diff --git a/src/main/java/org/hivedb/meta/persistence/CachingDataSourceProvider.java b/src/main/java/org/hivedb/meta/persistence/CachingDataSourceProvider.java index cb8d343..fd26998 100644 --- a/src/main/java/org/hivedb/meta/persistence/CachingDataSourceProvider.java +++ b/src/main/java/org/hivedb/meta/persistence/CachingDataSourceProvider.java @@ -1,44 +1,47 @@ /** - * + * */ package org.hivedb.meta.persistence; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.hivedb.meta.Node; -import org.hivedb.util.database.DriverLoader; -import org.springframework.jdbc.datasource.LazyConnectionDataSourceProxy; import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; public class CachingDataSourceProvider implements HiveDataSourceProvider { - - private static CachingDataSourceProvider INSTANCE = new CachingDataSourceProvider(); - - private Map cache = new HashMap(); - - private CachingDataSourceProvider() { - } - - public DataSource getDataSource(Node node) { - return getDataSource(node.getUri()); - } - - public DataSource getDataSource(String uri) { - LazyConnectionDataSourceProxy ds = cache.get(uri); - if (ds == null) { - DriverLoader.initializeDriver(uri); - ds = new LazyConnectionDataSourceProxy(createDataSource(uri)); - cache.put(uri, ds); - } - return ds; - } - - protected DataSource createDataSource(String uri) { - return new HiveBasicDataSource(uri); - } - - public static CachingDataSourceProvider getInstance() { - return INSTANCE; - } + private static final Log log = LogFactory.getLog(CachingDataSourceProvider.class); + + private static CachingDataSourceProvider INSTANCE = new CachingDataSourceProvider(); + + private Map cache = new HashMap(); + + private HiveBasicDataSourceProvider delegate; + + private CachingDataSourceProvider() { + this.delegate = new HiveBasicDataSourceProvider(); + } + + public DataSource getDataSource(Node node) { + return getDataSource(node.getUri()); + } + + public void close() { + delegate.close(); + } + + public DataSource getDataSource(String uri) { + DataSource ds = cache.get(uri); + if (ds == null) { + ds = delegate.getDataSource(uri); + cache.put(uri, ds); + } + return ds; + } + + public static CachingDataSourceProvider getInstance() { + return INSTANCE; + } } diff --git a/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSource.java b/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSource.java index 2f32d85..47933f0 100644 --- a/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSource.java +++ b/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSource.java @@ -1,131 +1,133 @@ package org.hivedb.meta.persistence; +import com.mchange.v2.c3p0.ComboPooledDataSource; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hivedb.util.HiveUtils; + +import javax.sql.DataSource; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; -import javax.sql.DataSource; +public class HiveBasicDataSource implements DataSource, Cloneable { + private Log log = LogFactory.getLog(HiveBasicDataSource.class); + private ComboPooledDataSource comboPooledDataSource; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hivedb.util.HiveUtils; + public HiveBasicDataSource() { + comboPooledDataSource = new ComboPooledDataSource(); + } -import com.mchange.v2.c3p0.ComboPooledDataSource; + public void close() { + comboPooledDataSource.close(); + } -public class HiveBasicDataSource implements DataSource, Cloneable { - private Log log = LogFactory.getLog(HiveBasicDataSource.class); - private ComboPooledDataSource comboPooledDataSource; - - public HiveBasicDataSource() { - comboPooledDataSource = new ComboPooledDataSource(); - } - - public HiveBasicDataSource(String hiveUri) { - this(); - comboPooledDataSource.setJdbcUrl(hiveUri); - log.debug(String.format("HiveBasicDataSource created: %s", comboPooledDataSource.toString())); - } - - public String getUrl() { //publicize for testing - return comboPooledDataSource.getJdbcUrl(); - } - - public void setUrl(String url) { - comboPooledDataSource.setJdbcUrl(url); - } - - @Override - public HiveBasicDataSource clone() throws CloneNotSupportedException { - HiveBasicDataSource clone = new HiveBasicDataSource(); - clone.setMaxActive(this.getMaxActive()); - clone.setPassword(this.getPassword()); - clone.setUrl(this.getUrl()); - clone.setUsername(this.getUsername()); - clone.setValidationQuery(this.getValidationQuery()); - return clone; - } - - @Override - public int hashCode() { - return HiveUtils.makeHashCode( - this.getMaxActive(), - this.getPassword(), - this.getUrl(), - this.getUsername(), - this.getValidationQuery() - ); - } - - public Connection getConnection() throws SQLException { - Connection connection = comboPooledDataSource.getConnection(); - log.debug("Loaned connection, current active connections: " + this.getNumActive()); - return connection; - } - - public Connection getConnection(String username, String password) - throws SQLException { - Connection connection = comboPooledDataSource.getConnection(username,password); - log.debug("Loaned connection, current active connections: " + this.getNumActive()); - return connection; - } - - public PrintWriter getLogWriter() throws SQLException { - return comboPooledDataSource.getLogWriter(); - } - - public int getLoginTimeout() throws SQLException { - return comboPooledDataSource.getLoginTimeout(); - } - - public void setLogWriter(PrintWriter out) throws SQLException { - comboPooledDataSource.setLogWriter(out); - } - - public void setLoginTimeout(int seconds) throws SQLException { - comboPooledDataSource.setLoginTimeout(seconds); - } - - public int getMaxActive() { - return comboPooledDataSource.getMaxPoolSize(); - } - - public void setMaxActive(int maxActive) { - comboPooledDataSource.setMaxPoolSize(maxActive); - } - - private int getNumActive() throws SQLException { - return comboPooledDataSource.getNumBusyConnections(); - } - - public String getUsername() { - return comboPooledDataSource.getUser(); - } - - public void setUsername(String username) { - comboPooledDataSource.setUser(username); - } - - public String getPassword() { - return comboPooledDataSource.getPassword(); - } - - public void setPassword(String password) { - comboPooledDataSource.setPassword(password); - } - - public String getValidationQuery() { - return comboPooledDataSource.getPreferredTestQuery(); - } - - public void setValidationQuery(String validationQuery) { - comboPooledDataSource.setPreferredTestQuery(validationQuery); - } - - public T unwrap(Class iface) throws SQLException { - throw new UnsupportedOperationException(); - } - - public boolean isWrapperFor(Class iface) { + public HiveBasicDataSource(String hiveUri) { + this(); + comboPooledDataSource.setJdbcUrl(hiveUri); + log.debug(String.format("HiveBasicDataSource created: %s", comboPooledDataSource.toString())); + } + + public String getUrl() { //publicize for testing + return comboPooledDataSource.getJdbcUrl(); + } + + public void setUrl(String url) { + comboPooledDataSource.setJdbcUrl(url); + } + + @Override + public HiveBasicDataSource clone() throws CloneNotSupportedException { + HiveBasicDataSource clone = new HiveBasicDataSource(); + clone.setMaxActive(this.getMaxActive()); + clone.setPassword(this.getPassword()); + clone.setUrl(this.getUrl()); + clone.setUsername(this.getUsername()); + clone.setValidationQuery(this.getValidationQuery()); + return clone; + } + + @Override + public int hashCode() { + return HiveUtils.makeHashCode( + this.getMaxActive(), + this.getPassword(), + this.getUrl(), + this.getUsername(), + this.getValidationQuery() + ); + } + + public Connection getConnection() throws SQLException { + Connection connection = comboPooledDataSource.getConnection(); + log.debug("Loaned connection, current active connections: " + this.getNumActive()); + return connection; + } + + public Connection getConnection(String username, String password) + throws SQLException { + Connection connection = comboPooledDataSource.getConnection(username, password); + log.debug("Loaned connection, current active connections: " + this.getNumActive()); + return connection; + } + + public PrintWriter getLogWriter() throws SQLException { + return comboPooledDataSource.getLogWriter(); + } + + public int getLoginTimeout() throws SQLException { + return comboPooledDataSource.getLoginTimeout(); + } + + public void setLogWriter(PrintWriter out) throws SQLException { + comboPooledDataSource.setLogWriter(out); + } + + public void setLoginTimeout(int seconds) throws SQLException { + comboPooledDataSource.setLoginTimeout(seconds); + } + + public int getMaxActive() { + return comboPooledDataSource.getMaxPoolSize(); + } + + public void setMaxActive(int maxActive) { + comboPooledDataSource.setMaxPoolSize(maxActive); + } + + private int getNumActive() throws SQLException { + return comboPooledDataSource.getNumBusyConnections(); + } + + public String getUsername() { + return comboPooledDataSource.getUser(); + } + + public void setUsername(String username) { + comboPooledDataSource.setUser(username); + } + + public String getPassword() { + return comboPooledDataSource.getPassword(); + } + + public void setPassword(String password) { + comboPooledDataSource.setPassword(password); + } + + public String getValidationQuery() { + return comboPooledDataSource.getPreferredTestQuery(); + } + + public void setValidationQuery(String validationQuery) { + comboPooledDataSource.setPreferredTestQuery(validationQuery); + } + + public T unwrap(Class iface) throws SQLException { + throw new UnsupportedOperationException(); + } + + public boolean isWrapperFor(Class iface) { return false; } } diff --git a/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSourceProvider.java b/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSourceProvider.java index 6a942f0..a1d437e 100644 --- a/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSourceProvider.java +++ b/src/main/java/org/hivedb/meta/persistence/HiveBasicDataSourceProvider.java @@ -1,48 +1,49 @@ /** - * + * */ package org.hivedb.meta.persistence; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hivedb.HiveRuntimeException; import org.hivedb.meta.Node; import org.springframework.jdbc.datasource.LazyConnectionDataSourceProxy; import javax.sql.DataSource; +import java.util.Collection; +import java.util.HashSet; public class HiveBasicDataSourceProvider implements HiveDataSourceProvider { - private Long connectionTimeoutInMillis = 0l; - private Long socketTimeoutInMillis = 0l; - - public HiveBasicDataSourceProvider(long connection, long socket) { - this.connectionTimeoutInMillis = connection; - this.socketTimeoutInMillis = socket; - } - - public HiveBasicDataSourceProvider(long timeout) { - this(timeout,timeout); - } - - public DataSource getDataSource(Node node) { - return getDataSource(node.getUri()); - } - - public long getTimeout() { - return connectionTimeoutInMillis; - } - - public void setTimeout(long timeout) { - this.connectionTimeoutInMillis = timeout; - } - - public DataSource getDataSource(String uri) { - HiveBasicDataSource ds = new HiveBasicDataSource(uri); - return new LazyConnectionDataSourceProxy(ds); - } - - public Long getSocketTimeout() { - return socketTimeoutInMillis; - } - - public void setSocketTimeout(Long socketTimeoutInMillis) { - this.socketTimeoutInMillis = socketTimeoutInMillis; - } + private Log log = LogFactory.getLog(HiveBasicDataSource.class); + + private Collection dataSourcesToClose; + + public HiveBasicDataSourceProvider() { + dataSourcesToClose = new HashSet(); + } + + public DataSource getDataSource(Node node) { + return getDataSource(node.getUri()); + } + + public void close() { + HiveRuntimeException exceptionWhileClosing = null; + for (HiveBasicDataSource dataSource : dataSourcesToClose) { + try { + dataSource.close(); + } catch (Exception e) { + exceptionWhileClosing = new HiveRuntimeException("Error closing datasources. Possibly more than one cause.", e); + } + } + if (exceptionWhileClosing != null) { + throw exceptionWhileClosing; + } + } + + public DataSource getDataSource(String uri) { + HiveBasicDataSource ds = new HiveBasicDataSource(uri); + LazyConnectionDataSourceProxy dataSourceProxy = new LazyConnectionDataSourceProxy(ds); + dataSourcesToClose.add(ds); + return dataSourceProxy; + } } \ No newline at end of file diff --git a/src/main/java/org/hivedb/meta/persistence/HiveDataSourceProvider.java b/src/main/java/org/hivedb/meta/persistence/HiveDataSourceProvider.java index 09b0b2f..e77fa06 100644 --- a/src/main/java/org/hivedb/meta/persistence/HiveDataSourceProvider.java +++ b/src/main/java/org/hivedb/meta/persistence/HiveDataSourceProvider.java @@ -5,13 +5,15 @@ package org.hivedb.meta.persistence; -import javax.sql.DataSource; import org.hivedb.meta.Node; +import javax.sql.DataSource; + /** - * * @author mellwanger */ public interface HiveDataSourceProvider extends DataSourceProvider { - public DataSource getDataSource(Node node); + public DataSource getDataSource(Node node); + + public void close(); }