Skip to content

Commit

Permalink
* added "close" to the HiveDataSourceProvider interface
Browse files Browse the repository at this point in the history
* implemented close in HiveBasicDataSourceProvider
* removed duplicate code from CachingDataSourceProvider and made it delegate to HiveBasicDataSourceProvider
  • Loading branch information
Dave Peckham committed Oct 29, 2008
1 parent 8030ffd commit 143f831
Show file tree
Hide file tree
Showing 4 changed files with 199 additions and 191 deletions.
@@ -1,44 +1,47 @@
/**
*
*
*/
package org.hivedb.meta.persistence;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hivedb.meta.Node;
import org.hivedb.util.database.DriverLoader;
import org.springframework.jdbc.datasource.LazyConnectionDataSourceProxy;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

public class CachingDataSourceProvider implements HiveDataSourceProvider {

private static CachingDataSourceProvider INSTANCE = new CachingDataSourceProvider();

private Map<String, LazyConnectionDataSourceProxy> cache = new HashMap<String, LazyConnectionDataSourceProxy>();

private CachingDataSourceProvider() {
}

public DataSource getDataSource(Node node) {
return getDataSource(node.getUri());
}

public DataSource getDataSource(String uri) {
LazyConnectionDataSourceProxy ds = cache.get(uri);
if (ds == null) {
DriverLoader.initializeDriver(uri);
ds = new LazyConnectionDataSourceProxy(createDataSource(uri));
cache.put(uri, ds);
}
return ds;
}

protected DataSource createDataSource(String uri) {
return new HiveBasicDataSource(uri);
}

public static CachingDataSourceProvider getInstance() {
return INSTANCE;
}
private static final Log log = LogFactory.getLog(CachingDataSourceProvider.class);

private static CachingDataSourceProvider INSTANCE = new CachingDataSourceProvider();

private Map<String, DataSource> cache = new HashMap<String, DataSource>();

private HiveBasicDataSourceProvider delegate;

private CachingDataSourceProvider() {
this.delegate = new HiveBasicDataSourceProvider();
}

public DataSource getDataSource(Node node) {
return getDataSource(node.getUri());
}

public void close() {
delegate.close();
}

public DataSource getDataSource(String uri) {
DataSource ds = cache.get(uri);
if (ds == null) {
ds = delegate.getDataSource(uri);
cache.put(uri, ds);
}
return ds;
}

public static CachingDataSourceProvider getInstance() {
return INSTANCE;
}
}
240 changes: 121 additions & 119 deletions src/main/java/org/hivedb/meta/persistence/HiveBasicDataSource.java
@@ -1,131 +1,133 @@
package org.hivedb.meta.persistence;

import com.mchange.v2.c3p0.ComboPooledDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hivedb.util.HiveUtils;

import javax.sql.DataSource;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.SQLException;

import javax.sql.DataSource;
public class HiveBasicDataSource implements DataSource, Cloneable {
private Log log = LogFactory.getLog(HiveBasicDataSource.class);
private ComboPooledDataSource comboPooledDataSource;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hivedb.util.HiveUtils;
public HiveBasicDataSource() {
comboPooledDataSource = new ComboPooledDataSource();
}

import com.mchange.v2.c3p0.ComboPooledDataSource;
public void close() {
comboPooledDataSource.close();
}

public class HiveBasicDataSource implements DataSource, Cloneable {
private Log log = LogFactory.getLog(HiveBasicDataSource.class);
private ComboPooledDataSource comboPooledDataSource;

public HiveBasicDataSource() {
comboPooledDataSource = new ComboPooledDataSource();
}

public HiveBasicDataSource(String hiveUri) {
this();
comboPooledDataSource.setJdbcUrl(hiveUri);
log.debug(String.format("HiveBasicDataSource created: %s", comboPooledDataSource.toString()));
}

public String getUrl() { //publicize for testing
return comboPooledDataSource.getJdbcUrl();
}

public void setUrl(String url) {
comboPooledDataSource.setJdbcUrl(url);
}

@Override
public HiveBasicDataSource clone() throws CloneNotSupportedException {
HiveBasicDataSource clone = new HiveBasicDataSource();
clone.setMaxActive(this.getMaxActive());
clone.setPassword(this.getPassword());
clone.setUrl(this.getUrl());
clone.setUsername(this.getUsername());
clone.setValidationQuery(this.getValidationQuery());
return clone;
}

@Override
public int hashCode() {
return HiveUtils.makeHashCode(
this.getMaxActive(),
this.getPassword(),
this.getUrl(),
this.getUsername(),
this.getValidationQuery()
);
}

public Connection getConnection() throws SQLException {
Connection connection = comboPooledDataSource.getConnection();
log.debug("Loaned connection, current active connections: " + this.getNumActive());
return connection;
}

public Connection getConnection(String username, String password)
throws SQLException {
Connection connection = comboPooledDataSource.getConnection(username,password);
log.debug("Loaned connection, current active connections: " + this.getNumActive());
return connection;
}

public PrintWriter getLogWriter() throws SQLException {
return comboPooledDataSource.getLogWriter();
}

public int getLoginTimeout() throws SQLException {
return comboPooledDataSource.getLoginTimeout();
}

public void setLogWriter(PrintWriter out) throws SQLException {
comboPooledDataSource.setLogWriter(out);
}

public void setLoginTimeout(int seconds) throws SQLException {
comboPooledDataSource.setLoginTimeout(seconds);
}

public int getMaxActive() {
return comboPooledDataSource.getMaxPoolSize();
}

public void setMaxActive(int maxActive) {
comboPooledDataSource.setMaxPoolSize(maxActive);
}

private int getNumActive() throws SQLException {
return comboPooledDataSource.getNumBusyConnections();
}

public String getUsername() {
return comboPooledDataSource.getUser();
}

public void setUsername(String username) {
comboPooledDataSource.setUser(username);
}

public String getPassword() {
return comboPooledDataSource.getPassword();
}

public void setPassword(String password) {
comboPooledDataSource.setPassword(password);
}

public String getValidationQuery() {
return comboPooledDataSource.getPreferredTestQuery();
}

public void setValidationQuery(String validationQuery) {
comboPooledDataSource.setPreferredTestQuery(validationQuery);
}

public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException();
}

public boolean isWrapperFor(Class<?> iface) {
public HiveBasicDataSource(String hiveUri) {
this();
comboPooledDataSource.setJdbcUrl(hiveUri);
log.debug(String.format("HiveBasicDataSource created: %s", comboPooledDataSource.toString()));
}

public String getUrl() { //publicize for testing
return comboPooledDataSource.getJdbcUrl();
}

public void setUrl(String url) {
comboPooledDataSource.setJdbcUrl(url);
}

@Override
public HiveBasicDataSource clone() throws CloneNotSupportedException {
HiveBasicDataSource clone = new HiveBasicDataSource();
clone.setMaxActive(this.getMaxActive());
clone.setPassword(this.getPassword());
clone.setUrl(this.getUrl());
clone.setUsername(this.getUsername());
clone.setValidationQuery(this.getValidationQuery());
return clone;
}

@Override
public int hashCode() {
return HiveUtils.makeHashCode(
this.getMaxActive(),
this.getPassword(),
this.getUrl(),
this.getUsername(),
this.getValidationQuery()
);
}

public Connection getConnection() throws SQLException {
Connection connection = comboPooledDataSource.getConnection();
log.debug("Loaned connection, current active connections: " + this.getNumActive());
return connection;
}

public Connection getConnection(String username, String password)
throws SQLException {
Connection connection = comboPooledDataSource.getConnection(username, password);
log.debug("Loaned connection, current active connections: " + this.getNumActive());
return connection;
}

public PrintWriter getLogWriter() throws SQLException {
return comboPooledDataSource.getLogWriter();
}

public int getLoginTimeout() throws SQLException {
return comboPooledDataSource.getLoginTimeout();
}

public void setLogWriter(PrintWriter out) throws SQLException {
comboPooledDataSource.setLogWriter(out);
}

public void setLoginTimeout(int seconds) throws SQLException {
comboPooledDataSource.setLoginTimeout(seconds);
}

public int getMaxActive() {
return comboPooledDataSource.getMaxPoolSize();
}

public void setMaxActive(int maxActive) {
comboPooledDataSource.setMaxPoolSize(maxActive);
}

private int getNumActive() throws SQLException {
return comboPooledDataSource.getNumBusyConnections();
}

public String getUsername() {
return comboPooledDataSource.getUser();
}

public void setUsername(String username) {
comboPooledDataSource.setUser(username);
}

public String getPassword() {
return comboPooledDataSource.getPassword();
}

public void setPassword(String password) {
comboPooledDataSource.setPassword(password);
}

public String getValidationQuery() {
return comboPooledDataSource.getPreferredTestQuery();
}

public void setValidationQuery(String validationQuery) {
comboPooledDataSource.setPreferredTestQuery(validationQuery);
}

public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException();
}

public boolean isWrapperFor(Class<?> iface) {
return false;
}
}

0 comments on commit 143f831

Please sign in to comment.