Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions client/conf/db.properties.in
Original file line number Diff line number Diff line change
Expand Up @@ -101,3 +101,6 @@ db.usage.autoReconnectForPools=true
db.usage.secondsBeforeRetryMaster=3600
db.usage.queriesBeforeRetryMaster=5000
db.usage.initialTimeout=3600

# Connection Pool - Valid options: dbcp, hikari
db.cloud.connPool=hikari
5 changes: 5 additions & 0 deletions framework/db/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@
<relativePath>../pom.xml</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>2.7.8</version>
</dependency>
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
Expand Down Expand Up @@ -388,6 +389,7 @@ public List<T> searchIncludingRemoved(SearchCriteria<T> sc, final Filter filter,
addFilter(str, filter);

final TransactionLegacy txn = TransactionLegacy.currentTxn();

if (lock != null) {
assert (txn.dbTxnStarted() == true) : "As nice as I can here now....how do you lock when there's no DB transaction? Review your db 101 course from college.";
str.append(lock ? FOR_UPDATE_CLAUSE : SHARE_MODE_CLAUSE);
Expand All @@ -398,6 +400,8 @@ public List<T> searchIncludingRemoved(SearchCriteria<T> sc, final Filter filter,
PreparedStatement pstmt = null;
final List<T> result = new ArrayList<T>();
try {
Connection currentConnection = txn.getCurrentConnection();
s_logger.info("Connection: " + (currentConnection.isClosed() ? "closed" : "open"));
pstmt = txn.prepareAutoCloseStatement(sql);
int i = 1;
if (clause != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@

import javax.sql.DataSource;

import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.commons.dbcp.ConnectionFactory;
import org.apache.commons.dbcp.DriverManagerConnectionFactory;
import org.apache.commons.dbcp.PoolableConnectionFactory;
Expand Down Expand Up @@ -426,9 +428,6 @@ public void start() {
protected void closePreviousStatement() {
if (_stmt != null) {
try {
if (s_stmtLogger.isTraceEnabled()) {
s_stmtLogger.trace("Closing: " + _stmt.toString());
}
try {
ResultSet rs = _stmt.getResultSet();
if (rs != null && _stmt.getResultSetHoldability() != ResultSet.HOLD_CURSORS_OVER_COMMIT) {
Expand Down Expand Up @@ -1062,6 +1061,7 @@ public static void initDataSource(Properties dbProps) {
final long cloudMinEvcitableIdleTimeMillis = Long.parseLong(dbProps.getProperty("db.cloud.minEvictableIdleTimeMillis"));
final boolean cloudPoolPreparedStatements = Boolean.parseBoolean(dbProps.getProperty("db.cloud.poolPreparedStatements"));
final String url = dbProps.getProperty("db.cloud.url.params");
final String connPool = dbProps.getProperty("db.cloud.connPool");

String cloudDbHAParams = null;
String cloudSlaves = null;
Expand All @@ -1088,6 +1088,55 @@ public static void initDataSource(Properties dbProps) {
(s_dbHAEnabled ? "&" + cloudDbHAParams : "") + (s_dbHAEnabled ? "&loadBalanceStrategy=" + loadBalanceStrategy : "");
DriverLoader.loadDriver(cloudDriver);

if (connPool.equalsIgnoreCase("hikari")) {
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl(cloudConnectionUri);
hikariConfig.setUsername(cloudUsername);
hikariConfig.setPassword(cloudPassword);
hikariConfig.addDataSourceProperty("cachePrepStmts", "true");
hikariConfig.addDataSourceProperty("prepStmtCacheSize", "250");
hikariConfig.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
hikariConfig.setMinimumIdle(100);
hikariConfig.setMaximumPoolSize(200);
hikariConfig.setAutoCommit(false);

s_ds = new HikariDataSource(hikariConfig);

// Configure the usage db
final int usageMaxActive = Integer.parseInt(dbProps.getProperty("db.usage.maxActive"));
final int usageMaxIdle = Integer.parseInt(dbProps.getProperty("db.usage.maxIdle"));
final long usageMaxWait = Long.parseLong(dbProps.getProperty("db.usage.maxWait"));
final String usageUsername = dbProps.getProperty("db.usage.username");
final String usagePassword = dbProps.getProperty("db.usage.password");
final String usageHost = dbProps.getProperty("db.usage.host");
final String usageDriver = dbProps.getProperty("db.usage.driver");
final int usagePort = Integer.parseInt(dbProps.getProperty("db.usage.port"));
final String usageDbName = dbProps.getProperty("db.usage.name");
final boolean usageAutoReconnect = Boolean.parseBoolean(dbProps.getProperty("db.usage.autoReconnect"));
final String usageUrl = dbProps.getProperty("db.usage.url.params");

final GenericObjectPool usageConnectionPool =
new GenericObjectPool(null, usageMaxActive, GenericObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION, usageMaxWait, usageMaxIdle);

final String usageConnectionUri = usageDriver + "://" + usageHost + (s_dbHAEnabled ? "," + dbProps.getProperty("db.cloud.slaves") : "") + ":" + usagePort +
"/" + usageDbName + "?autoReconnect=" + usageAutoReconnect + (usageUrl != null ? "&" + usageUrl : "") +
(s_dbHAEnabled ? "&" + getDBHAParams("usage", dbProps) : "") + (s_dbHAEnabled ? "&loadBalanceStrategy=" + loadBalanceStrategy : "");
DriverLoader.loadDriver(usageDriver);

HikariConfig hikariConfigUsage = new HikariConfig();
hikariConfigUsage.setJdbcUrl(usageConnectionUri);
hikariConfigUsage.setUsername(usageUsername);
hikariConfigUsage.setPassword(usagePassword);
hikariConfigUsage.addDataSourceProperty("cachePrepStmts", "true");
hikariConfigUsage.addDataSourceProperty("prepStmtCacheSize", "250");
hikariConfigUsage.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
hikariConfigUsage.setIdleTimeout(600000l);
hikariConfigUsage.setMinimumIdle(5);
s_usageDS = new HikariDataSource(hikariConfigUsage);
return;

}

final ConnectionFactory cloudConnectionFactory = new DriverManagerConnectionFactory(cloudConnectionUri, cloudUsername, cloudPassword);

final KeyedObjectPoolFactory poolableObjFactory = (cloudPoolPreparedStatements ? new StackKeyedObjectPoolFactory() : null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
// under the License.
package org.apache.cloudstack.framework.jobs.dao;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Date;
Expand Down Expand Up @@ -161,19 +162,19 @@ public void expungeLeftoverWorkJobs(final long msid) {
sc.setParameters("msid", msid);

expunge(sc);
*/
*/
Transaction.execute(new TransactionCallbackNoReturn() {
@Override
public void doInTransactionWithoutResult(TransactionStatus status) {
TransactionLegacy txn = TransactionLegacy.currentTxn();

try (
PreparedStatement pstmt = txn
.prepareAutoCloseStatement(
"DELETE FROM vm_work_job WHERE id IN (SELECT id FROM async_job WHERE (job_dispatcher='VmWorkJobPlaceHolder' OR job_dispatcher='VmWorkJobDispatcher') AND job_init_msid=?)");
) {
try {
PreparedStatement pstmt = txn
.prepareAutoCloseStatement(
"DELETE FROM vm_work_job WHERE id IN (SELECT id FROM async_job WHERE (job_dispatcher='VmWorkJobPlaceHolder' OR job_dispatcher='VmWorkJobDispatcher') AND job_init_msid=?)");
Connection currentConnection = txn.getCurrentConnection();
s_logger.info("Connection: " + (currentConnection.isClosed() ? "closed" : "open"));
pstmt.setLong(1, msid);

pstmt.execute();
} catch (SQLException e) {
s_logger.info("[ignored]"
Expand All @@ -183,12 +184,12 @@ public void doInTransactionWithoutResult(TransactionStatus status) {
+ "caught an error during delete vm work job: " + e.getLocalizedMessage());
}

try (
PreparedStatement pstmt = txn.prepareAutoCloseStatement(
try {
PreparedStatement pstmt = txn.prepareAutoCloseStatement(
"DELETE FROM async_job WHERE (job_dispatcher='VmWorkJobPlaceHolder' OR job_dispatcher='VmWorkJobDispatcher') AND job_init_msid=?");
) {
Connection currentConnection = txn.getCurrentConnection();
s_logger.info("Connection: " + (currentConnection.isClosed() ? "closed" : "open"));
pstmt.setLong(1, msid);

pstmt.execute();
} catch (SQLException e) {
s_logger.info("[ignored]"
Expand Down