Monday, October 22, 2007

Connection pool example

I was really surprised there were no working examples (albeit simple) in the Oracle documentation. Below is a simple connection pool (with cached statements that eliminate soft parsing)...


import java.sql.*;
import oracle.jdbc.pool.*;

class oracleConnMgr {
OracleDataSource ods;
oracleConnMgr() {
try {
ods = new OracleDataSource();
java.util.Properties prop = new java.util.Properties();
prop.setProperty("InitialLimit", "3");
prop.setProperty("MinLimit", "3");
prop.setProperty("MaxLimit", "5");

prop.setProperty("MaxStatementsLimit", "20");
String url = "jdbc:oracle:thin:@wcasbuild01.prod.oclc.org:2484/wcasbld.prod.oclc.org";
ods.setImplicitCachingEnabled(true);
ods.setURL(url);
ods.setUser("howards");
ods.setPassword("oclc*5439");
ods.setConnectionCachingEnabled(true);
ods.setConnectionCacheProperties (prop);
}
catch (Exception e) {
e.printStackTrace();
}
}

public static void main(String args[]) {
try {
oracleConnMgr cm = new oracleConnMgr();
for (int i = 1; i <= 30; i++) {

Connection conn = cm.ods.getConnection();
ResultSet rst1= conn.createStatement().executeQuery("select sys_context('userenv','sid') from dual");
String sid = "";
while (rst1.next()) {
sid = rst1.getString(1);
}
PreparedStatement pst = conn.prepareStatement("select * from t");
ResultSet rst = pst.executeQuery();
rst.close();
pst.close();
ResultSet rset= conn.createStatement().executeQuery("select parse_calls,executions from v$sql where sql_id = '89km4qj1thh13'");
while (rset.next()) {
System.out.println("SID " + sid + " running, statement has been parsed " + rset.getString(1) + " times and executed " + rset.getString(2) + " times.");
}
conn.close();
}
}
catch (Exception e) {
e.printStackTrace();
}
}
}

Sunday, October 14, 2007

Below is a simple set of classes for performing your own statistical analysis. As noted, they are simple (no multiple regressions analysis, etc.)


class myStats {
double standardDeviation (double[] array) {
double[] arr = new double[array.length];
double av = average(array);
for (int i = 0; i < array.length; i++) {
arr[i] = (array[i] - av) * (array[i] - av);
}
double d = 0;
for (int i = 0; i < array.length; i++) {
d = d + arr[i];
}
return Math.sqrt(d / (arr.length - 1));
}

double correlation (double[] array1, double[] array2) {
double[] arr = new double[array1.length];
double av1 = average(array1);
double av2 = average(array2);
for (int i = 0; i < array1.length; i++) {
arr[i] = (array1[i] - av1) * (array2[i] - av2);
}
double d = 0;
for (int i = 0; i < array1.length; i++) {
d = d + arr[i];
}
double sd1 = standardDeviation(array1);
double sd2 = standardDeviation(array2);
return (d / (sd1 * sd2)) / (array1.length - 1);
}

double slope (double[] array1, double[] array2) {
double[] arr = new double[array1.length];
double av1 = average(array1);
double av2 = average(array2);
for (int i = 0; i < array1.length; i++) {
arr[i] = (array1[i] - av1) * (array2[i] - av2);
}
double d = 0;
for (int i = 0; i < array1.length; i++) {
d = d + arr[i];
}
double sd1 = standardDeviation(array1);
double sd2 = standardDeviation(array2);
return (sd1 / sd2) * correlation(array1,array2);
}

double yIntercept(double array1[], double array2[]) {
double d = 0;
double sl = slope(array1, array2);
double yAvg = average(array1);
double xAvg = average(array2);
d = yAvg - (sl * xAvg);
return d;
}

double average(double array[]) {
double d = 0;
double total = 0;
for (int i = 0; i < array.length; i++) {
total = total + array[i];
}
d = total / array.length;
return d;
}

double lineFit(double array1[], double array2[]) {
double yAvg = average(array1);
double sl = slope(array1, array2);
double yi = yIntercept(array1, array2);
double d1 = 0;
double squaredResidual = 0;
double squaredYVariance = 0;
for (int i = 0; i < array1.length; i++) {
squaredYVariance = squaredYVariance + ((array1[i] - yAvg) * (array1[i] - yAvg));
d1 = ((array2[i] * sl) + yi);
squaredResidual = squaredResidual + ((array1[i] - d1) * (array1[i] - d1));
}
return 1 - (squaredResidual / squaredYVariance);
}
}

class runMyStats {
public static void main(String args[]) {
double[] i2 = {5,10,20,8,4,6,12,15};
double[] i = {27,46,73,40,30,28,46,59};
//double[] i2 = {3,6,9,12};
//double[] i = {15,30,45,60};
myStats m = new myStats();
System.out.println("AVG = " + m.average(i));
System.out.println("STDEV = " + m.standardDeviation(i));
System.out.println("Correlation = " + m.correlation(i,i2));
System.out.println("Slope = " + m.slope(i,i2));
System.out.println("YIntercept = " + m.yIntercept(i,i2));
System.out.println("lineFit = " + m.lineFit(i,i2));
}
}

Thursday, October 11, 2007

Limitations of wait event analysis?

OK, so we have come full circle to know that hit ratio tuning doesn't work (for everything), and now wait event analysis has been deemed to have limitations...but what are they? I sense a *great* interview question coming on for a senior DBA position.

Quite frankly, there is no silver bullet in tuning. All that matters is how is your application performing. Can you risk walking down the hall to the restroom without a user "accidentally" tripping you on the way? Having said that, I do think wait event analysis is an excellent place to start. Humans measure most things by time..."the system is really slow today...", so analyzing the wait event interface is a great place to at least see what is, well...waiting :)

An example will be provided for where it is of good use. User A comes into DBA's cube, and says, "We installed a new version of the application software this weekend, and now the system is crawling."

The DBA immediately goes to his handy grid control interface, and runs ADDM for the last 30 minutes. He immediately sees that I/O is consuming nearly 60% of user time (db file scattered read). He looks at the top SQL, and sees there is an index that needs to be created. Voila, problem solved.

Now let's look at a different example. While being proactive one Thursday afternoon, the DBA reviewed the top waits of the past 60 days in his database. "What's this?! We have quadrupled our waits on "direct path read" in the last 30 days. What happened?!"

Since his restroom trips have proven largely uneventful in terms of being tripped, he checks other statistics. He finds a corresponding increase in "physical reads direct (lob)" over the same time period.

He finally drills down into the historical SQL detail, and finds a new statement that begin running 30 days ago (right after a software install). He compares the time spent performing I/O by the statement and it is almost perfectly correlated to his increase in direct path read waits. He finds it does an EXISTSNODE call on a cached XMLTYPE column. Instant answer! But where is the problem? There may or may not be one.

Stay tuned for more later...
Cool little function to provide statistical correlation between two events. This helped us to identify performance drivers in our database. Statistically, anything above 60 is considered "statistically significant", or correlated. If one goes up, so does the other.

create or replace function corr_events (p_event1 in varchar2,
p_event2 in varchar2)
return number is
l_num number;
begin
select round((corr(val1,val2) * 100),0)
into l_num
from (select snap_id,
last_value(snap_id)
over (order by snap_id
rows between 1 preceding and current row) snap_id1,
time_waited_micro - min(time_waited_micro)
over (order by snap_id
rows between 1 preceding and current row) val1
from dba_hist_system_event
where event_name = p_event1
order by snap_id) a,
(select snap_id,
last_value(snap_id)
over (order by snap_id
rows between 1 preceding and current row) snap_id1,
time_waited_micro - min(time_waited_micro)
over (order by snap_id
rows between 1 preceding and current row) val2
from dba_hist_system_event
where event_name = p_event2
order by snap_id) b
where a.snap_id = b.snap_id;
return l_num;
end;
/
Locations of visitors to this page