Search This Blog

Friday 6 July 2012

Displaying Total Memory Footprint in SQLFire

The example below shows how you can view table / index memory footprint within your SQLFire distrubuted system.

1. Start the servers ensuring you  add sqlfire.jar as shown below.

Locator:

sqlf locator start -J-javaagent:/Users/papicella/sqlfire/SQLFire102_35992/lib/sqlfire.jar -peer-discovery-address=localhost -peer-discovery-port=41111 -client-bind-address=localhost -client-port=1527

SQLFire Nodes:

sqlf server start -J-Xms2024m -J-Xmx2024m -J-javaagent:/Users/papicella/sqlfire/SQLFire1021/lib/sqlfire.jar -server-groups=MYGROUP -locators=localhost[41111] -client-bind-address=localhost -client-port=1528 -dir=server1 &
sqlf server start -J-Xms2024m -J-Xmx2024m -J-javaagent:/Users/papicella/sqlfire/SQLFire1021/lib/sqlfire.jar -server-groups=MYGROUP -locators=localhost[41111] -client-bind-address=localhost -client-port=1529 -dir=server2 &

2. Query the system as shown below using sizerHints=withMemoryFootPrint hint with your query

SQL:

select sqlentity, memory, substr(id, 1, 50) "Id" FROM sys.memoryAnalytics -- SQLFIRE-PROPERTIES sizerHints=withMemoryFootPrint;

  
sqlf> select sqlentity, memory, substr(id, 1, 50) "Id" FROM sys.memoryAnalytics -- SQLFIRE-PROPERTIES sizerHints=withMemoryFootPrint;
SQLENTITY                                                       |MEMORY                                                                                                                          |Id                                                
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
APP.EMP (Entry Size, Value Size, Row Count)                     |208000000,50997494,2000000                                                                                                      |10.117.85.62(9090)<v1>:43540/60442                
APP.EMP (sqlfire,gemfire,others)                                |64426568,61261504,52987760 (178675832 = 170.4 mb)                                                                               |10.117.85.62(9090)<v1>:43540/60442                
APP.EMP (Entry Size, Value Size, Row Count)                     |208000000,50997494,2000000                                                                                                      |10.117.85.62(9091)<v0>:41971/60439                
APP.EMP (sqlfire,gemfire,others)                                |133766368,129748968,307527368 (571042704 = 544.59 mb)                                                                           |10.117.85.62(9091)<v0>:41971/60439

More information see the link below.

http://pubs.vmware.com/vfabric5/index.jsp?topic=/com.vmware.vfabric.sqlfire.1.0/getting_started/book_intro.html

Thursday 5 July 2012

Difference between a GemFire Client and a SQLFire client

Often I get asked how does a client accessing data in a GemFire region differ to that of a SQLFire table. Both elastic data fabric caches BUT SQLFire uses a compliant JDBC/ADO NET interface where the other is a key value store with a proprietary interface.

The best way to demonstrate this is by showing what the client code looks like from GemFire accessing a region versus what it would using SQLFire table with a JDBC compliant driver. In the two code samples below there using a Dept object and Dept row with the same attributes/columns,

GemFire Code
  
package pas.au.vmware.se.dept;

import java.util.Collection;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;

import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.client.ClientCache;
import com.gemstone.gemfire.cache.client.ClientCacheFactory;
import com.gemstone.gemfire.cache.query.FunctionDomainException;
import com.gemstone.gemfire.cache.query.NameResolutionException;
import com.gemstone.gemfire.cache.query.Query;
import com.gemstone.gemfire.cache.query.QueryInvocationTargetException;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.TypeMismatchException;

public class ViewDeps 
{
 private ClientCache cache = null;
 private Logger logger = Logger.getLogger(this.getClass().getSimpleName());
 
 public ViewDeps()
 {
  ClientCacheFactory ccf = new ClientCacheFactory();
  ccf.set("cache-xml-file", "config/client.xml");
  cache = ccf.create();  
 }
 
 public void run() throws Exception
 {
  
  QueryService queryService = cache.getQueryService(); 
  Query query = queryService.newQuery("SELECT * FROM /dept");
  logger.log (Level.INFO, "\nExecuting query:\n\t" + query.getQueryString());
  
  Object result = query.execute();
  
  Collection<?> collection = ((SelectResults<?>)result).asList();
  Iterator<?> iter = collection.iterator();
  
  while (iter.hasNext())
  { 
            Dept dept = (Dept) iter.next();
   System.out.println(dept);
  } 
  
  cache.close();
 }
 
 /**
  * @param args
  * @throws Exception 
  */
 public static void main(String[] args) throws Exception 
 {
  // TODO Auto-generated method stub
  ViewDeps test = new ViewDeps();
  test.run();
 }

}

Output

INFO:
Executing query:
    SELECT * FROM /dept

[info 2012/07/05 21:02:03.460 EST
tid=0x1] Adding: PdxType[

      id=1, name=pas.au.vmware.se.dept.Dept, fields=[
          deptno:int:0
          name:String:1]]

[info 2012/07/05 21:02:03.473 EST
tid=0x1] Auto serializer generating type for class pas.au.vmware.se.dept.Dept for fields:

      deptno: private int pas.au.vmware.se.dept.Dept.deptno
      name: private java.lang.String pas.au.vmware.se.dept.Dept.name
 
Dept [deptno=20, name=RESEARCH]
Dept [deptno=30, name=SALES]
Dept [deptno=10, name=ACCOUNTING]
Dept [deptno=40, name=OPERATION]

SQLFire Code
  
package pas.au.vmware.sqlfire;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import java.util.logging.Logger;

public class TestThinClient 
{
 private Logger logger = Logger.getLogger(this.getClass().getSimpleName());
 private String url = "jdbc:sqlfire://localhost:1527/";
 
 public TestThinClient() 
 {
  // TODO Auto-generated constructor stub
 }

 public void run() throws SQLException
 {
  Connection conn = null;
  Statement stmt = null;
  ResultSet rset = null;
  
  logger.log (Level.INFO, String.format("Connecting to SQLFire with url %s", url));
  
  try
  {
   conn = DriverManager.getConnection(url);
   logger.log(Level.INFO, conn.toString());
   stmt = conn.createStatement();
   rset = stmt.executeQuery("select * from dept order by 1");
   while (rset.next())
   {
    System.out.println(
                    String.format("Dept[%s, %s, %s]", 
                           rset.getInt(1), rset.getString(2), rset.getString(3))); 
   }
  }
  catch (SQLException se)
  {
   logger.log(Level.SEVERE, se.getMessage());
  }
  finally
  {
   if (conn != null)
   {
    conn.close();
   }
   
   if (stmt != null)
   {
    stmt.close();
   }
   
   if (rset != null)
   {
    rset.close();
   }
  }
  
 }
 
 /**
  * @param args
  */
 public static void main(String[] args) 
 {
  // TODO Auto-generated method stub
  TestThinClient test = new TestThinClient();
  try 
  {
   test.run();
  } 
  catch (SQLException e) 
  {
   // TODO Auto-generated catch block
   e.printStackTrace();
  }
 }

}

Output

Jul 5, 2012 9:03:17 PM pas.au.vmware.sqlfire.TestThinClient run
INFO: Connecting to SQLFire with url jdbc:sqlfire://localhost:1527/
Jul 5, 2012 9:03:17 PM pas.au.vmware.sqlfire.TestThinClient run
INFO: com.vmware.sqlfire.internal.client.net.NetConnection40@2acdb06e
Dept[10, ACCOUNTING, NEW YORK]
Dept[20, RESEARCH, DALLAS]
Dept[30, SALES, CHICAGO]
Dept[40, OPERATIONS, BRISBANE]

Tuesday 3 July 2012

Importing data from Oracle 11g into SQLFire

Note for myself:

build.xml extract which ensures we only query data from a specific schema in this case HR schema. DDLUtils has issues with Oracle unless you clearly specify which schema to use.

  
<target name="writeDDLToXML" description="Dumps the database structure">
  <taskdef name="databaseToDdl"
           classname="org.apache.ddlutils.task.DatabaseToDdlTask">
    <classpath refid="runtime-classpath"/>

  </taskdef>
  <databaseToDdl verbosity="debug" schemapattern="HR">
    <database url="jdbc:oracle:thin:@172.16.101.70:1521/linux11gr2"
              driverClassName="oracle.jdbc.OracleDriver"
              username="hr"
              password="hr" />

    <writeSchemaToFile outputFile="db-schema1.xml" failonerror="false" />

    <!-- Comment line below if the source DB is too big -->
    <writeDataToFile outputFile="data.xml" determineschema="false"/>
  </databaseToDdl>

</target>