statement.setParameters( getEnv() );
//String jpaQueryStr = statement.getQueryString();
String sqlQuery = statement.getSqlQueryString();
UserProfile userProfile = (UserProfile)getEnv().get(EngineConstants.ENV_USER_PROFILE);
// STEP 3: transform the sql query
GroupByQueryTransformer transformer = new GroupByQueryTransformer();
List selectFields = SqlUtils.getSelectFields(sqlQuery);
for(int i = 0; i < groupFields.length(); i++) {
String groupByField = groupFields.getString(i);
int fieldIndex = query.getSelectFieldIndex(groupByField);
Assert.assertTrue(fieldIndex >= 0 && fieldIndex <selectFields.size(), "Impossible to group on field [" + groupByField + "]");
String[] f = (String[])selectFields.get(fieldIndex);
transformer.addGrouByColumn(f[1]!=null? f[1]:f[0], query.getSelectFieldByIndex(fieldIndex).getAlias());
}
// count column
transformer.addAggregateColumn("*"/*f[1]!=null? f[1]:f[0]*/, "COUNT", "Records");
// aggregate measures
List dataMartSelectFields = query.getDataMartSelectFields(true);
Iterator it = dataMartSelectFields.iterator();
while (it.hasNext()) {
DataMartSelectField field = (DataMartSelectField) it.next();
int fieldIndex = query.getSelectFieldIndex(field.getUniqueName());
String[] f = (String[])selectFields.get(fieldIndex);
IAggregationFunction aggregationFunction = field.getFunction();
if (aggregationFunction != null && aggregationFunction != AggregationFunctions.NONE_FUNCTION) {
transformer.addAggregateColumn(f[1]!=null? f[1]:f[0], aggregationFunction.getName(), field.getAlias());
}
}
sqlQuery = (String)transformer.transformQuery(sqlQuery);
// STEP 4: execute the query
try {
logger.debug("Executing query: [" + sqlQuery + "]");
auditlogger.info("[" + userProfile.getUserId() + "]:: SQL: " + sqlQuery);
dataSet = new JDBCDataSet();
ConnectionDescriptor connection = (ConnectionDescriptor)getDataSource().getConfiguration().loadDataSourceProperties().get("connection");
DataSource dataSource = new DataSource();
dataSource.setJndi(connection.getJndiName());
dataSource.setHibDialectName(connection.getDialect());
dataSource.setUrlConnection(connection.getUrl());
dataSource.setDriver(connection.getDriverClass());
dataSource.setUser(connection.getUsername());
dataSource.setPwd(connection.getPassword());
dataSet.setDataSource(dataSource);
dataSet.setQuery(sqlQuery);
dataSet.loadData(start, limit, -1);
dataStore = dataSet.getDataStore();
} catch (Exception e) {
logger.debug("Query execution aborted because of an internal exceptian");
SpagoBIEngineServiceException exception;
String message;
message = "An error occurred in " + getActionName() + " service while executing query: [" + statement.getQueryString() + "]";
exception = new SpagoBIEngineServiceException(getActionName(), message, e);
exception.addHint("Check if the query is properly formed: [" + statement.getQueryString() + "]");
exception.addHint("Check connection configuration");
exception.addHint("Check the qbe jar file");
throw exception;
}
logger.debug("Query executed succesfully");
//dataStore.getMetaData().setProperty("resultNumber", new Integer( (int)dataStore.getRecordsCount() ));
resultNumber = (Integer)dataStore.getMetaData().getProperty("resultNumber");
Assert.assertNotNull(resultNumber, "property [resultNumber] of the dataStore returned by loadData method of the class [" + dataSet.getClass().getName()+ "] cannot be null");
logger.debug("Total records: " + resultNumber);
boolean overflow = maxSize != null && resultNumber >= maxSize;
if (overflow) {
logger.warn("Query results number [" + resultNumber + "] exceeds max result limit that is [" + maxSize + "]");
auditlogger.info("[" + userProfile.getUserId() + "]:: max result limit [" + maxSize + "] exceeded with SQL: " + sqlQuery);
}
dataSetWriter = new JSONDataWriter();
gridDataFeed = (JSONObject)dataSetWriter.write(dataStore);