parameterValues);
HashMap allFieldNames = prepareGetFieldName(queryStm);
// Feldinformationen abrufen
SOSImExportTableFieldTypes fieldTypes = getFieldTypes(queryStm
.toString());
// Daten abrufen
ArrayList result = new ArrayList();
result = getArray(queryStm.toString());
output.append(indent(1) + "<"
+ normalizeTagName(_xmlTagname + "_package id=\"")
+ _queries.get(queryId).getTag() + "\">\n");
if (!result.isEmpty()) {
//// META START ////
output.append(indent(1) + "<"
+ normalizeTagName(_xmlTagname + "_meta") + ">\n");
output.append(indent(0) + "<"
+ normalizeTagName("table name=\"")
+ _queries.get(queryId).getTag() + "\" />\n");
// Key-Felder
output.append(indent(1) + "<" + normalizeTagName("key_fields")
+ ">\n");
for (int i = 0; i < _queries.get(queryId).getKeyCnt(); i++) {
if (_log != null)
_log.debug6("key_field[" + i + "]=\""
+ _queries.get(queryId).getKey(i) + "\"");
output.append(indent()
+ "<"
+ normalizeTagName("field name=\"")
+ normalizeFieldName(_queries.get(queryId)
.getKey(i)) + "\"");
output.append(" type=\""
+ fieldTypes
.getTypeName(normalizeFieldName(_queries
.get(queryId).getKey(i))) + "\"");
output.append(" typeID=\""
+ fieldTypes.getTypeId(normalizeFieldName(_queries
.get(queryId).getKey(i))) + "\"");
output.append(" len=\""
+ fieldTypes.getLength(normalizeFieldName(_queries
.get(queryId).getKey(i))) + "\"");
output.append(" scale=\""
+ fieldTypes.getScale(normalizeFieldName(_queries
.get(queryId).getKey(i))) + "\"");
output.append(" />\n");
}
output.append(indent(-1) + normalizeTagName("</key_fields>")
+ "\n");
// Felder
output.append(indent(1) + normalizeTagName("<fields>") + "\n");
Object[] fields = ((HashMap) result.get(0)).keySet().toArray();
for (int i = 0; i < fields.length; i++) {
output.append(indent() + "<"
+ normalizeTagName("field name=\"")
+ normalizeFieldName((String) fields[i]) + "\"");
output
.append(" type=\""
+ fieldTypes
.getTypeName(normalizeFieldName((String) fields[i]))
+ "\"");
output
.append(" typeID=\""
+ fieldTypes
.getTypeId(normalizeFieldName((String) fields[i]))
+ "\"");
output
.append(" len=\""
+ fieldTypes
.getLength(normalizeFieldName((String) fields[i]))
+ "\"");
output
.append(" scale=\""
+ fieldTypes
.getScale(normalizeFieldName((String) fields[i]))
+ "\"");
output.append(" />\n");
}
fields = null;
output
.append(indent(-1) + normalizeTagName("</fields>")
+ "\n");
output.append(indent(-1) + "</"
+ normalizeTagName(_xmlTagname + "_meta") + ">\n");
//// META END ////
// Records schreiben
output.append(indent(1) + "<"
+ normalizeTagName(_xmlTagname + "_data") + ">\n");
// Zeilen
for (int i = 0; i < result.size(); i++) {
HashMap record = (HashMap) result.get(i);
if (_log != null)
_log.debug9("get: "
+ _queries.get(queryId).getTag()
+ " query_id=" + queryId);
output.append(indent(1) + "<"
+ normalizeTagName(_xmlTagname + "_record name=\"")
+ _queries.get(queryId).getTag() + "\">\n");
output
.append(indent(1) + "<"
+ normalizeTagName(_xmlTagname + "_fields")
+ ">\n");
// Spalten alphabetische Sortierung
//Vector record_vector = new Vector(record.keySet());
//Collections.sort(record_vector);
//for (Iterator it = record_vector.iterator();
// it.hasNext(); ) {
for (Iterator it = record.keySet().iterator(); it.hasNext();) {
String key = it.next().toString();
String lobType = null;
switch (fieldTypes.getTypeId(normalizeFieldName(key))) {
case Types.LONGVARCHAR:
lobType = "clob";
case Types.BINARY:
if (lobType == null) lobType = "blob";
case Types.BLOB:
if (lobType == null) lobType = "blob";
case Types.CLOB:
if (lobType == null) lobType = "clob";
case Types.LONGVARBINARY:
if (lobType == null) lobType = "blob";
case Types.VARBINARY:
if (lobType == null) lobType = "blob";
//// als binaer behandeln und in hex umwandeln ////
// create blob-stm
int posBegin = new String(queryStm).toLowerCase()
.indexOf("from");
int posEnd = new String(queryStm).toLowerCase()
.indexOf(" ", posBegin + 5);
StringBuffer queryBlobStm = new StringBuffer();
String blobFieldName = getBlobFieldName(allFieldNames, key
.toString());
queryBlobStm.append("SELECT "
+ normalizeFieldName(blobFieldName) + " ");
if (posEnd < posBegin) posEnd = queryStm.length();
queryBlobStm.append(queryStm.substring(posBegin,
posEnd));
String and = " WHERE ";
for (int j = 0; j < _queries.get(queryId)
.getKeyCnt(); j++) {
//queryBlobStm.append(and + "\""+
// normalizeFieldName(_queries.get(queryId).getKey(j))
// + "\"=");
String keyFieldName = getKeyFieldName(allFieldNames,
_queries.get(queryId).getKey(j));
queryBlobStm.append(and
+ normalizeFieldName(keyFieldName)
+ " =");
queryBlobStm
.append(quote(
fieldTypes
.getTypeId(normalizeFieldName(_queries
.get(queryId)
.getKey(j))),
(String) record
.get(normalizeFieldName(_queries
.get(queryId)
.getKey(j)))));
and = " AND ";
}
// blob mit Umbruch ausgeben
byte[] blob = null;
if (lobType.equals("blob")) {
blob = _conn.getBlob(queryBlobStm.toString());
} else {
blob = str2bin(_conn.getClob(queryBlobStm
.toString()));
}
output.append(indent() + "<"
+ normalizeTagName(key) + " null=");
if (blob != null && blob.length > 0) {
indent(1);
output
.append("\"false\">\n"
+ toHexString(blob, indent(),
_lineWrap) + "\n"
+ indent(-1));
} else {
output.append("\"true\">");
}
output.append("</" + normalizeTagName(key) + ">\n");
break;
// ...sonst als xml string ausgeben
default:
// TODO Workaround - Millisekunden entfernen
if (record.get(key) != null) {
switch (fieldTypes
.getTypeId(normalizeFieldName(key))) {
case Types.DATE:
case Types.TIMESTAMP:
String val = record.get(key).toString();
if (val.endsWith(".0")) {