* simpler and concise, just switch all column names to lower-case.
* This is ok since we acknowledge up from that DSV import/export
* assume no special characters or escaping in column names. */
byte[] bfr = null;
File file = new File(filePath);
SortedMap constColMap = null;
if (dsvConstCols != null) {
// We trim col. names, but not values. Must allow users to
// specify values as spaces, empty string, null.
constColMap = new TreeMap();
String[] constPairs = dsvConstCols.split("\\Q"
+ dsvColDelim + "\\E\\s*");
int firstEq;
String n;
for (int i = 0; i < constPairs.length; i++) {
firstEq = constPairs[i].indexOf('=');
n = constPairs[i].substring(0, firstEq).trim().toLowerCase();
if (n.trim().length() < 1) {
throw new SqlToolError(
rb.getString(SqltoolRB.DSV_CONSTCOLS_NULLCOL));
}
constColMap.put(n, constPairs[i].substring(firstEq + 1));
}
}
Set skipCols = null;
if (dsvSkipCols != null) {
skipCols = new HashSet();
String[] skipColsArray = dsvSkipCols.split("\\s*\\Q"
+ dsvColDelim + "\\E\\s*");
for (int i = 0; i < skipColsArray.length; i++) {
skipCols.add(skipColsArray[i].toLowerCase());
}
}
if (!file.canRead()) {
throw new SqlToolError(rb.getString(SqltoolRB.FILE_READFAIL,
file.toString()));
}
try {
bfr = new byte[(int) file.length()];
} catch (RuntimeException re) {
throw new SqlToolError(rb.getString(SqltoolRB.READ_TOOBIG), re);
}
int bytesread = 0;
int retval;
InputStream is = null;
try {
is = new FileInputStream(file);
while (bytesread < bfr.length &&
(retval = is.read(bfr, bytesread, bfr.length - bytesread))
> 0) {
bytesread += retval;
}
} catch (IOException ioe) {
throw new SqlToolError(ioe);
} finally {
if (is != null) try {
is.close();
} catch (IOException ioe) {
errprintln(rb.getString(SqltoolRB.INPUTFILE_CLOSEFAIL)
+ ": " + ioe);
}
}
if (bytesread != bfr.length) {
throw new SqlToolError(rb.getString(SqltoolRB.READ_PARTIAL,
bytesread, bfr.length));
}
String string = null;
String dateString;
try {
string = ((charset == null)
? (new String(bfr)) : (new String(bfr, charset)));
} catch (UnsupportedEncodingException uee) {
throw new RuntimeException(uee);
} catch (RuntimeException re) {
throw new SqlToolError(rb.getString(SqltoolRB.READ_CONVERTFAIL),
re);
}
List headerList = new ArrayList();
String tableName = dsvTargetTable;
// N.b. ENDs are the index of 1 PAST the current item
int recEnd = -1000; // Recognizable value incase something goes
// horrifically wrong.
int colStart;
int colEnd;
// First read one until we get one header line
int lineCount = 0; // Assume a 1 line header?
int recStart = -1;
String trimmedLine = null;
boolean switching = false;
while (true) {
recStart = (recStart < 0) ? 0 : (recEnd + dsvRowDelim.length());
if (recStart > string.length() - 2) {
throw new SqlToolError(rb.getString(SqltoolRB.DSV_HEADER_NONE));
}
recEnd = string.indexOf(dsvRowDelim, recStart);
lineCount++; // Increment when we have line start and end
if (recEnd < 0) {
// Last line in file. No data records.
recEnd = string.length();
}
trimmedLine = string.substring(recStart, recEnd).trim();
if (trimmedLine.length() < 1
|| (skipPrefix != null
&& trimmedLine.startsWith(skipPrefix))) {
continue;
}
if (trimmedLine.startsWith("targettable=")) {
if (tableName == null) {
tableName = trimmedLine.substring(
"targettable=".length()).trim();
}
continue;
}
if (trimmedLine.equals("headerswitch{")) {
if (tableName == null) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_HEADER_NOSWITCHTARG, lineCount));
}
switching = true;
continue;
}
if (trimmedLine.equals("}")) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_HEADER_NOSWITCHMATCH, lineCount));
}
if (!switching) {
break;
}
int colonAt = trimmedLine.indexOf(':');
if (colonAt < 1 || colonAt == trimmedLine.length() - 1) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_HEADER_NONSWITCHED, lineCount));
}
String matcher = trimmedLine.substring(0, colonAt).trim();
// Need to be sure here that tableName is not null (in
// which case it would be determined later on by the file name).
if (matcher.equals("*") || matcher.equalsIgnoreCase(tableName)){
recStart = 1 + string.indexOf(':', recStart);
break;
}
// Skip non-matched header line
}
String headerLine = string.substring(recStart, recEnd);
colStart = recStart;
colEnd = -1;
String colName;
while (true) {
if (colEnd == recEnd) {
// We processed final column last time through loop
break;
}
colEnd = string.indexOf(dsvColDelim, colStart);
if (colEnd < 0 || colEnd > recEnd) {
colEnd = recEnd;
}
if (colEnd - colStart < 1) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_NOCOLHEADER,
headerList.size() + 1, lineCount));
}
colName = string.substring(colStart, colEnd).trim().toLowerCase();
headerList.add(
(colName.equals("-")
|| (skipCols != null
&& skipCols.remove(colName))
|| (constColMap != null
&& constColMap.containsKey(colName))
)
? ((String) null)
: colName);
colStart = colEnd + dsvColDelim.length();
} if (skipCols != null && skipCols.size() > 0) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_SKIPCOLS_MISSING, skipCols.toString()));
}
boolean oneCol = false; // At least 1 non-null column
for (int i = 0; i < headerList.size(); i++) {
if (headerList.get(i) != null) {
oneCol = true;
break;
}
}
if (oneCol == false) {
// Difficult call, but I think in any real-world situation, the
// user will want to know if they are inserting records with no
// data from their input file.
throw new SqlToolError(rb.getString(SqltoolRB.DSV_NOCOLSLEFT,
dsvSkipCols));
}
int inputColHeadCount = headerList.size();
if (constColMap != null) {
headerList.addAll(constColMap.keySet());
}
String[] headers = (String[]) headerList.toArray(new String[0]);
// headers contains input headers + all constCols, some of these
// values may be nulls.
if (tableName == null) {
tableName = file.getName();
int i = tableName.lastIndexOf('.');
if (i > 0) {
tableName = tableName.substring(0, i);
}
}
StringBuffer tmpSb = new StringBuffer();
List tmpList = new ArrayList();
int skippers = 0;
for (int i = 0; i < headers.length; i++) {
if (headers[i] == null) {
skippers++;
continue;
}
if (tmpSb.length() > 0) {
tmpSb.append(", ");
}
tmpSb.append(headers[i]);
tmpList.add(headers[i]);
}
boolean[] autonulls = new boolean[headers.length - skippers];
boolean[] parseDate = new boolean[autonulls.length];
boolean[] parseBool = new boolean[autonulls.length];
String[] insertFieldName = (String[]) tmpList.toArray(new String[] {});
// Remember that the headers array has all columns in DSV file,
// even skipped columns.
// The autonulls array only has columns that we will insert into.
StringBuffer sb = new StringBuffer("INSERT INTO " + tableName + " ("
+ tmpSb + ") VALUES (");
StringBuffer typeQuerySb = new StringBuffer("SELECT " + tmpSb
+ " FROM " + tableName + " WHERE 1 = 2");
try {
ResultSetMetaData rsmd = curConn.createStatement().executeQuery(
typeQuerySb.toString()).getMetaData();
if (rsmd.getColumnCount() != autonulls.length) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_METADATA_MISMATCH));
// Don't know if it's possible to get here.
// If so, it's probably a SqlTool problem, not a user or
// data problem.
// Should be researched and either return a user-friendly
// message or a RuntimeExceptin.
}
for (int i = 0; i < autonulls.length; i++) {
autonulls[i] = true;
parseDate[i] = false;
parseBool[i] = false;
switch(rsmd.getColumnType(i + 1)) {
case java.sql.Types.BOOLEAN:
parseBool[i] = true;
break;
case java.sql.Types.VARBINARY :
case java.sql.Types.VARCHAR :
case java.sql.Types.ARRAY :
// Guessing at how to handle ARRAY.
case java.sql.Types.BLOB :
case java.sql.Types.CLOB :
case java.sql.Types.LONGVARBINARY :
case java.sql.Types.LONGVARCHAR :
autonulls[i] = false;
// This means to preserve white space and to insert
// "" for "". Otherwise we trim white space and
// insert null for \s*.
break;
case java.sql.Types.DATE:
case java.sql.Types.TIME:
case java.sql.Types.TIMESTAMP:
parseDate[i] = true;
}
}
} catch (SQLException se) {
throw new SqlToolError(rb.getString(
SqltoolRB.QUERY_METADATAFAIL,
typeQuerySb.toString()), se);
}
for (int i = 0; i < autonulls.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append('?');
}
// Initialize REJECT file(s)
int rejectCount = 0;
File rejectFile = null;
File rejectReportFile = null;
PrintWriter rejectWriter = null;
PrintWriter rejectReportWriter = null;
if (dsvRejectFile != null) try {
rejectFile = new File(dsvRejectFile);
rejectWriter = new PrintWriter((charset == null)
? (new OutputStreamWriter(new FileOutputStream(rejectFile)))
: (new OutputStreamWriter(new FileOutputStream(rejectFile),
charset)));
// Replace with just "(new FileOutputStream(file), charset)"
// once use defaultCharset from Java 1.5 in charset init.
// above.
rejectWriter.print(headerLine + dsvRowDelim);
} catch (IOException ioe) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_REJECTFILE_SETUPFAIL, dsvRejectFile), ioe);
}
if (dsvRejectReport != null) try {
rejectReportFile = new File(dsvRejectReport);
rejectReportWriter = new PrintWriter((charset == null)
? (new OutputStreamWriter(
new FileOutputStream(rejectReportFile)))
: (new OutputStreamWriter(
new FileOutputStream(rejectReportFile), charset)));
// Replace with just "(new FileOutputStream(file), charset)"
// once use defaultCharset from Java 1.5 in charset init.
// above.
rejectReportWriter.println(rb.getString(
SqltoolRB.REJECTREPORT_TOP, new String[] {
(new java.util.Date()).toString(),
file.getPath(),
((rejectFile == null) ? rb.getString(SqltoolRB.NONE)
: rejectFile.getPath()),
((rejectFile == null) ? null : rejectFile.getPath()),
}));
} catch (IOException ioe) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_REJECTREPORT_SETUPFAIL, dsvRejectReport),
ioe);
}
int recCount = 0;
int skipCount = 0;
PreparedStatement ps = null;
boolean importAborted = false;
try {
try {
ps = curConn.prepareStatement(sb.toString() + ')');
} catch (SQLException se) {
throw new SqlToolError(rb.getString(
SqltoolRB.INSERTION_PREPAREFAIL, sb.toString()), se);
}
String[] dataVals = new String[autonulls.length];
// Length is number of cols to insert INTO, not nec. # in DSV file.
int readColCount;
int storeColCount;
String currentFieldName = null;
// Insert data rows 1-row-at-a-time
while (true) try { try {
recStart = recEnd + dsvRowDelim.length();
if (recStart >= string.length()) {
break;
}
recEnd = string.indexOf(dsvRowDelim, recStart);
lineCount++; // Increment when we have line start and end
if (recEnd < 0) {
// Last record
recEnd = string.length();
}
trimmedLine = string.substring(recStart, recEnd).trim();
if (trimmedLine.length() < 1) {
continue; // Silently skip blank lines
}
if (skipPrefix != null
&& trimmedLine.startsWith(skipPrefix)) {
skipCount++;
continue;
}
if (switching) {
if (trimmedLine.equals("}")) {
switching = false;
continue;
}
int colonAt = trimmedLine.indexOf(':');
if (colonAt < 1 || colonAt == trimmedLine.length() - 1) {
throw new SqlToolError(rb.getString(
SqltoolRB.DSV_HEADER_MATCHERNONHEAD,
lineCount));
}
continue;
}
// Finally we will attempt to add a record!
recCount++;
// Remember that recCount counts both inserts + rejects
colStart = recStart;
colEnd = -1;
readColCount = 0;
storeColCount = 0;
while (true) {
if (colEnd == recEnd) {
// We processed final column last time through loop
break;
}
colEnd = string.indexOf(dsvColDelim, colStart);
if (colEnd < 0 || colEnd > recEnd) {
colEnd = recEnd;
}
if (readColCount == inputColHeadCount) {
throw new RowError(rb.getString(
SqltoolRB.DSV_COLCOUNT_MISMATCH,
inputColHeadCount, 1 + readColCount));
}
if (headers[readColCount++] != null) {
dataVals[storeColCount++] =
string.substring(colStart, colEnd);
}
colStart = colEnd + dsvColDelim.length();
}
if (readColCount < inputColHeadCount) {
throw new RowError(rb.getString(
SqltoolRB.DSV_COLCOUNT_MISMATCH,
inputColHeadCount, readColCount));
}
/* Already checked for readColCount too high in prev. block */
if (constColMap != null) {
Iterator it = constColMap.values().iterator();
while (it.hasNext()) {
dataVals[storeColCount++] = (String) it.next();
}
}
if (storeColCount != dataVals.length) {