Examples of CSVFileWriter


Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

        row.put("OwnerId", userId);
        row.put("Subject", TASK_SUBJECT);
        row.put(nullFieldName, nullFieldValue);
        if (id != null) row.put("Id", id);

        CSVFileWriter writer = null;
        try {
            writer = new CSVFileWriter(CSV_FILE_PATH, getController().getConfig());
            writer.open();
            writer.setColumnNames(new ArrayList<String>(row.keySet()));
            writer.writeRow(row);
        } finally {
            if (writer != null) writer.close();
        }
    }
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

    @Test
    public void testCSVWriteBasic() throws Exception {
        File f = new File(getTestDataDir(), "csvtestTemp.csv");
        String path = f.getAbsolutePath();
        CSVFileWriter writer = new CSVFileWriter(path, getController().getConfig());
        List<Row> rowList = new ArrayList<Row>();

        rowList.add(row1);
        rowList.add(row2);

        writer.open();
        writer.setColumnNames(writeHeader);

        writer.writeRowList(rowList);
        writer.close();

        compareWriterFile(path);

        f.delete();
    }
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

        logger.info(Messages.getFormattedString("DataAccessObjectFactory.creatingDao", new String[] {config.getString(Config.DAO_NAME), daoType}));

        if (CSV_READ_TYPE.equalsIgnoreCase(daoType)) {
            dao = new CSVFileReader(config);
        } else if (CSV_WRITE_TYPE.equalsIgnoreCase(daoType)) {
            dao = new CSVFileWriter(config.getString(Config.DAO_NAME), config);
        } else if (DATABASE_READ_TYPE.equalsIgnoreCase(daoType)) {
            dao = new DatabaseReader(config);
        } else if (DATABASE_WRITE_TYPE.equalsIgnoreCase(daoType)) {
            dao = new DatabaseWriter(config);
        } else {
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

        if (csvFile.exists()) {
            boolean deleteCsvFileOk = csvFile.delete();
            assertTrue("Could not delete existing CSV file: " + CSV_FILE_PATH, deleteCsvFileOk);
        }

        CSVFileWriter writer = null;
        try {
            writer = new CSVFileWriter(CSV_FILE_PATH, getController().getConfig());
            writer.open();
            writer.setColumnNames(new ArrayList<String>(rows[0].keySet()));
            writer.writeRowList(Arrays.asList(rows));
        } finally {
            if (writer != null) {
                writer.close();
            }
        }
    }
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

                }
                inputRows.add(row);
                idx++;
            }
            final String inputPath = new File(getTestDataDir(), inputFileName).getAbsolutePath();
            final CSVFileWriter inputWriter = new CSVFileWriter(inputPath, getController().getConfig());
            try {
                inputWriter.open();
                inputWriter.setColumnNames(templateReader.getColumnNames());
                inputWriter.writeRowList(inputRows);
                return inputPath;
            } finally {
                inputWriter.close();
            }
        } finally {
            templateReader.close();
        }
    }
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

    private DataWriter createErrorWriter() throws DataAccessObjectInitializationException {
        final String filename = getConfig().getString(Config.OUTPUT_ERROR);
        if (filename == null || filename.length() == 0)
            throw new DataAccessObjectInitializationException(getMessage("errorMissingErrorFile"));
        // TODO: Make sure that specific DAO is not mentioned: use DataReader, DataWriter, or DataAccessObject
        return new CSVFileWriter(filename, getConfig());
    }
View Full Code Here

Examples of com.salesforce.dataloader.dao.csv.CSVFileWriter

    private DataWriter createSuccesWriter() throws DataAccessObjectInitializationException {
        final String filename = getConfig().getString(Config.OUTPUT_SUCCESS);
        if (filename == null || filename.length() == 0)
            throw new DataAccessObjectInitializationException(getMessage("errorMissingSuccessFile"));
        // TODO: Make sure that specific DAO is not mentioned: use DataReader, DataWriter, or DataAccessObject
        return new CSVFileWriter(filename, getConfig());
    }
View Full Code Here

Examples of dmt.tools.CSVFileWriter

        }

        line = reader.readLine();
      }

      CSVFileWriter out = new CSVFileWriter("output/" + clustersCount
          + "clusterCentersMarina.csv", ',');
      out.writeFields(headers);
      for (int i = 0; i < clustersCount; i++)
      {
        Vector<String> fields = clusterCenters.get(i);
        fields.add(0, "Cluster" + i);
        out.writeFields(fields);
      }
      out.close();

    }
  }
View Full Code Here

Examples of dmt.tools.CSVFileWriter

        .hierarchicalCluster(TextInstance.textInstancesSet);
    int k = 30;
    Set<Set<TextInstance>> clResponsePartition = completeLinkDendrogram
        .partitionK(k);
    Object[] clusters = clResponsePartition.toArray();
    CSVFileWriter out = new CSVFileWriter("completeLinkClustersCSV.csv",
        ',');
    Vector<String> header = new Vector<String>();
    header.add("Cluster label");
    out.writeFields(header);
    Object[] textInstancesArray = TextInstance.textInstancesSet.toArray();
    for (int i = 0; i < textInstancesArray.length; i++)
    {
      for (int j = 0; j < clusters.length; j++)
      {
        if (((Set<TextInstance>) clusters[j])
            .contains(textInstancesArray[i]))
        {
          Vector<String> clusterLabel = new Vector<String>();
          clusterLabel.add((j + 1) + "");
          out.writeFields(clusterLabel);
        }
      }
    }
    out.close();

    HierarchicalClusterer<TextInstance> slClusterer = new SingleLinkClusterer<TextInstance>(
        TextInstance.EUCLIDEAN_DISTANCE);
    Dendrogram<TextInstance> singleLinkDendrogram = slClusterer
        .hierarchicalCluster(TextInstance.textInstancesSet);
    Set<Set<TextInstance>> slResponsePartition = singleLinkDendrogram
        .partitionK(k);
    clusters = slResponsePartition.toArray();
    out = new CSVFileWriter("singleLinkClustersCSV.csv", ',');
    out.writeFields(header);
    for (int i = 0; i < textInstancesArray.length; i++)
    {
      for (int j = 0; j < clusters.length; j++)
      {
        if (((Set<TextInstance>) clusters[j])
            .contains(textInstancesArray[i]))
        {
          Vector<String> clusterLabel = new Vector<String>();
          clusterLabel.add((j + 1) + "");
          out.writeFields(clusterLabel);
        }
      }
    }
    out.close();

  }
View Full Code Here

Examples of dmt.tools.CSVFileWriter

    }
    ClusterAnalysis jca = new ClusterAnalysis(5, 100, dataPoints);
    jca.startAnalysis();

    Vector<Vector<DataPoint>> clusters = jca.getClusterOutput();
    CSVFileWriter out = new CSVFileWriter("kMeansClustersCSV.csv",
        ',');
    Vector<String> header = new Vector<String>();
    header.add("Document ID");
    header.add("XPath");
    header.add("Cluster label");
    out.writeFields(header);
    for (int i = 0; i < clusters.size(); i++)
    {
      for (int j = 0; j < clusters.get(i).size(); j++)
      {
        DataPoint point = clusters.get(i).get(j);
        String documentId = point.getId().substring(0, point.getId().indexOf("/"));
        String XPath = point.getId().substring(point.getId().indexOf("/"), point.getId().length());
        Vector<String> fields = new Vector<String>();
        fields.add(documentId);
        fields.add(XPath);
        fields.add(i+"");
        out.writeFields(fields);
      }
    }
    out.close();
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.