Package org.geotools.data.shapefile.dbf

Examples of org.geotools.data.shapefile.dbf.DbaseFileHeader$DbaseField


  {
    FileInputStream fis = new FileInputStream(dbfFile);
    DbaseFileReader dbfReader = new DbaseFileReader(fis.getChannel(), false, Charset.forName("ISO-8859-1"));
   
    //contains the header columns
    DbaseFileHeader dbfHeader = dbfReader.getHeader();
   
    // get the names from the header
    List<String> names = new Vector<String>();
    int n = dbfHeader.getNumFields();
    for (int i = 0; i < n; i++)
      names.add(dbfHeader.getFieldName(i));

    return names;
  }
View Full Code Here


    List<String> allFields = getAttributeNames(dbfFile);
    FileInputStream fis = new FileInputStream(dbfFile);
    DbaseFileReader dbfReader = new DbaseFileReader(fis.getChannel(), false, Charset.forName("ISO-8859-1"));
   
    //contains the header columns
    DbaseFileHeader dbfHeader = dbfReader.getHeader();
   
    List<Object[]> rowsList = new Vector<Object[]>();
   

    while(dbfReader.hasNext())
    {
      Object[] row;
      if (fieldNames != null)
      {
        dbfReader.read();
        row = new Object[fieldNames.length];
        for (int i = 0; i < fieldNames.length; i++)
        {
          int index = allFields.indexOf(fieldNames[i]);
          if (index < 0)
            row[i] = "";
          else
            row[i] = dbfReader.readField(index);
        }
      }
      else
      {
        row = dbfReader.readEntry();
      }
      rowsList.add(row);
    }
   
   
   
    int numOfCol = dbfHeader.getNumFields();
   
    Object[][] dataRows = new Object[rowsList.size()][numOfCol];
   
    for(int i=0; i < rowsList.size();i++)
    {
View Full Code Here

    public void testNulls() throws IOException {
        File tmp = File.createTempFile("test", ".dbf");
        if (!tmp.delete()) {
            throw new IllegalStateException("Unable to clear temp file");
        }
        DbaseFileHeader header = new DbaseFileHeader();
        for (int i = 0; i < types.length; i++) {
            header.addColumn(""+types[i], types[i], sizes[i], decimals[i]);
        }
        header.setNumRecords(values.length);
        FileOutputStream fos = new FileOutputStream(tmp);
        WritableByteChannel channel = fos.getChannel();
        tmp.deleteOnExit();
        DbaseFileWriter writer = new DbaseFileWriter(header, channel, cs, tz);
        // write records such that the i-th row has nulls in every column except the i-th column
View Full Code Here

            writer.close();
            assert !shpChannel.isOpen();
            assert !shxChannel.isOpen();
        }

        DbaseFileHeader dbfheader = createDbaseHeader(featureType);

        dbfheader.setNumRecords(0);

        WritableByteChannel dbfChannel = dbfStoragefile.getWriteChannel();

        try {
            dbfheader.writeHeader(dbfChannel);
        } finally {
            dbfChannel.close();
        }

        if (crs != null) {
View Full Code Here

     * @throws DbaseFileException DOCUMENT ME!
     */
    protected static DbaseFileHeader createDbaseHeader(SimpleFeatureType featureType)
            throws IOException, DbaseFileException {

        DbaseFileHeader header = new DbaseFileHeader();

        for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
            AttributeDescriptor type = featureType.getDescriptor(i);

            Class<?> colType = type.getType().getBinding();
            String colName = type.getLocalName();

            int fieldLen = FeatureTypes.getFieldLength(type);
            if (fieldLen == FeatureTypes.ANY_LENGTH)
                fieldLen = 255;
            if ((colType == Integer.class) || (colType == Short.class) || (colType == Byte.class)) {
                header.addColumn(colName, 'N', Math.min(fieldLen, 9), 0);
            } else if (colType == Long.class) {
                header.addColumn(colName, 'N', Math.min(fieldLen, 19), 0);
            } else if (colType == BigInteger.class) {
                header.addColumn(colName, 'N', Math.min(fieldLen, 33), 0);
            } else if (Number.class.isAssignableFrom(colType)) {
                int l = Math.min(fieldLen, 33);
                int d = Math.max(l - 2, 0);
                header.addColumn(colName, 'N', l, d);
                // This check has to come before the Date one or it is never reached
                // also, this field is only activated with the following system property:
                // org.geotools.shapefile.datetime=true
            } else if (java.util.Date.class.isAssignableFrom(colType)
                    && Boolean.getBoolean("org.geotools.shapefile.datetime")) {
                header.addColumn(colName, '@', fieldLen, 0);
            } else if (java.util.Date.class.isAssignableFrom(colType)
                    || Calendar.class.isAssignableFrom(colType)) {
                header.addColumn(colName, 'D', fieldLen, 0);
            } else if (colType == Boolean.class) {
                header.addColumn(colName, 'L', 1, 0);
            } else if (CharSequence.class.isAssignableFrom(colType) || colType == java.util.UUID.class) {
                // Possible fix for GEOT-42 : ArcExplorer doesn't like 0 length
                // ensure that maxLength is at least 1
                header.addColumn(colName, 'C', Math.min(254, fieldLen), 0);
            } else if (Geometry.class.isAssignableFrom(colType)) {
                continue;
            //skip binary data types
            } else if (colType == byte[].class) {
                continue;
View Full Code Here

        dbf2.close();
    }

    @Test
    public void testHeader() throws Exception {
        DbaseFileHeader header = new DbaseFileHeader();

        Level before = LOGGER.getLevel();
        try {
            LOGGER.setLevel(Level.INFO);

            header.addColumn("emptyString", 'C', 20, 0);
            header.addColumn("emptyInt", 'N', 20, 0);
            header.addColumn("emptyDouble", 'N', 20, 5);
            header.addColumn("emptyFloat", 'F', 20, 5);
            header.addColumn("emptyLogical", 'L', 1, 0);
            header.addColumn("emptyDate", 'D', 20, 0);
            int length = header.getRecordLength();
            header.removeColumn("emptyDate");
            assertTrue(length != header.getRecordLength());
            header.addColumn("emptyDate", 'D', 20, 0);
            assertTrue(length == header.getRecordLength());
            header.removeColumn("billy");
            assertTrue(length == header.getRecordLength());
        } finally {
            LOGGER.setLevel(before);
        }
    }
View Full Code Here

        }
    }

    @Test
    public void testAddColumn() throws Exception {
        DbaseFileHeader header = new DbaseFileHeader();

        Level before = LOGGER.getLevel();
        try {
            LOGGER.setLevel(Level.INFO);

            header.addColumn("emptyInt", 'N', 9, 0);
            assertSame(Integer.class, header.getFieldClass(0));
            assertEquals(9, header.getFieldLength(0));

            header.addColumn("emptyString", 'C', 20, 0);
            assertSame(String.class, header.getFieldClass(1));
            assertEquals(20, header.getFieldLength(1));
        } finally {
            LOGGER.setLevel(before);
        }
    }
View Full Code Here

        }
    }

    @Test
    public void testEmptyFields() throws Exception {
        DbaseFileHeader header = new DbaseFileHeader();
        header.addColumn("emptyString", 'C', 20, 0);
        header.addColumn("emptyInt", 'N', 20, 0);
        header.addColumn("emptyDouble", 'N', 20, 5);
        header.addColumn("emptyFloat", 'F', 20, 5);
        header.addColumn("emptyLogical", 'L', 1, 0);
        header.addColumn("emptyDate", 'D', 20, 0);
        header.setNumRecords(20);
        File f = new File(System.getProperty("java.io.tmpdir"),
                "scratchDBF.dbf");
        f.deleteOnExit();
        FileOutputStream fout = new FileOutputStream(f);
        DbaseFileWriter dbf = new DbaseFileWriter(header, fout.getChannel(), Charset.defaultCharset());
        for (int i = 0; i < header.getNumRecords(); i++) {
            dbf.write(new Object[6]);
        }
        dbf.close();
        ShpFiles tempShpFiles = new ShpFiles(f);
        DbaseFileReader r = new DbaseFileReader(tempShpFiles, false,
View Full Code Here

        if (dbf != null) {
            // build the list of dbf indexes we have to read taking into consideration the
            // duplicated dbf field names issue
            List<AttributeDescriptor> atts = schema.getAttributeDescriptors();
            dbfindexes = new int[atts.size()];
            DbaseFileHeader head = dbf.getHeader();
            for (int i = 0; i < atts.size(); i++) {
                AttributeDescriptor att = atts.get(i);
                if (att instanceof GeometryDescriptor) {
                    dbfindexes[i] = -1;
                } else {
                    String attName = att.getLocalName();
                    int count = 0;
                    Map<Object, Object> userData = att.getUserData();
                    if (userData.get(ShapefileDataStore.ORIGINAL_FIELD_NAME) != null) {
                        attName = (String) userData.get(ShapefileDataStore.ORIGINAL_FIELD_NAME);
                        count = (Integer) userData
                                .get(ShapefileDataStore.ORIGINAL_FIELD_DUPLICITY_COUNT);
                    }

                    boolean found = false;
                    for (int j = 0; j < head.getNumFields(); j++) {
                        if (head.getFieldName(j).equals(attName) && count-- <= 0) {
                            dbfindexes[i] = j;
                            found = true;
                            break;
                        }
                    }
View Full Code Here

            usedNames.add(BasicFeatureTypes.GEOMETRY_ATTRIBUTE_NAME);

            // take care of the case where no dbf and query wants all =>
            // geometry only
            if (dbf != null) {
                DbaseFileHeader header = dbf.getHeader();
                for (int i = 0, ii = header.getNumFields(); i < ii; i++) {
                    Class attributeClass = header.getFieldClass(i);
                    String name = header.getFieldName(i);
                    if (usedNames.contains(name)) {
                        String origional = name;
                        int count = 1;
                        name = name + count;
                        while (usedNames.contains(name)) {
                            count++;
                            name = origional + count;
                        }
                        build.addUserData(ShapefileDataStore.ORIGINAL_FIELD_NAME, origional);
                        build.addUserData(ShapefileDataStore.ORIGINAL_FIELD_DUPLICITY_COUNT, count);
                    }
                    usedNames.add(name);
                    int length = header.getFieldLength(i);

                    build.setNillable(true);
                    build.setLength(length);
                    build.setBinding(attributeClass);
                    attributes.add(build.buildDescriptor(name));
View Full Code Here

TOP

Related Classes of org.geotools.data.shapefile.dbf.DbaseFileHeader$DbaseField

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.