Package org.geotools.coverage.grid.io

Examples of org.geotools.coverage.grid.io.GranuleSource


           
            //
            // Case A: input reader is a StructuredGridCoverage2DReader. We can get granules from a source
            //
            // Getting granule source and its input granules
            final GranuleSource source = ((StructuredGridCoverage2DReader) inputReader).getGranules(coverageName, true);
            final SimpleFeatureCollection originCollection = source.getGranules(null);
            final DefaultProgressListener listener = new DefaultProgressListener();
           
            // Getting attributes structure to be filled
            final Collection<Property> destProps = feature.getProperties();
            final Set<Name> destAttributes = new HashSet<Name>();
View Full Code Here


            assertEquals("true", reader.getMetadataValue("HAS_TIME_DOMAIN"));
            assertEquals("2008-11-01T00:00:00.000Z/2008-11-04T00:00:00.000Z/PT1S", reader.getMetadataValue(metadataNames[0]));
           
            // check the granule catalog
            String coverageName = reader.getGridCoverageNames()[0];
            GranuleSource granules = reader.getGranules(coverageName, true);
            assertEquals(1, granules.getCount(Query.ALL));
            Query q = new Query(Query.ALL);
            SimpleFeatureIterator fi = granules.getGranules(q).features();
            try {
                assertTrue(fi.hasNext());
                SimpleFeature f = fi.next();
                String expected = "../singleHarvest1/temp_020_099_20081101T000000_20081104T000000.tiff".replace('/', File.separatorChar);
                assertEquals(expected, f.getAttribute("location"));
View Full Code Here

    public void granuleSourceTest() throws Exception {
   
        final AbstractGridFormat format = TestUtils.getFormat(timeAdditionalDomainsRangeURL);
        ImageMosaicReader reader = TestUtils.getReader(timeAdditionalDomainsRangeURL, format);
   
        GranuleSource source = ((StructuredGridCoverage2DReader)reader).getGranules("time_domainsRanges", true);
        final int granules = source.getCount(null);
        final SimpleFeatureType type = source.getSchema();
        assertEquals("SimpleFeatureTypeImpl time_domainsRanges identified extends polygonFeature(the_geom:MultiPolygon,location:location,time:time,endtime:endtime,date:date,lowz:lowz,highz:highz,loww:loww,highw:highw)",type.toString());
        assertEquals(granules, 12);
       
        final String[] metadataNames = reader.getMetadataNames();
        assertNotNull(metadataNames);
View Full Code Here

            assertEquals("true", reader.getMetadataValue("HAS_TIME_DOMAIN"));
            assertEquals("2004-02-01T00:00:00.000Z,2004-05-01T00:00:00.000Z", reader.getMetadataValue(metadataNames[0]));
           
            // check the granule catalog
            String coverageName = reader.getGridCoverageNames()[0];
            GranuleSource granules = reader.getGranules(coverageName, true);
            assertEquals(2, granules.getCount(Query.ALL));
            Query q = new Query(Query.ALL);
            SimpleFeatureIterator fi = granules.getGranules(q).features();
            try {
                assertTrue(fi.hasNext());
                SimpleFeature f = fi.next();
                assertEquals("world.200402.3x5400x2700.tiff", f.getAttribute("location"));
                assertEquals("2004-02-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
View Full Code Here

            assertEquals(renamed.getCanonicalFile(), ((File) hf.getSource()).getCanonicalFile());
            assertTrue(hf.success());

            // check the granule catalog
            String coverageName = reader.getGridCoverageNames()[0];
            GranuleSource granules = reader.getGranules(coverageName, true);
            assertEquals(2, granules.getCount(Query.ALL));
            Query q = new Query(Query.ALL);
            SimpleFeatureIterator fi = granules.getGranules(q).features();
            try {
                assertTrue(fi.hasNext());
                SimpleFeature f = fi.next();
                assertEquals("passA2006128194218.tiff", f.getAttribute("location"));
                f = fi.next();
View Full Code Here

            assertEquals("true", reader.getMetadataValue("HAS_TIME_DOMAIN"));
            assertEquals("2004-02-01T00:00:00.000Z,2004-05-01T00:00:00.000Z", reader.getMetadataValue(metadataNames[0]));

            // check the granule catalog
            String coverageName = reader.getGridCoverageNames()[0];
            GranuleSource granules = reader.getGranules(coverageName, true);
            assertEquals(2, granules.getCount(Query.ALL));
            Query q = new Query(Query.ALL);
            SimpleFeatureIterator fi = granules.getGranules(q).features();
            try {
                assertTrue(fi.hasNext());
                SimpleFeature f = fi.next();
                assertEquals("world.200402.3x5400x2700.tiff", f.getAttribute("location"));
                assertEquals("2004-02-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
View Full Code Here

            String[] names = reader.getGridCoverageNames();
            assertEquals(1, names.length);
            assertEquals("O3", names[0]);
           
            // check we have the two granules we expect
            GranuleSource source = reader.getGranules("O3", true);
            FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
            Query q = new Query(Query.ALL);
            q.setSortBy(new SortBy[] {ff.sort("time", SortOrder.ASCENDING)});
            SimpleFeatureCollection granules = source.getGranules(q);
            assertEquals(2, granules.size());
            it = granules.features();
            assertTrue(it.hasNext());
            SimpleFeature f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            assertTrue(it.hasNext());
            f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(1, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            it.close();
           
            // now add another netcdf and harvest it
            File nc2 = TestData.file(this,"polyphemus_20130302_test.nc");
            FileUtils.copyFileToDirectory(nc2, mosaic);
            File fileToHarvest = new File(mosaic, "polyphemus_20130302_test.nc");
            List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null);
            assertEquals(1, harvestSummary.size());
            HarvestedSource hf = harvestSummary.get(0);
            assertEquals("polyphemus_20130302_test.nc", ((File) hf.getSource()).getName());
            assertTrue(hf.success());
            assertEquals(1, reader.getGridCoverageNames().length);
           
            // check that we have four times now
            granules = source.getGranules(q);
            assertEquals(4, granules.size());
            it = granules.features();
            f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
View Full Code Here

            String[] names = reader.getGridCoverageNames();
            assertEquals(1, names.length);
            assertEquals("O3", names[0]);
           
            // check we have the two granules we expect
            GranuleSource source = reader.getGranules("O3", true);
            FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
            Query q = new Query(Query.ALL);
            q.setSortBy(new SortBy[] {ff.sort("time", SortOrder.ASCENDING)});
            SimpleFeatureCollection granules = source.getGranules(q);
            assertEquals(2, granules.size());
            it = granules.features();
            assertTrue(it.hasNext());
            SimpleFeature f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            assertTrue(it.hasNext());
            f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(1, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            it.close();
           
            // close the reader and re-open it
            reader.dispose();
            reader = format.getReader(mosaic);
            source = reader.getGranules("O3", true);
           
            // wait a bit, we have to make sure the old indexes are recognized as old
            Thread.sleep(1000);
           
            // now replace the netcdf file with a more up to date version of the same
            File nc2 = TestData.file(this,"polyphemus_20130301_test_more_times.nc");
            File target = new File(mosaic, "polyphemus_20130301_test.nc");
            FileUtils.copyFile(nc2, target, false);
            File fileToHarvest = new File(mosaic, "polyphemus_20130301_test.nc");
            List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null);
            assertEquals(1, harvestSummary.size());
            HarvestedSource hf = harvestSummary.get(0);
            assertEquals("polyphemus_20130301_test.nc", ((File) hf.getSource()).getName());
            assertTrue(hf.success());
            assertEquals(1, reader.getGridCoverageNames().length);
           
            // check that we have four times now
            source = reader.getGranules("O3", true);
            granules = source.getGranules(q);
            assertEquals(4, granules.size());
            it = granules.features();
            f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
View Full Code Here

            String[] names = reader.getGridCoverageNames();
            assertEquals(1, names.length);
            assertEquals("O3", names[0]);
           
            // check we have the two granules we expect
            GranuleSource source = reader.getGranules("O3", true);
            FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
            Query q = new Query(Query.ALL);
            q.setSortBy(new SortBy[] {ff.sort("time", SortOrder.ASCENDING)});
            SimpleFeatureCollection granules = source.getGranules(q);
            assertEquals(2, granules.size());
            it = granules.features();
            assertTrue(it.hasNext());
            SimpleFeature f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            assertTrue(it.hasNext());
            f = it.next();
            assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location"));
            assertEquals(1, f.getAttribute("imageindex"));
            assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class));
            it.close();
           
            // now add another netcdf and harvest it
            File nc2 = TestData.file(this,"polyphemus_20130301_NO2.nc");
            FileUtils.copyFileToDirectory(nc2, mosaic);
            File fileToHarvest = new File(mosaic, "polyphemus_20130301_NO2.nc");
            List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null);
            assertEquals(1, harvestSummary.size());
            HarvestedSource hf = harvestSummary.get(0);
            assertEquals("polyphemus_20130301_NO2.nc", ((File) hf.getSource()).getName());
            assertTrue(hf.success());
            // check we have two coverages now
            names = reader.getGridCoverageNames();
            Arrays.sort(names);
            assertEquals(2, names.length);
            assertEquals("NO2", names[0]);
            assertEquals("O3", names[1]);
           
            // test the newly ingested granules, which are in a separate coverage
            q.setTypeName("NO2");
            granules = source.getGranules(q);
            assertEquals(2, granules.size());
            it = granules.features();
            f = it.next();
            assertEquals("polyphemus_20130301_NO2.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
View Full Code Here

            String[] names = reader.getGridCoverageNames();
            assertEquals(1, names.length);
            assertEquals("NO2", names[0]);

            // check we have the two granules we expect
            GranuleSource source = reader.getGranules("NO2", true);
            FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
            Query q = new Query(Query.ALL);
            q.setSortBy(new SortBy[] { ff.sort("time", SortOrder.DESCENDING) });
            SimpleFeatureCollection granules = source.getGranules(q);
            assertEquals(1, granules.size());
            it = granules.features();
            assertTrue(it.hasNext());
            SimpleFeature f = it.next();
            assertEquals("20130101.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
            assertEquals("2013-01-01T00:00:00.000Z",
                    ConvertersHack.convert(f.getAttribute("time"), String.class));
            it.close();

            // now add another netcdf and harvest it
            File nc2 =  TestData.file(this,"20130116.METOPA.GOME2.NO2.DUMMY.nc");
            FileUtils.copyFileToDirectory(nc2, mosaic);
            File fileToHarvest = new File(mosaic, "20130116.METOPA.GOME2.NO2.DUMMY.nc");
            List<HarvestedSource> harvestSummary = reader.harvest("NO2", fileToHarvest, null);
            assertEquals(1, harvestSummary.size());
            granules = source.getGranules(q);
            assertEquals(2, granules.size());
           
            HarvestedSource hf = harvestSummary.get(0);
            assertEquals("20130116.METOPA.GOME2.NO2.DUMMY.nc", ((File) hf.getSource()).getName());
            assertTrue(hf.success());
            assertEquals(1, reader.getGridCoverageNames().length);

            File nc3 =  TestData.file(this,"20130108.METOPA.GOME2.NO2.DUMMY.nc");
            FileUtils.copyFileToDirectory(nc3, mosaic);
            fileToHarvest = new File(mosaic, "20130108.METOPA.GOME2.NO2.DUMMY.nc");
            harvestSummary = reader.harvest("NO2", fileToHarvest, null);
            assertEquals(1, harvestSummary.size());
            hf = harvestSummary.get(0);
            assertEquals("20130108.METOPA.GOME2.NO2.DUMMY.nc", ((File) hf.getSource()).getName());
            assertTrue(hf.success());
            assertEquals(1, reader.getGridCoverageNames().length);

            // check that we have 2 times now
            granules = source.getGranules(q);
            assertEquals(3, granules.size());
            it = granules.features();
            f = it.next();
            assertEquals("20130116.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location"));
            assertEquals(0, f.getAttribute("imageindex"));
View Full Code Here

TOP

Related Classes of org.geotools.coverage.grid.io.GranuleSource

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.