Package org.eobjects.metamodel.schema

Examples of org.eobjects.metamodel.schema.Table


    if (tables.length != 1) {
      throw new IllegalStateException("Transformer is expected to contain columns originating from 1 table, found "
          + tables.length);
    }

    final Table table = tables[0];

    final DataContextProvider dataContextProvider = _analysisJobBuilder.getDataContextProvider();
    final DataContext dc = dataContextProvider.getDataContext();
    final Query q = dc.query().from(table).select(physicalColumns.toArray(new Column[physicalColumns.size()])).toQuery();
    q.setMaxRows(DEFAULT_PREVIEW_ROWS);
View Full Code Here


  }

  @Override
  public Table getPreviewTable(Datastore datastore) {
    DataContextProvider dcp = datastore.getDataContextProvider();
    Table table = dcp.getDataContext().getDefaultSchema().getTables()[0];
    dcp.close();
    return table;
  }
View Full Code Here

            }
            tableBuilder.execute();
          }
        });

        Table table = dataContext.getDefaultSchema().getTables()[0];

        dataContexts.put(filename, dataContext);
        counters.put(filename, new AtomicInteger(1));
        outputWriter = new CsvOutputWriter(dataContext, filename, table, columns);

        // write the headers
      } else {
        Table table = dataContext.getDefaultSchema().getTables()[0];
        outputWriter = new CsvOutputWriter(dataContext, filename, table, columns);
        counters.get(filename).incrementAndGet();
      }
    }
View Full Code Here

    // It is likely that schemas are cached, and since it is likely a new
    // table, we refresh the schema.
    dc.refreshSchemas();

    Table table = dc.getDefaultSchema().getTableByName(_tableName);
    dcp.close();
    return table;
  }
View Full Code Here

  }

  @Override
  public Table getPreviewTable(Datastore datastore) {
    DataContextProvider dcp = datastore.getDataContextProvider();
    Table table = dcp.getDataContext().getDefaultSchema().getTableByName(_sheetName);
    dcp.close();
    return table;
  }
View Full Code Here

      if (dataContext == null) {

        File file = new File(filename);
        dataContext = new ExcelDataContext(file);

        Table table = getTable(dataContext, sheetName, columns);

        dataContexts.put(filename, dataContext);
        counters.put(filename, new AtomicInteger(1));
        outputWriter = new ExcelOutputWriter(dataContext, filename, table, columns);

        // write the headers
      } else {
        Table table = getTable(dataContext, sheetName, columns);
        outputWriter = new ExcelOutputWriter(dataContext, filename, table, columns);
        counters.get(filename).incrementAndGet();
      }
    }
View Full Code Here

    return outputWriter;
  }

  private static Table getTable(UpdateableDataContext dataContext, final String sheetName, final InputColumn<?>[] columns) {
    final Schema schema = dataContext.getDefaultSchema();
    Table table = schema.getTableByName(sheetName);
    if (table == null) {
      final MutableRef<Table> tableRef = new MutableRef<Table>();
      dataContext.executeUpdate(new UpdateScript() {
        @Override
        public void run(UpdateCallback callback) {
View Full Code Here

    scenarioHelper.writeExampleData(writer);
    writer.close();
   
    DataContext dc = DataContextFactory.createCsvDataContext(new File(filename));
    Table table = dc.getDefaultSchema().getTables()[0];
    Query q = dc.query().from(table).select(table.getColumns()).toQuery();
    DataSet dataSet = dc.executeQuery(q);
   
    scenarioHelper.performAssertions(dataSet, false);
  }
View Full Code Here

        assertEquals("my datastore", datastore.getName());

        DataContextProvider dcp = datastore.getDataContextProvider();
        DataContext dc = dcp.getDataContext();

        Table table = dc.getDefaultSchema().getTables()[0];
        Query q = dc.query().from(table).select(table.getColumns()).toQuery();
        DataSet dataSet = dc.executeQuery(q);

        scenarioHelper.performAssertions(dataSet, true);

        dcp.close();
View Full Code Here

    final AnalyzerBeansConfiguration conf = new JaxbConfigurationReader().create(new File(DataCleanerHome.get(),
        "conf.xml"));
    final AnalysisJobBuilder ajb = new AnalysisJobBuilder(conf);
    Datastore ds = conf.getDatastoreCatalog().getDatastore("orderdb");
    DataContextProvider dcp = ds.getDataContextProvider();
    Table table = dcp.getSchemaNavigator().convertToTable("PUBLIC.CUSTOMERS");
    ajb.setDatastore(ds);
    ajb.addSourceColumns(table.getLiteralColumns());
    ajb.addRowProcessingAnalyzer(PatternFinderAnalyzer.class).addInputColumns(ajb.getSourceColumns())
        .setName("Ungrouped pattern finders");

    final RowProcessingAnalyzerJobBuilder<PatternFinderAnalyzer> groupedPatternFinder = ajb.addRowProcessingAnalyzer(
        PatternFinderAnalyzer.class).setName("Grouped PF");
View Full Code Here

TOP

Related Classes of org.eobjects.metamodel.schema.Table

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.