Package org.eobjects.metamodel

Examples of org.eobjects.metamodel.DataContext


    }

    final Table table = tables[0];

    final DataContextProvider dataContextProvider = _analysisJobBuilder.getDataContextProvider();
    final DataContext dc = dataContextProvider.getDataContext();
    final Query q = dc.query().from(table).select(physicalColumns.toArray(new Column[physicalColumns.size()])).toQuery();
    q.setMaxRows(DEFAULT_PREVIEW_ROWS);

    for (TransformerJobBuilder<?> tjb : transformerJobs) {
      initialize(tjb);
    }

    // getting the output columns can be an expensive call, so we do it
    // upfront in stead of for each row.
    final Map<TransformerJobBuilder<?>, List<MutableInputColumn<?>>> outputColumns = new LinkedHashMap<TransformerJobBuilder<?>, List<MutableInputColumn<?>>>();
    for (TransformerJobBuilder<?> tjb : transformerJobs) {
      List<MutableInputColumn<?>> cols = tjb.getOutputColumns();
      outputColumns.put(tjb, cols);
    }

    final List<InputRow> result = new ArrayList<InputRow>();
    final DataSet dataSet = dc.executeQuery(q);
    int rowNumber = 0;
    while (dataSet.next()) {
      Row row = dataSet.getRow();
      InputRow inputRow = new MetaModelInputRow(rowNumber, row);
View Full Code Here


          cols.add(col.getPhysicalColumn());
        }
      }
      columns = cols.toArray(new Column[cols.size()]);
    }
    DataContext dc = _dataContextProvider.getDataContext();
    Query q = dc.query().from(columns[0].getTable()).select(columns).toQuery();

    DataSetWindow window = new DataSetWindow(q, dc, PAGE_SIZE, _windowContext);
    window.setVisible(true);
  }
View Full Code Here

  }

  @Override
  public Table getPreviewTable(Datastore datastore) {
    DataContextProvider dcp = datastore.getDataContextProvider();
    DataContext dc = dcp.getDataContext();

    // It is likely that schemas are cached, and since it is likely a new
    // table, we refresh the schema.
    dc.refreshSchemas();

    Table table = dc.getDefaultSchema().getTableByName(_tableName);
    dcp.close();
    return table;
  }
View Full Code Here

    // make table name safe
    tableName = DatastoreOutputUtils.safeName(tableName);

    synchronized (DatastoreOutputWriter.class) {
      final DataContext dc = DataContextFactory.createJdbcDataContext(_connection);
      dc.refreshSchemas();
      final String[] tableNames = dc.getDefaultSchema().getTableNames();

      if (truncateExisting) {
        _tableName = tableName;

        for (String existingTableName : tableNames) {
View Full Code Here

    OutputWriter writer = CsvOutputWriterFactory.getWriter(filename, scenarioHelper.getColumns());

    scenarioHelper.writeExampleData(writer);
    writer.close();
   
    DataContext dc = DataContextFactory.createCsvDataContext(new File(filename));
    Table table = dc.getDefaultSchema().getTables()[0];
    Query q = dc.query().from(table).select(table.getColumns()).toQuery();
    DataSet dataSet = dc.executeQuery(q);
   
    scenarioHelper.performAssertions(dataSet, false);
  }
View Full Code Here

    }
    assertEquals(9, datastoreCount.get());

    assertNotNull(_datastore);
    DataContextProvider dataContextProvider = _datastore.getDataContextProvider();
    DataContext dc = dataContextProvider.getDataContext();
    dc.refreshSchemas();
    String[] tableNames = dc.getDefaultSchema().getTableNames();
    Arrays.sort(tableNames);

    assertEquals("[TAB_1, TAB_2, TAB_3, TAB_4, TAB_5, TAB_6, TAB_7, TAB_8, TAB_9]", Arrays.toString(tableNames));
  }
View Full Code Here

      public void createDatastore(Datastore datastore) {
        _datastoreCreated = true;
        assertEquals("my datastore", datastore.getName());

        DataContextProvider dcp = datastore.getDataContextProvider();
        DataContext dc = dcp.getDataContext();

        Table table = dc.getDefaultSchema().getTables()[0];
        Query q = dc.query().from(table).select(table.getColumns()).toQuery();
        DataSet dataSet = dc.executeQuery(q);

        scenarioHelper.performAssertions(dataSet, true);

        dcp.close();
      }
View Full Code Here

    final URL url = resourceManager.getUrl("licenses/dependency-licenses.csv");
    if (url == null) {
      throw new IllegalStateException("Could not find dependencies file");
    }
    try {
      DataContext dc = DataContextFactory.createCsvDataContext(url.openStream(), ',', '"', false);
      Table table = dc.getDefaultSchema().getTables()[0];
      Column projectColumn = table.getColumnByName("Project");
      Column websiteColumn = table.getColumnByName("Website");
      Column licenseColumn = table.getColumnByName("License");
      Query q = dc.query().from(table).select(table.getColumns()).orderBy(projectColumn).asc().toQuery();
      DataSet ds = dc.executeQuery(q);
      while (ds.next()) {
        final LicensedProject licensedProject = new LicensedProject();
        final Row row = ds.getRow();
        final String licenseName = row.getValue(licenseColumn).toString();
View Full Code Here

      logger.info("Not displaying preview table because isPreviewDataAvailable() returned false");
      return null;
    }
    D datastore = getPreviewDatastore(filename);
    DataContextProvider dcp = datastore.getDataContextProvider();
    DataContext dc = dcp.getDataContext();
    Table table = getPreviewTable(dc);
    Column[] columns = table.getColumns();
    if (columns.length > getPreviewColumns()) {
      // include max 10 columns
      columns = Arrays.copyOf(columns, getPreviewColumns());
    }
    Query q = dc.query().from(table).select(columns).toQuery();
    q.setMaxRows(7);

    DataSet dataSet = dc.executeQuery(q);

    dcp.close();

    return dataSet;
  }
View Full Code Here

TOP

Related Classes of org.eobjects.metamodel.DataContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.