Examples of BlockMetaData


Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

    }

    public Configuration createConfiguration( final Object entry )
        throws Exception
    {
        final BlockMetaData metaData = getMetaDataFor( entry );
        final String name = metaData.getName();
        try
        {
            return m_context.getConfiguration( name );
        }
        catch( final ConfigurationException ce )
View Full Code Here

Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

        throws AssemblyException
    {
        final ArrayList blockSet = new ArrayList();
        for( int i = 0; i < blocks.length; i++ )
        {
            final BlockMetaData blockMetaData =
                buildBlock( blocks[ i ], classLoader );
            blockSet.add( blockMetaData );
        }

        return (BlockMetaData[])blockSet.toArray( new BlockMetaData[ 0 ] );
View Full Code Here

Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

            final DependencyMetaData[] roles = buildDependencyMetaDatas( provides );
            final BlockInfo info = getBlockInfo( name, classname, classLoader );


            return new BlockMetaData( name, roles, info );
        }
        catch( final ConfigurationException ce )
        {
            final String message =
                REZ.getString( "block-entry-malformed", block.getLocation(), ce.getMessage() );
View Full Code Here

Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

    private void verifyNoCircularDependencies( final BlockMetaData[] blocks )
        throws VerifyException
    {
        for( int i = 0; i < blocks.length; i++ )
        {
            final BlockMetaData block = blocks[ i ];

            final Stack stack = new Stack();
            stack.push( block );
            verifyNoCircularDependencies( block, blocks, stack );
            stack.pop();
View Full Code Here

Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

    {
        final BlockMetaData[] dependencies = getDependencies( block, blocks );

        for( int i = 0; i < dependencies.length; i++ )
        {
            final BlockMetaData dependency = dependencies[ i ];
            if( stack.contains( dependency ) )
            {
                final String trace = getDependencyTrace( dependency, stack );
                final String message =
                    REZ.getString( "dependency-circular", block.getName(), trace );
View Full Code Here

Examples of org.apache.avalon.phoenix.metadata.BlockMetaData

        final String name = block.getName();
        final int size = stack.size();
        final int top = size - 1;
        for( int i = top; i >= 0; i-- )
        {
            final BlockMetaData other = (BlockMetaData)stack.get( i );
            if( top != i )
            {
                sb.append( ", " );
            }
            sb.append( other.getName() );

            if( other.getName().equals( name ) )
            {
                break;
            }
        }
View Full Code Here

Examples of parquet.hadoop.metadata.BlockMetaData

      CodecFactoryExposer codecFactoryExposer = new CodecFactoryExposer(conf);
      FileSystem fs = FileSystem.get(conf);
      Path filePath = new Path(entry.getPath());

      BlockMetaData blockMetaData = footer.getBlocks().get(entry.getRowGroupIndex());

      recordCount = (int) blockMetaData.getRowCount();

      ColumnChunkIncReadStore pageReadStore = new ColumnChunkIncReadStore(recordCount,
              codecFactoryExposer.getCodecFactory(), fs, filePath);

      for (String[] path : schema.getPaths()) {
View Full Code Here

Examples of parquet.hadoop.metadata.BlockMetaData

    writeKeyValues(out, extraMetadata);
    writeKeyValues(out, readSupportMetadata);
  }

  private BlockMetaData readBlock(DataInput in) throws IOException {
    final BlockMetaData block = new BlockMetaData();
    int size = in.readInt();
    for (int i = 0; i < size; i++) {
      block.addColumn(readColumn(in));
    }
    block.setRowCount(in.readLong());
    block.setTotalByteSize(in.readLong());
    if (!in.readBoolean()) {
      block.setPath(in.readUTF().intern());
    }
    return block;
  }
View Full Code Here

Examples of parquet.hadoop.metadata.BlockMetaData

   */
  public PageReadStore readNextRowGroup() throws IOException {
    if (currentBlock == blocks.size()) {
      return null;
    }
    BlockMetaData block = blocks.get(currentBlock);
    if (block.getRowCount() == 0) {
      throw new RuntimeException("Illegal row group of 0 rows");
    }
    ColumnChunkPageReadStore columnChunkPageReadStore = new ColumnChunkPageReadStore(block.getRowCount());
    // prepare the list of consecutive chunks to read them in one scan
    List<ConsecutiveChunkList> allChunks = new ArrayList<ConsecutiveChunkList>();
    ConsecutiveChunkList currentChunks = null;
    for (ColumnChunkMetaData mc : block.getColumns()) {
      ColumnPath pathKey = mc.getPath();
      BenchmarkCounter.incrementTotalBytes(mc.getTotalSize());
      ColumnDescriptor columnDescriptor = paths.get(pathKey);
      if (columnDescriptor != null) {
        long startingPos = getStartingPos(mc);
View Full Code Here

Examples of parquet.hadoop.metadata.BlockMetaData

   */
  public void startBlock(long recordCount) throws IOException {
    state = state.startBlock();
    if (DEBUG) LOG.debug(out.getPos() + ": start block");
//    out.write(MAGIC); // TODO: add a magic delimiter
    currentBlock = new BlockMetaData();
    currentRecordCount = recordCount;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.