Package org.apache.cassandra.db

Examples of org.apache.cassandra.db.IColumn


            totalRead++;
            KeySlice ks = rows.get(i++);
            SortedMap<ByteBuffer, IColumn> map = new TreeMap<ByteBuffer, IColumn>(comparator);
            for (ColumnOrSuperColumn cosc : ks.columns)
            {
                IColumn column = unthriftify(cosc);
                map.put(column.name(), column);
            }
            return new Pair<ByteBuffer, SortedMap<ByteBuffer, IColumn>>(ks.key, map);
        }
View Full Code Here


                while (true)
                {
                    if (columns.hasNext())
                    {
                        ColumnOrSuperColumn cosc = columns.next();
                        IColumn column = unthriftify(cosc);
                        ImmutableSortedMap<ByteBuffer, IColumn> map = ImmutableSortedMap.of(column.name(), column);
                        return Pair.<ByteBuffer, SortedMap<ByteBuffer, IColumn>>create(currentRow.key, map);
                    }

                    if (!rows.hasNext())
                        return endOfData();
View Full Code Here

    {
        if (cf == null || cf.getColumnsMap().size() == 0)
            return EMPTY_COLUMNS;
        if (subcolumnsOnly)
        {
            IColumn column = cf.getColumnsMap().values().iterator().next();
            Collection<IColumn> subcolumns = column.getSubColumns();
            if (subcolumns == null || subcolumns.isEmpty())
                return EMPTY_COLUMNS;
            else
                return thriftifyColumns(subcolumns, reverseOrder);
        }
View Full Code Here

        Iterator<IColumn> iter = cols.iterator();
        while (iter.hasNext())
        {
            outs.print("[");
            IColumn column = iter.next();
            outs.print(quote(bytesToHex(column.name())));
            outs.print(", ");
            outs.print(quote(bytesToHex(column.value())));
            outs.print(", ");
            outs.print(column.timestamp());
            outs.print(", ");
            outs.print(column.isMarkedForDelete());
            if (column instanceof ExpiringColumn)
            {
                outs.print(", ");
                outs.print(((ExpiringColumn) column).getTimeToLive());
                outs.print(", ");
                outs.print(column.getLocalDeletionTime());
            }
            outs.print("]");
            if (iter.hasNext())
                outs.print(", ");
        }
View Full Code Here

            outs.print("{ ");

            Iterator<IColumn> iter = cf.getSortedColumns().iterator();
            while (iter.hasNext())
            {
                IColumn column = iter.next();
                outs.print(asKey(bytesToHex(column.name())));
                outs.print("{");
                outs.print(asKey("deletedAt"));
                outs.print(column.getMarkedForDeleteAt());
                outs.print(", ");
                outs.print(asKey("subColumns"));
                serializeColumns(outs, column.getSubColumns(), comparator);
                outs.print("}");
                if (iter.hasNext())
                    outs.print(", ");
            }
           
View Full Code Here

            sourceColumn = ByteBuffer.wrap(context.getConfiguration().get(CONF_COLUMN_NAME).getBytes());
        }

        public void map(ByteBuffer key, SortedMap<ByteBuffer, IColumn> columns, Context context) throws IOException, InterruptedException
        {
            IColumn column = columns.get(sourceColumn);
            if (column == null)
                return;
            String value = ByteBufferUtil.string(column.value());
            logger.debug("read " + key + ":" + value + " from " + context.getInputSplit());

            StringTokenizer itr = new StringTokenizer(value);
            while (itr.hasMoreTokens())
            {
View Full Code Here

            // columns
            int columns = in1.readInt();
            assert columns == in2.readInt();
            for (int i = 0; i < columns; i++)
            {
                IColumn c1 = cf1.getColumnSerializer().deserialize(in1);
                IColumn c2 = cf2.getColumnSerializer().deserialize(in2);
                assert c1.equals(c2);
            }
            // that should be everything
            assert in1.available() == 0;
            assert in2.available() == 0;
View Full Code Here

    {
        int columns = file.readInt();
        int n = 0;
        for (int i = 0; i < columns; i++)
        {
            IColumn column = cf.getColumnSerializer().deserialize(file);
            if (columnNames.contains(column.name()))
            {
                cf.addColumn(column);
                if (n++ > filteredColumnNames.size())
                    break;
            }
View Full Code Here

            long curOffsert = file.skipBytes((int) indexInfo.offset);
            assert curOffsert == indexInfo.offset;
            // TODO only completely deserialize columns we are interested in
            while (file.bytesPastMark(mark) < indexInfo.offset + indexInfo.width)
            {
                IColumn column = cf.getColumnSerializer().deserialize(file);
                // we check vs the original Set, not the filtered List, for efficiency
                if (columnNames.contains(column.name()))
                {
                    cf.addColumn(column);
                }
            }
        }
View Full Code Here

    protected IColumn computeNext()
    {
        while (true)
        {
            IColumn column = blockColumns.poll();
            if (column != null && isColumnNeeded(column))
                return column;
            try
            {
                if (column == null && !fetcher.getNextBlock())
View Full Code Here

TOP

Related Classes of org.apache.cassandra.db.IColumn

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.