Skip to content

Instantly share code, notes, and snippets.

@doanduyhai
Created April 24, 2016 18:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save doanduyhai/7f0dc2dda6ba3fe381cdcf8b9a93ed89 to your computer and use it in GitHub Desktop.
Save doanduyhai/7f0dc2dda6ba3fe381cdcf8b9a93ed89 to your computer and use it in GitHub Desktop.
commit a1cd27e406b2671fd43600ab3bed060197348844
Author: DuyHai DOAN <doanduyhai@gmail.com>
Date: Sat Apr 23 22:56:37 2016 +0200
Debug SASI OnDiskIndex structure
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
index 9a1f7f1..d874467 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
@@ -18,11 +18,13 @@
package org.apache.cassandra.index.sasi.disk;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import java.util.StringJoiner;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.utils.AbstractIterator;
@@ -44,6 +46,19 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
protected long treeMinToken;
protected long treeMaxToken;
+ protected void writeToDebugAndFlush(FileOutputStream DEBUG_FILE, String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes());
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
+
public void add(TokenTreeBuilder other)
{
add(other.iterator());
@@ -99,6 +114,33 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
}
}
+ public void write(FileOutputStream bufferedWriter) throws IOException
+ {
+ Iterator<Node> levelIterator = root.levelIterator();
+ long childBlockIndex = 1;
+
+ while (levelIterator != null)
+ {
+ Node firstChild = null;
+ while (levelIterator.hasNext())
+ {
+ Node block = levelIterator.next();
+
+ if (firstChild == null && !block.isLeaf())
+ firstChild = ((InteriorNode) block).children.get(0);
+
+ if (block.isSerializable())
+ {
+ block.serialize(childBlockIndex, bufferedWriter);
+ }
+
+ childBlockIndex += block.childCount();
+ }
+
+ levelIterator = (firstChild == null) ? null : firstChild.levelIterator();
+ }
+ }
+
protected abstract void constructTree();
protected void flushBuffer(ByteBuffer buffer, DataOutputPlus o, boolean align) throws IOException
@@ -126,6 +168,7 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
public abstract boolean isSerializable();
public abstract void serialize(long childBlockIndex, ByteBuffer buf);
+ public abstract void serialize(long childBlockIndex, FileOutputStream bufferWriter);
public abstract int childCount();
public abstract int tokenCount();
@@ -179,6 +222,23 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
alignBuffer(buf, BLOCK_HEADER_BYTES);
}
+ protected void serializeHeader(FileOutputStream bufferedWriter)
+ {
+ Header header;
+ if (isRoot()) {
+ header = new RootHeader();
+ writeToDebugAndFlush(bufferedWriter, "Root Header -- ");
+ } else if (!isLeaf()) {
+ header = new InteriorNodeHeader();
+ writeToDebugAndFlush(bufferedWriter, "InteriorNode Header -- ");
+ } else {
+ header = new LeafHeader();
+ writeToDebugAndFlush(bufferedWriter, "Leaf Header -- ");
+ }
+
+ header.serialize(bufferedWriter);
+ }
+
private abstract class Header
{
public void serialize(ByteBuffer buf)
@@ -189,6 +249,14 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putLong(nodeMaxToken);
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ writeToDebugAndFlush(bufferedWriter, "Infobyte : " + infoByte() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "tokens count : " + tokenCount() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "min token : " + nodeMinToken + ", ");
+ writeToDebugAndFlush(bufferedWriter, "max token : " + nodeMaxToken + "\n");
+ }
+
protected abstract byte infoByte();
}
@@ -203,6 +271,15 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putLong(treeMaxToken);
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ super.serialize(bufferedWriter);
+// writeMagic(bufferedWriter);
+// writeToDebugAndFlush(bufferedWriter, "tokens count : " + tokenCount() + ", ");
+// writeToDebugAndFlush(bufferedWriter, "min token : " + nodeMinToken + ", ");
+// writeToDebugAndFlush(bufferedWriter, "max token : " + nodeMaxToken + "\n");
+ }
+
protected byte infoByte()
{
// if leaf, set leaf indicator and last leaf indicator (bits 0 & 1)
@@ -223,6 +300,19 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
}
}
+
+ protected void writeMagic(FileOutputStream bufferedWriter)
+ {
+ switch (Descriptor.CURRENT_VERSION)
+ {
+ case Descriptor.VERSION_AB:
+ writeToDebugAndFlush(bufferedWriter, "AB Magic 0x5A51, ");
+ break;
+
+ default:
+ break;
+ }
+ }
}
private class InteriorNodeHeader extends Header
@@ -270,6 +360,18 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
buf.putLong(offset.value);
}
+ protected void serializeOverflowCollisions(FileOutputStream bufferedWriter)
+ {
+ if (overflowCollisions != null)
+ {
+ writeToDebugAndFlush(bufferedWriter, "OverflowCollisions -- ");
+ StringJoiner joiner = new StringJoiner(", ", "Offsets : [", "]\n");
+ for (LongCursor offset : overflowCollisions)
+ joiner.add(offset.value + "");
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
+ }
+
public void serialize(long childBlockIndex, ByteBuffer buf)
{
serializeHeader(buf);
@@ -277,7 +379,15 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
serializeOverflowCollisions(buf);
}
+ public void serialize(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ serializeHeader(bufferedWriter);
+ serializeData(bufferedWriter);
+ serializeOverflowCollisions(bufferedWriter);
+ }
+
protected abstract void serializeData(ByteBuffer buf);
+ protected abstract void serializeData(FileOutputStream bufferedWriter);
protected LeafEntry createEntry(final long tok, final LongSet offsets)
{
@@ -342,6 +452,13 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putInt(offsetData());
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ writeToDebugAndFlush(bufferedWriter, "\t\tEntryType : " + type().name() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "offset extra : " + offsetExtra() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "token : " + token + ", ");
+ writeToDebugAndFlush(bufferedWriter, "offset data : " + offsetData() + "\n");
+ }
}
@@ -492,6 +609,13 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
serializeChildOffsets(childBlockIndex, buf);
}
+ public void serialize(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ serializeHeader(bufferedWriter);
+ serializeTokens(bufferedWriter);
+ serializeChildOffsets(childBlockIndex, bufferedWriter);
+ }
+
public int childCount()
{
return children.size();
@@ -634,11 +758,27 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
tokens.forEach(buf::putLong);
}
+ private void serializeTokens(FileOutputStream bufferedWriter)
+ {
+ StringJoiner joiner = new StringJoiner(", ", "Tokens: [", "]\n");
+ tokens.forEach(token -> joiner.add(token.toString()));
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
+
private void serializeChildOffsets(long childBlockIndex, ByteBuffer buf)
{
for (int i = 0; i < children.size(); i++)
buf.putLong((childBlockIndex + i) * BLOCK_BYTES);
}
+
+ private void serializeChildOffsets(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ StringJoiner joiner = new StringJoiner(", ", "Child offsets: [", "]\n");
+ for (int i = 0; i < children.size(); i++)
+ joiner.add(((childBlockIndex + i) * BLOCK_BYTES) + "");
+
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
}
public static class LevelIterator extends AbstractIterator<Node>
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
index 2ddfd89..e8f1166 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
@@ -17,6 +17,8 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.util.*;
@@ -185,5 +187,25 @@ public class DynamicTokenTreeBuilder extends AbstractTokenTreeBuilder
createEntry(entry.getKey(), entry.getValue()).serialize(buf);
}
+ protected void serializeData(FileOutputStream bufferedWriter)
+ {
+
+ for (Map.Entry<Long, LongSet> entry : tokens.entrySet())
+ {
+// createEntry(entry.getKey(), entry.getValue()).serialize(bufferedWriter);
+ final LongSet longSet = entry.getValue();
+ final LeafEntry leafEntry = createEntry(entry.getKey(), longSet);
+ leafEntry.serialize(bufferedWriter);
+// StringJoiner joiner = new StringJoiner(", ", "[", "]");
+// final Iterator<LongCursor> iterator = longSet.iterator();
+// while (iterator.hasNext())
+// {
+// final LongCursor next = iterator.next();
+// joiner.add("[index : " + next.index + ", value : " + next.value+ "]");
+// }
+
+ //writeToDebugAndFlush(bufferedWriter, "\t\tDynamic Leaf entry [token: " + entry.getKey() + ", offsets: " + joiner.toString() + "] \n");
+ }
+ }
}
}
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
index 8acbb05..9dec8a0 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
@@ -18,6 +18,7 @@
package org.apache.cassandra.index.sasi.disk;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
@@ -49,6 +50,8 @@ public class OnDiskIndexBuilder
{
private static final Logger logger = LoggerFactory.getLogger(OnDiskIndexBuilder.class);
+ private FileOutputStream DEBUG_FILE;
+
public enum Mode
{
PREFIX(EnumSet.of(Op.EQ, Op.MATCH, Op.PREFIX, Op.NOT_EQ, Op.RANGE)),
@@ -158,6 +161,15 @@ public class OnDiskIndexBuilder
this.termSize = TermSize.sizeOf(comparator);
this.mode = mode;
this.marksPartials = marksPartials;
+
+ try
+ {
+ DEBUG_FILE = new FileOutputStream(new File("/tmp/debug_SASI.txt"), false);
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
}
public OnDiskIndexBuilder add(ByteBuffer term, DecoratedKey key, long keyPosition)
@@ -265,49 +277,74 @@ public class OnDiskIndexBuilder
{
out = new SequentialWriter(file, BLOCK_SIZE, BufferType.ON_HEAP);
+ writeToDebugAndFlush("descriptor : "+ descriptor.version.toString() + "\n");
out.writeUTF(descriptor.version.toString());
+ writeToDebugAndFlush("termSize.size : "+ termSize.size + "\n");
out.writeShort(termSize.size);
// min, max term (useful to find initial scan range from search expressions)
+
+ writeToDebugAndFlush("min term : %s \n",terms.minTerm());
ByteBufferUtil.writeWithShortLength(terms.minTerm(), out);
+
+ writeToDebugAndFlush("max term : %s \n",terms.maxTerm());
ByteBufferUtil.writeWithShortLength(terms.maxTerm(), out);
// min, max keys covered by index (useful when searching across multiple indexes)
+
+ writeToDebugAndFlush("min range : " + keyComparator.getString(range.left) + "\n");
ByteBufferUtil.writeWithShortLength(range.left, out);
+
+ writeToDebugAndFlush("max range : " + keyComparator.getString(range.right) + "\n");
ByteBufferUtil.writeWithShortLength(range.right, out);
+ writeToDebugAndFlush("index mode : "+ mode.toString() + "\n");
out.writeUTF(mode.toString());
- out.writeBoolean(marksPartials);
+
+ writeToDebugAndFlush("marks partials ? "+ marksPartials + "\n");
+ out.writeBoolean(marksPartials);
out.skipBytes((int) (BLOCK_SIZE - out.position()));
- dataLevel = mode == Mode.SPARSE ? new DataBuilderLevel(out, new MutableDataBlock(termComparator, mode))
- : new MutableLevel<>(out, new MutableDataBlock(termComparator, mode));
+ dataLevel = mode == Mode.SPARSE ? new DataBuilderLevel(out, new MutableDataBlock(termComparator, mode, DEBUG_FILE))
+ : new MutableLevel<>(out, new MutableDataBlock(termComparator, mode, DEBUG_FILE));
+ writeToDebugAndFlush("DATA BLOCK \n");
while (terms.hasNext())
{
Pair<IndexedTerm, TokenTreeBuilder> term = terms.next();
addTerm(new InMemoryDataTerm(term.left, term.right), out);
}
+ writeToDebugAndFlush("Final DATA BLOCK \n");
dataLevel.finalFlush();
+
+ writeToDebugAndFlush("POINTERS BLOCKS \n");
for (MutableLevel l : levels)
l.flush(); // flush all of the buffers
// and finally write levels index
final long levelIndexPosition = out.position();
+ writeToDebugAndFlush("Levels count : " + levels.size() + "\n");
out.writeInt(levels.size());
+ writeToDebugAndFlush("-------------- \n");
+ writeToDebugAndFlush("POINTER BLOCKS META \n");
for (int i = levels.size() - 1; i >= 0; i--)
+ {
levels.get(i).flushMetadata();
-
+ }
+ writeToDebugAndFlush("-------------- \n");
+ writeToDebugAndFlush("DATA BLOCKS META \n");
dataLevel.flushMetadata();
+ writeToDebugAndFlush("Index position : " + levelIndexPosition + "\n");
out.writeLong(levelIndexPosition);
// sync contents of the output and disk,
// since it's not done implicitly on close
out.sync();
+ closeDebugFile();
}
catch (IOException e)
{
@@ -322,13 +359,13 @@ public class OnDiskIndexBuilder
private MutableLevel<InMemoryPointerTerm> getIndexLevel(int idx, SequentialWriter out)
{
if (levels.size() == 0)
- levels.add(new MutableLevel<>(out, new MutableBlock<>()));
+ levels.add(new MutableLevel<>(out, new MutableBlock<>(DEBUG_FILE, termComparator)));
if (levels.size() - 1 < idx)
{
int toAdd = idx - (levels.size() - 1);
for (int i = 0; i < toAdd; i++)
- levels.add(new MutableLevel<>(out, new MutableBlock<>()));
+ levels.add(new MutableLevel<>(out, new MutableBlock<>(DEBUG_FILE, termComparator)));
}
return levels.get(idx);
@@ -411,11 +448,28 @@ public class OnDiskIndexBuilder
private final MutableBlock<T> inProcessBlock;
private InMemoryPointerTerm lastTerm;
+ protected FileOutputStream DEBUG_FILE;
+ protected AbstractType<?> termComparator;
public MutableLevel(SequentialWriter out, MutableBlock<T> block)
{
this.out = out;
this.inProcessBlock = block;
+ this.DEBUG_FILE = block.DEBUG_FILE;
+ this.termComparator = block.termComparator;
+ }
+
+ protected void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
}
/**
@@ -450,14 +504,28 @@ public class OnDiskIndexBuilder
public void flushMetadata() throws IOException
{
- flushMetadata(blockOffsets);
+ flushMetadata(blockOffsets, false);
}
- protected void flushMetadata(LongArrayList longArrayList) throws IOException
+ protected void flushMetadata(LongArrayList longArrayList, boolean superBlock) throws IOException
{
+ StringJoiner builder;
+ if(superBlock)
+ {
+ writeToDebugAndFlush("Super Block count : " + longArrayList.size() + ", ");
+ builder = new StringJoiner(", ", "Super Block offsets : [", "]\n");
+ } else {
+ writeToDebugAndFlush("Block count : " + longArrayList.size() + ", ");
+ builder = new StringJoiner(", ", "Block offsets : [", "]\n");
+ }
+
out.writeInt(longArrayList.size());
for (int i = 0; i < longArrayList.size(); i++)
+ {
+ builder.add(longArrayList.get(i) + "");
out.writeLong(longArrayList.get(i));
+ }
+ writeToDebugAndFlush(builder.toString());
}
}
@@ -493,7 +561,10 @@ public class OnDiskIndexBuilder
if (dataBlocksCnt == SUPER_BLOCK_SIZE || (force && !superBlockTree.isEmpty()))
{
superBlockOffsets.add(out.position());
- superBlockTree.finish().write(out);
+ writeToDebugAndFlush("Super TokenTree Block for SPARSE mode \n");
+ final TokenTreeBuilder finish = superBlockTree.finish();
+ finish.write(out);
+ finish.write(DEBUG_FILE);
alignToBlock(out);
dataBlocksCnt = 0;
@@ -510,14 +581,18 @@ public class OnDiskIndexBuilder
public void flushMetadata() throws IOException
{
super.flushMetadata();
- flushMetadata(superBlockOffsets);
+ writeToDebugAndFlush("SPARSE MODE \n");
+ flushMetadata(superBlockOffsets, true);
}
}
private static class MutableBlock<T extends InMemoryTerm>
{
protected final DataOutputBufferFixed buffer;
+ protected StringBuilder debugBuffer = new StringBuilder();
protected final ShortArrayList offsets;
+ public FileOutputStream DEBUG_FILE;
+ public AbstractType<?> termComparator;
public MutableBlock()
{
@@ -525,6 +600,36 @@ public class OnDiskIndexBuilder
offsets = new ShortArrayList();
}
+ public MutableBlock(FileOutputStream bufferedWriter, AbstractType<?> termComparator)
+ {
+ this();
+ this.DEBUG_FILE = bufferedWriter;
+ this.termComparator = termComparator;
+ }
+
+ protected void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+ protected void writeToDebugBuffer(String string)
+ {
+ debugBuffer.append(string);
+ }
+
+ protected void writeToDebugBuffer(String string, ByteBuffer byteBuffer)
+ {
+ debugBuffer.append(String.format(string, termComparator.getString(byteBuffer)));
+ }
+
public final void add(T term) throws IOException
{
offsets.add((short) buffer.position());
@@ -533,6 +638,7 @@ public class OnDiskIndexBuilder
protected void addInternal(T term) throws IOException
{
+ writeTermToDebugBuffer(term);
term.serialize(buffer);
}
@@ -554,9 +660,16 @@ public class OnDiskIndexBuilder
public void flushAndClear(SequentialWriter out) throws IOException
{
out.writeInt(offsets.size());
- for (int i = 0; i < offsets.size(); i++)
- out.writeShort(offsets.get(i));
+ writeToDebugAndFlush("Term count : " + offsets.size() + ", ");
+ StringJoiner joiner = new StringJoiner(", ", "Offsets [", "]\n");
+ for (int i = 0; i < offsets.size(); i++) {
+ out.writeShort(offsets.get(i));
+ joiner.add(offsets.get(i) + "");
+ }
+ writeToDebugAndFlush(joiner.toString());
+ writeToDebugAndFlush(debugBuffer.toString());
+ debugBuffer = new StringBuilder();
out.write(buffer.buffer());
alignToBlock(out);
@@ -564,6 +677,23 @@ public class OnDiskIndexBuilder
offsets.clear();
buffer.clear();
}
+
+ protected void writeTermToDebugBuffer(InMemoryTerm term)
+ {
+ if(term instanceof InMemoryPointerTerm)
+ {
+ InMemoryPointerTerm pointerTerm = (InMemoryPointerTerm) term;
+ writeToDebugBuffer("Pointer Term (partial ? "+pointerTerm.term.isPartial()+") : %s, ", pointerTerm.term.getBytes());
+ writeToDebugBuffer("Block number : " + pointerTerm.blockCnt + ".\n");
+ } else if(term instanceof InMemoryDataTerm)
+ {
+ writeToDebugBuffer("Data Term (partial ? "+term.term.isPartial()+") : %s. ", term.term.getBytes());
+ } else {
+ writeToDebugBuffer("Normal Term (partial ? "+term.term.isPartial()+"): %s.\n", term.term.getBytes());
+ }
+ }
+
+
}
private static class MutableDataBlock extends MutableBlock<InMemoryDataTerm>
@@ -578,11 +708,13 @@ public class OnDiskIndexBuilder
private final List<TokenTreeBuilder> containers = new ArrayList<>();
private TokenTreeBuilder combinedIndex;
- public MutableDataBlock(AbstractType<?> comparator, Mode mode)
+ public MutableDataBlock(AbstractType<?> comparator, Mode mode, FileOutputStream debugFile)
{
this.comparator = comparator;
this.mode = mode;
this.combinedIndex = initCombinedIndex();
+ super.DEBUG_FILE = debugFile;
+ super.termComparator = comparator;
}
protected void addInternal(InMemoryDataTerm term) throws IOException
@@ -617,17 +749,28 @@ public class OnDiskIndexBuilder
public void flushAndClear(SequentialWriter out) throws IOException
{
super.flushAndClear(out);
-
+ writeToDebugAndFlush("Offset : " + (mode == Mode.SPARSE ? offset : -1) + "\n");
out.writeInt(mode == Mode.SPARSE ? offset : -1);
if (containers.size() > 0)
{
+ writeToDebugAndFlush("TOKEN TREES BLOCK \n");
for (TokenTreeBuilder tokens : containers)
+ {
tokens.write(out);
+ tokens.write(DEBUG_FILE);
+ }
}
+ writeToDebugAndFlush("\n");
+
if (mode == Mode.SPARSE && combinedIndex != null)
- combinedIndex.finish().write(out);
+ {
+ final TokenTreeBuilder finish = combinedIndex.finish();
+ finish.write(out);
+ writeToDebugAndFlush("SPARSE TOKEN TREE BLOCK \n");
+ finish.write(DEBUG_FILE);
+ }
alignToBlock(out);
@@ -647,14 +790,25 @@ public class OnDiskIndexBuilder
private void writeTerm(InMemoryTerm term, TokenTreeBuilder keys) throws IOException
{
term.serialize(buffer);
+ writeToDebugBuffer("SPARSE mode ");
+ writeTermToDebugBuffer(term);
+
+ writeToDebugBuffer("Token count : " + keys.getTokenCount() + ", ");
+ StringJoiner joiner = new StringJoiner(", ", "Tokens [", "]\n");
buffer.writeByte((byte) keys.getTokenCount());
for (Pair<Long, LongSet> key : keys)
+ {
+ joiner.add(key.left + "");
buffer.writeLong(key.left);
+ }
+ writeToDebugBuffer(joiner.toString());
}
private void writeTerm(InMemoryTerm term, int offset) throws IOException
{
term.serialize(buffer);
+ writeTermToDebugBuffer(term);
+ writeToDebugBuffer("0x0, TokenTree offset : " + offset + "\n");
buffer.writeByte(0x0);
buffer.writeInt(offset);
}
@@ -664,4 +818,44 @@ public class OnDiskIndexBuilder
return mode == Mode.SPARSE ? new DynamicTokenTreeBuilder() : null;
}
}
+
+ private void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ closeDebugFile();
+ }
+ }
+
+ private void writeToDebugAndFlush(String string, ByteBuffer byteBuffer)
+ {
+ try
+ {
+ DEBUG_FILE.write(String.format(string, termComparator.getString(byteBuffer)).getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ closeDebugFile();
+ }
+ }
+
+ private void closeDebugFile()
+ {
+ try
+ {
+ DEBUG_FILE.close();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
}
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
index 7a41b38..d265856 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
@@ -17,6 +17,8 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
@@ -197,6 +199,10 @@ public class StaticTokenTreeBuilder extends AbstractTokenTreeBuilder
{
throw new UnsupportedOperationException();
}
+ public void serializeData(FileOutputStream bufferedWriter)
+ {
+ throw new UnsupportedOperationException();
+ }
public boolean isSerializable()
{
@@ -244,6 +250,15 @@ public class StaticTokenTreeBuilder extends AbstractTokenTreeBuilder
}
}
+ public void serializeData(FileOutputStream bufferedWriter)
+ {
+ while (tokens.hasNext())
+ {
+ Token entry = tokens.next();
+ createEntry(entry.get(), entry.getOffsets()).serialize(bufferedWriter);
+ }
+ }
+
public boolean isSerializable()
{
return true;
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
index 2210964..d78dd0a 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
@@ -17,6 +17,8 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
@@ -73,4 +75,5 @@ public interface TokenTreeBuilder extends Iterable<Pair<Long, LongSet>>
int serializedSize();
void write(DataOutputPlus out) throws IOException;
+ void write(FileOutputStream bufferedWriter) throws IOException;
}
\ No newline at end of file
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment