Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
commit c0fd82ca3555586767307456fd5d52ad1f70e666
Author: DuyHai DOAN <doanduyhai@gmail.com>
Date: Thu Jun 23 11:41:54 2016 +0200
Cassandra 3.7 patch for SASI debug
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
index 9a1f7f1..fe34073 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/AbstractTokenTreeBuilder.java
@@ -18,11 +18,13 @@
package org.apache.cassandra.index.sasi.disk;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import java.util.StringJoiner;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.utils.AbstractIterator;
@@ -44,6 +46,48 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
protected long treeMinToken;
protected long treeMaxToken;
+ protected void writeToDebugAndFlush(FileOutputStream DEBUG_FILE, String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes());
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+
+ public void write(FileOutputStream bufferedWriter) throws IOException
+ {
+ Iterator<Node> levelIterator = root.levelIterator();
+ long childBlockIndex = 1;
+
+ while (levelIterator != null)
+ {
+ Node firstChild = null;
+ while (levelIterator.hasNext())
+ {
+ Node block = levelIterator.next();
+
+ if (firstChild == null && !block.isLeaf())
+ firstChild = ((InteriorNode) block).children.get(0);
+
+ if (block.isSerializable())
+ {
+ block.serialize(childBlockIndex, bufferedWriter);
+ }
+
+ childBlockIndex += block.childCount();
+ }
+
+ levelIterator = (firstChild == null) ? null : firstChild.levelIterator();
+ }
+ }
+
+
public void add(TokenTreeBuilder other)
{
add(other.iterator());
@@ -126,6 +170,7 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
public abstract boolean isSerializable();
public abstract void serialize(long childBlockIndex, ByteBuffer buf);
+ public abstract void serialize(long childBlockIndex, FileOutputStream bufferWriter);
public abstract int childCount();
public abstract int tokenCount();
@@ -179,6 +224,23 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
alignBuffer(buf, BLOCK_HEADER_BYTES);
}
+ protected void serializeHeader(FileOutputStream bufferedWriter)
+ {
+ Header header;
+ if (isRoot()) {
+ header = new RootHeader();
+ writeToDebugAndFlush(bufferedWriter, "Root Header -- ");
+ } else if (!isLeaf()) {
+ header = new InteriorNodeHeader();
+ writeToDebugAndFlush(bufferedWriter, "InteriorNode Header -- ");
+ } else {
+ header = new LeafHeader();
+ writeToDebugAndFlush(bufferedWriter, "Leaf Header -- ");
+ }
+
+ header.serialize(bufferedWriter);
+ }
+
private abstract class Header
{
public void serialize(ByteBuffer buf)
@@ -189,6 +251,14 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putLong(nodeMaxToken);
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ writeToDebugAndFlush(bufferedWriter, "Infobyte : " + infoByte() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "tokens count : " + tokenCount() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "min token : " + nodeMinToken + ", ");
+ writeToDebugAndFlush(bufferedWriter, "max token : " + nodeMaxToken + "\n");
+ }
+
protected abstract byte infoByte();
}
@@ -203,6 +273,11 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putLong(treeMaxToken);
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ super.serialize(bufferedWriter);
+ }
+
protected byte infoByte()
{
// if leaf, set leaf indicator and last leaf indicator (bits 0 & 1)
@@ -223,6 +298,20 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
}
}
+
+
+ protected void writeMagic(FileOutputStream bufferedWriter)
+ {
+ switch (Descriptor.CURRENT_VERSION)
+ {
+ case Descriptor.VERSION_AB:
+ writeToDebugAndFlush(bufferedWriter, "AB Magic 0x5A51, ");
+ break;
+
+ default:
+ break;
+ }
+ }
}
private class InteriorNodeHeader extends Header
@@ -270,6 +359,18 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
buf.putLong(offset.value);
}
+ protected void serializeOverflowCollisions(FileOutputStream bufferedWriter)
+ {
+ if (overflowCollisions != null)
+ {
+ writeToDebugAndFlush(bufferedWriter, "OverflowCollisions -- ");
+ StringJoiner joiner = new StringJoiner(", ", "Offsets : [", "]\n");
+ for (LongCursor offset : overflowCollisions)
+ joiner.add(offset.value + "");
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
+ }
+
public void serialize(long childBlockIndex, ByteBuffer buf)
{
serializeHeader(buf);
@@ -277,7 +378,15 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
serializeOverflowCollisions(buf);
}
+ public void serialize(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ serializeHeader(bufferedWriter);
+ serializeData(bufferedWriter);
+ serializeOverflowCollisions(bufferedWriter);
+ }
+
protected abstract void serializeData(ByteBuffer buf);
+ protected abstract void serializeData(FileOutputStream bufferedWriter);
protected LeafEntry createEntry(final long tok, final LongSet offsets)
{
@@ -342,6 +451,14 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
.putInt(offsetData());
}
+ public void serialize(FileOutputStream bufferedWriter)
+ {
+ writeToDebugAndFlush(bufferedWriter, "\t\tEntryType : " + type().name() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "offset extra : " + offsetExtra() + ", ");
+ writeToDebugAndFlush(bufferedWriter, "token : " + token + ", ");
+ writeToDebugAndFlush(bufferedWriter, "offset data : " + offsetData() + "\n");
+ }
+
}
@@ -492,6 +609,13 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
serializeChildOffsets(childBlockIndex, buf);
}
+ public void serialize(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ serializeHeader(bufferedWriter);
+ serializeTokens(bufferedWriter);
+ serializeChildOffsets(childBlockIndex, bufferedWriter);
+ }
+
public int childCount()
{
return children.size();
@@ -634,11 +758,27 @@ public abstract class AbstractTokenTreeBuilder implements TokenTreeBuilder
tokens.forEach(buf::putLong);
}
+ private void serializeTokens(FileOutputStream bufferedWriter)
+ {
+ StringJoiner joiner = new StringJoiner(", ", "Tokens: [", "]\n");
+ tokens.forEach(token -> joiner.add(token.toString()));
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
+
private void serializeChildOffsets(long childBlockIndex, ByteBuffer buf)
{
for (int i = 0; i < children.size(); i++)
buf.putLong((childBlockIndex + i) * BLOCK_BYTES);
}
+
+ private void serializeChildOffsets(long childBlockIndex, FileOutputStream bufferedWriter)
+ {
+ StringJoiner joiner = new StringJoiner(", ", "Child offsets: [", "]\n");
+ for (int i = 0; i < children.size(); i++)
+ joiner.add(((childBlockIndex + i) * BLOCK_BYTES) + "");
+
+ writeToDebugAndFlush(bufferedWriter, joiner.toString());
+ }
}
public static class LevelIterator extends AbstractIterator<Node>
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
index 2ddfd89..eca52de 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/DynamicTokenTreeBuilder.java
@@ -17,6 +17,7 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.util.*;
@@ -185,5 +186,15 @@ public class DynamicTokenTreeBuilder extends AbstractTokenTreeBuilder
createEntry(entry.getKey(), entry.getValue()).serialize(buf);
}
+ protected void serializeData(FileOutputStream bufferedWriter)
+ {
+
+ for (Map.Entry<Long, LongSet> entry : tokens.entrySet())
+ {
+ final LongSet longSet = entry.getValue();
+ final LeafEntry leafEntry = createEntry(entry.getKey(), longSet);
+ leafEntry.serialize(bufferedWriter);
+ }
+ }
}
}
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
index 8acbb05..d4ca77b 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/OnDiskIndexBuilder.java
@@ -18,6 +18,7 @@
package org.apache.cassandra.index.sasi.disk;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
@@ -49,6 +50,8 @@ public class OnDiskIndexBuilder
{
private static final Logger logger = LoggerFactory.getLogger(OnDiskIndexBuilder.class);
+ private FileOutputStream DEBUG_FILE;
+
public enum Mode
{
PREFIX(EnumSet.of(Op.EQ, Op.MATCH, Op.PREFIX, Op.NOT_EQ, Op.RANGE)),
@@ -158,6 +161,15 @@ public class OnDiskIndexBuilder
this.termSize = TermSize.sizeOf(comparator);
this.mode = mode;
this.marksPartials = marksPartials;
+
+ try
+ {
+ DEBUG_FILE = new FileOutputStream(new File("/tmp/debug_SASI.txt"), false);
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
}
public OnDiskIndexBuilder add(ByteBuffer term, DecoratedKey key, long keyPosition)
@@ -265,49 +277,74 @@ public class OnDiskIndexBuilder
{
out = new SequentialWriter(file, BLOCK_SIZE, BufferType.ON_HEAP);
+ writeToDebugAndFlush("descriptor : "+ descriptor.version.toString() + "\n");
out.writeUTF(descriptor.version.toString());
+ writeToDebugAndFlush("termSize.size : "+ termSize.size + "\n");
out.writeShort(termSize.size);
// min, max term (useful to find initial scan range from search expressions)
+
+ writeToDebugAndFlush("min term : %s \n",terms.minTerm());
ByteBufferUtil.writeWithShortLength(terms.minTerm(), out);
+
+ writeToDebugAndFlush("max term : %s \n",terms.maxTerm());
ByteBufferUtil.writeWithShortLength(terms.maxTerm(), out);
// min, max keys covered by index (useful when searching across multiple indexes)
+
+ writeToDebugAndFlush("min range : " + keyComparator.getString(range.left) + "\n");
ByteBufferUtil.writeWithShortLength(range.left, out);
+
+ writeToDebugAndFlush("max range : " + keyComparator.getString(range.right) + "\n");
ByteBufferUtil.writeWithShortLength(range.right, out);
+ writeToDebugAndFlush("index mode : "+ mode.toString() + "\n");
out.writeUTF(mode.toString());
+
+ writeToDebugAndFlush("marks partials ? "+ marksPartials + "\n");
out.writeBoolean(marksPartials);
out.skipBytes((int) (BLOCK_SIZE - out.position()));
- dataLevel = mode == Mode.SPARSE ? new DataBuilderLevel(out, new MutableDataBlock(termComparator, mode))
- : new MutableLevel<>(out, new MutableDataBlock(termComparator, mode));
+ dataLevel = mode == Mode.SPARSE ? new DataBuilderLevel(out, new MutableDataBlock(termComparator, mode, DEBUG_FILE))
+ : new MutableLevel<>(out, new MutableDataBlock(termComparator, mode, DEBUG_FILE));
+ writeToDebugAndFlush("DATA BLOCK \n");
while (terms.hasNext())
{
Pair<IndexedTerm, TokenTreeBuilder> term = terms.next();
addTerm(new InMemoryDataTerm(term.left, term.right), out);
}
+ writeToDebugAndFlush("Final DATA BLOCK \n");
dataLevel.finalFlush();
+
+ writeToDebugAndFlush("POINTERS BLOCKS \n");
for (MutableLevel l : levels)
l.flush(); // flush all of the buffers
// and finally write levels index
final long levelIndexPosition = out.position();
+ writeToDebugAndFlush("Levels count : " + levels.size() + "\n");
out.writeInt(levels.size());
+ writeToDebugAndFlush("-------------- \n");
+ writeToDebugAndFlush("POINTER BLOCKS META \n");
+
for (int i = levels.size() - 1; i >= 0; i--)
levels.get(i).flushMetadata();
+ writeToDebugAndFlush("-------------- \n");
+ writeToDebugAndFlush("DATA BLOCKS META \n");
dataLevel.flushMetadata();
+ writeToDebugAndFlush("Index position : " + levelIndexPosition + "\n");
out.writeLong(levelIndexPosition);
// sync contents of the output and disk,
// since it's not done implicitly on close
out.sync();
+ closeDebugFile();
}
catch (IOException e)
{
@@ -322,13 +359,13 @@ public class OnDiskIndexBuilder
private MutableLevel<InMemoryPointerTerm> getIndexLevel(int idx, SequentialWriter out)
{
if (levels.size() == 0)
- levels.add(new MutableLevel<>(out, new MutableBlock<>()));
+ levels.add(new MutableLevel<>(out, new MutableBlock<>(DEBUG_FILE, termComparator)));
if (levels.size() - 1 < idx)
{
int toAdd = idx - (levels.size() - 1);
for (int i = 0; i < toAdd; i++)
- levels.add(new MutableLevel<>(out, new MutableBlock<>()));
+ levels.add(new MutableLevel<>(out, new MutableBlock<>(DEBUG_FILE, termComparator)));
}
return levels.get(idx);
@@ -411,11 +448,28 @@ public class OnDiskIndexBuilder
private final MutableBlock<T> inProcessBlock;
private InMemoryPointerTerm lastTerm;
+ protected FileOutputStream DEBUG_FILE;
+ protected AbstractType<?> termComparator;
public MutableLevel(SequentialWriter out, MutableBlock<T> block)
{
this.out = out;
this.inProcessBlock = block;
+ this.DEBUG_FILE = block.DEBUG_FILE;
+ this.termComparator = block.termComparator;
+ }
+
+ protected void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
}
/**
@@ -450,14 +504,27 @@ public class OnDiskIndexBuilder
public void flushMetadata() throws IOException
{
- flushMetadata(blockOffsets);
+ flushMetadata(blockOffsets, false);
}
- protected void flushMetadata(LongArrayList longArrayList) throws IOException
+ protected void flushMetadata(LongArrayList longArrayList, boolean superBlock) throws IOException
{
+ StringJoiner builder;
+ if(superBlock)
+ {
+ writeToDebugAndFlush("Super Block count : " + longArrayList.size() + ", ");
+ builder = new StringJoiner(", ", "Super Block offsets : [", "]\n");
+ } else {
+ writeToDebugAndFlush("Block count : " + longArrayList.size() + ", ");
+ builder = new StringJoiner(", ", "Block offsets : [", "]\n");
+ }
+
out.writeInt(longArrayList.size());
for (int i = 0; i < longArrayList.size(); i++)
+ {
+ builder.add(longArrayList.get(i) + "");
out.writeLong(longArrayList.get(i));
+ }
}
}
@@ -493,7 +560,10 @@ public class OnDiskIndexBuilder
if (dataBlocksCnt == SUPER_BLOCK_SIZE || (force && !superBlockTree.isEmpty()))
{
superBlockOffsets.add(out.position());
- superBlockTree.finish().write(out);
+ writeToDebugAndFlush("Super TokenTree Block for SPARSE mode \n");
+ final TokenTreeBuilder finish = superBlockTree.finish();
+ finish.write(out);
+ finish.write(DEBUG_FILE);
alignToBlock(out);
dataBlocksCnt = 0;
@@ -510,14 +580,18 @@ public class OnDiskIndexBuilder
public void flushMetadata() throws IOException
{
super.flushMetadata();
- flushMetadata(superBlockOffsets);
+ writeToDebugAndFlush("SPARSE MODE \n");
+ flushMetadata(superBlockOffsets, true);
}
}
private static class MutableBlock<T extends InMemoryTerm>
{
protected final DataOutputBufferFixed buffer;
+ protected StringBuilder debugBuffer = new StringBuilder();
protected final ShortArrayList offsets;
+ public FileOutputStream DEBUG_FILE;
+ public AbstractType<?> termComparator;
public MutableBlock()
{
@@ -525,6 +599,37 @@ public class OnDiskIndexBuilder
offsets = new ShortArrayList();
}
+ public MutableBlock(FileOutputStream bufferedWriter, AbstractType<?> termComparator)
+ {
+ this();
+ this.DEBUG_FILE = bufferedWriter;
+ this.termComparator = termComparator;
+ }
+
+ protected void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+ protected void writeToDebugBuffer(String string)
+ {
+ debugBuffer.append(string);
+ }
+
+ protected void writeToDebugBuffer(String string, ByteBuffer byteBuffer)
+ {
+ debugBuffer.append(String.format(string, termComparator.getString(byteBuffer)));
+ }
+
+
public final void add(T term) throws IOException
{
offsets.add((short) buffer.position());
@@ -533,6 +638,7 @@ public class OnDiskIndexBuilder
protected void addInternal(T term) throws IOException
{
+ writeTermToDebugBuffer(term);
term.serialize(buffer);
}
@@ -554,8 +660,16 @@ public class OnDiskIndexBuilder
public void flushAndClear(SequentialWriter out) throws IOException
{
out.writeInt(offsets.size());
- for (int i = 0; i < offsets.size(); i++)
+
+ writeToDebugAndFlush("Term count : " + offsets.size() + ", ");
+ StringJoiner joiner = new StringJoiner(", ", "Offsets [", "]\n");
+ for (int i = 0; i < offsets.size(); i++) {
out.writeShort(offsets.get(i));
+ joiner.add(offsets.get(i) + "");
+ }
+ writeToDebugAndFlush(joiner.toString());
+ writeToDebugAndFlush(debugBuffer.toString());
+ debugBuffer = new StringBuilder();
out.write(buffer.buffer());
@@ -564,6 +678,21 @@ public class OnDiskIndexBuilder
offsets.clear();
buffer.clear();
}
+
+ protected void writeTermToDebugBuffer(InMemoryTerm term)
+ {
+ if(term instanceof InMemoryPointerTerm)
+ {
+ InMemoryPointerTerm pointerTerm = (InMemoryPointerTerm) term;
+ writeToDebugBuffer("Pointer Term (partial ? "+pointerTerm.term.isPartial()+") : %s, ", pointerTerm.term.getBytes());
+ writeToDebugBuffer("Block number : " + pointerTerm.blockCnt + ".\n");
+ } else if(term instanceof InMemoryDataTerm)
+ {
+ writeToDebugBuffer("Data Term (partial ? "+term.term.isPartial()+") : %s. ", term.term.getBytes());
+ } else {
+ writeToDebugBuffer("Normal Term (partial ? "+term.term.isPartial()+"): %s.\n", term.term.getBytes());
+ }
+ }
}
private static class MutableDataBlock extends MutableBlock<InMemoryDataTerm>
@@ -578,11 +707,13 @@ public class OnDiskIndexBuilder
private final List<TokenTreeBuilder> containers = new ArrayList<>();
private TokenTreeBuilder combinedIndex;
- public MutableDataBlock(AbstractType<?> comparator, Mode mode)
+ public MutableDataBlock(AbstractType<?> comparator, Mode mode, FileOutputStream debugFile)
{
this.comparator = comparator;
this.mode = mode;
this.combinedIndex = initCombinedIndex();
+ super.DEBUG_FILE = debugFile;
+ super.termComparator = comparator;
}
protected void addInternal(InMemoryDataTerm term) throws IOException
@@ -618,16 +749,30 @@ public class OnDiskIndexBuilder
{
super.flushAndClear(out);
+ writeToDebugAndFlush("Offset : " + (mode == Mode.SPARSE ? offset : -1) + "\n");
+
out.writeInt(mode == Mode.SPARSE ? offset : -1);
if (containers.size() > 0)
{
+ writeToDebugAndFlush("TOKEN TREES BLOCK \n");
for (TokenTreeBuilder tokens : containers)
+ {
tokens.write(out);
+ tokens.write(DEBUG_FILE);
+ }
}
+ writeToDebugAndFlush("\n");
+
if (mode == Mode.SPARSE && combinedIndex != null)
- combinedIndex.finish().write(out);
+ {
+ final TokenTreeBuilder finish = combinedIndex.finish();
+ finish.write(out);
+ writeToDebugAndFlush("SPARSE TOKEN TREE BLOCK \n");
+ finish.write(DEBUG_FILE);
+ }
+
alignToBlock(out);
@@ -647,14 +792,26 @@ public class OnDiskIndexBuilder
private void writeTerm(InMemoryTerm term, TokenTreeBuilder keys) throws IOException
{
term.serialize(buffer);
+
+ writeToDebugBuffer("SPARSE mode ");
+ writeTermToDebugBuffer(term);
+ writeToDebugBuffer("Token count : " + keys.getTokenCount() + ", ");
+ StringJoiner joiner = new StringJoiner(", ", "Tokens [", "]\n");
+
buffer.writeByte((byte) keys.getTokenCount());
for (Pair<Long, LongSet> key : keys)
+ {
+ joiner.add(key.left + "");
buffer.writeLong(key.left);
+ }
+ writeToDebugBuffer(joiner.toString());
}
private void writeTerm(InMemoryTerm term, int offset) throws IOException
{
term.serialize(buffer);
+ writeTermToDebugBuffer(term);
+ writeToDebugBuffer("0x0, TokenTree offset : " + offset + "\n");
buffer.writeByte(0x0);
buffer.writeInt(offset);
}
@@ -664,4 +821,44 @@ public class OnDiskIndexBuilder
return mode == Mode.SPARSE ? new DynamicTokenTreeBuilder() : null;
}
}
+
+ private void writeToDebugAndFlush(String string)
+ {
+ try
+ {
+ DEBUG_FILE.write(string.getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ closeDebugFile();
+ }
+ }
+
+ private void writeToDebugAndFlush(String string, ByteBuffer byteBuffer)
+ {
+ try
+ {
+ DEBUG_FILE.write(String.format(string, termComparator.getString(byteBuffer)).getBytes("UTF-8"));
+ DEBUG_FILE.flush();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ closeDebugFile();
+ }
+ }
+
+ private void closeDebugFile()
+ {
+ try
+ {
+ DEBUG_FILE.close();
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ }
+ }
}
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
index 7a41b38..8636836 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/StaticTokenTreeBuilder.java
@@ -17,6 +17,7 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
@@ -198,6 +199,11 @@ public class StaticTokenTreeBuilder extends AbstractTokenTreeBuilder
throw new UnsupportedOperationException();
}
+ public void serializeData(FileOutputStream bufferedWriter)
+ {
+ throw new UnsupportedOperationException();
+ }
+
public boolean isSerializable()
{
return false;
@@ -244,6 +250,15 @@ public class StaticTokenTreeBuilder extends AbstractTokenTreeBuilder
}
}
+ public void serializeData(FileOutputStream bufferedWriter)
+ {
+ while (tokens.hasNext())
+ {
+ Token entry = tokens.next();
+ createEntry(entry.get(), entry.getOffsets()).serialize(bufferedWriter);
+ }
+ }
+
public boolean isSerializable()
{
return true;
diff --git a/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java b/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
index 2210964..c505b15 100644
--- a/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
+++ b/src/java/org/apache/cassandra/index/sasi/disk/TokenTreeBuilder.java
@@ -17,6 +17,7 @@
*/
package org.apache.cassandra.index.sasi.disk;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
@@ -73,4 +74,5 @@ public interface TokenTreeBuilder extends Iterable<Pair<Long, LongSet>>
int serializedSize();
void write(DataOutputPlus out) throws IOException;
+ void write(FileOutputStream bufferedWriter) throws IOException;
}
\ No newline at end of file
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment