Skip to content

Instantly share code, notes, and snippets.

@beobal
Created October 25, 2012 11:36
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save beobal/344d9e60be76b6c6df58 to your computer and use it in GitHub Desktop.
Save beobal/344d9e60be76b6c6df58 to your computer and use it in GitHub Desktop.
DSE Search Test Example
package com.datastax.demo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.nio.ByteBuffer;
import org.apache.cassandra.thrift.Cassandra;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.TBinaryProtocol;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.datastax.bdp.server.DseDaemon;
public class DseSearchTest
{
private static DseDaemon daemon;
// these could be read from file
static String schemaXml =
"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n"
+ "<schema name=\"simple_schema\" version=\"1.1\">\n"
+ " <types>\n"
+ " <fieldType name=\"string\" class=\"solr.StrField\"/>\n"
+ " </types>\n"
+ " <fields>\n"
+ " <field name=\"id\" type=\"string\" indexed=\"true\" stored=\"true\"/>\n"
+ " <field name=\"name\" stored=\"true\" type=\"string\" multiValued=\"false\" indexed=\"true\"/>"
+ " </fields>\n"
+ " <uniqueKey>id</uniqueKey>\n"
+ " <defaultSearchField>name</defaultSearchField>\n"
+ "</schema>\n";
static String solrconfigXml =
"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n"
+ "<config>\n"
+ " <luceneMatchVersion>LUCENE_40</luceneMatchVersion>\n"
+ " <dataDir>${solr.data.dir:}</dataDir>\n"
+ " <directoryFactory name=\"DirectoryFactory\" \n"
+ " class=\"${solr.directoryFactory:solr.StandardDirectoryFactory}\"/>\n"
+ " <indexDefaults>\n"
+ " <useCompoundFile>false</useCompoundFile>\n"
+ " <mergeFactor>10</mergeFactor>\n"
+ " <ramBufferSizeMB>32</ramBufferSizeMB>\n"
+ " <maxFieldLength>10000</maxFieldLength>\n"
+ " <writeLockTimeout>1000</writeLockTimeout>\n"
+ " <commitLockTimeout>10000</commitLockTimeout>\n"
+ " <lockType>native</lockType>\n"
+ " </indexDefaults>\n"
+ " <mainIndex>\n"
+ " <useCompoundFile>false</useCompoundFile>\n"
+ " <ramBufferSizeMB>32</ramBufferSizeMB>\n"
+ " <mergeFactor>10</mergeFactor>\n"
+ " <unlockOnStartup>false</unlockOnStartup>\n"
+ " <reopenReaders>true</reopenReaders>\n"
+ " <deletionPolicy class=\"solr.SolrDeletionPolicy\">\n"
+ " <str name=\"maxCommitsToKeep\">1</str>\n"
+ " <str name=\"maxOptimizedCommitsToKeep\">0</str>\n"
+ " </deletionPolicy>\n"
+ " <infoStream file=\"INFOSTREAM.txt\">false</infoStream> \n"
+ " </mainIndex>\n"
+ " <autoCommit>\n"
+ " <maxDocs>1</maxDocs>\n"
+ " <maxTime>1</maxTime>\n"
+ " </autoCommit>\n"
+ " <updateHandler class=\"solr.DirectUpdateHandler2\">\n"
+ " </updateHandler>\n"
+ " <query>\n"
+ " <maxBooleanClauses>1024</maxBooleanClauses>\n"
+ " <filterCache class=\"solr.FastLRUCache\"\n"
+ " size=\"512\"\n"
+ " initialSize=\"512\"\n"
+ " autowarmCount=\"0\"/>\n"
+ " <queryResultCache class=\"solr.LRUCache\"\n"
+ " size=\"512\"\n"
+ " initialSize=\"512\"\n"
+ " autowarmCount=\"0\"/>\n"
+ " <documentCache class=\"solr.LRUCache\"\n"
+ " size=\"512\"\n"
+ " initialSize=\"512\"\n"
+ " autowarmCount=\"0\"/>\n"
+ " <enableLazyFieldLoading>true</enableLazyFieldLoading>\n"
+ " <queryResultWindowSize>20</queryResultWindowSize>\n"
+ " <queryResultMaxDocsCached>200</queryResultMaxDocsCached>\n"
+ " </query>\n"
+ " <requestDispatcher handleSelect=\"true\" >\n"
+ " <requestParsers enableRemoteStreaming=\"true\" \n"
+ " multipartUploadLimitInKB=\"2048000\" />\n"
+ " <httpCaching never304=\"true\" />\n"
+ " </requestDispatcher>\n"
+ " <requestHandler name=\"search\" class=\"solr.SearchHandler\" default=\"true\">\n"
+ " <lst name=\"defaults\">\n"
+ " <str name=\"echoParams\">explicit</str>\n"
+ " <int name=\"rows\">10</int>\n"
+ " </lst>\n"
+ " </requestHandler>\n"
+ " <requestHandler name=\"/get\" class=\"solr.RealTimeGetHandler\">\n"
+ " <lst name=\"defaults\">\n"
+ " <str name=\"omitHeader\">true</str>\n"
+ " </lst>\n"
+ " </requestHandler>\n"
+ " <admin>\n"
+ " <defaultQuery>*:*</defaultQuery>\n"
+ " </admin>\n"
+ "</config>\n";
static String keyspace = "testks";
static String columnFamily = "testcf";
static String indexName = keyspace + "." + columnFamily;
static int solrPort = 8983;
@BeforeClass
public static void setupServer()
{
try
{
// start DSE
daemon = DseSearchTestSupport.startupServer();
DseSearchTestSupport.addSolrConfig(indexName, solrconfigXml, solrPort);
DseSearchTestSupport.addSchema(indexName, schemaXml, solrPort);
}
catch (Exception e)
{
e.printStackTrace();
fail();
}
}
@AfterClass
public static void shutdownServer()
{
daemon.stop();
}
@Test
public void insertDataAndSearchForIt() throws Exception {
Cassandra.Client client = getClient();
ByteBuffer key = ByteBufferUtil.bytes("key1");
ColumnParent cp = new ColumnParent(columnFamily);
Column col = new Column();
col.setName("name".getBytes("utf-8"));
col.setValue("test".getBytes("utf-8"));
col.setTimestamp(System.currentTimeMillis());
// insert a key + column
client.set_keyspace(keyspace);
client.insert(key, cp, col, ConsistencyLevel.ONE);
// search for the column value
CommonsHttpSolrServer solrClient = new CommonsHttpSolrServer("http://localhost:" + solrPort + "/solr/" + indexName);
SolrQuery q = new SolrQuery().setQuery("name:test");
QueryResponse r = solrClient.query(q);
SolrDocumentList results = r.getResults();
assertEquals(1, results.size());
assertEquals("key1", results.get(0).getFieldValue("id"));
}
private Cassandra.Client getClient() throws TTransportException {
TTransport tr = new TSocket("localhost", 9160);
TProtocol proto = new TBinaryProtocol(new TFramedTransport(tr));
Cassandra.Client client = new Cassandra.Client(proto);
tr.open();
return client;
}
}
package com.datastax.demo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import com.datastax.bdp.hadoop.mapred.SchemaTool;
import com.datastax.bdp.server.DseDaemon;
public class DseSearchTestSupport
{
// Start DSE Server with Solr enabled
public static synchronized DseDaemon startupServer() throws Exception
{
System.setProperty("cassandra-foreground", "1");
System.setProperty("search-service", "true");
final ObservableDseDaemon daemon = new ObservableDseDaemon();
try
{
// run in own thread
new Thread(new Runnable() {
public void run()
{
daemon.activate();
}
}).start();
}
catch (Throwable e)
{
throw new RuntimeException(e);
}
// wait for startup to complete
try
{
daemon.startupLatch.await(60, TimeUnit.SECONDS);
SchemaTool.init();
return daemon;
}
catch (InterruptedException e1)
{
throw new RuntimeException("Error starting server");
}
}
static class ObservableDseDaemon extends DseDaemon{
CountDownLatch startupLatch = new CountDownLatch(1);
@Override
public void start()
{
super.start();
startupLatch.countDown();
}
}
public static void addSolrConfig(String indexName, String solrconfigXml, int port) throws MalformedURLException
{
URL url = new URL("http://localhost:" + port + "/solr/resource/" + indexName + "/solrconfig.xml");
submitSolrResource(url, solrconfigXml);
}
public static void addSchema(String indexName, String schemaXml, int port) throws MalformedURLException
{
URL url = new URL("http://localhost:" + port + "/solr/resource/" + indexName + "/schema.xml");
submitSolrResource(url, schemaXml);
}
public static void submitSolrResource(URL url, String resource)
{
try
{
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setDoOutput(true);
OutputStreamWriter wr = new OutputStreamWriter(conn.getOutputStream());
wr.write(resource);
wr.flush();
wr.close();
assertEquals(200, conn.getResponseCode());
}
catch (IOException e)
{
e.printStackTrace();
fail();
}
}
}
@beobal
Copy link
Author

beobal commented Oct 25, 2012

To compile and run the tests, you need to reference the jars in DSE's runtime classpath. In your DSE install location, you'll need all the jars from

  • resources/cassandra/lib
  • resources/dse/lib
  • resources/hadoop
  • resources/solr/lib
  • resources/tomcat/lib

on your classpath.

When running the tests, you need to specify paths to a few configuration locations using system properties:
cassandra.yaml, via the cassandra.config system property
dse.yaml with the dse.config property
the directory containing tomcat configuration with the catalina.base property

Examples of all of this config can be found in the DSE install locations.

Also, as with tests which use an embedded instance of regular Cassandra, you'll need to ensure that the data directories specified are
cleaned out somewhere in test setup.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment