Skip to content

Instantly share code, notes, and snippets.

View rajanim's full-sized avatar

Rajani Maski rajanim

  • New York
View GitHub Profile
o.a.s.h.RequestHandlerBase org.apache.solr.common.SolrException: org.apache.lucene.index.ExitableDirectoryReader$ExitingReaderException: The request took too long to iterate over terms. Timeout: timeoutAt: 946228173449902 (System.nanoTime(): 946228228181282), TermsEnum=org.apache.lucene.codecs.lucene90.blocktree.SegmentTermsEnum@691855df => org.apache.solr.common.SolrException: org.apache.lucene.index.ExitableDirectoryReader$ExitingReaderException: The request took too long to iterate over terms. Timeout: timeoutAt: 946228173449902 (System.nanoTime(): 946228228181282), TermsEnum=org.apache.lucene.codecs.lucene90.blocktree.SegmentTermsEnum@691855df
at org.apache.solr.search.ReRankCollector.topDocs(ReRankCollector.java:163)
org.apache.solr.common.SolrException: org.apache.lucene.index.ExitableDirectoryReader$ExitingReaderException: The request took too long to iterate over terms. Timeout: timeoutAt: 946228173449902 (System.nanoTime(): 946228228181282), TermsEnum=org.apache.lucene.codecs.lucene90.blocktr
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.Transaction;
import java.util.List;
public class TransactionOpsExample {
public static void main(String[] args) throws Exception {
@rajanim
rajanim / prom_metrix_to_csv.py
Created February 19, 2021 16:04 — forked from aneeshkp/prom_metrix_to_csv.py
Export data from Prometheus to csv file
import csv
import requests
import sys
def GetMetrixNames(url):
response = requests.get('{0}/api/v1/label/__name__/values'.format(url))
names = response.json()['data']
#Return metrix names
return names
import org.apache.spark.sql.functions._
import org.apache.spark.sql.SaveMode
import org.apache.hadoop.fs._
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
import java.io._
import spark.implicits._
import java.util.Calendar
import java.text.SimpleDateFormat
var recordsWrittenCount = 0L
import com.google.common.util.concurrent.MoreExecutors;
import com.lucidworks.apollo.component.ExecutorComponent;
import com.lucidworks.apollo.pipeline.*;
import com.lucidworks.apollo.pipeline.async.AbstractAsyncStageConfig;
import com.lucidworks.apollo.pipeline.async.AsyncStage;
import com.lucidworks.apollo.pipeline.async.AsyncStageConfig;
import java.util.concurrent.ExecutorService;
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
package org.sfsu.cs.selectivesearch.common.distance
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
/**
* Created by rajanishivarajmaski1 on 4/2/17.
* fork from spark-scala apis
* * The cosine distance between two points: cosineDistance(a,b) = (a dot b)/(norm(a) * norm(b))
*/
@rajanim
rajanim / solrj_example
Created March 18, 2016 05:06
solrj_cloudsolrclient
solr_j_index_sample
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrInputDocument;
import java.io.IOException;
import java.text.DecimalFormat;
<searchComponent name="suggest" class="solr.SuggestComponent">
<lst name="suggester">
<str name="name">mySuggester</str>
<str name="lookupImpl">FuzzyLookupFactory</str>
<str name="storeDir">suggester_fuzzy_dir</str>
<str name="weightField">price</str>
<!-- Substitute these for the two above for another "flavor"
<str name=”lookupImpl”>AnalyzingInfixLookupFactory</str>
<str name=”indexPath”>suggester_infix_dir</str>
<requestHandler name="/select" class="solr.SearchHandler">
<lst name="defaults">
<str name="echoParams">explicit</str>
<int name="rows">10</int>
<!-- Spellcheck default. Solr will use suggestions from both the 'default' spellchecker
and from the 'wordbreak' spellchecker and combine them.
collations (re-written queries) can include a combination of
corrections from both spellcheckers -->
<str name="spellcheck.dictionary">default</str>
<str name="spellcheck.dictionary">wordbreak</str>