Elasticsearch function score query
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"size" : 200, | |
"query" : { | |
"function_score" : { | |
"functions" : [ { | |
"filter" : { | |
"term" : { | |
"loginState" : "all" | |
} | |
}, | |
"boost_factor" : 2.0 | |
}, { | |
"filter" : { | |
"term" : { | |
"geoCode" : "all" | |
} | |
}, | |
"boost_factor" : 2.0 | |
}, { | |
"filter" : { | |
"term" : { | |
"channel" : "all" | |
} | |
}, | |
"boost_factor" : 2.0 | |
}, { | |
"filter" : { | |
"term" : { | |
"keyword" : "all" | |
} | |
}, | |
"boost_factor" : 2.0 | |
} ], | |
"score_mode" : "sum" | |
} | |
}, | |
"post_filter" : { | |
"bool" : { | |
"must" : [ { | |
"term" : { | |
"pageId" : "page1" | |
} | |
}, { | |
"range" : { | |
"releaseNormal" : { | |
"from" : null, | |
"to" : 100000000, | |
"include_lower" : true, | |
"include_upper" : true | |
} | |
} | |
}, { | |
"range" : { | |
"pubDate" : { | |
"from" : null, | |
"to" : "2014-12-11T23:07:42.235Z", | |
"include_lower" : true, | |
"include_upper" : true | |
} | |
} | |
}, { | |
"range" : { | |
"expDate" : { | |
"from" : "2014-12-11T23:07:42.235Z", | |
"to" : null, | |
"include_lower" : false, | |
"include_upper" : true | |
} | |
} | |
} ] | |
} | |
}, | |
"sort" : [ { | |
"releaseNormal" : { | |
"order" : "desc" | |
} | |
}, { | |
"_score" : { | |
"order" : "desc" | |
} | |
} ] | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public List<Content> fetchContentForPlacements(Date curDate, String relString, String pageId, Metadata meta) throws ContentServiceException { | |
// check for required parameters | |
if (pageId == null) { | |
throw new MissingParameterException(Constants.MSG_PAGE_ID_REQUIRED); | |
} | |
// default the current date to now if it wasn't provided | |
if (curDate == null) { | |
curDate = new Date(); | |
} | |
// normalize the release string (and default it if it wasn't provided) | |
Long release = 0L; | |
if (relString != null) { | |
release = normalizeRelease(relString); // returns a Long | |
} | |
String index = getDeliveryIndex(); | |
// BUILD THE QUERY | |
// Need two sets of filters: | |
// * Must filters for pageID and date range | |
// * Terms filters with a factorFunction for everything else | |
// build a list of filters to apply | |
List<FilterBuilder> filtersToApply = new ArrayList<FilterBuilder>(); | |
// must have the page ID | |
FilterBuilder pageIdFilter = FilterBuilders.termFilter(Constants.PROP_PAGE_ID, pageId); | |
filtersToApply.add(pageIdFilter); | |
// current release | |
FilterBuilder relFilter = FilterBuilders.rangeFilter(Constants.PROP_RELEASE_NORMAL).lte(release); | |
filtersToApply.add(relFilter); | |
// effective date filter | |
FilterBuilder pubDateFilter = FilterBuilders.rangeFilter(Constants.PROP_PUB_DATE).lte(curDate); | |
filtersToApply.add(pubDateFilter); | |
FilterBuilder expDateFilter = FilterBuilders.rangeFilter(Constants.PROP_EXP_DATE).gt(curDate); | |
filtersToApply.add(expDateFilter); | |
// if placementIds were passed in, treat those as a must | |
if (meta != null && meta.getPlacementId() != null) { | |
filtersToApply.add(FilterBuilders.termsFilter(Constants.PROP_PLACEMENT_ID, meta.getPlacementId())); | |
} | |
// if isDefault was passed in, treat that as a must | |
if (meta != null && meta.isDefault() != null) { | |
filtersToApply.add(FilterBuilders.termFilter(Constants.PROP_IS_DEFAULT, meta.isDefault())); | |
} | |
// combine all of the filters | |
BoolFilterBuilder boolFilter = FilterBuilders.boolFilter(); | |
for (FilterBuilder filter : filtersToApply) { | |
boolFilter.must(filter); | |
} | |
// now build the function score filter | |
// for the optional arguments that may have been passed in | |
FunctionScoreQueryBuilder fsqb = QueryBuilders.functionScoreQuery(); | |
// add an "all" boost for all searchable metadata. this helps the "all" | |
// fragments fill in before non-all frags | |
fsqb.add(FilterBuilders.termFilter(Constants.PROP_LOGIN_STATE, Constants.ALL), factorFunction(2.0f)); | |
fsqb.add(FilterBuilders.termFilter(Constants.PROP_GEO_CODE, Constants.ALL), factorFunction(2.0f)); | |
fsqb.add(FilterBuilders.termFilter(Constants.PROP_CHANNEL, Constants.ALL), factorFunction(2.0f)); | |
fsqb.add(FilterBuilders.termFilter(Constants.PROP_KEYWORD, Constants.ALL), factorFunction(2.0f)); | |
// now add a higher boost for the optional metadata. this makes the | |
// requested context sort higher | |
if (meta != null) { | |
if (meta.getLoginState() != null) { | |
fsqb.add(FilterBuilders.termsFilter(Constants.PROP_LOGIN_STATE, meta.getLoginState()), factorFunction(3.0f)); | |
} | |
if (meta.getGeoCode() != null) { | |
fsqb.add(FilterBuilders.termsFilter(Constants.PROP_GEO_CODE, meta.getGeoCode()), factorFunction(3.0f)); | |
} | |
if (meta.getChannel() != null) { | |
fsqb.add(FilterBuilders.termsFilter(Constants.PROP_CHANNEL, meta.getChannel()), factorFunction(3.0f)); | |
} | |
if (meta.getKeyword() != null) { | |
fsqb.add(FilterBuilders.termsFilter(Constants.PROP_KEYWORD, meta.getKeyword()), factorFunction(3.0f)); | |
} | |
} | |
// set the scoreMode to sum | |
fsqb.scoreMode("sum"); | |
// do the search | |
SearchResponse response = null; | |
try { | |
SearchRequestBuilder srb = getReadClient().prepareSearch(index) | |
.setSearchType(SearchType.QUERY_AND_FETCH) | |
.setQuery(fsqb) | |
.setPostFilter(boolFilter) | |
.addSort(Constants.PROP_RELEASE_NORMAL, SortOrder.DESC) | |
.addSort("_score", SortOrder.DESC) | |
.setSize(getPlacementSize()); | |
if (logger.isDebugEnabled()) { logger.debug(srb.toString()); } | |
response = srb | |
.execute() | |
.actionGet(); | |
} catch (ElasticsearchException ee) { | |
throw new ContentServiceException(ee.getMessage()); | |
} | |
SearchHit[] results = response.getHits().getHits(); | |
ArrayList<Content> contentList = new ArrayList<Content>(); | |
for (SearchHit hit : results) { | |
String source = hit.getSourceAsString(); | |
if (source == null) { | |
throw new SourceNotFoundException(Constants.MSG_COULD_NOT_READ_SOURCE); | |
} | |
Content content = null; | |
try { | |
content = this.mapper.readValue(source, Content.class); | |
} catch (IOException ioe) { | |
throw new DataFormatException(ioe.getMessage()); | |
} | |
contentList.add(content); | |
} | |
return contentList; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment