Skip to content

Instantly share code, notes, and snippets.

@guobingkun
Last active November 19, 2015 21:12
Show Gist options
  • Save guobingkun/f19ffe2d3c00c6a3de69 to your computer and use it in GitHub Desktop.
Save guobingkun/f19ffe2d3c00c6a3de69 to your computer and use it in GitHub Desktop.
package io.druid.client.indexing;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import io.druid.segment.indexing.granularity.GranularitySpec;
import org.joda.time.Interval;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
*/
public class ClientHadoopIndexQuery
{
private final String id;
private final Map<String, Object> tuningConfig;
private final List<String> hadoopDependencyCoordinates;
private final Map<String, Object> hadoopIngestionSpec;
@JsonCreator
public ClientHadoopIndexQuery(
@JsonProperty("id") String id,
@JsonProperty("dataSource") String dataSource,
@JsonProperty("intervals") List<Interval> intervalsToReindex,
@JsonProperty("metricsSpec") AggregatorFactory[] aggregators,
@JsonProperty("queryGranularity") QueryGranularity queryGranularity,
@JsonProperty("tuningConfig") Map<String, Object> tuningConfig,
@JsonProperty("hadoopDependencyCoordinates") List<String> hadoopDependencyCoordinates
)
{
this.id = id;
this.tuningConfig = tuningConfig;
this.hadoopDependencyCoordinates = hadoopDependencyCoordinates;
final Map<String, Object> dataSchema = ImmutableMap.<String, Object>builder()
.put("dataSource", dataSource)
.put("metricsSpec", aggregators)
.put(
"granularitySpec",
new ArbitraryGranularitySpec(
queryGranularity,
intervalsToReindex
)
).build();
final Map<String, Object> hadoopIOConfig = ImmutableMap.<String, Object>builder().put("type", "hadoop").build();
this.hadoopIngestionSpec = ImmutableMap.<String, Object>builder()
.put("dataSchema", dataSchema)
.put("ioConfig", hadoopIOConfig)
.put("tuningConfig", tuningConfig)
.build();
}
@JsonProperty
public String getType()
{
return "index_hadoop";
}
@JsonProperty
public String getId()
{
return id;
}
@JsonProperty
public List<String> getHadoopDependencyCoordinates()
{
return hadoopDependencyCoordinates;
}
@JsonProperty
public Map<String, Object> getTuningConfig()
{
return tuningConfig;
}
@JsonProperty("spec")
public Map<String, Object> getHadoopIngestionSpec()
{
return hadoopIngestionSpec;
}
}
@Test
public void test2() throws JsonProcessingException
{
ObjectMapper mapper = new DefaultObjectMapper();
GranularitySpec granularitySpec = new ArbitraryGranularitySpec(QueryGranularity.NONE, INTERVAL_LIST);
Map<String, Object> granularitySpecMap = ImmutableMap.<String, Object>of("granularitySpec", granularitySpec);
System.out.println(mapper.writeValueAsString(granularitySpec));
System.out.println(mapper.writeValueAsString(granularitySpecMap));
}
Output:
{"type":"arbitrary","queryGranularity":{"type":"none"},"intervals":["2012-01-01T00:00:00.000Z/2012-01-03T00:00:00.000Z","2012-01-05T00:00:00.000Z/2012-01-08T00:00:00.000Z","2012-01-10T00:00:00.000Z/2012-01-14T00:00:00.000Z"]}
{"granularitySpec":{"queryGranularity":{"type":"none"},"intervals":["2012-01-01T00:00:00.000Z/2012-01-03T00:00:00.000Z","2012-01-05T00:00:00.000Z/2012-01-08T00:00:00.000Z","2012-01-10T00:00:00.000Z/2012-01-14T00:00:00.000Z"]}}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment