Last active
April 25, 2018 15:30
-
-
Save coderfi/318f618d2d0942d334cb to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-- set hive.exec.reducers.bytes.per.reducer=<number> | |
-- set hive.exec.reducers.max=<number> | |
-- set mapred.reduce.tasks=<number> | |
set hive.exec.dynamic.partition.mode=nonstrict; | |
set hive.optimize.ppd=true; | |
add jar /opt/mapr/hadoop/hadoop-0.20.2/contrib/streaming/hadoop-0.20.2-dev-streaming.jar; | |
add jar /opt/jars/parquet-hive-bundle-1.2.10.jar; | |
set mapred.job.priority=VERY_LOW; | |
set hive.cli.print.header=true; | |
-- see https://cwiki.apache.org/confluence/display/Hive/Vectorized+Query+Execution | |
set hive.vectorized.execution.enabled = true; | |
set hive.enforce.sorting = true; | |
set hive.enforce.bucketing = true; | |
-- and perhaps set these, per | |
-- http://answers.mapr.com/questions/2913/if-maprfs-compresses-files-transparently-is-it-unnecessary-to-compress-output-in-hive | |
-- set mapred.output.compress=true; | |
-- set mapred.compress.map.output=true; | |
-- SET mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec; | |
-- SET mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec; | |
-- SET mapred.output.compression.type=BLOCK; | |
-- SET mapreduce.maprfs.use.compression=true; | |
-- SET hive.exec.compress.output=true; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment