Last active
August 29, 2015 14:08
-
-
Save cjmatta/3bdb5196c82f06ea54ee to your computer and use it in GitHub Desktop.
configureOozie_mapr.sh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Copyright (C) 2013 by Teradata Corporation. | |
# All Rights Reserved. | |
# | |
# This script installs tdch for Oozie transfers with Hadoop | |
# | |
# Version : $Id$ | |
# MapR Notes | |
# Since MapR doesn't need a nameNode, we've removed it from this script | |
# It's also useful to run this on the host you have installed Oozie, and Hive on | |
# as it's trying to copy libraries from the oozie install (commons-dbcp and commons-pool) | |
USAGE="Usage: $0 oozie=oozieHost [nn=maprfs://] [jt=maprfs://] [nnPort=nameNodePortNum] [jtPort=jobTrackerPortNum] [ooziePort=ooziePortNum] [webhcatPort=webhcatPortNum] [webhdfsPort=webhdfsPortNum] [hiveClientMetastorePort=hiveClientMetastorePortNum]" | |
if [[ -e /etc/default/hadoop ]] | |
then | |
source /etc/default/hadoop | |
fi | |
if [[ -e /etc/default/hcatalog ]] | |
then | |
source /etc/default/hcatalog | |
fi | |
function find_home() { | |
target="$2" | |
# file exists and is a symbolic link | |
if [ -h "$target" ] | |
then | |
target=$(readlink "$target") | |
fi | |
# find home | |
home_dir=$(dirname "$target") # points to bin dir | |
home_dir="$home_dir/../" | |
echo "$home_dir" | |
} | |
HIVE_HOME=$(find_home "/usr/bin/hive") | |
HCAT_PREFIX=$HIVE_HOME/hcatalog | |
HADOOP_HOME=$(find_home "/usr/bin/hadoop") | |
OOZIE_HOME=$(find_home $(find /opt/mapr/oozie -type f -name "oozie")) | |
function showUsageAndExit { | |
USAGEa="Usage: $0" | |
USAGEb=$'oozie=oozieHost [nnPort=nameNodePortNum] [jtPort=jobTrackerPortNum] [ooziePort=ooziePortNum] [webhcatPort=webhcatPortNum] [webhdfsPort=webhdfsPortNum] [hiveClientMetastorePort=hiveClientMetastorePortNum]\n\n oozie - The Oozie host name \n nnPort - The Name node port number (8020 if omitted)\n jtPort - The Job Tracker port number (50300 if omitted)\n ooziePort - The Oozie port number (11000 if omitted)\n webhcatPort - The WebHCatalog port number (50111 if omitted)\n webhdfsPort - The WebHDFS port number (50070 if omitted)\n hiveClientMetastorePort - The URI port for hive client to connect to metastore server (9083 if omitted)' | |
echo "$USAGEa" "$USAGEb" | |
exit $1 | |
} | |
numexpr='^[0-9]+$' | |
function testPortNumber { | |
case $1 in | |
''|*[!0-9]*) | |
echo Invalid port number $1 for parameter $2 | |
showUsageAndExit 1 | |
;; | |
*) | |
;; | |
esac | |
} | |
function testhost { | |
ping -c 1 $1 > /dev/null 2>&1 | |
if [ $? -ne 0 ] ; | |
then | |
echo Host defined by parameter $2 could not be reached. | |
exit 2 | |
fi | |
} | |
function makeint { | |
parts=($(echo $1 | tr "." "\n")) | |
part0=${parts[0]} | |
part1=${parts[1]} | |
intvalue=$(( $part0*1000000 + $part1*1000 )) | |
} | |
function findconnector { | |
numconnectors=`ls -1 /usr/lib/tdch/1.3/lib/teradata-connector-*.jar 2> /dev/null | wc -l` | |
if [ $numconnectors -eq 0 ] ; | |
then | |
echo "ERROR: The Teradata Connector for Hadoop needs to be installed in /usr/lib/tdch" | |
exit 3 | |
else | |
connectorversion=0 | |
connectorjar="" | |
for f in /usr/lib/tdch/1.3/lib/teradata-connector-*.jar | |
do | |
if [ -z "$connectorjar" ] ; | |
then | |
connectorjar=$f | |
fi | |
fullversionpart=`echo $f | sed -e 's/^.*teradata-connector-\([0-9][0-9]*\.[0-9][0-9]*.*\)\.jar$/\1/'` | |
versionpart=`echo $f | sed -e 's/^.*teradata-connector-\([0-9][0-9]*\.[0-9][0-9]*\).*$/\1/'` | |
if [ "$f" != "$versionpart" ] ; | |
then | |
makeint $versionpart | |
if [ $intvalue -gt $connectorversion ] ; | |
then | |
fullconnectorversion=$fullversionpart | |
connectorversion=$intvalue | |
connectorjar=$f | |
fi | |
fi | |
done | |
if [ $connectorversion -lt 1003000 ] ; | |
then | |
echo "ERROR: The version of the Teradata Connector for Hadoop jar in /usr/lib/tdch needs to be 1.3.0" | |
exit 4 | |
fi | |
fi | |
} | |
findconnector | |
#jarcmd=`ls -1 /usr/hadoop-jdk*/bin/jar 2> /dev/null | tail -1` | |
jarcmd=`ls -1 /usr/bin/jar 2> /dev/null | tail -1` | |
if [ -z $jarcmd ] ; | |
then | |
#try sandbox location of JDK | |
jarcmd=`ls -1 /usr/jdk*/jdk*/bin/jar 2> /dev/null | tail -1` | |
if [ -z $jarcmd ] ; | |
then | |
#try TDH location | |
jarcmd=`ls -1 /opt/teradata/jvm64/jdk6/bin/jar 2> /dev/null | tail -1` | |
if [ -z $jarcmd ] ; | |
then | |
echo "ERROR: No Hadoop JDK jar command found" | |
exit 5 | |
fi | |
fi | |
fi | |
nameNode="maprfs://" | |
jobTracker="maprfs://" | |
oozieServer="" | |
jtPort=9001 | |
ooziePort=11000 | |
webhcatPort=50111 | |
webhdfsPort=8080 | |
hiveClientMetastorePort=9083 | |
if [[ ( $# < 1 ) ]]; | |
then | |
echo No parameters specified, parameter \"nn\" is required. | |
showUsageAndExit 6 | |
fi | |
while [[ $# > 0 ]]; do | |
arr=($(echo $1 | tr "=" "\n")) | |
part0=${arr[0]} | |
part1=${arr[1]} | |
if [ -z "$part1" ]; | |
then | |
echo Unexpected parameter: $1 | |
showUsageAndExit 7 | |
fi | |
case "$part0" in | |
"oozie") | |
oozieServer=$part1 | |
;; | |
"jtPort") | |
jtPort=$part1 | |
;; | |
"ooziePort") | |
ooziePort=$part1 | |
;; | |
"webhcatPort") | |
webhcatPort=$part1 | |
;; | |
"webhdfsPort") | |
webhdfsPort=$part1 | |
;; | |
"hiveClientMetastorePort") | |
hiveClientMetastorePort=$part1 | |
;; | |
*) | |
echo Unexpected parameter: $1 | |
showUsageAndExit 8 | |
;; | |
esac | |
shift | |
done | |
if [ -z "$oozieServer" ]; | |
then | |
echo Missing \"oozie\" \(Oozie host\) parameter | |
showUsageAndExit 9 | |
fi | |
testhost $oozieServer oozie | |
testPortNumber $jtPort jtPort | |
testPortNumber $ooziePort ooziePort | |
testPortNumber $webhcatPort webhcatPort | |
testPortNumber $webhdfsPort webhdfsPort | |
testPortNumber $hiveClientMetastorePort hiveClientMetastorePort | |
#user='hdfs' | |
user='mapr' | |
PWD=$(pwd) | |
echo "Verifying cluster is up (dfsadmin -report)" | |
su - $user -c "hadoop dfsadmin -report" | |
if [ $? -ne 0 ] ; | |
then | |
echo "ERROR: Please Verify Hadoop is up and running" | |
exit 10 | |
fi | |
hivebuiltins=`ls -1 $HIVE_HOME/lib/hive-builtins-*.jar 2> /dev/null` | |
mysqljars=`ls -1 $HIVE_HOME/lib/mysql-connector-*.jar 2> /dev/null` | |
echo "Creating teradata directories in hdfs" | |
su - $user -c "hadoop fs -rmr /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -mkdir /teradata" | |
su - $user -c "hadoop fs -mkdir /teradata/hadoop" | |
su - $user -c "hadoop fs -mkdir /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -rmr /teradata/tdch/services.json" | |
su - $user -c "hadoop fs -rmr /teradata/tdch/1.3" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/lib" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/oozieexport" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/oozieimport" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/ooziehadooptoteradata" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/oozieteradatatoexistinghadoop" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/oozieteradatatonewhadoop" | |
#Data Mover addition | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/jobOutputFolder" | |
su - $user -c "hadoop fs -mkdir /teradata/tdch/1.3/oozieworkflows/hive" | |
echo "Copying hive jars into /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/conf/hive-site.xml /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HADOOP_HOME/lib/slf4j-api-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/libthrift-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/libfb303-*.jar /teradata/hadoop/lib" | |
# su - $user -c "hadoop fs -put $HIVE_HOME/lib/jdo2-api-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/hive-metastore-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/hive-exec-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/hive-cli-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/datanucleus-rdbms-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/datanucleus-core-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $OOZIE_HOME/libtools/commons-pool-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $OOZIE_HOME/libtools/commons-dbcp-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/antlr-runtime-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/datanucleus-api-jdo-*.jar /teradata/hadoop/lib" | |
#Data Mover addition | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/hive-service-*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/jline-*.jar /teradata/hadoop/lib" | |
if [ -z $hivebuiltins ] ; | |
then | |
su - $user -c "hadoop fs -put $HCAT_PREFIX/share/hcatalog/hive-hcatalog-core-*.jar /teradata/hadoop/lib" | |
distVersion=1.3.2 | |
else | |
su - $user -c "hadoop fs -put $HCAT_PREFIX/share/hcatalog/hive-hcatalog-[0-9]*.jar /teradata/hadoop/lib" | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/hive-builtins-*.jar /teradata/hadoop/lib" | |
distVersion=1.1 | |
fi | |
if [ -n "$mysqljars" ] ; | |
then | |
su - $user -c "hadoop fs -put $HIVE_HOME/lib/mysql-connector-*.jar /teradata/hadoop/lib" | |
fi | |
echo "Copying Teradata Connector" | |
su - $user -c "hadoop fs -put $connectorjar /teradata/tdch/1.3/lib" | |
setupconnector=/tmp/setupconnector | |
rm -rf $setupconnector | |
mkdir $setupconnector | |
cd $setupconnector | |
${jarcmd} -xf $connectorjar | |
su - $user -c "hadoop fs -put $setupconnector/lib/* /teradata/tdch/1.3/lib" | |
cd $PWD | |
rm -rf $setupsconnector | |
cat<< XXX > /tmp/services.json | |
{ | |
"Distribution":"HDP", | |
"DistributionVersion":"$distVersion", | |
"TeradataConnectorForHadoopVersion":"$fullconnectorversion", | |
"WebHCatalog":"$oozieServer", | |
"WebHCatalogPort":$webhcatPort, | |
"WebHDFS":"$oozieServer", | |
"WebHDFSPort":$webhdfsPort, | |
"JobTracker":"$jobTracker", | |
"JobTrackerPort":$jtPort, | |
"NameNode":"$nameNode", | |
"Oozie":"$oozieServer", | |
"OoziePort":$ooziePort, | |
"HiveClientMetastorePort":$hiveClientMetastorePort | |
} | |
XXX | |
echo The following is the specification of the Hadoop services used by the Oozie workflows: | |
cat /tmp/services.json | |
echo | |
echo | |
cat<<XXX > /tmp/exportworkflow.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataExportTool-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorExportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libjars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${jdbcURL}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-batchsize</arg> | |
<arg>10000</arg> | |
<arg>-sourcedatabase</arg> | |
<arg>\${sourceTableDatabase}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveconf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobOutputFile}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/importworkflow.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportTool-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libjars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${jdbcURL}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-batchsize</arg> | |
<arg>10000</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumnName}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targettableschema</arg> | |
<arg>\${targetTableSchema}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetTableDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveconf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobOutputFile}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/ooziehadooptoteradata.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataExportToolByFields-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorExportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libjars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${jdbcURL}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-batchsize</arg> | |
<arg>10000</arg> | |
<arg>-sourcedatabase</arg> | |
<arg>\${sourceTableDatabase}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceColumnNames}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetColumnNames}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveconf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobOutputFile}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/oozieteradatatoexistinghadoop.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolToExisting-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libjars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${jdbcURL}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-batchsize</arg> | |
<arg>10000</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumnName}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetTableDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveconf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobOutputFile}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/oozieteradatatonewhadoop.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libjars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${jdbcURL}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-batchsize</arg> | |
<arg>10000</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumnName}</arg> | |
<arg>-sourcequery</arg> | |
<arg>\${sourceQuery}</arg> | |
<arg>-targettableschema</arg> | |
<arg>\${targetTableSchema}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetTableDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveconf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobOutputFile}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
#DATA MOVER WORKFLOW FILES | |
#Import Files | |
cat<<XXX > /tmp/ooziedmteradatatohadooptableexists.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-batchsize</arg> | |
<arg>\${batchSize}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumn}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/ooziedmteradatatohadoopnewtablenonppi.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-batchsize</arg> | |
<arg>\${batchSize}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targettableschema</arg> | |
<arg>\${targetTableSchema}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumn}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/ooziedmteradatatohadoopnewtableppi.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-batchsize</arg> | |
<arg>\${batchSize}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targettableschema</arg> | |
<arg>\${targetTableSchema}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-targetpartitionschema</arg> | |
<arg>\${targetPartitionSchema}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumn}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/ooziedmteradatatohadoopnewtablepartial.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-batchsize</arg> | |
<arg>\${batchSize}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targettableschema</arg> | |
<arg>\${targetTableSchema}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-sourceconditions</arg> | |
<arg>\${sourceConditions}</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumn}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/ooziedmteradatatohadooptableexistpartial.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataImportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorImportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-batchsize</arg> | |
<arg>\${batchSize}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-targetdatabase</arg> | |
<arg>\${targetDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-sourceconditions</arg> | |
<arg>\${sourceConditions}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-splitbycolumn</arg> | |
<arg>\${splitByColumn}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
#Export Files | |
cat<<XXX > /tmp/ooziedmhadooptoteradatatableexists.xml | |
<!-- | |
Copyright (C) 2013 by Teradata Corporation. All rights reserved. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="TeradataExportToolByQuery-wf"> | |
<start to="java-node"/> | |
<action name="java-node"> | |
<java> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
<property> | |
<name>oozie.launcher.mapred.child.java.opts</name> | |
<value>-Djava.security.egd=file:/dev/./urandom</value> | |
</property> | |
</configuration> | |
<main-class>com.teradata.connector.common.tool.ConnectorExportTool</main-class> | |
<java-opts>-Xmx1024m -Djava.security.egd=file:/dev/./urandom</java-opts> | |
<arg>-libjars</arg> | |
<arg>\${libJars}</arg> | |
<arg>-jobtype</arg> | |
<arg>hive</arg> | |
<arg>-fileformat</arg> | |
<arg>\${fileFormat}</arg> | |
<arg>-separator</arg> | |
<arg>\${separator}</arg> | |
<arg>-classname</arg> | |
<arg>com.teradata.jdbc.TeraDriver</arg> | |
<arg>-url</arg> | |
<arg>\${url}</arg> | |
<arg>-username</arg> | |
<arg>\${userName}</arg> | |
<arg>-password</arg> | |
<arg>\${password}</arg> | |
<arg>-method</arg> | |
<arg>\${method}</arg> | |
<arg>-nummappers</arg> | |
<arg>\${numMappers}</arg> | |
<arg>-sourcetable</arg> | |
<arg>\${sourceTable}</arg> | |
<arg>-sourcedatabase</arg> | |
<arg>\${sourceDatabase}</arg> | |
<arg>-targettable</arg> | |
<arg>\${targetTable}</arg> | |
<arg>-hiveconf</arg> | |
<arg>\${hiveConf}</arg> | |
<arg>-jobclientoutput</arg> | |
<arg>\${jobClientOutput}</arg> | |
<arg>-targetfieldnames</arg> | |
<arg>\${targetFieldNames}</arg> | |
<arg>-sourcefieldnames</arg> | |
<arg>\${sourceFieldNames}</arg> | |
<arg>-usexviews</arg> | |
<arg>\${useXViews}</arg> | |
</java> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
# Hive Files | |
cat<<XXX > /tmp/ooziedmhiveworkflow.xml | |
<?xml version="1.0" encoding="UTF-8"?> | |
<!-- | |
Licensed to the Apache Software Foundation (ASF) under one | |
or more contributor license agreements. See the NOTICE file | |
distributed with this work for additional information | |
regarding copyright ownership. The ASF licenses this file | |
to you under the Apache License, Version 2.0 (the | |
"License"); you may not use this file except in compliance | |
with the License. You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, software | |
distributed under the License is distributed on an "AS IS" BASIS, | |
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
See the License for the specific language governing permissions and | |
limitations under the License. | |
--> | |
<workflow-app xmlns="uri:oozie:workflow:0.2" name="hive-wf"> | |
<start to="hive-node"/> | |
<action name="hive-node"> | |
<hive xmlns="uri:oozie:hive-action:0.2"> | |
<job-tracker>\${jobTracker}</job-tracker> | |
<name-node>\${nameNode}</name-node> | |
<job-xml>\${hiveConf}</job-xml> | |
<configuration> | |
<property> | |
<name>mapred.job.queue.name</name> | |
<value>\${queueName}</value> | |
</property> | |
</configuration> | |
<script>\${script}</script> | |
<param>DATABASE=\${database}</param> | |
<param>TABLE=\${table}</param> | |
<param>OUTPUT=\${jobClientOutput}</param> | |
</hive> | |
<ok to="end"/> | |
<error to="fail"/> | |
</action> | |
<kill name="fail"> | |
<message>Hive failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message> | |
</kill> | |
<end name="end"/> | |
</workflow-app> | |
XXX | |
cat<<XXX > /tmp/dmhivescripttruncate.sql | |
-- | |
-- Licensed to the Apache Software Foundation (ASF) under one | |
-- or more contributor license agreements. See the NOTICE file | |
-- distributed with this work for additional information | |
-- regarding copyright ownership. The ASF licenses this file | |
-- to you under the Apache License, Version 2.0 (the | |
-- "License"); you may not use this file except in compliance | |
-- with the License. You may obtain a copy of the License at | |
-- | |
-- http://www.apache.org/licenses/LICENSE-2.0 | |
-- | |
-- Unless required by applicable law or agreed to in writing, software | |
-- distributed under the License is distributed on an "AS IS" BASIS, | |
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
-- See the License for the specific language governing permissions and | |
-- limitations under the License. | |
-- | |
use \${DATABASE}; | |
truncate table \${TABLE}; | |
XXX | |
echo "Copy Oozie workflow xml files into hdfs" | |
su - $user -c "hadoop fs -put /tmp/exportworkflow.xml /teradata/tdch/1.3/oozieworkflows/oozieexport/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/importworkflow.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziehadooptoteradata.xml /teradata/tdch/1.3/oozieworkflows/ooziehadooptoteradata/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/oozieteradatatoexistinghadoop.xml /teradata/tdch/1.3/oozieworkflows/oozieteradatatoexistinghadoop/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/oozieteradatatonewhadoop.xml /teradata/tdch/1.3/oozieworkflows/oozieteradatatonewhadoop/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/services.json /teradata/tdch/" | |
#Data Mover addition | |
su - $user -c "hadoop fs -put /tmp/ooziedmteradatatohadooptableexists.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow_dm_table_exist.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziedmteradatatohadoopnewtablenonppi.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow_dm_table_new_non_ppi.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziedmteradatatohadoopnewtableppi.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow_dm_table_new_ppi.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziedmteradatatohadoopnewtablepartial.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow_dm_table_exist_partial.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziedmteradatatohadooptableexistpartial.xml /teradata/tdch/1.3/oozieworkflows/oozieimport/workflow_dm_table_new_partial.xml" | |
su - $user -c "hadoop fs -put /tmp/ooziedmhiveworkflow.xml /teradata/tdch/1.3/oozieworkflows/hive/workflow.xml" | |
su - $user -c "hadoop fs -put /tmp/dmhivescripttruncate.sql /teradata/tdch/1.3/oozieworkflows/hive/script_truncate.q" | |
su - $user -c "hadoop fs -put /tmp/ooziedmhadooptoteradatatableexists.xml /teradata/tdch/1.3/oozieworkflows/oozieexport/workflow_dm_table_exist.xml" | |
su - $user -c "hadoop fs -chmod -R 755 /teradata" | |
#Data Mover addition | |
su - $user -c "hadoop fs -chmod -R 777 /teradata/tdch/1.3/oozieworkflows/jobOutputFolder" | |
echo "All Done" | |
exit 0 | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment