- Failover to secondary namenode
$ sudo -u hdfs hdfs haadmin -failover nn1 nn2
- Restart nn1 from Ambari
- Failover again to secondary namenode
$ sudo -u hdfs hdfs haadmin -failover nn2 nn1
import hudson.model.* | |
Hudson.instance.items.each { | |
println(it.name) | |
def builds = Jenkins.instance.getItemByFullName(it.name).builds | |
if(builds.size() > 3) { | |
def toBeDeleted = builds.subList(3, builds.size()) | |
toBeDeleted.each { | |
println("\t" +it.number) | |
it.delete() |
#!/usr/bin/env bash | |
HDP_INSTALL_PATH=/usr/hdp/2.5.5.0-157 | |
cd /tmp | |
wget http://redrockdigimark.com/apachemirror/phoenix/apache-phoenix-4.12.0-HBase-1.1/bin/apache-phoenix-4.12.0-HBase-1.1-bin.tar.gz | |
tar -xvf apache-phoenix-4.12.0-HBase-1.1-bin.tar.gz | |
cp -r apache-phoenix-4.12.0-HBase-1.1-bin ${HDP_INSTALL_PATH}/apache-phoenix-4.12.0 | |
PHOENIX_INSTALL_DIR=${HDP_INSTALL_PATH}/apache-phoenix-4.12.0 | |
chmod -R 755 ${PHOENIX_INSTALL_DIR} | |
rm -rf apache-phoenix-4.12.0-HBase-1.1-bin.tar.gz | |
rm -rf apache-phoenix-4.12.0-HBase-1.1-bin |
from __future__ import print_function | |
import requests | |
from bs4 import BeautifulSoup | |
import xlrd | |
import shutil | |
import os | |
import codecs | |
page = requests.get("https://www.rbi.org.in/Scripts/bs_viewcontent.aspx?Id=2009") |
#!/usr/bin/env bash | |
HDP_VERSION=2.6.5.0-292 | |
hdfs dfs -put /usr/hdp/${HDP_VERSION}/apache-tez-0.9.1/share/tez.tar.gz /hdp/apps/${HDP_VERSION}/tez/ | |
hdfs dfs -chown -R hdfs:hadoop /hdp | |
hdfs dfs -chmod -R 555 /hdp/apps/${HDP_VERSION}/tez | |
hdfs dfs -chmod -R 444 /hdp/apps/${HDP_VERSION}/tez/tez.tar.gz |
/dev/sdb1 /data/hdp01 xfs defaults,noatime,nodiratime,nobarrier 1 2 | |
/dev/sdc1 /data/hdp02 xfs defaults,noatime,nodiratime,nobarrier 1 2 |
log4j.appender.cmd-logger=org.apache.log4j.RollingFileAppender | |
log4j.appender.cmd-logger.append=true | |
log4j.appender.cmd-logger.layout=org.apache.log4j.PatternLayout | |
log4j.appender.cmd-logger.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c - %m%n | |
log4j.appender.cmd-logger.File=/var/log/rundeck/command.log | |
log4j.appender.cmd-logger.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy | |
log4j.appender.cmd-logger.rollingPolicy.ActiveFileName=/var/log/rundeck/command.log | |
log4j.appender.cmd-logger.rollingPolicy.FileNamePattern=/var/log/rundeck/command-%d{yyyy-MM-dd}.log | |
log4j.appender.cmd-logger.MaxBackupIndex=7 |
import hudson.model.* | |
import hudson.maven.MavenModuleSet | |
import hudson.tasks.* | |
Hudson.instance.items.each { | |
if(it instanceof MavenModuleSet) { | |
if(it.getBuildDiscarder() != null && it.getBuildDiscarder() instanceof LogRotator) { | |
def l = new LogRotator(365,-1,-1,-1) | |
it.setBuildDIscarder(l) | |
} |
import hudson.model.* | |
import hudson.maven.MavenModuleSet | |
import hudson.tasks.* | |
import hudson.plugins.git.GitSCM | |
//List valid branches that can be built | |
def validBranches = ["develop","master"] | |
Hudson.instance.items.each { | |
if(it.getScm() instanceof GitSCM) { | |
Set branchNames = [] | |
it.getScm().getBranches().each { b -> |
import jenkins.model.* | |
Jenkins.instance.getAllItems(AbstractProject.class) | |
.each { | |
try { | |
println("Wiping workspace for "+it.fullName) | |
it.doDoWipeOutWorkspace() | |
} catch(Exception e) { | |
println("Error wiping workspace for "+it.fullName) | |
} | |
} |