这是个列表
- one
- two
这是个有序列表
- 哈哈
- 二
package com; | |
import java.io.BufferedReader; | |
import java.io.File; | |
import java.io.FileFilter; | |
import java.io.FileReader; | |
import java.io.IOException; | |
/** | |
* @author Administrator |
import java.util.ArrayList; | |
public class RunTimer { | |
public static void main(String[] args) { | |
RunTimer timer = new RunTimer("Tag"); | |
try { | |
Thread.sleep(1000); | |
} catch (InterruptedException e) { | |
e.printStackTrace(); |
#!/bin/bash | |
sudo apt install -y wget unzip software-properties-common dirmngr | |
echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" | \ | |
sudo tee /etc/apt/sources.list.d/webupd8team-java.list | |
echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" | \ | |
sudo tee -a /etc/apt/sources.list.d/webupd8team-java.list | |
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886 | |
sudo apt-get update | |
sudo apt-get -y install oracle-java8-installer | |
wget "https://www-us.apache.org/dist/hadoop/common/hadoop-2.9.2/hadoop-2.9.2.tar.gz" |
groupadd hadoop | |
useradd -s /bin/bash -d /home/hduser -m hduser -g hadoop | |
passwd hduser | |
adduser hduser sudo | |
su hduser |
hello world | |
hello hadoop | |
hello java | |
hello oracle | |
hello android | |
hello yarn | |
hello spark | |
hello zookeeper |
#!/bin/bash | |
sudo apt update | |
sudo apt -y dist-upgrade | |
sudo apt install -y wget gedit git subversion unzip software-properties-common dirmngr | |
echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" | \ | |
sudo tee /etc/apt/sources.list.d/webupd8team-java.list | |
echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" | \ | |
sudo tee -a /etc/apt/sources.list.d/webupd8team-java.list | |
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886 | |
sudo apt-get update |
private static <T> T hex2Bean(String str, Class<T> deviceAuthClass) { | |
try { | |
str = str.replace(" ", ""); | |
byte[] array = new BigInteger(str, 16).toByteArray(); | |
IoBuffer buffer = IoBuffer.wrap(array); | |
Constructor<T> cons = deviceAuthClass.getConstructor(IoBuffer.class); | |
return cons.newInstance(buffer); | |
} catch (NoSuchMethodException e) { | |
e.printStackTrace(); | |
} catch (IllegalAccessException e) { |
public static String readStr(String filepath, int startLine) { | |
File file = new File(filepath); | |
if (!file.exists()) return ""; | |
startLine = Math.max(startLine, 1); | |
LineNumberReader reader = null; | |
StringBuilder sb = new StringBuilder(); | |
String line; | |
try { | |
reader = new LineNumberReader(new FileReader(file)); | |
while ((line = reader.readLine()) != null) { |
#!/usr/bin/env bash | |
rm -r ./spark ./spark-2.4.0-bin-without-hadoop.tgz* | |
wget https://archive.apache.org/dist/spark/spark-2.4.0/spark-2.4.0-bin-without-hadoop.tgz | |
tar -zxf spark-2.4.0-bin-without-hadoop.tgz -C . && rm spark-2.4.0-bin-without-hadoop.tgz | |
mv ./spark-2.4.0-bin-without-hadoop/ ./spark | |
sudo chown -R hduser:hadoop ./spark | |
cp ./spark/conf/spark-env.sh.template ./spark/conf/spark-env.sh | |
sed -i '1i\export SPARK_DIST_CLASSPATH=$(~/hadoop-2.9.2/bin/hadoop classpath)' ./spark/conf/spark-env.sh | |
echo "export SPARK_HOME=/home/hduser/spark" >> ~/.bashrc | |
echo "export PATH=\$PATH:\${SPARK_HOME}/bin:" >> ~/.bashrc |