- Docker build and run
git clone https://github.com/hibuz/ubuntu-docker
cd ubuntu-docker/hadoop
docker compose up hadoop-dev --no-build
-
see Dockerfile
Hadoop Build Order
# bash ubuntu-docker$ docker compose build bash-base # hadoop ubuntu-docker/hadoop$ docker compose build hadoop-base ubuntu-docker/hadoop$ docker build -t hibuz/hadoop-dev . # hbase|spark|hive|flink ubuntu-docker/hadoop/(hbase|spark|hive|flink)$ docker compose up --build # flink-base for zeppelin ubuntu-docker/hadoop/zeppelin$ docker compose build flink-base # zeppelin ubuntu-docker/hadoop/zeppelin$ docker compose up --build
docker exec -it hadoop bash
# Make the HDFS directories
hdfs dfs -mkdir -p /user/hadoop/input
# Copy the input files
hdfs dfs -put $HADOOP_HOME/etc/hadoop/*.xml input
# Run example wordcount job:
hadoop jar $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar wordcount input output
# View the output files on the distributed filesystem:
hdfs dfs -cat output/*
# Remove the output dir:
hdfs dfs -rm -r output
# Run example wordcount grep job:
hadoop jar $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar grep input output 'dfs[a-z.]+'
# View the output files on the distributed filesystem:
hdfs dfs -cat output/*
# Result of the output files
1 dfsadmin
1 dfs.replication
docker compose down -v
[+] Running 3/3
✔ Container hbase Removed
✔ Volume hbase_hbase-vol Removed
✔ Network hbase_default Removed
- Hadoop Dashboard: http://localhost:9870
- Yarn Dashboard: http://localhost:8088 (run start-yarn.sh or uncomment command props in docker-compose.yml)
- Hadoop Job History: http://localhost:19888