rootProject.name = 'hadoop-hdfs-app'
plugins {
id 'java'
}
group 'de.example.hadoop.hdfs'
version '1.0-SNAPSHOT'
repositories {
mavenCentral()
}
dependencies {
implementation 'org.apache.hadoop:hadoop-common:3.2.1'
implementation 'org.apache.hadoop:hadoop-hdfs-client:3.2.1'
}
test {
useJUnitPlatform()
}
package de.example.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class InputReadAndFileWriter {
public static void main(String[] args) {
if (args.length != 2) {
System.err.println("Usage: InputReadAndFileWriter <filename> <content>");
System.exit(1);
}
String filePath = args[0];
String contents = args[1];
try {
Configuration configuration = new Configuration();
FileSystem hdfs = FileSystem.get(configuration);
// Check path & delete if exists
Path path = new Path(filePath);
if (hdfs.exists(path)) {
hdfs.delete(path, true);
System.out.println("#-#-# " + filePath + " is deleted.");
}
// Write contents as file
FSDataOutputStream outputStream = hdfs.create(path);
outputStream.writeUTF(contents);
outputStream.close();
//
FSDataInputStream inputStream = hdfs.open(path);
String result = inputStream.readUTF();
inputStream.close();
System.out.println("#-#-# Saved contents: " + result);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Primary 노드에 접속해서 경로 생성
mkdir /home/hadoop/example/jars
로컬의 jar 를 scp 로 primary 노드로 이동
scp -i $key $project_path/build/libs/hadoop-hdfs-app-1.0-SNAPSHOT.jar hadoop@$primary_node:~/example/jars/.
hadoop jar $jar_file $main_classname $args
hadoop jar hadoop-hdfs-app-1.0-SNAPSHOT.jar de.example.hadoop.hdfs.InputReadAndFileWriter /data/example/hdfs/input.txt 'Hello world, hello hdfs!'
hdfs 명령어로 input.txt 파일을 확인한다.
hdfs dfs -ls /data/example
hdfs dfs -head /data/example/input.txt