1、pom.xml配置
<!--配置--><properties> <project.build.sourceencoding>utf-8</project.build.sourceencoding> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <hadoop.version>3.1.3</hadoop.version></properties><!--依赖库--><dependencies> <dependency> <groupid>org.apache.hadoop</groupid> <artifactid>hadoop-common</artifactid> <version>${hadoop.version}</version> </dependency> <dependency> <groupid>org.apache.hadoop</groupid> <artifactid>hadoop-mapreduce-client-core</artifactid> <version>${hadoop.version}</version> </dependency></dependencies>
2、创建与删除
//导包import org.apache.hadoop.conf.configuration;import org.apache.hadoop.fs.filesystem;import org.apache.hadoop.fs.path;import java.io.ioexception;public static void main( string[] args ){ //初始化hadoop文件系统的configration对象 configuration conf = new configuration(); //将hadoop的configration信息传入 conf.set("fs.defaultfs","hdfs://192.168.50.102:9000"); //初始化hadoop文件系统的句柄 filesystem fs=null; try { //配置hadoop的文件句柄信息 fs=filesystem.get(conf); //定义hadoop的文件路径 final string path="/test/kb16/hadoop/ratings.csv"; //初始化hadoop的路径信息 path path = new path(path); //如果文件路径存在就删除 if (fs.exists(path)) { system.out.println("delete "+fs.delete(path, true)); }else{ //如果文件路径不存在就创建 system.out.println("create "+fs.create(path)); } } catch (ioexception e) { e.printstacktrace(); }finally { //结束的时候,句柄还没有释放就进行释放 if (fs!=null) { try { fs.close() ; }catch (ioexception e) { e.printstacktrace(); } } }}
3、文件上传
//导包import org.apache.hadoop.conf.configuration;import org.apache.hadoop.fs.filesystem;import org.apache.hadoop.fs.path;import java.io.file;import java.io.ioexception;public static void main(string[] args) { //定义本地上传文件路径 final string formpath="e:\\ratings.csv"; //本地文件不存在就报错,并强制让程序终止 if (!new file(formpath).exists()) { system.out.println(formpath +"doesn't exits"); return; } //初始化hadoop文件系统的configration对象 configuration conf = new configuration(); //将hadoop的configration信息传入 conf.set("fs.defaultfs","hdfs://192.168.50.102:9000"); //初始化hadoop文件系统的句柄 filesystem fs=null; try { //将config信息传入 fs=filesystem.get(conf); //定义上传到hdfs的路径 final string topath="/test/kb16/hive"; //初始化路径 path to =new path(topath); //如果文件路径存在不执行,如果文件路径不存在就尝试创建,如果创建失败就跳过 if (!fs.exists(to)&& !fs.mkdirs(to)) { system.out.println(topath +"doesn't exit and can't be created"); return; } //初始化上传文件路径 path from=new path(formpath); //利用方法将本地文件复制到hdfs中 fs.copyfromlocalfile(from, to); system.out.println("succeed in copying from "+formpath+" to "+topath); } catch (ioexception e) { e.printstacktrace(); system.out.println("failure"); }finally{ //如果结束hadoop文件系统句柄没有关闭,利用方法进行句柄释放 if (null!=fs) { try { fs.close(); } catch (ioexception e) { e.printstacktrace(); } } }}
4、文件下载
//导包import com.google.inject.internal.cglib.core.$localvariablessorter;import com.google.inject.internal.cglib.proxy.$factory;import org.apache.hadoop.conf.configuration;import org.apache.hadoop.fs.filesystem;import org.apache.hadoop.fs.path;import java.io.file;import java.io.ioexception;public class download { public static void main(string[] args) { //定义文件下载路径 final string topath = "c:\\users\\jialin\\desktop"; //获取路径 file to = new file(topath); //如果路存在或者文件路径不存在但是创建成功就不执行if方法 if (!to.exists()&&!to.mkdirs()) { system.err.println(topath + "doesn't exist and can't be created"); return; } //初始化hadoop文件系统的configration对象 configuration config = new configuration(); //将hadoop的configration信息传入 config.set("fs.defaultfs", "hdfs://192.168.50.102:9000"); //初始化hadoop文件系统的句柄 filesystem fs = null; try { //将config信息传入 fs = filesystem.get(config); //定义下载文件路径 final string frompath = "/test/kb16/hive/ratings.csv"; //获取路径信息 path from = new path(frompath); //如果指定下载文件不存在就退出 if (!fs.exists(from)) { system.err.println(topath + "doesn't exist "); return; } //获取文件下载路径信息 path _to = new path(topath); //利用方法将hadoop文件下载到本地 fs.copytolocalfile(from,_to); system.out.println("succeed in downloading from "+frompath+" to"+topath); } catch (ioexception e) { e.printstacktrace(); system.out.println("failure"); } finally { //如果结束hadoop文件系统句柄没有关闭,利用方法进行句柄释放 if (null != fs) try { fs.close(); } catch (ioexception e) { e.printstacktrace(); } } }}
以上就是java怎么实现hdfs文件上传下载的详细内容。