编程知识 cdmana.com

Hadoop Foundation - 04 - hdfs API Programming

Code source voir:https://github.com/hiszm/hadoop-train

HDFS APIProgrammation

Environnement de développement

porn.xml

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>org.example</groupId>
  <artifactId>untitled4</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>untitled4</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>



  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.7</maven.compiler.source>
    <maven.compiler.target>1.7</maven.compiler.target>
    <hadoop.version>2.6.0-cdh5.15.1</hadoop.version>
  </properties>



  <repositories>
    <repository>
      <id>cloudera</id>
      <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
    </repository>
  </repositories>

  <dependencies>

    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>2.6.0-cdh5.15.1</version>
    </dependency>

    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>

  </dependencies>

  <build>
    <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
      <plugins>
        <!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
        <plugin>
          <artifactId>maven-clean-plugin</artifactId>
          <version>3.1.0</version>
        </plugin>
        <!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
        <plugin>
          <artifactId>maven-resources-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-compiler-plugin</artifactId>
          <version>3.8.0</version>
        </plugin>
        <plugin>
          <artifactId>maven-surefire-plugin</artifactId>
          <version>2.22.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-jar-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-install-plugin</artifactId>
          <version>2.5.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-deploy-plugin</artifactId>
          <version>2.8.2</version>
        </plugin>
        <!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
        <plugin>
          <artifactId>maven-site-plugin</artifactId>
          <version>3.7.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-project-info-reports-plugin</artifactId>
          <version>3.0.0</version>
        </plugin>
      </plugins>
    </pluginManagement>
  </build>
</project>

Quand le téléchargement sera terminé

La vitesse finale est trop lente , Je vais raccrocher l'échelle

C'est comme ça que ça se passe après l'installation

HelloWorld

package org.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

//Utiliserjava API Pour faire fonctionner le système de fichiers 
public class HDFSapp {
    

    public static void main(String[] args) throws Exception {
    
        Configuration configuration =new Configuration();

        FileSystem fileSystem= FileSystem.get(new URI("hdfs://hadoop000:8020"), configuration);
        Path path= new Path("/hdfsapi/test");
        boolean result= fileSystem.mkdirs(path);
        System.out.println(result);
    }

}


C'est la première fois qu'une erreur est signalée Permission denied

Exception in thread "main" org.apache.hadoop.security.AccessControlException: Permission denied: user=jacksun, access=WRITE, inode="/":hadoop:supergroup:drwxr-xr-x
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:279)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.chec
-rw-r--r--   1 hadoop supergroup       1366 2020-08-17 21:35 /README.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-17 21:48 /hdfs-test
-rw-r--r--   1 hadoop supergroup  181367942 2020-08-17 21:59 /jdk-8u91-linux-x64.tar.gz

La raison pour laquelle nous ne sommes pas supergroup Il n'y a pas de permission d'écrire dans le Groupe

La solution rejoint l'utilisateur hadoopÀ l'intérieur

package org.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

//Utiliserjava API Pour faire fonctionner le système de fichiers 
public class HDFSapp {

    public static void main(String[] args) throws Exception {
        Configuration configuration =new Configuration();
        FileSystem fileSystem= FileSystem.get(new URI("hdfs://hadoop000:8020"), configuration, "hadoop");
        Path path= new Path("/hdfsapi/test");
        boolean result= fileSystem.mkdirs(path);
        System.out.println(result);
    }

}
true

Process finished with exit code 0

[[email protected] sbin]$ hadoop fs -ls /
Found 4 items
-rw-r--r--   1 hadoop supergroup       1366 2020-08-17 21:35 /README.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-17 21:48 /hdfs-test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi
-rw-r--r--   1 hadoop supergroup  181367942 2020-08-17 21:59 /jdk-8u91-linux-x64.tar.gz

jUnitEncapsulation

package org.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.net.URI;

//Utiliserjava API Pour faire fonctionner le système de fichiers 
public class HDFSapp {
    

    public static final String HDFS_PATH="hdfs://hadoop000:8020";
    FileSystem fileSystem=null;
    Configuration configuration=null;


    @Before
    public void setUp() throws Exception{
    
        System.out.println("=======setUp=======");
        //fileSyetemParamètres(Désignationuri, Paramètres de configuration spécifiés par le client , L'identité du client est le nom d'utilisateur )
        configuration=new Configuration();
        fileSystem=FileSystem.get(new URI(HDFS_PATH),configuration,"hadoop");
    }
    @Test
    public void mkdir() throws Exception{
    
        fileSystem.mkdirs(new Path("/hdfsapi/test2"));
    }

    @After
    public void tearDown(){
    
        configuration=null;
        fileSystem=null;
        System.out.println("=======tearDown=======");

    }


// public static void main(String[] args) throws Exception {
    
// Configuration configuration =new Configuration();
// FileSystem fileSystem= FileSystem.get(new URI("hdfs://hadoop000:8020"), configuration, "hadoop");
// Path path= new Path("/hdfsapi/test");
// boolean result= fileSystem.mkdirs(path);
// System.out.println(result);
// }

}



[[email protected] sbin]$ hadoop fs -ls /hdfsapi
Found 2 items
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

VoirHDFSContenu du fichier

 //VoirHDFSContenu
    @Test
    public void text() throws Exception{
        FSDataInputStream in=fileSystem.open(new Path("/README.txt"));
        IOUtils.copyBytes(in,System.out,1024);

    }

Créer un fichier

 //Créer un fichier
    @Test
    public void create() throws Exception{
    
        FSDataOutputStream out=fileSystem.create(new Path("/hdfsapi/b.txt"));
        out.writeUTF("hello world replication");
        out.flush();
        out.close();
    }
[[email protected] sbin]$ hadoop dfs -ls /hdfsapi
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.

Found 4 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:48 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2
[[email protected] sbin]$ 

Analyse du coefficient de copie

[[email protected] sbin]$ hadoop fs -ls /
Found 4 items
-rw-r--r--   1 hadoop supergroup       1366 2020-08-17 21:35 /README.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-17 21:48 /hdfs-test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 10:07 /hdfsapi
-rw-r--r--   1 hadoop supergroup  181367942 2020-08-17 21:59 /jdk-8u91-linux-x64.tar.gz
[[email protected] sbin]$ hadoop fs -ls /hdfsapi
Found 3 items
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/a.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

On a trouvé un détail
C'est avecssh Les facteurs de copie de fichier créés sur le serveur sont 1- Oui.

Mais utiliserjava Facteur de copie créé le rinçage de copie créé est 3- Oui.

C'est parce queidea Dans la configuration de 3- Oui.

Ajouter au Code configuration.set("dfs.replication","1");
Le contrôle du lavage des répliques peut être réalisé

    @Before
    public void setUp() throws Exception{
        System.out.println("=======setUp=======");
        //fileSyetemParamètres(Désignationuri, Paramètres de configuration spécifiés par le client , L'identité du client est le nom d'utilisateur )
        configuration=new Configuration();
        configuration.set("dfs.replication","1");
        fileSystem=FileSystem.get(new URI(HDFS_PATH),configuration,"hadoop");
    }
[[email protected] hadoop]$ hadoop fs -ls /hdfsapi
Found 4 items
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/a.txt
-rw-r--r--   1 hadoop supergroup          0 2020-08-19 10:46 /hdfsapi/b.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

Renommer

//Renommer
    @Test
    public void rename() throws Exception{
    
        Path oldPath=new Path("/hdfsapi/a.txt");
        Path newPath=new Path("/hdfsapi/c.txt");
        boolean result =fileSystem.rename(oldPath,newPath);
        System.out.println(result);
    }

[[email protected] sbin]$ hadoop fs -ls /hdfsapi/
Found 4 items
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/a.txt
-rw-r--r--   1 hadoop supergroup         25 2020-08-19 10:46 /hdfsapi/b.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2
[[email protected] sbin]$ hadoop fs -ls /hdfsapi/
Found 4 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-19 10:46 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2


Télécharger des fichiers locaux sur HDFS

    // Télécharger des fichiers locaux sur HDFSSystème
    @Test
    public void copyFromLocalFile() throws Exception{
    
        Path src =new Path("/Users/jacksun/data/local.txt");
        Path dst =new Path("/hdfsapi/");
        fileSystem.copyFromLocalFile(src,dst);
    }
[[email protected] sbin]$ hadoop dfs -ls /hdfsapi
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.

Found 4 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:48 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2
[[email protected] sbin]$ hadoop dfs -ls /hdfsapi
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.

Found 5 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:48 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
-rw-r--r--   1 hadoop supergroup         18 2020-08-21 09:52 /hdfsapi/local.txt
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

Téléchargement de gros fichiers avec progression


    // Télécharger des fichiers locaux sur HDFSSystème(Gros dossiers,Barre de progression)
    @Test
    public void copyFromLocalBigFile() throws Exception{
    
        InputStream in =new BufferedInputStream(new FileInputStream(new File("/Users/jacksun/data/music.ape")));
        FSDataOutputStream out =fileSystem.create(new Path("/hdfsapi/music.ape"),
                new Progressable() {
    
                    @Override
                    public void progress() {
    
                        System.out.print("-");
                    }
                }
        );
        IOUtils.copyBytes(in,out,1024);
    }


[[email protected] sbin]$ hadoop dfs -ls /hdfsapi
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.

Found 6 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:48 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
-rw-r--r--   1 hadoop supergroup         18 2020-08-21 09:52 /hdfsapi/local.txt
-rw-r--r--   1 hadoop supergroup   48097077 2020-08-21 09:52 /hdfsapi/music.ape
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

Télécharger le fichier

    // DeHDFS Le système télécharge les fichiers localement 
    @Test
    public void copyToLocalFile() throws Exception{
    
        Path src =new Path("/hdfsapi/hello.txt");
        Path dst =new Path("/Users/jacksun/data");
        fileSystem.copyFromLocalFile(src,dst);

    }

Liste tout sous le dossier

 // Liste des fichiers 
    @Test
    public void listFiles() throws Exception {
    
        FileStatus[] statuses = fileSystem.listStatus(new Path("/hdfsapi"));
        for (FileStatus file : statuses) {
    
            printFileStatus(file);
        }
    }

    private void printFileStatus(FileStatus file) {
    
        String isDir = file.isDirectory() ? "Dossiers" : "Documentation";
        String permission = file.getPermission().toString();
        short replication = file.getReplication();
        long length = file.getLen();
        String path = file.getPath().toString();

        System.out.println(isDir + "\t" + permission + "\t" +
                replication + "\t" + length + "\t" + path);
    }

=======setUp=======
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Documentation	rw-r--r--	1	25	hdfs://hadoop000:8020/hdfsapi/b.txt
Documentation	rw-r--r--	3	13	hdfs://hadoop000:8020/hdfsapi/c.txt
Documentation	rw-r--r--	1	18	hdfs://hadoop000:8020/hdfsapi/local.txt
Documentation	rw-r--r--	1	48097077	hdfs://hadoop000:8020/hdfsapi/music.ape
Dossiers	rwxr-xr-x	0	0	hdfs://hadoop000:8020/hdfsapi/test
Dossiers	rwxr-xr-x	0	0	hdfs://hadoop000:8020/hdfsapi/test2
=======tearDown=======


Liste récursive de tous les fichiers sous le dossier

    //Liste récursive de tous les fichiers
    @Test
    public void listFileRecursive() throws Exception {
    
        RemoteIterator<LocatedFileStatus> files = fileSystem.listFiles(
                new Path("/hdfsapi"), true);
        while (files.hasNext()) {
    
            LocatedFileStatus file = files.next();
            printFileStatus(file);
        }
    }

=======setUp=======
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Documentation	rw-r--r--	1	25	hdfs://hadoop000:8020/hdfsapi/b.txt
Documentation	rw-r--r--	3	13	hdfs://hadoop000:8020/hdfsapi/c.txt
Documentation	rw-r--r--	1	18	hdfs://hadoop000:8020/hdfsapi/local.txt
Documentation	rw-r--r--	1	48097077	hdfs://hadoop000:8020/hdfsapi/music.ape

Voir les informations du bloc de fichiers

     //  Voir les informations du bloc de fichiers 
    @Test
    public void getFielBlockLocations() throws Exception {
    
        FileStatus fileStatus = fileSystem.getFileStatus(
                new Path("/hdfsapi/test/a.txt"));
        BlockLocation[] blocks = fileSystem.getFileBlockLocations(fileStatus,
                0, fileStatus.getLen());

        for (BlockLocation block : blocks) {
    
            for (String name : block.getNames()) {
    
                System.out.println(name + ":" + block.getOffset() + ":" +
                        block.getLength() + ":" + block.getHosts());
            }
        }
    }

Supprimer le fichier


    //Supprimer le fichier
    @Test
    public void delete() throws Exception {
        fileSystem.delete(new Path("/hdfsapi/test/idea.dmg"), true);
    }

Found 6 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:48 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
-rw-r--r--   1 hadoop supergroup         18 2020-08-21 09:52 /hdfsapi/local.txt
-rw-r--r--   1 hadoop supergroup   48097077 2020-08-21 09:52 /hdfsapi/music.ape
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2
[[email protected] sbin]$ hadoop dfs -ls /hdfsapi
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.

Found 5 items
-rw-r--r--   1 hadoop supergroup         25 2020-08-21 09:57 /hdfsapi/b.txt
-rw-r--r--   3 hadoop supergroup         13 2020-08-19 10:07 /hdfsapi/c.txt
-rw-r--r--   1 hadoop supergroup   48097077 2020-08-21 09:57 /hdfsapi/music.ape
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:08 /hdfsapi/test
drwxr-xr-x   - hadoop supergroup          0 2020-08-19 09:44 /hdfsapi/test2

Tous les documents


package org.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;

//Utiliserjava API Pour faire fonctionner le système de fichiers 
public class HDFSapp {
    

    public static final String HDFS_PATH="hdfs://hadoop000:8020";
    FileSystem fileSystem=null;
    Configuration configuration=null;


    @Before
    public void setUp() throws Exception{
    
        System.out.println("=======setUp=======");
        //fileSyetemParamètres(Désignationuri, Paramètres de configuration spécifiés par le client , L'identité du client est le nom d'utilisateur )
        configuration=new Configuration();
        configuration.set("dfs.replication","1");
        fileSystem=FileSystem.get(new URI(HDFS_PATH),configuration,"hadoop");
    }

    //Créer un dossier
    @Test
    public void mkdir() throws Exception{
    
        fileSystem.mkdirs(new Path("/hdfsapi/test2"));
    }
    //VoirHDFSContenu
    @Test
    public void text() throws Exception{
    
        FSDataInputStream in=fileSystem.open(new Path("/README.txt"));
        IOUtils.copyBytes(in,System.out,1024);

    }
    //========================================
    //Créer un fichier
    @Test
    public void create() throws Exception{
    
        FSDataOutputStream out=fileSystem.create(new Path("/hdfsapi/b.txt"));
        out.writeUTF("hello world replication");
        out.flush();
        out.close();
    }
    //Renommer
    @Test
    public void rename() throws Exception{
    
        Path oldPath=new Path("/hdfsapi/a.txt");
        Path newPath=new Path("/hdfsapi/c.txt");
        boolean result =fileSystem.rename(oldPath,newPath);
        System.out.println(result);
    }
    // Télécharger des fichiers locaux sur HDFSSystème
    @Test
    public void copyFromLocalFile() throws Exception{
    
        Path src =new Path("/Users/jacksun/data/local.txt");
        Path dst =new Path("/hdfsapi/");
        fileSystem.copyFromLocalFile(src,dst);
    }

    // Télécharger des fichiers locaux sur HDFSSystème(Gros dossiers,Barre de progression)
    @Test
    public void copyFromLocalBigFile() throws Exception{
    
        InputStream in =new BufferedInputStream(new FileInputStream(new File("/Users/jacksun/data/music.ape")));
        FSDataOutputStream out =fileSystem.create(new Path("/hdfsapi/music.ape"),
                new Progressable() {
    
                    @Override
                    public void progress() {
    
                        System.out.print("-");
                    }
                }
        );
        IOUtils.copyBytes(in,out,1024);
    }

    // DeHDFS Le système télécharge les fichiers localement 
    @Test
    public void copyToLocalFile() throws Exception{
    
        Path src =new Path("/hdfsapi/hello.txt");
        Path dst =new Path("/Users/jacksun/data");
        fileSystem.copyFromLocalFile(src,dst);

    }

    // Liste des fichiers 
    @Test
    public void listFiles() throws Exception {
    
        FileStatus[] statuses = fileSystem.listStatus(new Path("/hdfsapi"));
        for (FileStatus file : statuses) {
    
            printFileStatus(file);
        }
    }

    private void printFileStatus(FileStatus file) {
    
        String isDir = file.isDirectory() ? "Dossiers" : "Documentation";
        String permission = file.getPermission().toString();
        short replication = file.getReplication();
        long length = file.getLen();
        String path = file.getPath().toString();

        System.out.println(isDir + "\t" + permission + "\t" +
                replication + "\t" + length + "\t" + path);
    }
    //Liste récursive de tous les fichiers
    @Test
    public void listFileRecursive() throws Exception {
    
        RemoteIterator<LocatedFileStatus> files = fileSystem.listFiles(
                new Path("/hdfsapi"), true);
        while (files.hasNext()) {
    
            LocatedFileStatus file = files.next();
            printFileStatus(file);
        }
    }




     //  Voir les informations du bloc de fichiers 
    @Test
    public void getFielBlockLocations() throws Exception {
    
        FileStatus fileStatus = fileSystem.getFileStatus(
                new Path("/hdfsapi/test/a.txt"));
        BlockLocation[] blocks = fileSystem.getFileBlockLocations(fileStatus,
                0, fileStatus.getLen());

        for (BlockLocation block : blocks) {
    
            for (String name : block.getNames()) {
    
                System.out.println(name + ":" + block.getOffset() + ":" +
                        block.getLength() + ":" + block.getHosts());
            }
        }
    }

    //Supprimer le fichier
    @Test
    public void delete() throws Exception {
    
        fileSystem.delete(new Path("/hdfsapi/local.txt"), true);
    }


    @After
    public void tearDown(){
    
        configuration=null;
        fileSystem=null;
        System.out.println("=======tearDown=======");

    }


// public static void main(String[] args) throws Exception {
    
// Configuration configuration =new Configuration();
// FileSystem fileSystem= FileSystem.get(new URI("hdfs://hadoop000:8020"), configuration, "hadoop");
// Path path= new Path("/hdfsapi/test");
// boolean result= fileSystem.mkdirs(path);
// System.out.println(result);
// }

}

Insérer la description de l'image ici

版权声明
本文为[Sun zhongming]所创,转载请带上原文链接,感谢
https://cdmana.com/2021/10/20211013211945111A.html

Scroll to Top