Partilhar via


Gerir o Azure Data Lake Analytics com uma aplicação Java

Importante

O Azure Data Lake Analytics descontinuado a 29 de fevereiro de 2024. Saiba mais com este anúncio.

Para análise de dados, a sua organização pode utilizar o Azure Synapse Analytics ou o Microsoft Fabric.

Este artigo descreve como gerir contas, origens de dados, utilizadores e tarefas do Azure Data Lake Analytics com uma aplicação escrita com o SDK Java do Azure.

Pré-requisitos

Autenticar com Microsoft Entra ID

O fragmento de código seguinte fornece código para a autenticação não interativa , onde a aplicação fornece as suas próprias credenciais.

Criar uma aplicação Java

  1. Abra o IntelliJ e crie um projeto Java com o modelo de Aplicação de Linha de Comandos .
  2. Clique com o botão direito do rato no projeto no lado esquerdo do ecrã e selecione Adicionar Suporte da Estrutura. Selecione Maven e selecione OK.
  3. Abra o ficheiro "pom.xml" recentemente criado e adicione o seguinte fragmento de texto entre a <etiqueta /version> e a <etiqueta /project> :
<dependencies>
    <dependency>
      <groupId>com.azure.resourcemanager</groupId>
      <artifactId>azure-resourcemanager-datalakeanalytics</artifactId>
      <version>1.0.0-beta.1</version>
    </dependency>
    <dependency>
      <groupId>com.azure.resourcemanager</groupId>
      <artifactId>azure-resourcemanager-datalakestore</artifactId>
      <version>1.0.0-beta.1</version>
    </dependency>
    <dependency>
      <groupId>com.azure</groupId>
      <artifactId>azure-storage-file-datalake</artifactId>
      <version>12.7.2</version>
    </dependency>
    <dependency>
      <groupId>com.azure</groupId>
      <artifactId>azure-identity</artifactId>
      <version>1.4.1</version>
    </dependency>
</dependencies>

Aceda a Definições de Ficheiros >>> Compilar Implementação de Execução>. Selecione Criar Ferramentas > de Importação do Maven>. Em seguida, selecione Importar projetos do Maven automaticamente.

Abra Main.java e substitua o bloco de código existente pelo seguinte código:

import com.azure.core.credential.TokenCredential;
import com.azure.core.management.AzureEnvironment;
import com.azure.core.management.profile.AzureProfile;
import com.azure.identity.ClientSecretCredential;
import com.azure.identity.ClientSecretCredentialBuilder;
import com.azure.resourcemanager.datalakeanalytics.DataLakeAnalyticsManager;
import com.azure.resourcemanager.datalakeanalytics.models.DataLakeAnalyticsAccount;
import com.azure.resourcemanager.datalakestore.DataLakeStoreManager;
import com.azure.resourcemanager.datalakestore.models.DataLakeStoreAccount;
import com.azure.storage.file.datalake.DataLakeFileClient;
import com.azure.storage.file.datalake.DataLakeFileSystemClient;
import com.azure.storage.file.datalake.DataLakeServiceClient;
import com.azure.storage.file.datalake.DataLakeServiceClientBuilder;
import com.azure.storage.file.datalake.models.PathAccessControl;
import com.azure.storage.file.datalake.models.PathPermissions;

import java.io.ByteArrayInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.UUID;

public class Main {
    private static String adlsAccountName;
    private static String adlaAccountName;
    private static String resourceGroupName;
    private static String location;

    private static String tenantId;
    private static String subscriptionId;
    private static String clientId;
    private static String clientSecret;
    private static String fileSystemName;
    private static String localFolderPath;

    private static DataLakeAnalyticsManager analyticsManager;
    private static DataLakeStoreManager storeManager;
    private static DataLakeStoreAccount storeAccount;
    private static DataLakeAnalyticsAccount analyticsAccount;
    private static DataLakeServiceClient serviceClient;
    private static DataLakeFileSystemClient fileSystemClient;
    private static DataLakeFileClient fileClient;

    public static void main(String[] args) throws Exception {
        adlsAccountName = "<DATA-LAKE-STORE-NAME>";
        adlaAccountName = "<DATA-LAKE-ANALYTICS-NAME>";
        resourceGroupName = "<RESOURCE-GROUP-NAME>";
        location = "East US 2";

        tenantId = "<TENANT-ID>";
        subscriptionId = "<SUBSCRIPTION-ID>";
        clientId = "<CLIENT-ID>";
        clientSecret = "<CLIENT-SECRET>";
        fileSystemName = "<DATALAKE-FILE-SYSTEM-NAME>";

        localFolderPath = "C:\\local_path\\";

        // ----------------------------------------
        // Authenticate
        // ----------------------------------------
        AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
        ClientSecretCredential creds = new ClientSecretCredentialBuilder()
                .clientId(clientId).tenantId(tenantId).clientSecret(clientSecret)
                .authorityHost("https://login.microsoftonline.com/" + tenantId + "/oauth2/token")
                .build();
        setupClients(creds, profile);

        // ----------------------------------------
        // List Data Lake Store and Analytics accounts that this app can access
        // ----------------------------------------
        System.out.println(String.format("All ADL Store accounts that this app can access in subscription %s:", subscriptionId));
        storeManager.accounts().list().forEach(acct -> System.out.println(acct.name()));

        System.out.println(String.format("All ADL Analytics accounts that this app can access in subscription %s:", subscriptionId));
        analyticsManager.accounts().list().forEach(acct -> System.out.println(acct.name()));
        waitForNewline("Accounts displayed.", "Creating files.");

        // ----------------------------------------
        // Create a file in Data Lake Store: input1.csv
        // ----------------------------------------
        createFile("input1.csv", "123,abc", true);
        waitForNewline("File created.", "Submitting a job.");

        // ----------------------------------------
        // Submit a job to Data Lake Analytics
        // ----------------------------------------
        String script = "@input = EXTRACT Row1 string, Row2 string FROM \"/input1.csv\" USING Extractors.Csv(); OUTPUT @input TO @\"/output1.csv\" USING Outputters.Csv();";
        UUID jobId = submitJobByScript(script, "testJob", creds);
        waitForNewline("Job submitted.", "Getting job status.");

        // ----------------------------------------
        // Download job output from Data Lake Store
        // ----------------------------------------
        downloadFile("output1.csv", localFolderPath + "output1.csv");
        waitForNewline("Job output downloaded.", "Deleting file.");

        deleteFile("output1.csv");
        waitForNewline("File deleted.", "Done.");
    }

    public static void setupClients(TokenCredential creds, AzureProfile profile) {

        analyticsManager = DataLakeAnalyticsManager.authenticate(creds, profile);

        storeManager = DataLakeStoreManager.authenticate(creds, profile);

        createAccounts();

        serviceClient = new DataLakeServiceClientBuilder().endpoint(storeAccount.endpoint()).credential(creds).buildClient();

        fileSystemClient = serviceClient.createFileSystem(fileSystemName);

    }

    public static void waitForNewline(String reason, String nextAction) {
        if (nextAction == null)
            nextAction = "";

        System.out.println(reason + "\r\nPress ENTER to continue...");
        try {
            System.in.read();
        } catch (Exception e) {
        }

        if (!nextAction.isEmpty()) {
            System.out.println(nextAction);
        }
    }

    // Create accounts
    public static void createAccounts() {
        // Create ADLS account
        storeAccount = storeManager.accounts().define(adlsAccountName)
                .withRegion(location)
                .withExistingResourceGroup(resourceGroupName)
                .create();

        analyticsAccount = analyticsManager.accounts().define(adlaAccountName)
                .withRegion(location).withExistingResourceGroup(resourceGroupName)
                .withDefaultDataLakeStoreAccount(adlsAccountName)
                .withDataLakeStoreAccounts(Collections.EMPTY_LIST)
                .create();
    }

    // Create a file
    public static void createFile(String path, String contents, boolean force) {
        byte[] bytesContents = contents.getBytes();

        DataLakeFileClient fileClient = fileSystemClient.createFile(path,force);
        PathAccessControl accessControl = fileClient.getAccessControl();
        fileClient.setPermissions(PathPermissions.parseOctal("744"), accessControl.getGroup(), accessControl.getOwner());
        fileClient.upload(new ByteArrayInputStream(bytesContents), bytesContents.length);
    }

    // Delete a file
    public static void deleteFile(String filePath) {
        fileSystemClient.getFileClient(filePath).delete();
    }

    // Download a file
    private static void downloadFile(String srcPath, String destPath) throws IOException {

        fileClient = fileSystemClient.getFileClient(srcPath);
        OutputStream outputStream = new FileOutputStream(destPath);
        fileClient.read(outputStream);
        outputStream.close();
    }

}

Indique os valores dos parâmetros indicados no fragmento de código:

  • adlsAccountName
  • adlaAccountName
  • resourceGroupName
  • location
  • tenantId
  • subscriptionId
  • clientId
  • clientSecret
  • fileSystemName
  • localFolderPath

Passos seguintes