POST https://management.azure.com/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/pipelines/examplePipeline/createRun?api-version=2018-06-01&referencePipelineRunId=
{
"OutputBlobNameList": [
"exampleoutput.csv"
]
}
import com.azure.core.management.serializer.SerializerFactory;
import com.azure.core.util.serializer.SerializerEncoding;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* Samples for Pipelines CreateRun.
*/
public final class Main {
/*
* x-ms-original-file:
* specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.
* json
*/
/**
* Sample code: Pipelines_CreateRun.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void pipelinesCreateRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
throws IOException {
manager.pipelines().createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null,
null, null, null,
mapOf("OutputBlobNameList", SerializerFactory.createDefaultManagementSerializerAdapter()
.deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)),
com.azure.core.util.Context.NONE);
}
// Use "Map.of" if available
@SuppressWarnings("unchecked")
private static <T> Map<String, T> mapOf(Object... inputs) {
Map<String, T> map = new HashMap<>();
for (int i = 0; i < inputs.length; i += 2) {
String key = (String) inputs[i];
T value = (T) inputs[i + 1];
map.put(key, value);
}
return map;
}
}
To use the Azure SDK library in your project, see this documentation. To provide feedback on this code sample, open a GitHub issue
from azure.identity import DefaultAzureCredential
from azure.mgmt.datafactory import DataFactoryManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-datafactory
# USAGE
python pipelines_create_run.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = DataFactoryManagementClient(
credential=DefaultAzureCredential(),
subscription_id="12345678-1234-1234-1234-12345678abc",
)
response = client.pipelines.create_run(
resource_group_name="exampleResourceGroup",
factory_name="exampleFactoryName",
pipeline_name="examplePipeline",
)
print(response)
# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json
if __name__ == "__main__":
main()
To use the Azure SDK library in your project, see this documentation. To provide feedback on this code sample, open a GitHub issue
package armdatafactory_test
import (
"context"
"log"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v9"
)
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/ab04533261eff228f28e08900445d0edef3eb70c/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json
func ExamplePipelinesClient_CreateRun() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
clientFactory, err := armdatafactory.NewClientFactory("<subscription-id>", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := clientFactory.NewPipelinesClient().CreateRun(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", &armdatafactory.PipelinesClientCreateRunOptions{ReferencePipelineRunID: nil,
IsRecovery: nil,
StartActivityName: nil,
StartFromFailure: nil,
Parameters: map[string]any{
"OutputBlobNameList": []any{
"exampleoutput.csv",
},
},
})
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// You could use response here. We use blank identifier for just demo purposes.
_ = res
// If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes.
// res.CreateRunResponse = armdatafactory.CreateRunResponse{
// RunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"),
// }
}
To use the Azure SDK library in your project, see this documentation. To provide feedback on this code sample, open a GitHub issue
const { DataFactoryManagementClient } = require("@azure/arm-datafactory");
const { DefaultAzureCredential } = require("@azure/identity");
/**
* This sample demonstrates how to Creates a run of a pipeline.
*
* @summary Creates a run of a pipeline.
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json
*/
async function pipelinesCreateRun() {
const subscriptionId =
process.env["DATAFACTORY_SUBSCRIPTION_ID"] || "12345678-1234-1234-1234-12345678abc";
const resourceGroupName = process.env["DATAFACTORY_RESOURCE_GROUP"] || "exampleResourceGroup";
const factoryName = "exampleFactoryName";
const pipelineName = "examplePipeline";
const referencePipelineRunId = undefined;
const parameters = {
outputBlobNameList: ["exampleoutput.csv"],
};
const options = {
referencePipelineRunId,
parameters,
};
const credential = new DefaultAzureCredential();
const client = new DataFactoryManagementClient(credential, subscriptionId);
const result = await client.pipelines.createRun(
resourceGroupName,
factoryName,
pipelineName,
options,
);
console.log(result);
}
To use the Azure SDK library in your project, see this documentation. To provide feedback on this code sample, open a GitHub issue
using Azure;
using Azure.ResourceManager;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Identity;
using Azure.ResourceManager.DataFactory.Models;
using Azure.ResourceManager.DataFactory;
// Generated from example definition: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json
// this example is just showing the usage of "Pipelines_CreateRun" operation, for the dependent resources, they will have to be created separately.
// get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line
TokenCredential cred = new DefaultAzureCredential();
// authenticate your client
ArmClient client = new ArmClient(cred);
// this example assumes you already have this DataFactoryPipelineResource created on azure
// for more information of creating DataFactoryPipelineResource, please refer to the document of DataFactoryPipelineResource
string subscriptionId = "12345678-1234-1234-1234-12345678abc";
string resourceGroupName = "exampleResourceGroup";
string factoryName = "exampleFactoryName";
string pipelineName = "examplePipeline";
ResourceIdentifier dataFactoryPipelineResourceId = DataFactoryPipelineResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, factoryName, pipelineName);
DataFactoryPipelineResource dataFactoryPipeline = client.GetDataFactoryPipelineResource(dataFactoryPipelineResourceId);
// invoke the operation
IDictionary<string, BinaryData> parameterValueSpecification = new Dictionary<string, BinaryData>()
{
["OutputBlobNameList"] = BinaryData.FromObjectAsJson(new object[] { "exampleoutput.csv" }),
};
string referencePipelineRunId = null;
PipelineCreateRunResult result = await dataFactoryPipeline.CreateRunAsync(parameterValueSpecification: parameterValueSpecification, referencePipelineRunId: referencePipelineRunId);
Console.WriteLine($"Succeeded: {result}");
To use the Azure SDK library in your project, see this documentation. To provide feedback on this code sample, open a GitHub issue
az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\"OutputBlobNameList\":[\"exampleoutput.csv\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup"
Click here to learn more about the Azure CLI command.