Organiza tus páginas con colecciones
Guarda y categoriza el contenido según tus preferencias.
A diferencia de los flujos de trabajo estándar que crean una instancia de un recurso de plantilla de flujo de trabajo creado previamente, los flujos de trabajo intercalados usan un archivo YAML o una definición WorkflowTemplate incorporada para ejecutar un flujo de trabajo.
Por el momento, la creación de flujos de trabajo intercalados no es compatible con la consola de Google Cloud. Las plantillas de flujo de trabajo y los flujos de trabajo en los que se crearon instancias se pueden ver desde la página Flujos de trabajo de Dataproc.
import("context""fmt""io"dataproc"cloud.google.com/go/dataproc/apiv1""cloud.google.com/go/dataproc/apiv1/dataprocpb""google.golang.org/api/option")funcinstantiateInlineWorkflowTemplate(wio.Writer,projectID,regionstring)error{// projectID := "your-project-id"// region := "us-central1"ctx:=context.Background()// Create the cluster client.endpoint:=region+"-dataproc.googleapis.com:443"workflowTemplateClient,err:=dataproc.NewWorkflowTemplateClient(ctx,option.WithEndpoint(endpoint))iferr!=nil{returnfmt.Errorf("dataproc.NewWorkflowTemplateClient: %w",err)}deferworkflowTemplateClient.Close()// Create jobs for the workflow.teragenJob:=&dataprocpb.OrderedJob{JobType:&dataprocpb.OrderedJob_HadoopJob{HadoopJob:&dataprocpb.HadoopJob{Driver:&dataprocpb.HadoopJob_MainJarFileUri{MainJarFileUri:"file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",},Args:[]string{"teragen","1000","hdfs:///gen/",},},},StepId:"teragen",}terasortJob:=&dataprocpb.OrderedJob{JobType:&dataprocpb.OrderedJob_HadoopJob{HadoopJob:&dataprocpb.HadoopJob{Driver:&dataprocpb.HadoopJob_MainJarFileUri{MainJarFileUri:"file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",},Args:[]string{"terasort","hdfs:///gen/","hdfs:///sort/",},},},StepId:"terasort",PrerequisiteStepIds:[]string{"teragen",},}// Create the cluster placement.clusterPlacement:=&dataprocpb.WorkflowTemplatePlacement{Placement:&dataprocpb.WorkflowTemplatePlacement_ManagedCluster{ManagedCluster:&dataprocpb.ManagedCluster{ClusterName:"my-managed-cluster",Config:&dataprocpb.ClusterConfig{GceClusterConfig:&dataprocpb.GceClusterConfig{// Leave "ZoneUri" empty for "Auto Zone Placement"// ZoneUri: ""ZoneUri:"us-central1-a",},},},},}// Create the Instantiate Inline Workflow Template Request.req:=&dataprocpb.InstantiateInlineWorkflowTemplateRequest{Parent:fmt.Sprintf("projects/%s/regions/%s",projectID,region),Template:&dataprocpb.WorkflowTemplate{Jobs:[]*dataprocpb.OrderedJob{teragenJob,terasortJob,},Placement:clusterPlacement,},}// Create the cluster.op,err:=workflowTemplateClient.InstantiateInlineWorkflowTemplate(ctx,req)iferr!=nil{returnfmt.Errorf("InstantiateInlineWorkflowTemplate: %w",err)}iferr:=op.Wait(ctx);err!=nil{returnfmt.Errorf("InstantiateInlineWorkflowTemplate.Wait: %w",err)}// Output a success message.fmt.Fprintf(w,"Workflow created successfully.")returnnil}
importcom.google.api.gax.longrunning.OperationFuture;importcom.google.cloud.dataproc.v1.ClusterConfig;importcom.google.cloud.dataproc.v1.GceClusterConfig;importcom.google.cloud.dataproc.v1.HadoopJob;importcom.google.cloud.dataproc.v1.ManagedCluster;importcom.google.cloud.dataproc.v1.OrderedJob;importcom.google.cloud.dataproc.v1.RegionName;importcom.google.cloud.dataproc.v1.WorkflowMetadata;importcom.google.cloud.dataproc.v1.WorkflowTemplate;importcom.google.cloud.dataproc.v1.WorkflowTemplatePlacement;importcom.google.cloud.dataproc.v1.WorkflowTemplateServiceClient;importcom.google.cloud.dataproc.v1.WorkflowTemplateServiceSettings;importcom.google.protobuf.Empty;importjava.io.IOException;importjava.util.concurrent.ExecutionException;publicclassInstantiateInlineWorkflowTemplate{publicstaticvoidinstantiateInlineWorkflowTemplate()throwsIOException,InterruptedException{// TODO(developer): Replace these variables before running the sample.StringprojectId="your-project-id";Stringregion="your-project-region";instantiateInlineWorkflowTemplate(projectId,region);}publicstaticvoidinstantiateInlineWorkflowTemplate(StringprojectId,Stringregion)throwsIOException,InterruptedException{StringmyEndpoint=String.format("%s-dataproc.googleapis.com:443",region);// Configure the settings for the workflow template service client.WorkflowTemplateServiceSettingsworkflowTemplateServiceSettings=WorkflowTemplateServiceSettings.newBuilder().setEndpoint(myEndpoint).build();// Create a workflow template service client with the configured settings. The client only// needs to be created once and can be reused for multiple requests. Using a try-with-resources// closes the client, but this can also be done manually with the .close() method.try(WorkflowTemplateServiceClientworkflowTemplateServiceClient=WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings)){// Configure the jobs within the workflow.HadoopJobteragenHadoopJob=HadoopJob.newBuilder().setMainJarFileUri("file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar").addArgs("teragen").addArgs("1000").addArgs("hdfs:///gen/").build();OrderedJobteragen=OrderedJob.newBuilder().setHadoopJob(teragenHadoopJob).setStepId("teragen").build();HadoopJobterasortHadoopJob=HadoopJob.newBuilder().setMainJarFileUri("file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar").addArgs("terasort").addArgs("hdfs:///gen/").addArgs("hdfs:///sort/").build();OrderedJobterasort=OrderedJob.newBuilder().setHadoopJob(terasortHadoopJob).addPrerequisiteStepIds("teragen").setStepId("terasort").build();// Configure the cluster placement for the workflow.// Leave "ZoneUri" empty for "Auto Zone Placement".// GceClusterConfig gceClusterConfig =// GceClusterConfig.newBuilder().setZoneUri("").build();GceClusterConfiggceClusterConfig=GceClusterConfig.newBuilder().setZoneUri("us-central1-a").build();ClusterConfigclusterConfig=ClusterConfig.newBuilder().setGceClusterConfig(gceClusterConfig).build();ManagedClustermanagedCluster=ManagedCluster.newBuilder().setClusterName("my-managed-cluster").setConfig(clusterConfig).build();WorkflowTemplatePlacementworkflowTemplatePlacement=WorkflowTemplatePlacement.newBuilder().setManagedCluster(managedCluster).build();// Create the inline workflow template.WorkflowTemplateworkflowTemplate=WorkflowTemplate.newBuilder().addJobs(teragen).addJobs(terasort).setPlacement(workflowTemplatePlacement).build();// Submit the instantiated inline workflow template request.Stringparent=RegionName.format(projectId,region);OperationFuture<Empty,WorkflowMetadata>instantiateInlineWorkflowTemplateAsync=workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent,workflowTemplate);instantiateInlineWorkflowTemplateAsync.get();// Print out a success message.System.out.printf("Workflow ran successfully.");}catch(ExecutionExceptione){System.err.println(String.format("Error running workflow: %s ",e.getMessage()));}}}
constdataproc=require('@google-cloud/dataproc');// TODO(developer): Uncomment and set the following variables// projectId = 'YOUR_PROJECT_ID'// region = 'YOUR_REGION'// Create a client with the endpoint set to the desired regionconstclient=newdataproc.v1.WorkflowTemplateServiceClient({apiEndpoint:`${region}-dataproc.googleapis.com`,projectId:projectId,});asyncfunctioninstantiateInlineWorkflowTemplate(){// Create the formatted parent.constparent=client.regionPath(projectId,region);// Create the templateconsttemplate={jobs:[{hadoopJob:{mainJarFileUri:'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar',args:['teragen','1000','hdfs:///gen/'],},stepId:'teragen',},{hadoopJob:{mainJarFileUri:'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar',args:['terasort','hdfs:///gen/','hdfs:///sort/'],},stepId:'terasort',prerequisiteStepIds:['teragen'],},],placement:{managedCluster:{clusterName:'my-managed-cluster',config:{gceClusterConfig:{// Leave 'zoneUri' empty for 'Auto Zone Placement'// zoneUri: ''zoneUri:'us-central1-a',},},},},};constrequest={parent:parent,template:template,};// Submit the request to instantiate the workflow from an inline template.const[operation]=awaitclient.instantiateInlineWorkflowTemplate(request);awaitoperation.promise();// Output a success messageconsole.log('Workflow ran successfully.');
fromgoogle.cloudimportdataproc_v1asdataprocdefinstantiate_inline_workflow_template(project_id,region):"""This sample walks a user through submitting a workflow for a Cloud Dataproc using the Python client library. Args: project_id (string): Project to use for running the workflow. region (string): Region where the workflow resources should live. """# Create a client with the endpoint set to the desired region.workflow_template_client=dataproc.WorkflowTemplateServiceClient(client_options={"api_endpoint":f"{region}-dataproc.googleapis.com:443"})parent=f"projects/{project_id}/regions/{region}"template={"jobs":[{"hadoop_job":{"main_jar_file_uri":"file:///usr/lib/hadoop-mapreduce/""hadoop-mapreduce-examples.jar","args":["teragen","1000","hdfs:///gen/"],},"step_id":"teragen",},{"hadoop_job":{"main_jar_file_uri":"file:///usr/lib/hadoop-mapreduce/""hadoop-mapreduce-examples.jar","args":["terasort","hdfs:///gen/","hdfs:///sort/"],},"step_id":"terasort","prerequisite_step_ids":["teragen"],},],"placement":{"managed_cluster":{"cluster_name":"my-managed-cluster","config":{"gce_cluster_config":{# Leave 'zone_uri' empty for 'Auto Zone Placement'# 'zone_uri': ''"zone_uri":"us-central1-a"}},}},}# Submit the request to instantiate the workflow from an inline template.operation=workflow_template_client.instantiate_inline_workflow_template(request={"parent":parent,"template":template})operation.result()# Output a success message.print("Workflow ran successfully.")
[[["Fácil de comprender","easyToUnderstand","thumb-up"],["Resolvió mi problema","solvedMyProblem","thumb-up"],["Otro","otherUp","thumb-up"]],[["Difícil de entender","hardToUnderstand","thumb-down"],["Información o código de muestra incorrectos","incorrectInformationOrSampleCode","thumb-down"],["Faltan la información o los ejemplos que necesito","missingTheInformationSamplesINeed","thumb-down"],["Problema de traducción","translationIssue","thumb-down"],["Otro","otherDown","thumb-down"]],["Última actualización: 2025-03-25 (UTC)"],[[["Inline workflows utilize a YAML file or an embedded WorkflowTemplate definition to run a workflow, as opposed to standard workflows that use a pre-existing template resource."],["Inline workflows do not support the use of parameters, meaning parameters cannot be passed to them."],["You can instantiate an inline workflow using `gcloud` CLI or through REST API requests, which requires a defined JSON request body with the specified jobs and placement."],["Instantiating inline workflows can also be done through code using various client libraries such as Go, Java, Node.js, and Python."],["The Google Cloud console does not currently support the direct creation of inline workflows, but you can view workflow templates and instantiated workflows in the console."]]],[]]