From c4838c0e52a301ef63f3bdae60a072d2f130bea6 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Fri, 10 May 2024 17:47:02 -0700 Subject: [PATCH] Go: Update generated wrapper functions for TensorFlow ops. PiperOrigin-RevId: 632649155 --- tensorflow/go/op/wrappers.go | 38 ++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tensorflow/go/op/wrappers.go b/tensorflow/go/op/wrappers.go index c5fd9bf51960c3..ea62d33e92d431 100644 --- a/tensorflow/go/op/wrappers.go +++ b/tensorflow/go/op/wrappers.go @@ -19863,6 +19863,22 @@ func GetSessionTensor(scope *Scope, handle tf.Output, dtype tf.DataType) (value return op.Output(0) } +// An op returns the TPU task ID from TPU topology. +// +// This op is to return the TPU task ID from TPU topology. +// +// Returns The TPU task ID from TPU topology. +func GetTpuTaskId(scope *Scope) (tpu_task_id tf.Output) { + if scope.Err() != nil { + return + } + opspec := tf.OpSpec{ + Type: "GetTpuTaskId", + } + op := scope.AddOperation(opspec) + return op.Output(0) +} + // Returns the truth value of (x > y) element-wise. // // *NOTE*: `Greater` supports broadcasting. More about broadcasting @@ -57782,6 +57798,28 @@ func Unstage(scope *Scope, dtypes []tf.DataType, optional ...UnstageAttr) (value return values } +// An op to update the task ID and global core array. +// +// This op is to update the task ID and global core array. +// +// Arguments: +// +// tpu_task_id_to_shard_id: An array of int32 that maps TPU task ID to shard ID. +// +// Returns the created operation. +func UpdateTaskIdAndGlobalCoreArray(scope *Scope, tpu_task_id_to_shard_id []tf.Output) (o *tf.Operation) { + if scope.Err() != nil { + return + } + opspec := tf.OpSpec{ + Type: "UpdateTaskIdAndGlobalCoreArray", + Input: []tf.Input{ + tf.OutputList(tpu_task_id_to_shard_id), + }, + } + return scope.AddOperation(opspec) +} + // UpperBoundAttr is an optional argument to UpperBound. type UpperBoundAttr func(optionalAttr)