forked from apache/beam
/
main.go
67 lines (56 loc) · 1.99 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// beam-playground:
// name: pardo-one-to-many
// description: ParDo one-to-many example.
// multifile: false
// context_line: 38
// categories:
// - Quickstart
// complexity: MEDIUM
// tags:
// - hellobeam
package main
import (
"context"
"github.com/apache/beam/sdks/v2/go/pkg/beam"
"github.com/apache/beam/sdks/v2/go/pkg/beam/log"
"github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx"
"github.com/apache/beam/sdks/v2/go/pkg/beam/x/debug"
"strings"
)
func main() {
p, s := beam.NewPipelineWithRoot()
// List of elements
input := beam.Create(s, "Hello Beam", "It is awesome")
// The applyTransform() converts [input] to [output]
output := applyTransform(s, input)
debug.Print(s, output)
err := beamx.Run(context.Background(), p)
if err != nil {
log.Exitf(context.Background(), "Failed to execute job: %v", err)
}
}
// The applyTransform() using ParDo with tokenize function
func applyTransform(s beam.Scope, input beam.PCollection) beam.PCollection {
return beam.ParDo(s, tokenizeFn, input)
}
// The tokenizeFn() divides a sentence into an array of words
func tokenizeFn(input string, emit func(out string)) {
tokens := strings.Split(input, " ")
for _, k := range tokens {
emit(k)
}
}