forked from gofireflyio/aiac
/
main.go
98 lines (86 loc) · 3.04 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
package main
import (
"context"
"errors"
"fmt"
"os"
"strings"
"github.com/alecthomas/kong"
"github.com/gofireflyio/aiac/libaiac"
)
type flags struct {
APIKey string `help:"OpenAI API key" optional:"" env:"OPENAI_API_KEY"`
OutputFile string `help:"Output file to push resulting code to, defaults to stdout" default:"-" type:"path" short:"o"`
ReadmeFile string `help:"Markdown file to push explanations to (available only in ChatGPT mode)" optional:"" type:"path" short:"r"`
Save bool `help:"Save AIaC response without retry prompt" default:false short:"s"`
Quiet bool `help:"Print AIaC response to stdout and exit (non-interactive mode)" default:false short:"q"`
Get struct {
What []string `arg:"" help:"Which IaC template to generate"`
} `cmd:"" help:"Generate IaC code" aliases:"generate"`
// ChatGPT authentication is experimental
ChatGPT bool `help:"Use ChatGPT mode instead of the OpenAI API (requires --session-token)" default:false hidden:""`
SessionToken string `help:"Session token for ChatGPT mode" optional:"" hidden:"" env:"CHATGPT_SESSION_TOKEN"`
CloudflareClearance string `help:"Cloudflare clearance token for ChatGPT mode" optional:"" hidden:"" env:"CLOUDFLARE_CLEARANCE_TOKEN"`
CloudflareBm string `help:"Cloudflare bm token for ChatGPT mode" optional:"" hidden:"" env:"CLOUDFLARE_BM_TOKEN"`
UserAgent string `help:"Cloudflare tokens user agent ChatGPT mode" optional:"" hidden:"" env:"USER_AGENT"`
}
func main() {
if len(os.Args) < 2 {
os.Args = append(os.Args, "--help")
}
var cli flags
cmd := kong.Parse(&cli)
if cmd.Command() != "get <what>" {
fmt.Fprintln(os.Stderr, "Unknown command")
os.Exit(1)
}
var token string
if !cli.ChatGPT {
token = cli.APIKey
if token == "" {
fmt.Fprintf(os.Stderr, "You must provide an OpenAI API key\n")
os.Exit(1)
}
} else {
token = cli.SessionToken
if token == "" {
var ok bool
token, ok = os.LookupEnv("CHATGPT_SESSION_TOKEN")
if !ok {
fmt.Fprintf(os.Stderr, "You must provide a ChatGPT session token\n")
os.Exit(1)
}
}
}
client := libaiac.NewClient(&libaiac.AIACClientInput{
ChatGPT: cli.ChatGPT,
Token: token,
CloudflareClearance: cli.CloudflareClearance,
CloudflareBm: cli.CloudflareBm,
UserAgent: cli.UserAgent,
})
shouldRetry := !cli.Save
err := client.Ask(
context.TODO(),
// NOTE: we are prepending the word "generate" to the prompt, this
// ensures the language model actually generates code. The word "get",
// on the other hand, doesn't necessarily result in code being generated.
fmt.Sprintf("generate %s", strings.Join(cli.Get.What, " ")),
shouldRetry,
cli.Quiet,
cli.OutputFile,
cli.ReadmeFile,
)
if err != nil {
if errors.Is(err, libaiac.ErrNoCode) {
fmt.Fprintln(
os.Stderr,
"It doesn't look like ChatGPT generated any code, please make "+
"sure that you're prompt properly guides ChatGPT to do so.",
)
} else {
fmt.Fprintf(os.Stderr, "Request failed: %s\n", err)
}
os.Exit(1)
}
}