Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Store OpenAI API Key in config.json #17

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 43 additions & 11 deletions src/config/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as fs from "fs";
import * as path from "path";
import * as readline from "readline";
import * as readlinePromises from "readline/promises";

Check failure on line 3 in src/config/index.ts

View workflow job for this annotation

GitHub Actions / build (18.x)

Cannot find module 'readline/promises' or its corresponding type declarations.

export const DEFAULT_OLLAMA_MODEL = "llama2";
export const DEFAULT_OPENAI_MODEL = "gpt-3.5-turbo";
Expand All @@ -24,6 +24,44 @@
}
}

export const requestApiKey = async (
rl: readlinePromises.Interface,
): Promise<string> => {
for (const key of ["LOZ_OPENAI_API_KEY", "OPENAI_API_KEY"]) {
const value = process.env[key];
if (value) {
const useApiKeyFromEnv = await rl.question(
`\n${key} found in environment variables. Do you want to use it? (y/n) `,
);
if (useApiKeyFromEnv.toLowerCase() === "y") {
return value;
}
if (useApiKeyFromEnv.toLowerCase() !== "n") {
console.log("Received the wrong answer. Please try again.");
return await requestApiKey(rl);
}
}
}

const apiKey = await rl.question("Enter your OpenAI API key:\n> ");
if (!apiKey) {
console.log("Received the wrong answer. Please try again.");
return await requestApiKey(rl);
}
return apiKey;
};

const requestApiName = async (
rl: readlinePromises.Interface,
): Promise<string> => {
const res = await rl.question("Choose your LLM service: (ollama, openai) ");
if (!["ollama", "openai"].includes(res)) {
console.log("Received the wrong answer. Please try again.");
return await requestApiName(rl);
}
return res;
};

export class Config implements ConfigInterface {
items: ConfigItemInterface[];
configFilePath: string;
Expand Down Expand Up @@ -101,19 +139,12 @@
async loadConfig(configPath: string) {
this.configFilePath = path.join(configPath, "config.json");
if (!fs.existsSync(this.configFilePath)) {
const rl = readline.createInterface({
const rl = readlinePromises.createInterface({
input: process.stdin,
output: process.stdout,
});

const question = (query: string): Promise<string> => {
return new Promise((resolve) => {
rl.question(query, (answer) => {
resolve(answer);
});
});
};
const name = await question("Choose your LLM service: (ollama, openai) ");
const name = await requestApiName(rl);

this.set("openai.model", DEFAULT_OPENAI_MODEL);
this.set("ollama.model", DEFAULT_OLLAMA_MODEL);
Expand All @@ -124,7 +155,8 @@
);
} else if (name === "openai") {
this.set("model", DEFAULT_OPENAI_MODEL);
console.log("set OPENAI_API_KEY in your environment variables");
const newApiKey = await requestApiKey(rl);
this.set("openai.apikey", newApiKey);
}
this.set("mode", "default");
this.set("api", name);
Expand Down
57 changes: 29 additions & 28 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,21 @@
import * as fs from "fs";
import * as path from "path";
import * as readline from "readline";
import * as readlinePromises from "readline/promises";

Check failure on line 4 in src/index.ts

View workflow job for this annotation

GitHub Actions / build (18.x)

Cannot find module 'readline/promises' or its corresponding type declarations.
import { exec } from "child_process";
import { OpenAiAPI, OllamaAPI } from "./llm";

import { ChatHistory } from "./history";
import { Config, DEFAULT_OLLAMA_MODEL, DEFAULT_OPENAI_MODEL } from "./config";
import {
Config,
DEFAULT_OLLAMA_MODEL,
DEFAULT_OPENAI_MODEL,
requestApiKey,
} from "./config";
import { Git } from "./git";

const LOZ_DEBUG = process.env.DEBUG === "true" ? true : false;

const readline = require("readline");

require("dotenv").config();

const DEBUG = process.env.LOZ_DEBUG === "true" ? true : false;
Expand Down Expand Up @@ -83,33 +88,39 @@

await this.loadingConfigFromJSONFile();

let api = this.checkAPI();
const api = this.checkAPI() || "openai";

if (api === "openai") {
this.checkEnv();
this.llmAPI = new OpenAiAPI();
this.defaultSettings.model =
this.config.get("model")?.value || DEFAULT_OPENAI_MODEL;
} else if (this.checkAPI() === "ollama") {
if (api === "ollama") {
const result = await runShellCommand("ollama --version");
if (DEBUG) console.log(result);
if (result.indexOf("ollama") === -1) {
console.log(
"Please install ollama with llama2 and codellama first: see https://ollama.ai/download \n"
"Please install ollama with llama2 and codellama first: see https://ollama.ai/download \n",
);
process.exit(1);
}
this.llmAPI = new OllamaAPI();
this.defaultSettings.model =
this.config.get("model")?.value || DEFAULT_OLLAMA_MODEL;
} else {
// default to openai
this.checkEnv();
this.llmAPI = new OpenAiAPI();
this.config.set("api", "openai");
this.defaultSettings.model =
this.config.get("model")?.value || DEFAULT_OPENAI_MODEL;
return true;
}

let apiKey = this.config.get("openai.apikey")?.value;
if (!apiKey) {
const rl = readlinePromises.createInterface({
input: process.stdin,
output: process.stdout,
});
apiKey = await requestApiKey(rl);
this.config.set("openai.apikey", apiKey);
rl.close();
}
this.llmAPI = new OpenAiAPI(apiKey);
this.config.set("api", "openai");
this.defaultSettings.model =
this.config.get("model")?.value || DEFAULT_OPENAI_MODEL;

// TODO: show error if api is wrong

return true;
}
Expand All @@ -124,16 +135,6 @@
return this.config.get("api")?.value;
}

checkEnv() {
if (process.env.OPENAI_API_KEY === undefined) {
console.error("Please set OPENAI_API_KEY in your environment variables");
// system end
process.exit(1);
}

return true;
}

// Save chat history (JSON) to file.
async saveChatHistory() {
const date = new Date();
Expand Down
4 changes: 2 additions & 2 deletions src/llm/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ abstract class LLMService {
}

export class OpenAiAPI extends LLMService {
constructor() {
constructor(apiKey: string) {
super();
this.api = new OpenAI();
this.api = new OpenAI({ apiKey });
}
async completion(params: LLMSettings) {
const gptParams: OpenAI.Chat.ChatCompletionCreateParams = {
Expand Down
12 changes: 0 additions & 12 deletions test/a.loz.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,6 @@ describe("Test OpenAI API", () => {
});
});

describe("Test Loz class", () => {
// npm test -- --grep=Loz.checkEnv
describe("loz.checkEnv", () => {
it("should return true", () => {
let loz = new Loz();

const result = loz.checkEnv();
expect(result).to.equal(true);
});
});
});

if (process.env.LOZ_LOCAL_TEST === "true") {
describe("Loz.ollama", () => {
it("should return true", async () => {
Expand Down
Loading