Skip to content

Commit

Permalink
docs: update README
Browse files Browse the repository at this point in the history
  • Loading branch information
tak-bro committed Jul 25, 2024
1 parent 0c837e3 commit 40d93a8
Show file tree
Hide file tree
Showing 13 changed files with 323 additions and 291 deletions.
8 changes: 0 additions & 8 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,6 @@ node ./dist/cli.mjs

## Testing

Testing requires passing in `OPENAI_KEY` as an environment variable:

```sh
OPENAI_KEY=<your OPENAI key> pnpm test
```

You can still run tests that don't require `OPENAI_KEY` but will not test the main functionality:

```
pnpm test
```
Expand Down
560 changes: 299 additions & 261 deletions README.md

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
"llm",
"chatgpt",
"cohere",
"groq"
"groq",
"aipick",
"aip"
],
"license": "MIT",
"repository": "tak-bro/aipick",
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/anthropic.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export class AnthropicService extends AIService {
const generatedSystemPrompt = generatePrompt(promptOptions);

const params: Anthropic.MessageCreateParams = {
max_tokens: this.params.config['max-tokens'],
max_tokens: this.params.config.maxTokens,
temperature,
system: generatedSystemPrompt,
messages: [
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/codestral.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ export class CodestralService extends AIService {
],
temperature: this.params.config.temperature,
top_p: 1,
max_tokens: this.params.config['max-tokens'],
max_tokens: this.params.config.maxTokens,
stream: false,
safe_prompt: false,
random_seed: getRandomNumber(10, 1000),
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/cohere.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export class CohereService extends AIService {
systemPromptPath,
};
const generatedSystemPrompt = generatePrompt(promptOptions);
const maxTokens = this.params.config['max-tokens'];
const maxTokens = this.params.config.maxTokens;

const prediction = await this.cohere.chat({
chatHistory: [{ role: 'SYSTEM', message: generatedSystemPrompt }],
Expand Down
4 changes: 2 additions & 2 deletions src/services/ai/gemini.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ export class GeminiService extends AIService {
private async generateResponses(): Promise<AIResponse[]> {
try {
const userMessage = this.params.userMessage;
const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config;
const maxTokens = this.params.config['max-tokens'];
const { systemPrompt, systemPromptPath, logging } = this.params.config;
const maxTokens = this.params.config.maxTokens;
const promptOptions: PromptOptions = {
...DEFAULT_PROMPT_OPTIONS,
userMessage,
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/groq.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export class GroqService extends AIService {
try {
const userMessage = this.params.userMessage;
const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config;
const maxTokens = this.params.config['max-tokens'];
const maxTokens = this.params.config.maxTokens;
const promptOptions: PromptOptions = {
...DEFAULT_PROMPT_OPTIONS,
userMessage,
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/hugging-face.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ export class HuggingFaceService extends AIService {
await this.intialize();

const userMessage = this.params.userMessage;
const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config;
const { systemPrompt, systemPromptPath, logging } = this.params.config;
const promptOptions: PromptOptions = {
...DEFAULT_PROMPT_OPTIONS,
userMessage,
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/mistral.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ export class MistralService extends AIService {
],
temperature: this.params.config.temperature,
top_p: 1,
max_tokens: this.params.config['max-tokens'],
max_tokens: this.params.config.maxTokens,
stream: false,
safe_prompt: false,
random_seed: getRandomNumber(10, 1000),
Expand Down
4 changes: 2 additions & 2 deletions src/services/ai/ollama.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ export class OllamaService extends AIService {
private async generateMessages(): Promise<AIResponse[]> {
try {
const userMessage = this.params.userMessage;
const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config;
const { systemPrompt, systemPromptPath, logging } = this.params.config;
const promptOptions: PromptOptions = {
...DEFAULT_PROMPT_OPTIONS,
userMessage,
Expand All @@ -97,7 +97,7 @@ export class OllamaService extends AIService {
const response = await new HttpRequestBuilder({
method: 'GET',
baseURL: `${this.host}`,
timeout: this.params.config.OLLAMA_TIMEOUT,
timeout: this.params.config.timeout,
}).execute();

return response.data;
Expand Down
2 changes: 1 addition & 1 deletion src/services/ai/openai.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ export class OpenAIService extends AIService {
this.params.config.key,
this.params.config.model,
this.params.config.timeout,
this.params.config['max-tokens'],
this.params.config.maxTokens,
temperature,
generate,
userMessage,
Expand Down
20 changes: 10 additions & 10 deletions src/utils/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ const generalConfigParsers = {

return parsed;
},
'max-tokens'(maxTokens?: string) {
maxTokens(maxTokens?: string) {
if (!maxTokens) {
return 1024;
}

parseAssert('max-tokens', /^\d+$/.test(maxTokens), 'Must be an integer');
parseAssert('maxTokens', /^\d+$/.test(maxTokens), 'Must be an integer');
return Number(maxTokens);
},
logging(enable?: string | boolean) {
Expand Down Expand Up @@ -112,7 +112,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
OLLAMA: {
Expand All @@ -134,7 +134,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
HUGGINGFACE: {
Expand Down Expand Up @@ -175,7 +175,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
ANTHROPIC: {
Expand All @@ -199,7 +199,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
MISTRAL: {
Expand Down Expand Up @@ -232,7 +232,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
CODESTRAL: {
Expand All @@ -250,7 +250,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
COHERE: {
Expand All @@ -267,7 +267,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
GROQ: {
Expand All @@ -284,7 +284,7 @@ const modelConfigParsers: Record<ModelName, Record<string, (value: any) => any>>
systemPromptPath: generalConfigParsers.systemPromptPath,
timeout: generalConfigParsers.timeout,
temperature: generalConfigParsers.temperature,
'max-tokens': generalConfigParsers['max-tokens'],
maxTokens: generalConfigParsers.maxTokens,
logging: generalConfigParsers.logging,
},
};
Expand Down

0 comments on commit 40d93a8

Please sign in to comment.