Skip to content

Commit

Permalink
Adds integration testing (#47)
Browse files Browse the repository at this point in the history
* NOOP engine for integration testing
* CLI tests, integration tests, and github actions.
  • Loading branch information
DavidSouther authored Apr 15, 2024
1 parent cd94c9a commit ede520c
Show file tree
Hide file tree
Showing 23 changed files with 983 additions and 108 deletions.
26 changes: 26 additions & 0 deletions .github/workflows/linux.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: "*Nix Integration Tests"

on:
push:
branches: [main]
pull_request:
branches: [main]
workflow_dispatch:

jobs:
linux-integ:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2

- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: "20.x"

- name: Install dependencies
run: npm install ; npx tsc -p core

- name: Run integration tests
run: ./integ/integ.sh
29 changes: 29 additions & 0 deletions .github/workflows/npm.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: CI

on:
push:
branches: [main]
pull_request:
branches: [main]
workflow_dispatch:

jobs:
npm-test:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2

- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: "20.x"

- name: Install dependencies
run: npm install && npx tsc -p core && npm link ./core -w cli

- name: Run CI script in cli workspace
run: npm run --workspace cli ci

- name: Run CI script in core workspace
run: npm run --workspace core ci
27 changes: 27 additions & 0 deletions .github/workflows/windows.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Windows Integration Tests

on:
push:
branches: [main]
pull_request:
branches: [main]
workflow_dispatch:

jobs:
win-integ:
runs-on: windows-latest

steps:
- uses: actions/checkout@v3

- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: "20.x"

- name: Install dependencies
run: npm install ; npx tsc -p core

- name: Run Integration Tests
run: ./integ/integ.bat
shell: cmd
47 changes: 20 additions & 27 deletions cli/args.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,40 +5,24 @@ export function makeArgs(argv = process.argv) {
args: argv,
allowPositionals: true,
options: {
root: {
type: "string",
short: "r",
default: process.cwd(),
},
out: {
type: "string",
short: "o",
},
isolated: {
type: "boolean",
short: "i",
default: false,
},
"no-overwrite": {
type: "boolean",
default: false,
},
root: { type: "string", default: process.cwd(), short: "r" },
out: { type: "string", short: "o" },
isolated: { type: "boolean", default: false, short: "i" },
combined: { type: "boolean", default: false },
"no-overwrite": { type: "boolean", default: false },
edit: { type: 'boolean', default: false, short: 'e' },
lines: { type: 'string', default: "", short: 'l' },
engine: { type: "string", default: process.env["AILLY_ENGINE"] },
model: { type: "string", default: process.env["AILLY_MODEL"] },
plugin: {
type: "string",
default: process.env["AILLY_PLUGIN"] ?? "noop",
},
context: { type: "string", default: "content", short: "c" },
"template-view": { type: "string", default: "" },
prompt: { type: "string", default: "", short: "p" },
system: { type: "string", default: "", short: "s" },
plugin: { type: "string", default: process.env["AILLY_PLUGIN"] ?? "noop", },
context: { type: "string", default: process.env["AILLY_CONTEXT"] ?? "content", short: "c" },
"template-view": { type: "string", default: process.env["AILLY_TEMPLATE_VIEW"] },
prompt: { type: "string", default: process.env["AILLY_PROMPT"], short: "p" },
system: { type: "string", default: process.env["AILLY_SYSTEM"], short: "s" },
temperature: { type: "string", default: "" },
"update-db": { type: "boolean", default: false },
"query-db": { type: "string", default: "" },
augment: { type: "boolean", default: false },

summary: { type: "boolean", default: false },
yes: { type: "boolean", default: false, short: "y" },
help: { type: "boolean", short: "h", default: false },
Expand All @@ -63,6 +47,7 @@ export function help() {
-s, --system sets an initial system prompt.
-p, --prompt generate a final, single piece of content and print the response to standard out.
-i, --isolated will start in isolated mode, generating each file separately. Can be overridden with 'isolated: false' in .aillyrc files.
--combined will force files to output as combined.
-o, --out specify an output folder to work with responses. Defaults to --root. Will load responses from and write outputs to here, using .ailly file extensions.
-c, --context conversation | folder | none
'conversation' (default) loads files from the root folder and includes them alphabetically, chatbot history style, before the current file when generating.
Expand All @@ -75,6 +60,7 @@ export function help() {
--engine will set the default engine. Can be set with AILLY_ENGINE environment variable. Default is bedrock. bedrock calls AWS Bedrock. noop is available for testing. (Probably? Check the code.)
--model will set the model from the engine. Can be set with AILLY_MODEL environment variable. Default depends on the engine; bedrock is anthropic.claude-3-sonnet-20240229-v1:0, OpenAI is gpt-4-0613. (Probably? Check the code.)
--temperature for models that support changing the stochastic temperature. (Usually between 0 and 1, but check the engine and model.)
--plugin can load a custom RAG plugin. Specify a path to import with "file://./path/to/plugin.mjs". plugin.mjs must export a single default function that meets the PluginBuilder interface in core/src/plugin/index.ts
--template-view loads a YAML or JSON file to use as a view for the prompt templates. This view will be merged after global, engine, and plugin views but before system and template views.
Expand All @@ -86,6 +72,13 @@ export function help() {
--version will print the cli and core versions
-h, --help will print this message and exit.
Engines:
bedrock - Call LLM models using @aws-sdk/bedrock-runtime. While this can use any model available in bedrock, in practice, because of the difference in prompt APIs, Claude3 is the only currently supported model.
openai - Call ChatGPT models using OpenAI's API.
mistral - Attempt to run Mistral 7B instruct locally, using a Python subshell.
noop - A testing model that returns with constant text (either a nonce with the name of the file, or the contents of the AILLY_NOOP_RESPONSE environment variable).
`);

// -n, --section use LLM + TreeSitter to find line numbers.
Expand Down
159 changes: 95 additions & 64 deletions cli/fs.js
Original file line number Diff line number Diff line change
@@ -1,76 +1,57 @@
import { NodeFileSystemAdapter } from "@davidsouther/jiffies/lib/esm/fs_node.js";
import { DEFAULT_LOGGER, LEVEL, error } from "@davidsouther/jiffies/lib/esm/log.js";
import { dirname, resolve } from "node:path";
import { DEFAULT_LOGGER, LEVEL } from "@davidsouther/jiffies/lib/esm/log.js";
import { assertExists } from "@davidsouther/jiffies/lib/esm/assert.js";
import { dirname, resolve, join } from "node:path";
import { parse } from "yaml";
// import * as yaml from "yaml";
import * as ailly from "@ailly/core";

/** @typedef {ReturnType<import("./args.js").makeArgs>} Args */
/** @typedef {import("@ailly/core").types.Content} Content */
/** @typedef {{start: number, end: number, file: string}|{after: number, file: string}} Edit */
/** @typedef {import("@ailly/core/dist/src/ailly").PipelineSettings} PipelineSettings */
/** @typedef {import("@ailly/core/dist/src/content/content").View} View */
/** @typedef {import("@davidsouther/jiffies/lib/esm/fs").FileSystem} FileSystem */

/**
* @param {ReturnType<import("./args.js").makeArgs>} args
* @param {import("@davidsouther/jiffies/lib/esm/fs").FileSystem} fs
* @param {Args} args
* @returns {Promise<{
* fs: import("@davidsouther/jiffies/lib/esm/fs").FileSystem,
* context: Record<string, ailly.types.Content>,
* content: string[],
* settings: import("@ailly/core/dist/src/ailly").PipelineSettings
* settings: PipelineSettings
* }>}
*/
export async function loadFs(args) {
export async function loadFs(fs, args) {
const root = resolve(args.values.root ?? '.');
const fs = new ailly.Ailly.GitignoreFs(new NodeFileSystemAdapter());
fs.cd(root);

const settings = await ailly.Ailly.makePipelineSettings({
root,
out: resolve(args.values.out ?? root),
context: args.values.context,
isolated: args.values.isolated,
combined: args.values.combined,
engine: args.values.engine,
model: args.values.model,
plugin: args.values.plugin,
templateView: await loadTemplateView(fs, args.values['template-view']),
overwrite: !args.values["no-overwrite"],
});
const positionals = args.positionals.slice(2).map(a => resolve(a));

const positionals = args.positionals.slice(2).map(a => resolve(join(root, a)));
const hasPositionals = positionals.length > 0;
const hasPrompt = Boolean(args.values.prompt)
const hasPrompt = args.values.prompt !== undefined && args.values.prompt !== "";
const isPipe = !hasPositionals && hasPrompt;
DEFAULT_LOGGER.level = getLogLevel(args.values['log-level'], args.values.verbose ?? false, isPipe);

let edit = undefined;
if (args.values.edit) {
if (positionals.length != 1) {
throw new Error("Edit requires exactly 1 path")
}
if (!hasPrompt) {
throw new Error("Edit requires a prompt to know what to change")
}
const line = args.values.lines?.split(':') ?? [];
const hasStart = Boolean(line[0]);
const hasEnd = Boolean(line[1]);
const start = Number(line[0]) - 1;
const end = Number(line[1]) - 1;
switch (true) {
case hasStart && hasEnd:
edit = { start, end, file: '' };
break;
case hasStart:
edit = { start, end: start + 1, file: '' };
break;
case hasEnd:
edit = { start: end - 1, end, file: "" };
break;
}
}

const system = args.values.system ?? "";

let context = await ailly.content.load(
fs,
args.values.system ? [{ content: args.values.system, view: {} }] : [],
system ? [{ content: system, view: {} }] : [],
settings
);

let content = /* @type string[] */[];

let content = /* @type {string[]} */[];
if (!hasPositionals && hasPrompt) {
Object.values(context).forEach(c => { c.meta = c.meta ?? {}; c.meta.skip = true; });
} else {
Expand All @@ -80,38 +61,88 @@ export async function loadFs(args) {
);
}

let edit = args.values.edit ? makeEdit(args.values.lines, content, hasPrompt) : undefined;
if (hasPrompt) {
if (edit && content.length == 1) {
edit.file = content[0];
content = [];
}
const noContext = args.values.context == "none";
const folder = args.values.context == 'folder' ?
Object.values(context).find(c => dirname(c.path) == root)?.context.folder : edit ? [edit.file] : undefined;
const cliContent = {
name: 'stdout',
outPath: "/dev/stdout",
path: "/dev/stdout",
prompt: args.values.prompt ?? "",
context: {
view: settings.templateView,
predecessor: noContext ? undefined : content.filter(c => dirname(c) == root).at(-1)?.path,
system: noContext ? [] : [{ content: args.values.system ?? "", view: {} }],
folder,
edit,
}
};
context['/dev/stdout'] = cliContent;
content.push('/dev/stdout');
const prompt = assertExists(args.values.prompt);
const cliContent = makeCLIContent(prompt, settings.context, system, context, root, edit, content, settings.templateView);
context[cliContent.path] = cliContent;
content.push(cliContent.path);
}

return { fs, settings, content, context };
return { settings, content, context };
}

/**
* @param {string|undefined} lines
* @param {string[]} content
* @param {boolean} hasPrompt
* @returns Edit;
*/
export function makeEdit(lines, content, hasPrompt) {
if (!lines) return undefined;
if (content.length != 1) {
throw new Error("Edit requires exactly 1 path");
}
if (!hasPrompt) {
throw new Error("Edit requires a prompt to know what to change");
}
const file = content[0];
const line = lines.split(':') ?? [];
const hasStart = Boolean(line[0]);
const hasEnd = Boolean(line[1]);
const start = Number(line[0]) - 1;
const end = Number(line[1]) - 1;
switch (true) {
case hasStart && hasEnd:
return { start, end, file };
case hasStart:
return { after: start, file };
case hasEnd:
return { after: end - 1, file };
default:
throw new Error("Edit lines have at least one of start or end");
}
}

/**
* @typedef {import("@ailly/core/dist/src/content/content").View} View
* @typedef {import("@davidsouther/jiffies/lib/esm/fs").FileSystem} FileSystem
* Create a "synthetic" Content block with path "/dev/stdout" to serve as the Content root
* for this Ailly call to the LLM.
*
* @param {string} prompt
* @param {'none'|'folder'|'content'} argContext
* @param {string} argSystem
* @param {Record<string, Content>} context
* @param {string} root
* @param {Edit|undefined} edit
* @param {string[]} content
* @param {*} view
* @returns Content
*/
export function makeCLIContent(prompt, argContext, argSystem, context, root, edit, content, view) {
// When argContext is folder, `folder` is all files in context in root.
const folder = argContext == 'folder' ? Object.keys(context).filter(c => dirname(c) == root) : undefined;
// When argContext is `content`, `predecessor` is the last item in the root folder.
const predecessor = argContext == 'content' ? content.filter(c => dirname(c) == root).at(-1) : undefined;
// When argContext is none, system is empty; otherwise, system is argSystem + predecessor's system.
const system = argContext == "none" ? [] : [{ content: argSystem ?? "", view: {} }, ...((predecessor ? context[predecessor].context.system : undefined) ?? [])];
const cliContent = {
name: 'stdout',
outPath: "/dev/stdout",
path: "/dev/stdout",
prompt: prompt ?? "",
context: {
view,
predecessor,
system,
folder,
edit,
}
};
return cliContent;
}

/**
* Read, parse, and validate a template view.
Expand All @@ -120,7 +151,7 @@ export async function loadFs(args) {
* @param {string|undefined} path
* @returns {Promise<View>}
*/
async function loadTemplateView(fs, path) {
export async function loadTemplateView(fs, path) {
if (!path) return {};
try {
const file = await fs.readFile(path);
Expand All @@ -140,7 +171,7 @@ async function loadTemplateView(fs, path) {
* @param {boolean} isPipe
* @returns {number}
*/
function getLogLevel(level, verbose, isPipe) {
export function getLogLevel(level, verbose, isPipe) {
if (level) {
switch (level) {
case "debug": LEVEL.DEBUG;
Expand Down
Loading

0 comments on commit ede520c

Please sign in to comment.