include dist in build
Some checks failed
ci / test (push) Failing after 5m3s
ci / build (push) Successful in 12m4s

This commit is contained in:
CJ_Clippy 2025-09-25 15:36:48 -08:00
parent b050f0e8f1
commit a7d8333ee1
4 changed files with 26 additions and 108 deletions

View File

@ -1,4 +1,3 @@
dist
vibeui
venv
src/test

Binary file not shown.

View File

@ -1,107 +0,0 @@
// import { describe, it, expect, vi } from 'vitest';
// import { runInferenceOnFrame } from '../utils/vibeui';
// import {
// type InferenceSession,
// type Tensor,
// } from 'onnxruntime-node'
// type DetectionOutput = {
// bbox: [number, number, number, number];
// confidence: number;
// classIndex: number;
// };
// describe('runInferenceOnFrame', () => {
// it('parses detections and filters by confidence, rounds classIndex, includes out-of-range classes', async () => {
// // Mock session
// const mockSession = {
// inputNames: ['input'],
// outputNames: ['output'],
// run: vi.fn().mockResolvedValue({
// output: {
// data: new Float32Array([
// 0.1, 0.2, 0.3, 0.4, 0.5, 10, // valid detection
// 0.2, 0.3, 0.4, 0.5, 0.2, 5, // confidence too low, filtered out
// 0.3, 0.4, 0.5, 0.6, 0.9, 54 // class 54 out of range, but included
// ]),
// dims: [3, 6]
// }
// }),
// } as unknown as InferenceSession;
// // Mock tensor input, content irrelevant here
// const mockTensor = {} as Tensor;
// const detections = await runInferenceOnFrame(mockSession, mockTensor);
// expect(detections).toHaveLength(2);
// expect(detections[0]).toEqual({
// bbox: [0.1, 0.2, 0.3, 0.4],
// confidence: 0.5,
// classIndex: 10,
// });
// expect(detections[1]).toEqual({
// bbox: [0.3, 0.4, 0.5, 0.6],
// confidence: 0.9,
// classIndex: 54,
// });
// });
// it('throws if output missing or data is wrong type', async () => {
// const badSession = {
// inputNames: ['input'],
// outputNames: ['output'],
// run: vi.fn().mockResolvedValue({
// output: {
// data: [1, 2, 3], // not Float32Array
// dims: [1, 6]
// }
// }),
// } as unknown as InferenceSession;
// await expect(runInferenceOnFrame(badSession, {} as Tensor)).rejects.toThrow(
// 'Unexpected model output format'
// );
// });
// });
import { describe, it, expect } from 'vitest';
import fs from 'fs/promises';
import path, { resolve } from 'path';
import ort from 'onnxruntime-node';
import sharp from 'sharp';
import { preprocessImage, runModelInference } from '../utils/vibeui';
const __dirname = import.meta.dirname;
const FIXTURE_DIR = resolve(__dirname, 'fixtures');
const DIST_DIR = resolve(__dirname, '..', '..', 'dist');
const VIBEUI_DIR = resolve(DIST_DIR, 'vibeui');
const VIDEO = resolve(FIXTURE_DIR, 'sample.mp4');
const MODEL_PATH = resolve(VIBEUI_DIR, 'vibeui.onnx');
const IMAGE_PATH = resolve(FIXTURE_DIR, 'prediction/frames/000001.jpg');
describe.skip('runInferenceOnFrame integration', () => {
it('runs inference on real image and returns valid detections', async () => {
// Load ONNX model session
const session = await ort.InferenceSession.create(MODEL_PATH);
// Prepare input tensor from JPG image
const inputTensor = await preprocessImage(IMAGE_PATH);
// Run inference
const detections = await runModelInference(session, inputTensor);
// Check output is not empty and class indices are within range
expect(detections.length).toBeGreaterThan(0);
for (const det of detections) {
expect(det.confidence).toBeGreaterThan(0.3);
expect(det.classIndex).toBeGreaterThanOrEqual(0);
expect(det.classIndex).toBeLessThan(19); // since you have 19 classes
expect(det.bbox).toHaveLength(4);
}
});
});

View File

@ -0,0 +1,26 @@
import { describe, it, expect } from 'vitest';
import fs from 'fs/promises';
import path, { resolve } from 'path';
import ort from 'onnxruntime-node';
import sharp from 'sharp';
import { preprocessImage, inference } from '../utils/vibeui';
const __dirname = import.meta.dirname;
const FIXTURE_DIR = resolve(__dirname, 'fixtures');
const DIST_DIR = resolve(__dirname, '..', '..', 'dist');
const VIBEUI_DIR = resolve(DIST_DIR, 'vibeui');
const VIDEO = resolve(FIXTURE_DIR, 'sample-short.mp4');
describe('inference integration', () => {
it('runs inference on real video and returns valid detections', { timeout: 60000 }, async () => {
// Run inference
const output = await inference(VIDEO);
// Check output is not empty and class indices are within range
expect(output).toContain('/runs/')
});
});