chutes-selection.test.ts 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869
  1. /// <reference types="bun-types" />
  2. import { describe, expect, test } from 'bun:test';
  3. import {
  4. pickBestCodingChutesModel,
  5. pickSupportChutesModel,
  6. } from './chutes-selection';
  7. import type { OpenCodeFreeModel } from './types';
  8. function model(input: Partial<OpenCodeFreeModel>): OpenCodeFreeModel {
  9. return {
  10. providerID: 'chutes',
  11. model: input.model ?? 'chutes/unknown',
  12. name: input.name ?? input.model ?? 'unknown',
  13. status: input.status ?? 'active',
  14. contextLimit: input.contextLimit ?? 128000,
  15. outputLimit: input.outputLimit ?? 16000,
  16. reasoning: input.reasoning ?? false,
  17. toolcall: input.toolcall ?? false,
  18. attachment: input.attachment ?? false,
  19. dailyRequestLimit: input.dailyRequestLimit,
  20. };
  21. }
  22. describe('chutes-selection', () => {
  23. test('prefers reasoning model for primary role', () => {
  24. const models = [
  25. model({
  26. model: 'chutes/minimax-m2.1',
  27. reasoning: true,
  28. toolcall: true,
  29. contextLimit: 512000,
  30. outputLimit: 64000,
  31. dailyRequestLimit: 300,
  32. }),
  33. model({
  34. model: 'chutes/gpt-oss-20b-mini',
  35. reasoning: false,
  36. toolcall: true,
  37. dailyRequestLimit: 5000,
  38. }),
  39. ];
  40. expect(pickBestCodingChutesModel(models)?.model).toBe(
  41. 'chutes/minimax-m2.1',
  42. );
  43. });
  44. test('prefers high-cap fast model for support role', () => {
  45. const models = [
  46. model({
  47. model: 'chutes/kimi-k2.5',
  48. reasoning: true,
  49. toolcall: true,
  50. dailyRequestLimit: 300,
  51. }),
  52. model({
  53. model: 'chutes/qwen3-coder-30b-mini',
  54. reasoning: true,
  55. toolcall: true,
  56. dailyRequestLimit: 5000,
  57. }),
  58. ];
  59. expect(pickSupportChutesModel(models, 'chutes/kimi-k2.5')?.model).toBe(
  60. 'chutes/qwen3-coder-30b-mini',
  61. );
  62. });
  63. });