token-monitor/monitor.js
Hannibal Smith 34898b1196
Phase 2: analysis layer (analyze.js), cache guard, log hygiene
- analyze.js: burn rate, weekly reconstruction, cycle stagger, rotation
  rank, underspend alerts, log prune with weekly archive
- logger.js: getCachedRun(maxAgeMinutes) — skip probing if recent data exists
- monitor.js: cache guard at wake — 20-min dedup, zero extra API calls
- test.js: fix type assertion for gemini-api/xai-api providers (+5 passing);
  add 14 new tests for cache guard and analyze.js (162 total, all green)
- docs/analyze.md: usage reference

Co-authored-by: Hannibal Smith <hannibal@trentuna.com>
2026-04-05 04:49:05 +00:00

113 lines
3.5 KiB
JavaScript

#!/usr/bin/env node
/**
* monitor.js — Token Monitor entrypoint
*
* Probes all configured LLM API providers, extracts rate-limit/quota headers,
* and outputs a human-readable summary or JSON.
*
* Usage:
* node monitor.js # human-readable summary + log
* node monitor.js --json # JSON to stdout + log
* node monitor.js --summary # human-readable only (no log)
* node monitor.js --provider team-nadja # single provider
* node monitor.js --no-log # suppress log file
*/
import { getProviders } from './providers/index.js';
import { probeTeamsProvider } from './providers/anthropic-teams.js';
import { getApiAteamStatus } from './providers/anthropic-api.js';
import { probeShelleyProxy } from './providers/shelley-proxy.js';
import { probeGeminiProvider } from './providers/gemini.js';
import { probeXaiProvider } from './providers/xai.js';
import { generateReport, getSeverity } from './report.js';
import { logRun } from './logger.js';
const args = process.argv.slice(2);
const isJson = args.includes('--json');
const isSummaryOnly = args.includes('--summary');
const noLog = args.includes('--no-log');
const filterIdx = args.indexOf('--provider');
const filterProvider = filterIdx !== -1 ? args[filterIdx + 1] : null;
/**
* Probe a single provider and return normalized result.
*/
async function probeProvider(p) {
const start = Date.now();
let result;
if (p.type === 'teams-direct') {
result = await probeTeamsProvider(p.name, p.baseUrl, p.apiKey);
} else if (p.type === 'shelley-proxy') {
result = await probeShelleyProxy(p.name, p.baseUrl);
} else if (p.type === 'api-direct') {
result = getApiAteamStatus();
} else if (p.type === 'gemini-api') {
result = await probeGeminiProvider(p.name, p.baseUrl, p.apiKey);
} else if (p.type === 'xai-api') {
result = await probeXaiProvider(p.name, p.baseUrl, p.apiKey);
} else {
result = { type: 'unknown', status: 'skipped', severity: 'unknown' };
}
result.probe_latency_ms = Date.now() - start;
if (!result.severity) {
result.severity = getSeverity(result);
}
return result;
}
async function main() {
// Cache guard — return last logged run if within 20 minutes (skip on --summary, --no-log, --provider filter)
if (!noLog && !filterProvider && !isSummaryOnly) {
const { getCachedRun } = await import('./logger.js');
const cached = getCachedRun(20);
if (cached) {
if (isJson) {
console.log(JSON.stringify(cached, null, 2));
} else {
console.log(generateReport(cached));
}
return;
}
}
const allProviders = getProviders();
const providerNames = filterProvider
? [filterProvider]
: Object.keys(allProviders);
if (filterProvider && !allProviders[filterProvider]) {
console.error(
`Unknown provider: ${filterProvider}. Available: ${Object.keys(allProviders).join(', ')}`
);
process.exit(1);
}
const results = {};
for (const name of providerNames) {
const p = allProviders[name];
results[name] = await probeProvider(p);
}
const output = {
timestamp: new Date().toISOString(),
providers: results,
};
if (isJson) {
console.log(JSON.stringify(output, null, 2));
} else {
console.log(generateReport(output));
}
// Log to file unless --summary or --no-log
if (!isSummaryOnly && !noLog) {
logRun(output);
}
}
main().catch((err) => {
console.error('Fatal:', err.message);
process.exit(1);
});