Skip to content

Commit 5faaf70

Browse files
committed
chore(ui): add data-source badge and tidy agent endpoints
1 parent dd4a0a7 commit 5faaf70

File tree

3 files changed

+41
-27
lines changed

3 files changed

+41
-27
lines changed

plugin/anglerfishlyy-llmwatch-panel/plugin.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"name": "LLM Watch Panel",
55
"module": "src/module.tsx",
66
"info": {
7-
"description": "LLM Watch Panel - monitor latency, token usage, costs and errors from LLM requests. Integrates with Prometheus for time-series queries.",
7+
"description": "LLM Watch Panel: visualizes latency, token usage, cost, and errors from LLM requests. Supports demo JSON from the agent and Prometheus time-series queries.",
88
"author": {
99
"name": "Anglerfishlyy"
1010
},

plugin/anglerfishlyy-llmwatch-panel/src/components/LLMWatchPanel.tsx

Lines changed: 39 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -44,44 +44,49 @@ export const LLMWatchPanel: React.FC<PanelProps<LLMWatchOptions>> = ({
4444
}) => {
4545
const theme = useTheme2();
4646

47+
// Choose agent host dynamically: use localhost when developing locally, otherwise use the service name
48+
const AGENT_URL =
49+
(typeof window !== 'undefined' && window.location && window.location.hostname === 'localhost')
50+
? 'http://localhost:8080/metrics/all'
51+
: 'http://agent:8080/metrics/all';
52+
4753
// Fetch live metrics from the agent backend (inside Docker use service name 'agent')
4854
const [metricsState, setMetricsState] = useState<any[]>([]);
4955
const [fetchError, setFetchError] = useState<string | null>(null);
56+
// Track which data source was last used to populate the UI
57+
const [lastDataSource, setLastDataSource] = useState<'agent' | 'prometheus' | null>(null);
5058

5159
useEffect(() => {
5260
let mounted = true;
5361

5462
const fetchMetrics = async () => {
55-
const endpoints = [
56-
'http://agent:8080/metrics/all',
57-
'http://localhost:8080/metrics/all',
58-
];
59-
60-
for (const url of endpoints) {
61-
try {
62-
const resp = await fetch(url);
63-
if (!resp.ok) continue;
64-
const json = await resp.json();
65-
const arr = json.metrics || [];
66-
if (Array.isArray(arr)) {
63+
try {
64+
const resp = await fetch(AGENT_URL);
65+
if (!resp.ok) {
66+
if (mounted) setFetchError(`Agent responded with status ${resp.status}`);
67+
return;
68+
}
69+
const json = await resp.json();
70+
const arr = json.metrics || [];
71+
if (Array.isArray(arr)) {
6772
if (mounted) {
68-
setMetricsState(arr);
69-
setFetchError(null);
70-
try {
71-
// Debug log so Grafana console shows agent path usage
72-
// eslint-disable-next-line no-console
73-
console.log('LLMWatchPanel: fetched metrics from agent endpoint', url, 'count=', arr.length);
74-
} catch (e) {
75-
// ignore logging errors in panel runtime
76-
}
73+
setMetricsState(arr);
74+
setFetchError(null);
75+
setLastDataSource('agent');
76+
try {
77+
// Debug log so Grafana console shows agent path usage
78+
// eslint-disable-next-line no-console
79+
console.log('LLMWatchPanel: fetched metrics from agent endpoint', AGENT_URL, 'count=', arr.length);
80+
} catch (e) {
81+
// ignore logging errors in panel runtime
7782
}
78-
return;
7983
}
80-
} catch (err) {
81-
// try the next endpoint
84+
} else {
85+
if (mounted) setFetchError('Invalid metrics payload');
8286
}
87+
} catch (err) {
88+
if (mounted) setFetchError('Error fetching metrics');
8389
}
84-
if (mounted) setFetchError('Error fetching metrics');
8590
};
8691

8792
fetchMetrics();
@@ -164,6 +169,7 @@ export const LLMWatchPanel: React.FC<PanelProps<LLMWatchOptions>> = ({
164169

165170
if (mounted) {
166171
setPromSeries(series);
172+
setLastDataSource('prometheus');
167173
try {
168174
// Debug log so Grafana console shows Prometheus path usage
169175
// eslint-disable-next-line no-console
@@ -428,6 +434,14 @@ export const LLMWatchPanel: React.FC<PanelProps<LLMWatchOptions>> = ({
428434
overflowX: 'auto',
429435
fontFamily: theme.typography.fontFamily
430436
}}>
437+
{/* Data source badge + Primary Metrics */}
438+
<div style={{ display: 'flex', justifyContent: 'flex-end', marginBottom: theme.spacing(1) }}>
439+
{lastDataSource && (
440+
<div style={{ display: 'flex', alignItems: 'center', gap: theme.spacing(1) }}>
441+
<Badge text={`data: ${lastDataSource}`} color={lastDataSource === 'agent' ? 'blue' : 'purple'} />
442+
</div>
443+
)}
444+
</div>
431445
{/* Primary Metrics */}
432446
<div style={{
433447
display: 'grid',

plugin/anglerfishlyy-llmwatch-panel/src/plugin.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"id": "anglerfishlyy-llmwatch-panel",
66
"info": {
77
"keywords": ["panel", "llm", "metrics", "cerebras", "grafana"],
8-
"description": "A Grafana panel to monitor latency, tokens, costs, and errors from LLM API calls (Cerebras, Llama, etc).",
8+
"description": "LLM Watch Panel: visualizes latency, token usage, cost, and errors from LLM requests. Supports demo JSON from the agent and Prometheus time-series queries.",
99
"author": {
1010
"name": "Anglerfishlyy"
1111
},

0 commit comments

Comments
 (0)