feat: improve dashboard
All checks were successful
Auto Tag on Merge to Main / auto-tag (push) Successful in 3s
CI - Lint and Build / lint-backend (push) Successful in 17s
CI - Lint and Build / lint-frontend (push) Successful in 12s
CI - Lint and Build / build-frontend (push) Successful in 16s
CI - Lint and Build / docker-build-test (push) Successful in 36s

This commit is contained in:
Millaguie
2025-11-28 15:03:22 +01:00
parent 436a7e25d4
commit f979ad67a0
4 changed files with 1848 additions and 6 deletions

View File

@@ -344,4 +344,428 @@ router.get('/available-range', async (req, res, next) => {
}
});
// GET /dashboard/lines - Get list of all lines with basic stats
router.get('/lines', async (req, res, next) => {
try {
const { hours = 24 } = req.query;
const startTime = new Date(Date.now() - hours * 3600000);
const result = await db.query(`
SELECT
line_code,
nucleo,
COUNT(DISTINCT train_id) as unique_trains,
COUNT(*) as observations,
AVG(delay_minutes)::FLOAT as avg_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct
FROM train_punctuality
WHERE recorded_at >= $1
AND line_code IS NOT NULL
GROUP BY line_code, nucleo
HAVING COUNT(*) >= 5
ORDER BY unique_trains DESC
`, [startTime]);
const linesWithNucleoName = result.rows.map(row => ({
...row,
nucleo_name: NUCLEO_NAMES[row.nucleo] || row.nucleo,
}));
res.json(linesWithNucleoName);
} catch (error) {
next(error);
}
});
// GET /dashboard/regions - Get list of all regions with stats
router.get('/regions', async (req, res, next) => {
try {
const { hours = 24 } = req.query;
const startTime = new Date(Date.now() - hours * 3600000);
const result = await db.query(`
SELECT
nucleo,
COUNT(DISTINCT line_code) as line_count,
COUNT(DISTINCT train_id) as unique_trains,
COUNT(*) as observations,
AVG(delay_minutes)::FLOAT as avg_delay,
MAX(delay_minutes) as max_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct
FROM train_punctuality
WHERE recorded_at >= $1
AND nucleo IS NOT NULL
GROUP BY nucleo
ORDER BY unique_trains DESC
`, [startTime]);
const regionsWithName = result.rows.map(row => ({
...row,
nucleo_name: NUCLEO_NAMES[row.nucleo] || row.nucleo,
}));
res.json(regionsWithName);
} catch (error) {
next(error);
}
});
// GET /dashboard/line/:lineCode - Get detailed stats for a specific line
router.get('/line/:lineCode', async (req, res, next) => {
try {
const { lineCode } = req.params;
const { nucleo, hours = 24 } = req.query;
const startTime = new Date(Date.now() - hours * 3600000);
// Basic stats for the line
const statsResult = await db.query(`
SELECT
line_code,
nucleo,
COUNT(DISTINCT train_id) as unique_trains,
COUNT(*) as observations,
AVG(delay_minutes)::FLOAT as avg_delay,
MAX(delay_minutes) as max_delay,
MIN(delay_minutes) as min_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct,
COUNT(CASE WHEN delay_minutes <= 0 THEN 1 END) as on_time,
COUNT(CASE WHEN delay_minutes > 0 AND delay_minutes <= 5 THEN 1 END) as minor_delay,
COUNT(CASE WHEN delay_minutes > 5 AND delay_minutes <= 15 THEN 1 END) as moderate_delay,
COUNT(CASE WHEN delay_minutes > 15 THEN 1 END) as severe_delay
FROM train_punctuality
WHERE recorded_at >= $1
AND line_code = $2
${nucleo ? 'AND nucleo = $3' : ''}
GROUP BY line_code, nucleo
`, nucleo ? [startTime, lineCode, nucleo] : [startTime, lineCode]);
if (statsResult.rows.length === 0) {
return res.status(404).json({ error: 'Line not found or no data available' });
}
// Timeline for this line
const timelineResult = await db.query(`
WITH time_buckets AS (
SELECT
date_trunc('hour', recorded_at) as time_bucket,
train_id,
delay_minutes
FROM train_punctuality
WHERE recorded_at >= $1
AND line_code = $2
${nucleo ? 'AND nucleo = $3' : ''}
)
SELECT
time_bucket,
COUNT(DISTINCT train_id) as train_count,
AVG(delay_minutes)::FLOAT as avg_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct
FROM time_buckets
GROUP BY time_bucket
ORDER BY time_bucket
`, nucleo ? [startTime, lineCode, nucleo] : [startTime, lineCode]);
// Top stations for this line
const stationsResult = await db.query(`
SELECT
origin_station_code as station_code,
COUNT(DISTINCT train_id) as train_count,
AVG(delay_minutes)::FLOAT as avg_delay
FROM train_punctuality
WHERE recorded_at >= $1
AND line_code = $2
${nucleo ? 'AND nucleo = $3' : ''}
AND origin_station_code IS NOT NULL
GROUP BY origin_station_code
ORDER BY train_count DESC
LIMIT 10
`, nucleo ? [startTime, lineCode, nucleo] : [startTime, lineCode]);
const stats = statsResult.rows[0];
res.json({
line_code: lineCode,
nucleo: stats.nucleo,
nucleo_name: NUCLEO_NAMES[stats.nucleo] || stats.nucleo,
stats: {
unique_trains: parseInt(stats.unique_trains, 10),
observations: parseInt(stats.observations, 10),
avg_delay: parseFloat(stats.avg_delay) || 0,
max_delay: parseInt(stats.max_delay, 10),
min_delay: parseInt(stats.min_delay, 10),
punctuality_pct: parseFloat(stats.punctuality_pct) || 0,
punctuality_breakdown: {
on_time: parseInt(stats.on_time, 10),
minor_delay: parseInt(stats.minor_delay, 10),
moderate_delay: parseInt(stats.moderate_delay, 10),
severe_delay: parseInt(stats.severe_delay, 10),
},
},
timeline: timelineResult.rows.map(row => ({
timestamp: row.time_bucket,
train_count: parseInt(row.train_count, 10),
avg_delay: parseFloat(row.avg_delay) || 0,
punctuality_pct: parseFloat(row.punctuality_pct) || 0,
})),
top_stations: stationsResult.rows.map(row => ({
station_code: row.station_code,
train_count: parseInt(row.train_count, 10),
avg_delay: parseFloat(row.avg_delay) || 0,
})),
});
} catch (error) {
next(error);
}
});
// GET /dashboard/region/:nucleo - Get detailed stats for a specific region
router.get('/region/:nucleo', async (req, res, next) => {
try {
const { nucleo } = req.params;
const { hours = 24 } = req.query;
const startTime = new Date(Date.now() - hours * 3600000);
// Basic stats for the region
const statsResult = await db.query(`
SELECT
nucleo,
COUNT(DISTINCT line_code) as line_count,
COUNT(DISTINCT train_id) as unique_trains,
COUNT(*) as observations,
AVG(delay_minutes)::FLOAT as avg_delay,
MAX(delay_minutes) as max_delay,
MIN(delay_minutes) as min_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct,
COUNT(CASE WHEN delay_minutes <= 0 THEN 1 END) as on_time,
COUNT(CASE WHEN delay_minutes > 0 AND delay_minutes <= 5 THEN 1 END) as minor_delay,
COUNT(CASE WHEN delay_minutes > 5 AND delay_minutes <= 15 THEN 1 END) as moderate_delay,
COUNT(CASE WHEN delay_minutes > 15 THEN 1 END) as severe_delay
FROM train_punctuality
WHERE recorded_at >= $1
AND nucleo = $2
GROUP BY nucleo
`, [startTime, nucleo]);
if (statsResult.rows.length === 0) {
return res.status(404).json({ error: 'Region not found or no data available' });
}
// Timeline for this region
const timelineResult = await db.query(`
WITH time_buckets AS (
SELECT
date_trunc('hour', recorded_at) as time_bucket,
train_id,
delay_minutes
FROM train_punctuality
WHERE recorded_at >= $1
AND nucleo = $2
)
SELECT
time_bucket,
COUNT(DISTINCT train_id) as train_count,
AVG(delay_minutes)::FLOAT as avg_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct
FROM time_buckets
GROUP BY time_bucket
ORDER BY time_bucket
`, [startTime, nucleo]);
// Lines in this region
const linesResult = await db.query(`
SELECT
line_code,
COUNT(DISTINCT train_id) as unique_trains,
AVG(delay_minutes)::FLOAT as avg_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct
FROM train_punctuality
WHERE recorded_at >= $1
AND nucleo = $2
AND line_code IS NOT NULL
GROUP BY line_code
ORDER BY unique_trains DESC
LIMIT 15
`, [startTime, nucleo]);
const stats = statsResult.rows[0];
res.json({
nucleo,
nucleo_name: NUCLEO_NAMES[nucleo] || nucleo,
stats: {
line_count: parseInt(stats.line_count, 10),
unique_trains: parseInt(stats.unique_trains, 10),
observations: parseInt(stats.observations, 10),
avg_delay: parseFloat(stats.avg_delay) || 0,
max_delay: parseInt(stats.max_delay, 10),
min_delay: parseInt(stats.min_delay, 10),
punctuality_pct: parseFloat(stats.punctuality_pct) || 0,
punctuality_breakdown: {
on_time: parseInt(stats.on_time, 10),
minor_delay: parseInt(stats.minor_delay, 10),
moderate_delay: parseInt(stats.moderate_delay, 10),
severe_delay: parseInt(stats.severe_delay, 10),
},
},
timeline: timelineResult.rows.map(row => ({
timestamp: row.time_bucket,
train_count: parseInt(row.train_count, 10),
avg_delay: parseFloat(row.avg_delay) || 0,
punctuality_pct: parseFloat(row.punctuality_pct) || 0,
})),
lines: linesResult.rows.map(row => ({
line_code: row.line_code,
unique_trains: parseInt(row.unique_trains, 10),
avg_delay: parseFloat(row.avg_delay) || 0,
punctuality_pct: parseFloat(row.punctuality_pct) || 0,
})),
});
} catch (error) {
next(error);
}
});
// GET /dashboard/compare - Compare stats between two date ranges
router.get('/compare', async (req, res, next) => {
try {
const { start1, end1, start2, end2, lineCode, nucleo } = req.query;
if (!start1 || !end1 || !start2 || !end2) {
return res.status(400).json({
error: 'Required parameters: start1, end1, start2, end2'
});
}
const range1Start = new Date(start1);
const range1End = new Date(end1);
const range2Start = new Date(start2);
const range2End = new Date(end2);
// Build WHERE clause based on filters
let filterClause = '';
const params1 = [range1Start, range1End];
const params2 = [range2Start, range2End];
if (lineCode) {
filterClause += ` AND line_code = $3`;
params1.push(lineCode);
params2.push(lineCode);
}
if (nucleo) {
const nucleoParamIndex = lineCode ? 4 : 3;
filterClause += ` AND nucleo = $${nucleoParamIndex}`;
params1.push(nucleo);
params2.push(nucleo);
}
const queryText = `
SELECT
COUNT(DISTINCT train_id) as unique_trains,
COUNT(*) as observations,
AVG(delay_minutes)::FLOAT as avg_delay,
MAX(delay_minutes) as max_delay,
ROUND(
COUNT(CASE WHEN delay_minutes <= 5 THEN 1 END)::NUMERIC /
NULLIF(COUNT(*), 0) * 100, 1
) as punctuality_pct,
COUNT(CASE WHEN delay_minutes <= 0 THEN 1 END) as on_time,
COUNT(CASE WHEN delay_minutes > 0 AND delay_minutes <= 5 THEN 1 END) as minor_delay,
COUNT(CASE WHEN delay_minutes > 5 AND delay_minutes <= 15 THEN 1 END) as moderate_delay,
COUNT(CASE WHEN delay_minutes > 15 THEN 1 END) as severe_delay
FROM train_punctuality
WHERE recorded_at BETWEEN $1 AND $2
${filterClause}
`;
const [range1Result, range2Result] = await Promise.all([
db.query(queryText, params1),
db.query(queryText, params2),
]);
const range1Stats = range1Result.rows[0] || {};
const range2Stats = range2Result.rows[0] || {};
// Calculate differences
const calculateDiff = (val1, val2) => {
if (!val1 || !val2) return null;
return parseFloat(val1) - parseFloat(val2);
};
const calculatePctChange = (val1, val2) => {
if (!val1 || !val2 || parseFloat(val2) === 0) return null;
return ((parseFloat(val1) - parseFloat(val2)) / parseFloat(val2) * 100).toFixed(1);
};
res.json({
range1: {
start: range1Start.toISOString(),
end: range1End.toISOString(),
stats: {
unique_trains: parseInt(range1Stats.unique_trains, 10) || 0,
observations: parseInt(range1Stats.observations, 10) || 0,
avg_delay: parseFloat(range1Stats.avg_delay) || 0,
max_delay: parseInt(range1Stats.max_delay, 10) || 0,
punctuality_pct: parseFloat(range1Stats.punctuality_pct) || 0,
punctuality_breakdown: {
on_time: parseInt(range1Stats.on_time, 10) || 0,
minor_delay: parseInt(range1Stats.minor_delay, 10) || 0,
moderate_delay: parseInt(range1Stats.moderate_delay, 10) || 0,
severe_delay: parseInt(range1Stats.severe_delay, 10) || 0,
},
},
},
range2: {
start: range2Start.toISOString(),
end: range2End.toISOString(),
stats: {
unique_trains: parseInt(range2Stats.unique_trains, 10) || 0,
observations: parseInt(range2Stats.observations, 10) || 0,
avg_delay: parseFloat(range2Stats.avg_delay) || 0,
max_delay: parseInt(range2Stats.max_delay, 10) || 0,
punctuality_pct: parseFloat(range2Stats.punctuality_pct) || 0,
punctuality_breakdown: {
on_time: parseInt(range2Stats.on_time, 10) || 0,
minor_delay: parseInt(range2Stats.minor_delay, 10) || 0,
moderate_delay: parseInt(range2Stats.moderate_delay, 10) || 0,
severe_delay: parseInt(range2Stats.severe_delay, 10) || 0,
},
},
},
comparison: {
unique_trains_diff: calculateDiff(range1Stats.unique_trains, range2Stats.unique_trains),
avg_delay_diff: calculateDiff(range1Stats.avg_delay, range2Stats.avg_delay),
punctuality_diff: calculateDiff(range1Stats.punctuality_pct, range2Stats.punctuality_pct),
unique_trains_pct_change: calculatePctChange(range1Stats.unique_trains, range2Stats.unique_trains),
avg_delay_pct_change: calculatePctChange(range1Stats.avg_delay, range2Stats.avg_delay),
punctuality_pct_change: calculatePctChange(range1Stats.punctuality_pct, range2Stats.punctuality_pct),
},
filters: {
line_code: lineCode || null,
nucleo: nucleo || null,
},
});
} catch (error) {
next(error);
}
});
export default router;