2020-01-14 23:19:26 +00:00
|
|
|
|
const unzip = require('unzip-stream');
|
|
|
|
|
const csv = require('csv-parse');
|
2020-07-16 22:16:54 +00:00
|
|
|
|
const axios = require('axios');
|
2020-01-14 23:19:26 +00:00
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Process a CSV stream to extract passings.
|
|
|
|
|
*
|
|
|
|
|
* @private
|
|
|
|
|
* @param csvStream Stream containing CSV data.
|
2020-07-17 10:13:25 +00:00
|
|
|
|
* @param callback See fetchRealtime for a description of the callback.
|
2020-01-14 23:19:26 +00:00
|
|
|
|
*/
|
|
|
|
|
const processTamPassingStream = (csvStream, callback) =>
|
|
|
|
|
{
|
|
|
|
|
const parser = csv({
|
|
|
|
|
delimiter: ';',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const rowStream = csvStream.pipe(parser);
|
|
|
|
|
|
|
|
|
|
rowStream.on('readable', () =>
|
|
|
|
|
{
|
|
|
|
|
let row;
|
|
|
|
|
|
2020-01-15 23:34:47 +00:00
|
|
|
|
while ((row = rowStream.read()))
|
2020-01-14 23:19:26 +00:00
|
|
|
|
{
|
|
|
|
|
if (row.length === 0 || row[0] === 'course')
|
|
|
|
|
{
|
|
|
|
|
// Ignore les lignes invalides et l’en-tête
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
callback(null, {
|
|
|
|
|
course: row[0],
|
|
|
|
|
stopCode: row[1],
|
|
|
|
|
stopId: row[2],
|
|
|
|
|
stopName: row[3],
|
|
|
|
|
routeShortName: row[4],
|
|
|
|
|
tripHeadsign: row[5],
|
|
|
|
|
directionId: row[6],
|
|
|
|
|
departureTime: row[7],
|
|
|
|
|
isTheorical: row[8],
|
|
|
|
|
delaySec: row[9],
|
|
|
|
|
destArCode: row[10],
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
rowStream.on('end', () => callback(null, null));
|
|
|
|
|
rowStream.on('error', err => callback(err));
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const tamRealtimeEndpoint = 'http://data.montpellier3m.fr/node/10732/download';
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Fetch realtime passings for the current day across the network.
|
|
|
|
|
*
|
|
|
|
|
* @param callback Called for each passing during parsing. First argument will
|
|
|
|
|
* be non-null only if an error occurred. Second argument will contain passings
|
|
|
|
|
* or be null if the end was reached.
|
|
|
|
|
*/
|
2020-07-17 10:13:25 +00:00
|
|
|
|
const fetchRealtime = (callback) =>
|
2020-01-14 23:19:26 +00:00
|
|
|
|
{
|
2020-07-16 22:16:54 +00:00
|
|
|
|
axios.get(tamRealtimeEndpoint, {
|
|
|
|
|
responseType: 'stream'
|
|
|
|
|
}).then(res => processTamPassingStream(res.data, callback));
|
2020-01-14 23:19:26 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-07-17 10:13:25 +00:00
|
|
|
|
exports.fetchRealtime = fetchRealtime;
|
2020-01-14 23:19:26 +00:00
|
|
|
|
|
|
|
|
|
const tamTheoreticalEndpoint =
|
|
|
|
|
'http://data.montpellier3m.fr/node/10731/download';
|
|
|
|
|
const tamTheoreticalFileName = 'offre_du_jour.csv';
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Fetch theoretical passings for the current day across the network.
|
|
|
|
|
*
|
|
|
|
|
* @param callback Called for each passing during parsing. First argument will
|
|
|
|
|
* be non-null only if an error occurred. Second argument will contain passings
|
|
|
|
|
* or be null if the end was reached.
|
|
|
|
|
*/
|
2020-07-17 10:13:25 +00:00
|
|
|
|
const fetchTheoretical = (callback) =>
|
2020-01-14 23:19:26 +00:00
|
|
|
|
{
|
2020-07-16 22:16:54 +00:00
|
|
|
|
axios.get(tamTheoreticalEndpoint, {
|
|
|
|
|
responseType: 'stream'
|
|
|
|
|
}).then(res =>
|
2020-01-14 23:19:26 +00:00
|
|
|
|
{
|
2020-07-16 22:16:54 +00:00
|
|
|
|
const fileStream = res.data.pipe(unzip.Parse());
|
|
|
|
|
|
|
|
|
|
fileStream.on('entry', entry =>
|
2020-01-14 23:19:26 +00:00
|
|
|
|
{
|
2020-07-16 22:16:54 +00:00
|
|
|
|
if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName)
|
|
|
|
|
{
|
|
|
|
|
entry.autodrain();
|
|
|
|
|
return;
|
|
|
|
|
}
|
2020-01-14 23:19:26 +00:00
|
|
|
|
|
2020-07-16 22:16:54 +00:00
|
|
|
|
processTamPassingStream(entry, callback);
|
|
|
|
|
});
|
2020-01-14 23:19:26 +00:00
|
|
|
|
|
2020-07-16 22:16:54 +00:00
|
|
|
|
fileStream.on('error', err => callback(err));
|
|
|
|
|
});
|
2020-01-14 23:19:26 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-07-17 10:13:25 +00:00
|
|
|
|
exports.fetchTheoretical = fetchTheoretical;
|