const csv = require("csv-parse"); const axios = require("axios"); const path = require("path"); const fs = require("fs").promises; const { snakeToCamelCase, unzipFile } = require("../../util"); /** * Data available for each passing of a vehicle at a station. * * See also . * @typedef {Object} Passing * @property {string} course Identifier of the overall trip of the same vehicle * from one end of the route to another (unique for the day). * @property {string} stopCode Unused internal stop identifier. * @property {string} stopId Unique network identifier for the station at * which the vehicle will pass (same id as in GTFS). * @property {string} routeShortName Transport line number. * @property {string} tripHeadsign Name of the final stop of this trip. * @property {string} directionId Route identifier inside the line. * @property {string} departureTime Theoretical time at which the * vehicle will depart the stop (HH:MM:SS format). * @property {string} isTheorical (sic) True if this is only the planned * passing time, false if this is real-time information. * @property {string} delaySec Number of seconds before the vehicle arrives * at the station (only if isTheorical is false). * @property {string} destArCode Unique network identifier for the final * stop of this trip (only if isTheorical is false). */ /** * Wrap a passing-fetching function to use a filesystem-based cache. * * @param {function} func Fetching function to wrap. * @param {string} cachePath Path to the file to use as a cache (will be * overwritten, may be non-existing). * @return {function} Wrapped function. */ const makeCached = (func, cachePath) => { return async function *() { try { const cacheRaw = await fs.readFile(cachePath, {encoding: "utf8"}); const cache = JSON.parse(cacheRaw); if (Date.now() < cache.timing.nextUpdate) { yield cache.timing; for (const passing of cache.passings) { yield passing; } return; } } catch (err) { // Ignore missing cache file if (err.code !== 'ENOENT') { throw err; } } const passings = func(); const newCache = { timing: (await passings.next()).value, passings: [], }; yield newCache.timing; for await (const passing of passings) { newCache.passings.push(passing); yield passing; } fs.writeFile(cachePath, JSON.stringify(newCache)); }; }; const cacheDir = path.join(__dirname, "..", "..", "..", "cache"); const realtimeEndpoint = "http://data.montpellier3m.fr/node/10732/download"; const realtimeCachePath = path.join(cacheDir, "realtime.json"); /** * Fetch real time passings of vehicles across the network. * @yields {{{lastUpdate: number, nextUpdate: number}|Passing}} First value * is an object containing the time of last update and the time of next * update of this information. Next values are informations about each vehicle * passing. */ const fetchRealtime = async function *() { const res = await axios.get(realtimeEndpoint, { responseType: "stream" }); const lastUpdate = new Date(res.headers["last-modified"]).getTime(); const nextUpdate = lastUpdate + 65 * 1000; yield { lastUpdate, nextUpdate }; const parser = res.data.pipe(csv({ delimiter: ";", columns: header => header.map(snakeToCamelCase) })); for await (const passing of parser) { yield passing; } }; exports.fetchRealtime = makeCached(fetchRealtime, realtimeCachePath); const theoreticalEndpoint = "http://data.montpellier3m.fr/node/10731/download"; const theoreticalCachePath = path.join(cacheDir, "theoretical.json"); /** * Fetch theoretical passings for the current day across the network. * @yields {{{lastUpdate: number, nextUpdate: number}|Passing}} First value * is an object containing the time of last update and the time of next * update of this information. Next values are informations about each vehicle * passing. */ const fetchTheoretical = async function *() { const res = await axios.get(theoreticalEndpoint, { responseType: "stream" }); const lastUpdate = new Date(); if (lastUpdate.getHours() < 4) { lastUpdate.setDate(lastUpdate.getDate() - 1); } lastUpdate.setHours(4); lastUpdate.setMinutes(0); lastUpdate.setSeconds(0); lastUpdate.setMilliseconds(0); const nextUpdate = new Date(lastUpdate); nextUpdate.setDate(nextUpdate.getDate() + 1); yield { lastUpdate: lastUpdate.getTime(), nextUpdate: nextUpdate.getTime() }; const stream = await unzipFile(res.data, "offre_du_jour.csv"); const parser = stream.pipe(csv({ delimiter: ";", columns: header => header.map(snakeToCamelCase) })); for await (const passing of parser) { yield passing; } }; exports.fetchTheoretical = makeCached(fetchTheoretical, theoreticalCachePath);