Compare commits

..

No commits in common. "7badd97610f9b022eea1c10389c3b9effd6f05ec" and "dd008d7ee23d0fee38dd6d9b4481b135a7ea7651" have entirely different histories.

5 changed files with 2524 additions and 2944 deletions

View File

@ -1,6 +1,7 @@
const unzip = require('unzip-stream'); const unzip = require('unzip-stream');
const csv = require('csv-parse'); const csv = require('csv-parse');
const axios = require('axios'); const request = require('request');
const requestp = require('request-promise-native');
const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter'; const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter';
@ -11,10 +12,10 @@ const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter';
* @param query Query in Overpass QL. * @param query Query in Overpass QL.
* @return Results as provided by the endpoint. * @return Results as provided by the endpoint.
*/ */
const queryOverpass = query => axios.post( const queryOverpass = query => requestp.post(
overpassEndpoint, overpassEndpoint,
'data=' + query {form: 'data=' + query}
).then(res => res.data); );
exports.queryOverpass = queryOverpass; exports.queryOverpass = queryOverpass;
@ -76,9 +77,8 @@ const tamRealtimeEndpoint = 'http://data.montpellier3m.fr/node/10732/download';
*/ */
const fetchTamRealtime = (callback) => const fetchTamRealtime = (callback) =>
{ {
axios.get(tamRealtimeEndpoint, { const csvStream = request(tamRealtimeEndpoint);
responseType: 'stream' processTamPassingStream(csvStream, callback);
}).then(res => processTamPassingStream(res.data, callback));
}; };
exports.fetchTamRealtime = fetchTamRealtime; exports.fetchTamRealtime = fetchTamRealtime;
@ -96,25 +96,20 @@ const tamTheoreticalFileName = 'offre_du_jour.csv';
*/ */
const fetchTamTheoretical = (callback) => const fetchTamTheoretical = (callback) =>
{ {
axios.get(tamTheoreticalEndpoint, { const fileStream = request(tamTheoreticalEndpoint).pipe(unzip.Parse());
responseType: 'stream'
}).then(res => fileStream.on('entry', entry =>
{ {
const fileStream = res.data.pipe(unzip.Parse()); if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName)
fileStream.on('entry', entry =>
{ {
if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName) entry.autodrain();
{ return;
entry.autodrain(); }
return;
}
processTamPassingStream(entry, callback); processTamPassingStream(entry, callback);
});
fileStream.on('error', err => callback(err));
}); });
fileStream.on('error', err => callback(err));
}; };
exports.fetchTamTheoretical = fetchTamTheoretical; exports.fetchTamTheoretical = fetchTamTheoretical;

View File

@ -1,3 +1,5 @@
const geolib = require('geolib');
const {choosePlural, joinSentence} = require('../util'); const {choosePlural, joinSentence} = require('../util');
const {queryOverpass, fetchTamTheoretical} = require('./endpoints'); const {queryOverpass, fetchTamTheoretical} = require('./endpoints');
@ -157,7 +159,7 @@ out body qt;
const associations = await fetchStopsRefAssociations(); const associations = await fetchStopsRefAssociations();
// List of retrieved objects // List of retrieved objects
const elementsList = rawData.elements; const elementsList = JSON.parse(rawData).elements;
// List of retrieved lines // List of retrieved lines
const routeMasters = elementsList.filter(elt => const routeMasters = elementsList.filter(elt =>
@ -378,7 +380,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`);
path = path.concat( path = path.concat(
wayNodes.slice(nextNodeIndex + 1, curNodeIndex + 1) wayNodes.slice(nextNodeIndex + 1, curNodeIndex + 1)
.reverse() .reverse()
); );
} }
@ -395,7 +397,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`);
path.indexOf(stops[stopIndex + 1] + 1), path.indexOf(stops[stopIndex + 1] + 1),
).map(id => ({ ).map(id => ({
lat: elements[id].lat, lat: elements[id].lat,
lon: elements[id].lon lon: elements[id].lon,
}))); })));
} }

View File

@ -1,4 +1,4 @@
const axios = require('axios'); const request = require('request');
const csv = require('csv-parse'); const csv = require('csv-parse');
const network = require('./network'); const network = require('./network');
@ -8,52 +8,47 @@ const sortByFirstKey = (a, b) => a[0] - b[0];
const fetchRealtime = () => new Promise((res, rej) => const fetchRealtime = () => new Promise((res, rej) =>
{ {
const stream = axios.get(TAM_REALTIME, { const parser = csv({
responseType: 'stream' delimiter: ';',
}).then(stream =>
{
const parser = csv({
delimiter: ';',
});
const courses = {};
stream.pipe(parser);
stream.on('readable', () =>
{
let row;
while (row = stream.read())
{
if (row.length === 0 || row[0] === 'course')
{
// Ignore les lignes invalides et len-tête
continue;
}
const course = row[0];
const stopRef = row[2];
const lineRef = row[4];
const eta = row[9];
const destinationRef = row[10];
if (!(course in courses))
{
courses[course] = {
lineRef,
destinationRef,
stops: [],
};
}
courses[course].stops.push([parseInt(eta, 10), stopRef]);
courses[course].stops.sort(sortByFirstKey);
}
});
stream.on('end', () => res(courses));
stream.on('error', err => rej(err));
}); });
const stream = request(TAM_REALTIME).pipe(parser);
const courses = {};
stream.on('readable', () =>
{
let row;
while (row = stream.read())
{
if (row.length === 0 || row[0] === 'course')
{
// Ignore les lignes invalides et len-tête
continue;
}
const course = row[0];
const stopRef = row[2];
const lineRef = row[4];
const eta = row[9];
const destinationRef = row[10];
if (!(course in courses))
{
courses[course] = {
lineRef,
destinationRef,
stops: [],
};
}
courses[course].stops.push([parseInt(eta, 10), stopRef]);
courses[course].stops.sort(sortByFirstKey);
}
});
stream.on('end', () => res(courses));
stream.on('error', err => rej(err));
}); });
const updateVehicles = async (lines, vehicles) => const updateVehicles = async (lines, vehicles) =>

5327
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -12,18 +12,19 @@
"author": "", "author": "",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"axios": "^0.19.2",
"color": "^3.1.2", "color": "^3.1.2",
"csv-parse": "^4.8.3", "csv-parse": "^4.8.3",
"express": "^4.17.1", "express": "^4.17.1",
"geolib": "^3.2.1", "geolib": "^3.2.1",
"leaflet": "^1.6.0", "leaflet": "^1.6.0",
"ol": "^6.1.1", "ol": "^6.1.1",
"parcel-bundler": "^1.12.4",
"request": "^2.88.0",
"request-promise-native": "^1.0.8",
"unzip-stream": "^0.3.0" "unzip-stream": "^0.3.0"
}, },
"devDependencies": { "devDependencies": {
"eslint": "^6.8.0", "eslint": "^6.8.0",
"nodemon": "^2.0.2", "nodemon": "^2.0.2"
"parcel-bundler": "^1.12.4"
} }
} }