Migrate request to axios
This commit is contained in:
parent
3429227114
commit
7badd97610
|
@ -1,7 +1,6 @@
|
||||||
const unzip = require('unzip-stream');
|
const unzip = require('unzip-stream');
|
||||||
const csv = require('csv-parse');
|
const csv = require('csv-parse');
|
||||||
const request = require('request');
|
const axios = require('axios');
|
||||||
const requestp = require('request-promise-native');
|
|
||||||
|
|
||||||
const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter';
|
const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter';
|
||||||
|
|
||||||
|
@ -12,10 +11,10 @@ const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter';
|
||||||
* @param query Query in Overpass QL.
|
* @param query Query in Overpass QL.
|
||||||
* @return Results as provided by the endpoint.
|
* @return Results as provided by the endpoint.
|
||||||
*/
|
*/
|
||||||
const queryOverpass = query => requestp.post(
|
const queryOverpass = query => axios.post(
|
||||||
overpassEndpoint,
|
overpassEndpoint,
|
||||||
{form: 'data=' + query}
|
'data=' + query
|
||||||
);
|
).then(res => res.data);
|
||||||
|
|
||||||
exports.queryOverpass = queryOverpass;
|
exports.queryOverpass = queryOverpass;
|
||||||
|
|
||||||
|
@ -77,8 +76,9 @@ const tamRealtimeEndpoint = 'http://data.montpellier3m.fr/node/10732/download';
|
||||||
*/
|
*/
|
||||||
const fetchTamRealtime = (callback) =>
|
const fetchTamRealtime = (callback) =>
|
||||||
{
|
{
|
||||||
const csvStream = request(tamRealtimeEndpoint);
|
axios.get(tamRealtimeEndpoint, {
|
||||||
processTamPassingStream(csvStream, callback);
|
responseType: 'stream'
|
||||||
|
}).then(res => processTamPassingStream(res.data, callback));
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.fetchTamRealtime = fetchTamRealtime;
|
exports.fetchTamRealtime = fetchTamRealtime;
|
||||||
|
@ -96,20 +96,25 @@ const tamTheoreticalFileName = 'offre_du_jour.csv';
|
||||||
*/
|
*/
|
||||||
const fetchTamTheoretical = (callback) =>
|
const fetchTamTheoretical = (callback) =>
|
||||||
{
|
{
|
||||||
const fileStream = request(tamTheoreticalEndpoint).pipe(unzip.Parse());
|
axios.get(tamTheoreticalEndpoint, {
|
||||||
|
responseType: 'stream'
|
||||||
fileStream.on('entry', entry =>
|
}).then(res =>
|
||||||
{
|
{
|
||||||
if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName)
|
const fileStream = res.data.pipe(unzip.Parse());
|
||||||
|
|
||||||
|
fileStream.on('entry', entry =>
|
||||||
{
|
{
|
||||||
entry.autodrain();
|
if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName)
|
||||||
return;
|
{
|
||||||
}
|
entry.autodrain();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
processTamPassingStream(entry, callback);
|
processTamPassingStream(entry, callback);
|
||||||
|
});
|
||||||
|
|
||||||
|
fileStream.on('error', err => callback(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
fileStream.on('error', err => callback(err));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.fetchTamTheoretical = fetchTamTheoretical;
|
exports.fetchTamTheoretical = fetchTamTheoretical;
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
const geolib = require('geolib');
|
|
||||||
|
|
||||||
const {choosePlural, joinSentence} = require('../util');
|
const {choosePlural, joinSentence} = require('../util');
|
||||||
const {queryOverpass, fetchTamTheoretical} = require('./endpoints');
|
const {queryOverpass, fetchTamTheoretical} = require('./endpoints');
|
||||||
|
|
||||||
|
@ -159,7 +157,7 @@ out body qt;
|
||||||
const associations = await fetchStopsRefAssociations();
|
const associations = await fetchStopsRefAssociations();
|
||||||
|
|
||||||
// List of retrieved objects
|
// List of retrieved objects
|
||||||
const elementsList = JSON.parse(rawData).elements;
|
const elementsList = rawData.elements;
|
||||||
|
|
||||||
// List of retrieved lines
|
// List of retrieved lines
|
||||||
const routeMasters = elementsList.filter(elt =>
|
const routeMasters = elementsList.filter(elt =>
|
||||||
|
@ -380,7 +378,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`);
|
||||||
|
|
||||||
path = path.concat(
|
path = path.concat(
|
||||||
wayNodes.slice(nextNodeIndex + 1, curNodeIndex + 1)
|
wayNodes.slice(nextNodeIndex + 1, curNodeIndex + 1)
|
||||||
.reverse()
|
.reverse()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -397,7 +395,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`);
|
||||||
path.indexOf(stops[stopIndex + 1] + 1),
|
path.indexOf(stops[stopIndex + 1] + 1),
|
||||||
).map(id => ({
|
).map(id => ({
|
||||||
lat: elements[id].lat,
|
lat: elements[id].lat,
|
||||||
lon: elements[id].lon,
|
lon: elements[id].lon
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const request = require('request');
|
const axios = require('axios');
|
||||||
const csv = require('csv-parse');
|
const csv = require('csv-parse');
|
||||||
|
|
||||||
const network = require('./network');
|
const network = require('./network');
|
||||||
|
@ -8,47 +8,52 @@ const sortByFirstKey = (a, b) => a[0] - b[0];
|
||||||
|
|
||||||
const fetchRealtime = () => new Promise((res, rej) =>
|
const fetchRealtime = () => new Promise((res, rej) =>
|
||||||
{
|
{
|
||||||
const parser = csv({
|
const stream = axios.get(TAM_REALTIME, {
|
||||||
delimiter: ';',
|
responseType: 'stream'
|
||||||
});
|
}).then(stream =>
|
||||||
|
|
||||||
const stream = request(TAM_REALTIME).pipe(parser);
|
|
||||||
const courses = {};
|
|
||||||
|
|
||||||
stream.on('readable', () =>
|
|
||||||
{
|
{
|
||||||
let row;
|
const parser = csv({
|
||||||
|
delimiter: ';',
|
||||||
|
});
|
||||||
|
|
||||||
while (row = stream.read())
|
const courses = {};
|
||||||
|
stream.pipe(parser);
|
||||||
|
|
||||||
|
stream.on('readable', () =>
|
||||||
{
|
{
|
||||||
if (row.length === 0 || row[0] === 'course')
|
let row;
|
||||||
|
|
||||||
|
while (row = stream.read())
|
||||||
{
|
{
|
||||||
// Ignore les lignes invalides et l’en-tête
|
if (row.length === 0 || row[0] === 'course')
|
||||||
continue;
|
{
|
||||||
|
// Ignore les lignes invalides et l’en-tête
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const course = row[0];
|
||||||
|
const stopRef = row[2];
|
||||||
|
const lineRef = row[4];
|
||||||
|
const eta = row[9];
|
||||||
|
const destinationRef = row[10];
|
||||||
|
|
||||||
|
if (!(course in courses))
|
||||||
|
{
|
||||||
|
courses[course] = {
|
||||||
|
lineRef,
|
||||||
|
destinationRef,
|
||||||
|
stops: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
courses[course].stops.push([parseInt(eta, 10), stopRef]);
|
||||||
|
courses[course].stops.sort(sortByFirstKey);
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const course = row[0];
|
stream.on('end', () => res(courses));
|
||||||
const stopRef = row[2];
|
stream.on('error', err => rej(err));
|
||||||
const lineRef = row[4];
|
|
||||||
const eta = row[9];
|
|
||||||
const destinationRef = row[10];
|
|
||||||
|
|
||||||
if (!(course in courses))
|
|
||||||
{
|
|
||||||
courses[course] = {
|
|
||||||
lineRef,
|
|
||||||
destinationRef,
|
|
||||||
stops: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
courses[course].stops.push([parseInt(eta, 10), stopRef]);
|
|
||||||
courses[course].stops.sort(sortByFirstKey);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
stream.on('end', () => res(courses));
|
|
||||||
stream.on('error', err => rej(err));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateVehicles = async (lines, vehicles) =>
|
const updateVehicles = async (lines, vehicles) =>
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -12,19 +12,18 @@
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"axios": "^0.19.2",
|
||||||
"color": "^3.1.2",
|
"color": "^3.1.2",
|
||||||
"csv-parse": "^4.8.3",
|
"csv-parse": "^4.8.3",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"geolib": "^3.2.1",
|
"geolib": "^3.2.1",
|
||||||
"leaflet": "^1.6.0",
|
"leaflet": "^1.6.0",
|
||||||
"ol": "^6.1.1",
|
"ol": "^6.1.1",
|
||||||
"parcel-bundler": "^1.12.4",
|
|
||||||
"request": "^2.88.0",
|
|
||||||
"request-promise-native": "^1.0.8",
|
|
||||||
"unzip-stream": "^0.3.0"
|
"unzip-stream": "^0.3.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^6.8.0",
|
||||||
"nodemon": "^2.0.2"
|
"nodemon": "^2.0.2",
|
||||||
|
"parcel-bundler": "^1.12.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue