Compare commits
	
		
			2 Commits
		
	
	
		
			dd008d7ee2
			...
			7badd97610
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 7badd97610 | |
|  | 3429227114 | 
|  | @ -1,7 +1,6 @@ | |||
| const unzip = require('unzip-stream'); | ||||
| const csv = require('csv-parse'); | ||||
| const request = require('request'); | ||||
| const requestp = require('request-promise-native'); | ||||
| const axios = require('axios'); | ||||
| 
 | ||||
| const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter'; | ||||
| 
 | ||||
|  | @ -12,10 +11,10 @@ const overpassEndpoint = 'https://lz4.overpass-api.de/api/interpreter'; | |||
|  * @param query Query in Overpass QL. | ||||
|  * @return Results as provided by the endpoint. | ||||
|  */ | ||||
| const queryOverpass = query => requestp.post( | ||||
| const queryOverpass = query => axios.post( | ||||
|     overpassEndpoint, | ||||
|     {form: 'data=' + query} | ||||
| ); | ||||
|     'data=' + query | ||||
| ).then(res => res.data); | ||||
| 
 | ||||
| exports.queryOverpass = queryOverpass; | ||||
| 
 | ||||
|  | @ -77,8 +76,9 @@ const tamRealtimeEndpoint = 'http://data.montpellier3m.fr/node/10732/download'; | |||
|  */ | ||||
| const fetchTamRealtime = (callback) => | ||||
| { | ||||
|     const csvStream = request(tamRealtimeEndpoint); | ||||
|     processTamPassingStream(csvStream, callback); | ||||
|     axios.get(tamRealtimeEndpoint, { | ||||
|         responseType: 'stream' | ||||
|     }).then(res => processTamPassingStream(res.data, callback)); | ||||
| }; | ||||
| 
 | ||||
| exports.fetchTamRealtime = fetchTamRealtime; | ||||
|  | @ -96,20 +96,25 @@ const tamTheoreticalFileName = 'offre_du_jour.csv'; | |||
|  */ | ||||
| const fetchTamTheoretical = (callback) => | ||||
| { | ||||
|     const fileStream = request(tamTheoreticalEndpoint).pipe(unzip.Parse()); | ||||
| 
 | ||||
|     fileStream.on('entry', entry => | ||||
|     axios.get(tamTheoreticalEndpoint, { | ||||
|         responseType: 'stream' | ||||
|     }).then(res => | ||||
|     { | ||||
|         if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName) | ||||
|         const fileStream = res.data.pipe(unzip.Parse()); | ||||
| 
 | ||||
|         fileStream.on('entry', entry => | ||||
|         { | ||||
|             entry.autodrain(); | ||||
|             return; | ||||
|         } | ||||
|             if (entry.type !== 'File' || entry.path !== tamTheoreticalFileName) | ||||
|             { | ||||
|                 entry.autodrain(); | ||||
|                 return; | ||||
|             } | ||||
| 
 | ||||
|         processTamPassingStream(entry, callback); | ||||
|             processTamPassingStream(entry, callback); | ||||
|         }); | ||||
| 
 | ||||
|         fileStream.on('error', err => callback(err)); | ||||
|     }); | ||||
| 
 | ||||
|     fileStream.on('error', err => callback(err)); | ||||
| }; | ||||
| 
 | ||||
| exports.fetchTamTheoretical = fetchTamTheoretical; | ||||
|  |  | |||
|  | @ -1,5 +1,3 @@ | |||
| const geolib = require('geolib'); | ||||
| 
 | ||||
| const {choosePlural, joinSentence} = require('../util'); | ||||
| const {queryOverpass, fetchTamTheoretical} = require('./endpoints'); | ||||
| 
 | ||||
|  | @ -159,7 +157,7 @@ out body qt; | |||
|     const associations = await fetchStopsRefAssociations(); | ||||
| 
 | ||||
|     // List of retrieved objects
 | ||||
|     const elementsList = JSON.parse(rawData).elements; | ||||
|     const elementsList = rawData.elements; | ||||
| 
 | ||||
|     // List of retrieved lines
 | ||||
|     const routeMasters = elementsList.filter(elt => | ||||
|  | @ -380,7 +378,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`); | |||
| 
 | ||||
|                     path = path.concat( | ||||
|                         wayNodes.slice(nextNodeIndex + 1, curNodeIndex + 1) | ||||
|                                 .reverse() | ||||
|                             .reverse() | ||||
|                     ); | ||||
|                 } | ||||
| 
 | ||||
|  | @ -397,7 +395,7 @@ ${routeDescription} is one-way and cannot be used in reverse.`); | |||
|                     path.indexOf(stops[stopIndex + 1] + 1), | ||||
|                 ).map(id => ({ | ||||
|                     lat: elements[id].lat, | ||||
|                     lon: elements[id].lon, | ||||
|                     lon: elements[id].lon | ||||
|                 }))); | ||||
|             } | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,4 +1,4 @@ | |||
| const request = require('request'); | ||||
| const axios = require('axios'); | ||||
| const csv = require('csv-parse'); | ||||
| 
 | ||||
| const network = require('./network'); | ||||
|  | @ -8,47 +8,52 @@ const sortByFirstKey = (a, b) => a[0] - b[0]; | |||
| 
 | ||||
| const fetchRealtime = () => new Promise((res, rej) => | ||||
| { | ||||
|     const parser = csv({ | ||||
|         delimiter: ';', | ||||
|     }); | ||||
| 
 | ||||
|     const stream = request(TAM_REALTIME).pipe(parser); | ||||
|     const courses = {}; | ||||
| 
 | ||||
|     stream.on('readable', () => | ||||
|     const stream = axios.get(TAM_REALTIME, { | ||||
|         responseType: 'stream' | ||||
|     }).then(stream => | ||||
|     { | ||||
|         let row; | ||||
|         const parser = csv({ | ||||
|             delimiter: ';', | ||||
|         }); | ||||
| 
 | ||||
|         while (row = stream.read()) | ||||
|         const courses = {}; | ||||
|         stream.pipe(parser); | ||||
| 
 | ||||
|         stream.on('readable', () => | ||||
|         { | ||||
|             if (row.length === 0 || row[0] === 'course') | ||||
|             let row; | ||||
| 
 | ||||
|             while (row = stream.read()) | ||||
|             { | ||||
|                 // Ignore les lignes invalides et l’en-tête
 | ||||
|                 continue; | ||||
|                 if (row.length === 0 || row[0] === 'course') | ||||
|                 { | ||||
|                     // Ignore les lignes invalides et l’en-tête
 | ||||
|                     continue; | ||||
|                 } | ||||
| 
 | ||||
|                 const course = row[0]; | ||||
|                 const stopRef = row[2]; | ||||
|                 const lineRef = row[4]; | ||||
|                 const eta = row[9]; | ||||
|                 const destinationRef = row[10]; | ||||
| 
 | ||||
|                 if (!(course in courses)) | ||||
|                 { | ||||
|                     courses[course] = { | ||||
|                         lineRef, | ||||
|                         destinationRef, | ||||
|                         stops: [], | ||||
|                     }; | ||||
|                 } | ||||
| 
 | ||||
|                 courses[course].stops.push([parseInt(eta, 10), stopRef]); | ||||
|                 courses[course].stops.sort(sortByFirstKey); | ||||
|             } | ||||
|         }); | ||||
| 
 | ||||
|             const course = row[0]; | ||||
|             const stopRef = row[2]; | ||||
|             const lineRef = row[4]; | ||||
|             const eta = row[9]; | ||||
|             const destinationRef = row[10]; | ||||
| 
 | ||||
|             if (!(course in courses)) | ||||
|             { | ||||
|                 courses[course] = { | ||||
|                     lineRef, | ||||
|                     destinationRef, | ||||
|                     stops: [], | ||||
|                 }; | ||||
|             } | ||||
| 
 | ||||
|             courses[course].stops.push([parseInt(eta, 10), stopRef]); | ||||
|             courses[course].stops.sort(sortByFirstKey); | ||||
|         } | ||||
|         stream.on('end', () => res(courses)); | ||||
|         stream.on('error', err => rej(err)); | ||||
|     }); | ||||
| 
 | ||||
|     stream.on('end', () => res(courses)); | ||||
|     stream.on('error', err => rej(err)); | ||||
| }); | ||||
| 
 | ||||
| const updateVehicles = async (lines, vehicles) => | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -12,19 +12,18 @@ | |||
|   "author": "", | ||||
|   "license": "ISC", | ||||
|   "dependencies": { | ||||
|     "axios": "^0.19.2", | ||||
|     "color": "^3.1.2", | ||||
|     "csv-parse": "^4.8.3", | ||||
|     "express": "^4.17.1", | ||||
|     "geolib": "^3.2.1", | ||||
|     "leaflet": "^1.6.0", | ||||
|     "ol": "^6.1.1", | ||||
|     "parcel-bundler": "^1.12.4", | ||||
|     "request": "^2.88.0", | ||||
|     "request-promise-native": "^1.0.8", | ||||
|     "unzip-stream": "^0.3.0" | ||||
|   }, | ||||
|   "devDependencies": { | ||||
|     "eslint": "^6.8.0", | ||||
|     "nodemon": "^2.0.2" | ||||
|     "nodemon": "^2.0.2", | ||||
|     "parcel-bundler": "^1.12.4" | ||||
|   } | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue