Compare commits
4 Commits
ca8d6f894f
...
6b7333c46b
Author | SHA1 | Date |
---|---|---|
Mattéo Delabre | 6b7333c46b | |
Mattéo Delabre | ef6b4b9741 | |
Mattéo Delabre | 3600e0b2bf | |
Mattéo Delabre | e44e9d07ba |
|
@ -1,3 +1,4 @@
|
||||||
node_modules
|
node_modules
|
||||||
dist
|
dist
|
||||||
.cache
|
.cache
|
||||||
|
cache
|
||||||
|
|
|
@ -0,0 +1,89 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
const courses = require('../src/tam/courses');
|
||||||
|
const network = require('../src/tam/network.json');
|
||||||
|
const {displayTime} = require('../src/util');
|
||||||
|
const process = require('process');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert stop ID to human-readable stop name.
|
||||||
|
*
|
||||||
|
* If the stop ID is not known, the ID will be kept as-is.
|
||||||
|
*/
|
||||||
|
const getStopName = stopId =>
|
||||||
|
{
|
||||||
|
if (stopId in network.stops)
|
||||||
|
{
|
||||||
|
return network.stops[stopId].properties.name;
|
||||||
|
}
|
||||||
|
|
||||||
|
return stopId;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Create a string representing a course for printing. */
|
||||||
|
const courseToString = course =>
|
||||||
|
{
|
||||||
|
let result = `Course #${course.id}
|
||||||
|
Line ${course.line} - Direction ${course.direction} - Bound for ${getStopName(course.finalStopId)}
|
||||||
|
|
||||||
|
Next stops:
|
||||||
|
`;
|
||||||
|
|
||||||
|
for (let [stopId, time] of course.passings)
|
||||||
|
{
|
||||||
|
result += `${displayTime(new Date(time))} - ${getStopName(stopId)}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Show user help. */
|
||||||
|
const doHelp = () =>
|
||||||
|
{
|
||||||
|
const name = "./" + path.relative(process.cwd(), process.argv[1]);
|
||||||
|
process.stdout.write(`Usage: ${name} TYPE [COURSE]
|
||||||
|
Show TaM courses data.
|
||||||
|
|
||||||
|
Set TYPE to 'realtime' to fetch real-time data (limited time scope) or to
|
||||||
|
'theoretical' to fetch planned courses for the day.
|
||||||
|
|
||||||
|
Set COURSE to a valid course ID to limit the output to a given course.
|
||||||
|
`);
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Print realtime information for a course or all courses. */
|
||||||
|
const doPrint = async (kind, courseId) =>
|
||||||
|
{
|
||||||
|
const results = await courses.fetch(kind);
|
||||||
|
|
||||||
|
if (courseId)
|
||||||
|
{
|
||||||
|
if (courseId in results)
|
||||||
|
{
|
||||||
|
console.log(courseToString(results[courseId]));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
console.log('Unknown course');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (let course of Object.values(results))
|
||||||
|
{
|
||||||
|
console.log(courseToString(course));
|
||||||
|
console.log("======\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const argv = process.argv.slice(2);
|
||||||
|
|
||||||
|
if (argv.length === 0)
|
||||||
|
{
|
||||||
|
doHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
doPrint(argv[0], argv[1]);
|
|
@ -1,12 +1,12 @@
|
||||||
const express = require("express");
|
const express = require("express");
|
||||||
const realtime = require("../tam/realtime");
|
const courses = require("../tam/courses");
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const port = 4321;
|
const port = 4321;
|
||||||
|
|
||||||
app.get("/courses", async(req, res) => {
|
app.get("/courses", async(req, res) => {
|
||||||
res.header("Access-Control-Allow-Origin", "*");
|
res.header("Access-Control-Allow-Origin", "*");
|
||||||
return res.json(await realtime.fetch());
|
return res.json(await courses.fetch("realtime"));
|
||||||
});
|
});
|
||||||
|
|
||||||
app.listen(port, () => console.info(`App listening on port ${port}`));
|
app.listen(port, () => console.info(`App listening on port ${port}`));
|
||||||
|
|
|
@ -84,6 +84,26 @@ setInterval(() => {
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
// Create the network and courses map
|
// Create the network and courses map
|
||||||
map.create(/* map = */ "map", coursesSimulation, course => {
|
map.create(/* map = */ "map", coursesSimulation, courses => {
|
||||||
courseId = course;
|
if (courses.length === 0) {
|
||||||
|
// If no course were clicked, show nothing
|
||||||
|
courseId = null;
|
||||||
|
} else {
|
||||||
|
// If several courses were clicked, show the one departing the soonest,
|
||||||
|
// or the first moving one
|
||||||
|
courses.sort((id1, id2) => {
|
||||||
|
const course1 = coursesSimulation.courses[id1];
|
||||||
|
const course2 = coursesSimulation.courses[id2];
|
||||||
|
|
||||||
|
if (course1.state === "moving") {
|
||||||
|
return -1;
|
||||||
|
} else if (course2.state === "moving") {
|
||||||
|
return 1;
|
||||||
|
} else {
|
||||||
|
return course1.departureTime - course2.departureTime;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
courseId = courses[0];
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -120,6 +120,7 @@ const create = (target, coursesSimulation, onClick) => {
|
||||||
map.on("singleclick", ev => {
|
map.on("singleclick", ev => {
|
||||||
const mousePixel = map.getPixelFromCoordinate(ev.coordinate);
|
const mousePixel = map.getPixelFromCoordinate(ev.coordinate);
|
||||||
const maxDistance = sizes.courseSize + sizes.courseInnerBorder;
|
const maxDistance = sizes.courseSize + sizes.courseInnerBorder;
|
||||||
|
const clicked = [];
|
||||||
|
|
||||||
for (const course of Object.values(coursesSimulation.courses)) {
|
for (const course of Object.values(coursesSimulation.courses)) {
|
||||||
const coursePixel = map.getPixelFromCoordinate(course.position);
|
const coursePixel = map.getPixelFromCoordinate(course.position);
|
||||||
|
@ -128,9 +129,11 @@ const create = (target, coursesSimulation, onClick) => {
|
||||||
const distance = dx * dx + dy * dy;
|
const distance = dx * dx + dy * dy;
|
||||||
|
|
||||||
if (distance <= maxDistance * maxDistance) {
|
if (distance <= maxDistance * maxDistance) {
|
||||||
onClick(course.id);
|
clicked.push(course.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
onClick(clicked);
|
||||||
});
|
});
|
||||||
|
|
||||||
return map;
|
return map;
|
||||||
|
|
|
@ -0,0 +1,123 @@
|
||||||
|
const path = require("path");
|
||||||
|
const fs = require("fs").promises;
|
||||||
|
const tam = require("./sources/tam");
|
||||||
|
const network = require("./network.json");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Information about the course of a vehicle.
|
||||||
|
* @typedef {Object} Course
|
||||||
|
* @property {string} id Unique identifier for this course.
|
||||||
|
* @property {string} line Transport line number.
|
||||||
|
* @property {string} finalStop Final stop to which the course is headed.
|
||||||
|
* @property {Array.<Array>} passings Next stations to which
|
||||||
|
* the vehicle will stop, associated to the passing timestamp, ordered by
|
||||||
|
* increasing passing timestamp.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** Parse time information relative to the current date. */
|
||||||
|
const parseTime = (time, reference) =>
|
||||||
|
{
|
||||||
|
const [hours, minutes, seconds] = time.split(':').map(x => parseInt(x, 10));
|
||||||
|
const result = new Date(reference);
|
||||||
|
|
||||||
|
result.setHours(hours);
|
||||||
|
result.setMinutes(minutes);
|
||||||
|
result.setSeconds(seconds);
|
||||||
|
|
||||||
|
if (reference > result.getTime()) {
|
||||||
|
// Timestamps in the past refer to the next day
|
||||||
|
result.setDate(result.getDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch information about courses in the TaM network.
|
||||||
|
*
|
||||||
|
* @param {string} kind Pass 'realtime' to get real-time information,
|
||||||
|
* or 'theoretical' to get planned courses for the day.
|
||||||
|
* @returns {Object.<string,Course>} Mapping from active course IDs to
|
||||||
|
* information about each course.
|
||||||
|
*/
|
||||||
|
const fetch = async (kind = 'realtime') => {
|
||||||
|
const courses = {};
|
||||||
|
const passings = (
|
||||||
|
kind === 'realtime'
|
||||||
|
? tam.fetchRealtime()
|
||||||
|
: tam.fetchTheoretical()
|
||||||
|
);
|
||||||
|
const timing = (await passings.next()).value;
|
||||||
|
|
||||||
|
// Aggregate passings relative to the same course
|
||||||
|
for await (const passing of passings) {
|
||||||
|
const {
|
||||||
|
course: id,
|
||||||
|
routeShortName: line,
|
||||||
|
stopId,
|
||||||
|
destArCode: finalStopId,
|
||||||
|
} = passing;
|
||||||
|
|
||||||
|
const direction = (
|
||||||
|
'direction' in passing
|
||||||
|
? passing.direction
|
||||||
|
: passing.directionId
|
||||||
|
);
|
||||||
|
|
||||||
|
const departureTime = (
|
||||||
|
'delaySec' in passing
|
||||||
|
? timing.lastUpdate + parseInt(passing.delaySec, 10) * 1000
|
||||||
|
: parseTime(passing.departureTime, timing.lastUpdate)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!(id in courses)) {
|
||||||
|
courses[id] = {
|
||||||
|
id,
|
||||||
|
line,
|
||||||
|
direction,
|
||||||
|
finalStopId,
|
||||||
|
passings: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(stopId in courses[id].passings) ||
|
||||||
|
courses[id].passings[stopId] < departureTime) {
|
||||||
|
// Only consider passings with an increased passing time
|
||||||
|
// or for stops not seen before
|
||||||
|
courses[id].passings[stopId] = departureTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter courses to only keep those referring to known data
|
||||||
|
for (const courseId of Object.keys(courses)) {
|
||||||
|
const course = courses[courseId];
|
||||||
|
|
||||||
|
if (!(course.line in network.lines)) {
|
||||||
|
delete courses[courseId];
|
||||||
|
} else {
|
||||||
|
for (const stopId of Object.keys(course.passings)) {
|
||||||
|
if (!(stopId in network.stops)) {
|
||||||
|
delete courses[courseId];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order next passings by increasing passing time
|
||||||
|
for (const course of Object.values(courses)) {
|
||||||
|
course.passings = (
|
||||||
|
Object.entries(course.passings).sort(
|
||||||
|
([, time1], [, time2]) => time1 - time2
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (course.finalStopId === undefined) {
|
||||||
|
course.finalStopId = course.passings[course.passings.length - 1][0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return courses;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.fetch = fetch;
|
|
@ -4,8 +4,7 @@
|
||||||
* Extract static information about the TaM network from OpenStreetMap (OSM):
|
* Extract static information about the TaM network from OpenStreetMap (OSM):
|
||||||
* tram and bus lines, stops and routes.
|
* tram and bus lines, stops and routes.
|
||||||
*
|
*
|
||||||
* Functions in this file also report and offer to correct errors that may
|
* Functions in this file also report inconsistencies in OSM data.
|
||||||
* occur in OSM data.
|
|
||||||
*
|
*
|
||||||
* Because of the static nature of this data, it is cached in a
|
* Because of the static nature of this data, it is cached in a
|
||||||
* version-controlled file `network.json` next to this file. To update it, use
|
* version-controlled file `network.json` next to this file. To update it, use
|
||||||
|
@ -24,7 +23,6 @@ const osm = require("./sources/osm");
|
||||||
* segments and lines.
|
* segments and lines.
|
||||||
*/
|
*/
|
||||||
const fetch = async lineRefs => {
|
const fetch = async lineRefs => {
|
||||||
|
|
||||||
// Retrieve routes, ways and stops from OpenStreetMap
|
// Retrieve routes, ways and stops from OpenStreetMap
|
||||||
const rawData = await osm.runQuery(`[out:json];
|
const rawData = await osm.runQuery(`[out:json];
|
||||||
|
|
||||||
|
|
3656
src/tam/network.json
3656
src/tam/network.json
File diff suppressed because it is too large
Load Diff
|
@ -1,100 +0,0 @@
|
||||||
const tam = require("./sources/tam");
|
|
||||||
const network = require("./network.json");
|
|
||||||
|
|
||||||
// Time at which the course data needs to be updated next
|
|
||||||
let nextUpdate = null;
|
|
||||||
|
|
||||||
// Current information about courses
|
|
||||||
let currentCourses = null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Information about the course of a vehicle.
|
|
||||||
* @typedef {Object} Course
|
|
||||||
* @property {string} id Unique identifier for this course.
|
|
||||||
* @property {string} line Transport line number.
|
|
||||||
* @property {string} finalStop Final stop to which the course is headed.
|
|
||||||
* @property {Array.<Array>} nextPassings Next stations to which
|
|
||||||
* the vehicle will stop, associated to the passing timestamp, ordered by
|
|
||||||
* increasing passing timestamp.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch real-time information about active courses in the TaM network.
|
|
||||||
*
|
|
||||||
* New data will only be fetched from the TaM server once every minute,
|
|
||||||
* otherwise pulling from the in-memory cache.
|
|
||||||
* @returns {Object.<string,Course>} Mapping from active course IDs to
|
|
||||||
* information about each course.
|
|
||||||
*/
|
|
||||||
const fetch = async() => {
|
|
||||||
if (nextUpdate === null || Date.now() >= nextUpdate) {
|
|
||||||
const courses = {};
|
|
||||||
const passings = tam.fetchRealtime();
|
|
||||||
const timing = (await passings.next()).value;
|
|
||||||
|
|
||||||
nextUpdate = timing.nextUpdate;
|
|
||||||
|
|
||||||
// Aggregate passings relative to the same course
|
|
||||||
for await (const passing of passings) {
|
|
||||||
const {
|
|
||||||
course: id,
|
|
||||||
routeShortName: line,
|
|
||||||
stopId,
|
|
||||||
destArCode: finalStop
|
|
||||||
} = passing;
|
|
||||||
|
|
||||||
const arrivalTime = (
|
|
||||||
timing.lastUpdate +
|
|
||||||
parseInt(passing.delaySec, 10) * 1000
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!(id in courses)) {
|
|
||||||
courses[id] = {
|
|
||||||
id,
|
|
||||||
line,
|
|
||||||
finalStop,
|
|
||||||
|
|
||||||
// Initially accumulate passings in an object
|
|
||||||
// to prevent duplicates
|
|
||||||
nextPassings: { [stopId]: arrivalTime }
|
|
||||||
};
|
|
||||||
} else if (!(stopId in courses[id].nextPassings) ||
|
|
||||||
courses[id].nextPassings[stopId] < arrivalTime) {
|
|
||||||
// Only consider passings with an increased passing time
|
|
||||||
// or for stops not seen before
|
|
||||||
courses[id].nextPassings[stopId] = arrivalTime;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter courses to only keep those referring to known data
|
|
||||||
for (const courseId of Object.keys(courses)) {
|
|
||||||
const course = courses[courseId];
|
|
||||||
|
|
||||||
if (!(course.line in network.lines)) {
|
|
||||||
delete courses[courseId];
|
|
||||||
} else {
|
|
||||||
for (const stopId of Object.keys(course.nextPassings)) {
|
|
||||||
if (!(stopId in network.stops)) {
|
|
||||||
delete courses[courseId];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Order next passings by increasing passing time
|
|
||||||
for (const courseId of Object.keys(courses)) {
|
|
||||||
courses[courseId].nextPassings = (
|
|
||||||
Object.entries(courses[courseId].nextPassings).sort(
|
|
||||||
([, time1], [, time2]) => time1 - time2
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
currentCourses = courses;
|
|
||||||
}
|
|
||||||
|
|
||||||
return currentCourses;
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.fetch = fetch;
|
|
|
@ -59,8 +59,9 @@ class Course {
|
||||||
|
|
||||||
updateData(data) {
|
updateData(data) {
|
||||||
this.line = data.line;
|
this.line = data.line;
|
||||||
this.finalStop = data.finalStop;
|
this.direction = data.direction;
|
||||||
this.nextPassings = data.nextPassings;
|
this.finalStop = data.finalStopId;
|
||||||
|
this.nextPassings = data.passings;
|
||||||
|
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
const csv = require("csv-parse");
|
const csv = require("csv-parse");
|
||||||
const axios = require("axios");
|
const axios = require("axios");
|
||||||
|
const path = require("path");
|
||||||
|
const fs = require("fs").promises;
|
||||||
const { snakeToCamelCase, unzipFile } = require("../../util");
|
const { snakeToCamelCase, unzipFile } = require("../../util");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -17,15 +19,65 @@ const { snakeToCamelCase, unzipFile } = require("../../util");
|
||||||
* @property {string} directionId Route identifier inside the line.
|
* @property {string} directionId Route identifier inside the line.
|
||||||
* @property {string} departureTime Theoretical time at which the
|
* @property {string} departureTime Theoretical time at which the
|
||||||
* vehicle will depart the stop (HH:MM:SS format).
|
* vehicle will depart the stop (HH:MM:SS format).
|
||||||
* @property {string} isTheorical (sic) Whether the arrival time is only
|
* @property {string} isTheorical (sic) True if this is only the planned
|
||||||
* a theoretical information.
|
* passing time, false if this is real-time information.
|
||||||
* @property {string} delaySec Number of seconds before the vehicle arrives
|
* @property {string} delaySec Number of seconds before the vehicle arrives
|
||||||
* at the station.
|
* at the station (only if isTheorical is false).
|
||||||
* @property {string} destArCode Unique network identifier for the final
|
* @property {string} destArCode Unique network identifier for the final
|
||||||
* stop of this trip.
|
* stop of this trip (only if isTheorical is false).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrap a passing-fetching function to use a filesystem-based cache.
|
||||||
|
*
|
||||||
|
* @param {function} func Fetching function to wrap.
|
||||||
|
* @param {string} cachePath Path to the file to use as a cache (will be
|
||||||
|
* overwritten, may be non-existing).
|
||||||
|
* @return {function} Wrapped function.
|
||||||
|
*/
|
||||||
|
const makeCached = (func, cachePath) => {
|
||||||
|
return async function *() {
|
||||||
|
try {
|
||||||
|
const cacheRaw = await fs.readFile(cachePath, {encoding: "utf8"});
|
||||||
|
const cache = JSON.parse(cacheRaw);
|
||||||
|
|
||||||
|
if (Date.now() < cache.timing.nextUpdate) {
|
||||||
|
yield cache.timing;
|
||||||
|
|
||||||
|
for (const passing of cache.passings) {
|
||||||
|
yield passing;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Ignore missing cache file
|
||||||
|
if (err.code !== 'ENOENT') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const passings = func();
|
||||||
|
const newCache = {
|
||||||
|
timing: (await passings.next()).value,
|
||||||
|
passings: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
yield newCache.timing;
|
||||||
|
|
||||||
|
for await (const passing of passings) {
|
||||||
|
newCache.passings.push(passing);
|
||||||
|
yield passing;
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFile(cachePath, JSON.stringify(newCache));
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const cacheDir = path.join(__dirname, "..", "..", "..", "cache");
|
||||||
|
|
||||||
const realtimeEndpoint = "http://data.montpellier3m.fr/node/10732/download";
|
const realtimeEndpoint = "http://data.montpellier3m.fr/node/10732/download";
|
||||||
|
const realtimeCachePath = path.join(cacheDir, "realtime.json");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch real time passings of vehicles across the network.
|
* Fetch real time passings of vehicles across the network.
|
||||||
|
@ -54,9 +106,10 @@ const fetchRealtime = async function *() {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.fetchRealtime = fetchRealtime;
|
exports.fetchRealtime = makeCached(fetchRealtime, realtimeCachePath);
|
||||||
|
|
||||||
const theoreticalEndpoint = "http://data.montpellier3m.fr/node/10731/download";
|
const theoreticalEndpoint = "http://data.montpellier3m.fr/node/10731/download";
|
||||||
|
const theoreticalCachePath = path.join(cacheDir, "theoretical.json");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch theoretical passings for the current day across the network.
|
* Fetch theoretical passings for the current day across the network.
|
||||||
|
@ -101,4 +154,4 @@ const fetchTheoretical = async function *() {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.fetchTheoretical = fetchTheoretical;
|
exports.fetchTheoretical = makeCached(fetchTheoretical, theoreticalCachePath);
|
||||||
|
|
|
@ -63,3 +63,11 @@ const unzipFile = (data, fileName) => new Promise((res, rej) => {
|
||||||
});
|
});
|
||||||
|
|
||||||
exports.unzipFile = unzipFile;
|
exports.unzipFile = unzipFile;
|
||||||
|
|
||||||
|
const displayTime = date => [
|
||||||
|
date.getHours(),
|
||||||
|
date.getMinutes(),
|
||||||
|
date.getSeconds()
|
||||||
|
].map(number => number.toString().padStart(2, "0")).join(":");
|
||||||
|
|
||||||
|
exports.displayTime = displayTime;
|
||||||
|
|
Loading…
Reference in New Issue