Implement iterative graph compression

This commit is contained in:
Mattéo Delabre 2021-05-23 14:59:08 +02:00
parent a51a80a4b3
commit e696761f8e
Signed by: matteo
GPG Key ID: AE3FBD02DC583ABB
2 changed files with 1174 additions and 1691 deletions

View File

@ -340,7 +340,7 @@ different sequence of nodes in two or more lines.`);
* Create a graph for navigating between stops.
* @param {Array.<Object>} elementsList List of nodes retrieved from OSM.
* @param {Object.<string,Object>} elementsById OSM nodes indexed by their ID.
* @return {Object.<string,Object.<string,NavigationEdge>} Resulting graph.
* @return {Object} Resulting graph and reverse arcs.
*/
const createNavigationGraph = (elementsList, elementsById) => {
const navigation = {};
@ -350,7 +350,7 @@ const createNavigationGraph = (elementsList, elementsById) => {
for (const obj of elementsList) {
if (obj.type === "node") {
navigation[obj.id] = {};
navigationReverse[obj.id] = {};
navigationReverse[obj.id] = new Set();
}
}
@ -360,27 +360,27 @@ const createNavigationGraph = (elementsList, elementsById) => {
const oneWay = osm.isOneWay(obj);
for (let i = 0; i + 1 < obj.nodes.length; ++i) {
const from = obj.nodes[i];
let to = obj.nodes[i + 1];
let path = [from.toString(), to.toString()];
const from = obj.nodes[i].toString();
let to = obj.nodes[i + 1].toString();
let path = [from, to];
// Make sure we cant jump between rails at railway crossings
if (i + 2 < obj.nodes.length
&& osm.isRailwayCrossing(elementsById[to])) {
const next = obj.nodes[i + 2];
path = [from.toString(), to.toString(), next.toString()];
const next = obj.nodes[i + 2].toString();
path = [from, to, next];
to = next;
i += 1;
}
navigation[from][to] = path;
navigationReverse[to][from] = true;
navigationReverse[to].add(from);
if (!oneWay) {
const reversePath = [...path];
reversePath.reverse();
navigation[to][from] = reversePath;
navigationReverse[from][to] = true;
navigationReverse[from].add(to);
}
}
}
@ -390,158 +390,99 @@ const createNavigationGraph = (elementsList, elementsById) => {
};
/**
* Remove and relink nodes that connect only two nodes or less.
* @param {Object.<string,Stop>} stops List of stops.
* Identify intermediate nodes of the navigation graph that can be simplified.
* @param {Set.<string>} stopsSet OSM IDs of stop nodes.
* @param {Navigation} navigation Input navigation graph.
* @param {Object.<string,Object.<string,boolean>>} navigationReverse
* Backward edges of the navigation graph.
* @param {Object.<string,Set.<string>>} navigationReverse Reverse arcs.
* @return {Set.<string>} Set of compressible nodes.
*/
const compressNavigationGraph = (stops, navigation, navigationReverse) => {
const stopsReverse = Object.fromEntries(
Object.entries(stops).map(([id, stop]) => [stop.properties.node, id])
);
const findCompressibleNodes = (stopsSet, navigation, navigationReverse) => {
const compressible = new Set();
let removedDeadEnds = true;
const nodesToCompress = {};
while (removedDeadEnds) {
// Identify nodes to be compressed
for (const nodeId in navigation) {
if (nodeId in stopsReverse) {
// Keep stop nodes
continue;
}
const entries = new Set(Object.keys(navigationReverse[nodeId]));
const exits = new Set(Object.keys(navigation[nodeId]));
// Keep split nodes, i.e. nodes with at least two exit nodes
// and one entry node that are all distinct from each other
if (entries.size >= 1) {
if (exits.size >= 3) {
continue;
}
let isSplit = false;
if (exits.size === 2) {
for (const entry of entries) {
if (!exits.has(entry)) {
isSplit = true;
break;
}
}
}
if (isSplit) {
continue;
}
}
// Keep junction nodes, i.e. nodes with at least two entry nodes
// and one exit node that are all distinct from each other
if (exits.size >= 1) {
if (entries.size >= 3) {
continue;
}
let isJunction = false;
if (entries.size === 2) {
for (const exit of exits) {
if (!entries.has(exit)) {
isJunction = true;
break;
}
}
}
if (isJunction) {
continue;
}
}
// Compress all other nodes
nodesToCompress[nodeId] = true;
}
// Find nodes that cannot be used to directly link up two kept nodes
const usedNodes = {};
for (const beginId in navigation) {
if (beginId in nodesToCompress) {
continue;
}
usedNodes[beginId] = true;
// Start a DFS from each node to be kept
const begin = navigation[beginId];
const stack = [];
const parent = {[beginId]: beginId};
for (const succId in begin) {
if (succId in nodesToCompress) {
stack.push(succId);
parent[succId] = beginId;
}
}
while (stack.length > 0) {
const endId = stack.pop();
const end = navigation[endId];
if (!(endId in nodesToCompress)) {
let trackback = parent[endId];
while (trackback !== beginId) {
usedNodes[trackback] = true;
trackback = parent[trackback];
}
} else {
for (const succId in end) {
if (succId !== parent[endId]) {
parent[succId] = endId;
stack.push(succId);
}
}
}
}
}
// Remove dead-end nodes
removedDeadEnds = false;
for (const beginId in navigation) {
if (!(beginId in usedNodes)) {
for (const neighborId in navigation[beginId]) {
delete navigationReverse[neighborId][beginId];
}
for (const neighborId in navigationReverse[beginId]) {
delete navigation[neighborId][beginId];
}
delete navigation[beginId];
delete navigationReverse[beginId];
removedDeadEnds = true;
}
}
}
// Perform node compression
for (const beginId in navigation) {
if (beginId in nodesToCompress) {
for (const nodeId in navigation) {
if (stopsSet.has(nodeId)) {
// Keep stop nodes
continue;
}
const entries = navigationReverse[nodeId];
const exits = new Set(Object.keys(navigation[nodeId]));
// Keep split nodes, i.e. nodes with at least two exit nodes
// and one entry node that are all distinct from each other
if (entries.size >= 1) {
if (exits.size >= 3) {
continue;
}
let isSplit = false;
if (exits.size === 2) {
for (const entry of entries) {
if (!exits.has(entry)) {
isSplit = true;
break;
}
}
}
if (isSplit) {
continue;
}
}
// Keep junction nodes, i.e. nodes with at least two entry nodes
// and one exit node that are all distinct from each other
if (exits.size >= 1) {
if (entries.size >= 3) {
continue;
}
let isJunction = false;
if (entries.size === 2) {
for (const exit of exits) {
if (!entries.has(exit)) {
isJunction = true;
break;
}
}
}
if (isJunction) {
continue;
}
}
// Compress all other nodes
compressible.add(nodeId);
}
return compressible;
};
/**
* Remove nodes that are not used to link up two kept nodes.
* @param {Navigation} navigation Input navigation graph.
* @param {Object.<string,Set.<string>>} navigationReverse Reverse arcs.
* @param {Set.<string>} compressible Set of nodes that will not be kept.
* @return {boolean} True if some dead-ends were removed.
*/
const removeDeadEnds = (navigation, navigationReverse, compressible) => {
let didRemove = false;
// Find dead-ends starting from kept nodes
for (const beginId in navigation) {
if (compressible.has(beginId)) {
continue;
}
// Start a DFS from each node to be kept
const begin = navigation[beginId];
const stack = [];
const parent = {[beginId]: beginId};
for (const succId in begin) {
if (succId in nodesToCompress) {
if (compressible.has(succId)) {
stack.push(succId);
parent[succId] = beginId;
}
@ -551,44 +492,141 @@ const compressNavigationGraph = (stops, navigation, navigationReverse) => {
const endId = stack.pop();
const end = navigation[endId];
if (!(endId in nodesToCompress)) {
// Found another kept node
// Collect and remove intermediate nodes
const reversePath = [endId];
let trackback = parent[endId];
let oneWay = !(trackback in end);
if (compressible.has(endId)) {
let hasSuccessor = false;
for (const succId in end) {
if (succId !== parent[endId]) {
parent[succId] = endId;
stack.push(succId);
hasSuccessor = true;
}
}
if (!hasSuccessor) {
// Remove the dead-end path
let trackback = endId;
while (trackback !== beginId) {
navigationReverse[trackback].delete(parent[trackback]);
delete navigation[parent[trackback]][trackback];
trackback = parent[trackback];
}
didRemove = true;
}
}
}
}
// Find dead-ends starting from compressible source nodes
for (const beginId in navigation) {
if (!compressible.has(beginId)) {
continue;
}
if (navigationReverse[beginId].size > 0) {
continue;
}
const begin = navigation[beginId];
const stack = [];
const parent = {[beginId]: beginId};
for (const succId in begin) {
stack.push(succId);
parent[succId] = beginId;
}
while (stack.length > 0) {
const endId = stack.pop();
const end = navigation[endId];
if (compressible.has(endId)) {
for (const succId in end) {
if (succId !== parent[endId]) {
parent[succId] = endId;
stack.push(succId);
}
}
} else {
// Remove the dead-end path
let trackback = endId;
while (trackback !== beginId) {
reversePath.push(trackback);
oneWay = oneWay || !(parent[trackback] in navigation[trackback]);
delete navigation[trackback];
delete navigationReverse[trackback];
navigationReverse[trackback].delete(parent[trackback]);
delete navigation[parent[trackback]][trackback];
trackback = parent[trackback];
}
reversePath.push(beginId);
const forwardPath = [...reversePath];
forwardPath.reverse();
didRemove = true;
}
}
}
// Create edges to link both nodes directly
delete begin[forwardPath[1]];
delete navigationReverse[endId][reversePath[1]];
return didRemove;
};
delete end[reversePath[1]];
delete navigationReverse[beginId][forwardPath[1]];
/**
* Compress the given set of nodes.
* @param {Navigation} navigation Input navigation graph.
* @param {Object.<string,Set.<string>>} navigationReverse Reverse arcs.
* @param {Set.<string>} compressible Set of nodes to compress.
* @return {boolean} True if some nodes were compressed.
*/
const removeCompressibleNodes = (navigation, navigationReverse, compressible) => {
let didCompress = false;
if (!(endId in begin)) {
begin[endId] = forwardPath;
navigationReverse[endId][beginId] = true;
for (const beginId in navigation) {
if (compressible.has(beginId)) {
continue;
}
// Start a DFS from each kept node
const begin = navigation[beginId];
const stack = [];
const parent = {[beginId]: beginId};
for (const succId in begin) {
if (compressible.has(succId)) {
stack.push(succId);
parent[succId] = beginId;
}
}
while (stack.length > 0) {
const endId = stack.pop();
const end = navigation[endId];
if (!compressible.has(endId)) {
// Found another kept node
// Collect and remove intermediate path
let path = [];
let trackback = endId;
do {
const segment = [...navigation[parent[trackback]][trackback]];
segment.reverse();
path = path.concat(segment.slice(0, -1));
navigationReverse[trackback].delete(parent[trackback]);
delete navigation[parent[trackback]][trackback];
trackback = parent[trackback];
} while (trackback !== beginId);
// Make sure not to add loops if were compressing a cycle
if (endId !== beginId) {
path.push(beginId);
path.reverse();
begin[endId] = path;
navigationReverse[endId].add(beginId);
}
if (!oneWay && !(beginId in end)) {
end[beginId] = reversePath;
navigationReverse[beginId][endId] = true;
}
didCompress = true;
} else {
// Continue the traversal down unused nodes
// Continue the traversal down compressible nodes
let isFirst = true;
for (const succId in end) {
@ -606,6 +644,57 @@ non-junction node ${endId}`);
}
}
}
return didCompress;
};
/**
* Find nodes in the graph that have no exits nor entries and remove them.
* @param {Navigation} navigation Input navigation graph.
* @param {Object.<string,Set.<string>>} navigationReverse Reverse arcs.
*/
const cleanUpIsolatedNodes = (navigation, navigationReverse) => {
for (const nodeId in navigation) {
if (
Object.keys(navigation[nodeId]).length === 0
&& navigationReverse[nodeId].size === 0
) {
delete navigation[nodeId];
delete navigationReverse[nodeId];
}
}
};
/**
* Remove and relink nodes that connect only two nodes or less.
* @param {Object.<string,Stop>} stops List of stops.
* @param {Navigation} navigation Input navigation graph.
* @param {Object.<string,Set.<string>>} navigationReverse Reverse arcs.
*/
const compressNavigationGraph = (stops, navigation, navigationReverse) => {
const stopsSet = new Set(
Object.values(stops).map(stop => stop.properties.node)
);
let compressible = null;
let didCompress = true;
while (didCompress) {
let didRemove = true;
while (didRemove) {
compressible = findCompressibleNodes(
stopsSet, navigation, navigationReverse
);
didRemove = removeDeadEnds(
navigation, navigationReverse, compressible
);
}
didCompress = removeCompressibleNodes(
navigation, navigationReverse, compressible
);
cleanUpIsolatedNodes(navigation, navigationReverse);
}
};
/**

File diff suppressed because it is too large Load Diff