kopia lustrzana https://github.com/onthegomap/planetiler
openmaptiles profile cleanup and comments
rodzic
8a3b3e3fbf
commit
aaf534424d
|
@ -5,7 +5,7 @@ import com.graphhopper.reader.ReaderNode;
|
|||
import com.graphhopper.reader.ReaderRelation;
|
||||
import com.graphhopper.reader.ReaderWay;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmInputFile;
|
||||
|
|
|
@ -6,7 +6,10 @@ import com.onthegomap.flatmap.reader.SourceFeature;
|
|||
import com.onthegomap.flatmap.reader.osm.OsmElement;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmRelationInfo;
|
||||
import com.onthegomap.flatmap.util.Wikidata;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
|
@ -25,6 +28,9 @@ import java.util.function.Consumer;
|
|||
* an element is encountered in a later source, or the {@link Profile#finish(String, FeatureCollector.Factory, Consumer)}
|
||||
* method is called for a source. All methods may be called concurrently by multiple threads, so implementations must be
|
||||
* careful to ensure access to instance fields is thread-safe.
|
||||
* <p>
|
||||
* For complex profiles, {@link ForwardingProfile} provides a framework for splitting the logic up into several handlers
|
||||
* (i.e. one per layer) and forwarding each element/event to the handlers that care about it.
|
||||
*/
|
||||
public interface Profile {
|
||||
|
||||
|
@ -56,9 +62,7 @@ public interface Profile {
|
|||
*/
|
||||
void processFeature(SourceFeature sourceFeature, FeatureCollector features);
|
||||
|
||||
/**
|
||||
* Free any resources associated with this profile (i.e. shared data structures)
|
||||
*/
|
||||
/** Free any resources associated with this profile (i.e. shared data structures) */
|
||||
default void release() {
|
||||
}
|
||||
|
||||
|
@ -81,7 +85,7 @@ public interface Profile {
|
|||
* @implSpec The default implementation passes through input features unaltered
|
||||
*/
|
||||
default List<VectorTile.Feature> postProcessLayerFeatures(String layer, int zoom,
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
return items;
|
||||
}
|
||||
|
||||
|
@ -151,7 +155,7 @@ public interface Profile {
|
|||
* @param next a consumer to pass finished map features to
|
||||
*/
|
||||
default void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> next) {
|
||||
Consumer<FeatureCollector.Feature> next) {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -176,7 +180,7 @@ public interface Profile {
|
|||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcessLayerFeatures(String layer, int zoom,
|
||||
List<VectorTile.Feature> items) {
|
||||
List<VectorTile.Feature> items) {
|
||||
return items;
|
||||
}
|
||||
|
||||
|
@ -185,4 +189,175 @@ public interface Profile {
|
|||
return "Null";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A profile that delegates handling of input features to individual {@link Handler LayerHandlers} if they implement
|
||||
* {@link OsmRelationPreprocessor}, {@link FeatureProcessor}, {@link FinishHandler}, or {@link FeaturePostProcessor}.
|
||||
*/
|
||||
abstract class ForwardingProfile implements Profile {
|
||||
|
||||
private final List<Handler> handlers = new ArrayList<>();
|
||||
/** Handlers that pre-process OSM relations during pass 1 through the data. */
|
||||
private final List<OsmRelationPreprocessor> osmRelationPreprocessors = new ArrayList<>();
|
||||
/** Handlers that get a callback when each source is finished reading. */
|
||||
private final List<FinishHandler> finishHandlers = new ArrayList<>();
|
||||
/** Map from layer name to its handler if it implements {@link FeaturePostProcessor}. */
|
||||
private final Map<String, List<FeaturePostProcessor>> postProcessors = new HashMap<>();
|
||||
/** Map from source ID to its handler if it implements {@link FeatureProcessor}. */
|
||||
private final Map<String, List<FeatureProcessor>> sourceElementProcessors = new HashMap<>();
|
||||
|
||||
|
||||
protected void registerSourceHandler(String source, FeatureProcessor processor) {
|
||||
sourceElementProcessors.computeIfAbsent(source, name -> new ArrayList<>())
|
||||
.add(processor);
|
||||
}
|
||||
|
||||
protected void registerHandler(Handler handler) {
|
||||
this.handlers.add(handler);
|
||||
if (handler instanceof OsmRelationPreprocessor osmRelationPreprocessor) {
|
||||
osmRelationPreprocessors.add(osmRelationPreprocessor);
|
||||
}
|
||||
if (handler instanceof FinishHandler finishHandler) {
|
||||
finishHandlers.add(finishHandler);
|
||||
}
|
||||
if (handler instanceof FeaturePostProcessor postProcessor) {
|
||||
postProcessors.computeIfAbsent(postProcessor.name(), name -> new ArrayList<>())
|
||||
.add(postProcessor);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OsmRelationInfo> preprocessOsmRelation(OsmElement.Relation relation) {
|
||||
// delegate OSM relation pre-processing to each layer, if it implements FeaturePostProcessor
|
||||
List<OsmRelationInfo> result = null;
|
||||
for (OsmRelationPreprocessor osmRelationPreprocessor : osmRelationPreprocessors) {
|
||||
List<OsmRelationInfo> thisResult = osmRelationPreprocessor
|
||||
.preprocessOsmRelation(relation);
|
||||
if (thisResult != null) {
|
||||
if (result == null) {
|
||||
result = new ArrayList<>(thisResult);
|
||||
} else {
|
||||
result.addAll(thisResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processFeature(SourceFeature sourceFeature, FeatureCollector features) {
|
||||
// delegate source feature processing to each handler for that source
|
||||
var handlers = sourceElementProcessors.get(sourceFeature.getSource());
|
||||
if (handlers != null) {
|
||||
for (var handler : handlers) {
|
||||
handler.processFeature(sourceFeature, features);
|
||||
// TODO extract common handling for expression-based filtering from openmaptiles to this
|
||||
// common profile when we have another use-case for it.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean caresAboutSource(String name) {
|
||||
return sourceElementProcessors.containsKey(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcessLayerFeatures(String layer, int zoom, List<VectorTile.Feature> items)
|
||||
throws GeometryException {
|
||||
// delegate feature post-processing to each layer, if it implements FeaturePostProcessor
|
||||
List<FeaturePostProcessor> handlers = postProcessors.get(layer);
|
||||
List<VectorTile.Feature> result = null;
|
||||
if (handlers != null) {
|
||||
for (FeaturePostProcessor handler : handlers) {
|
||||
var thisResult = handler.postProcess(zoom, items);
|
||||
if (thisResult != null) {
|
||||
if (result == null) {
|
||||
result = thisResult;
|
||||
} else {
|
||||
result.addAll(thisResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result == null ? items : result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> next) {
|
||||
// delegate finish handling to every layer that implements FinishHandler
|
||||
for (var handler : finishHandlers) {
|
||||
handler.finish(sourceName, featureCollectors, next);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
// release resources used by each handler
|
||||
handlers.forEach(Handler::release);
|
||||
}
|
||||
|
||||
/** Interface for handlers that this profile forwards to should implement. */
|
||||
public interface Handler {
|
||||
|
||||
/** Free any resources associated with this profile (i.e. shared data structures) */
|
||||
default void release() {
|
||||
}
|
||||
}
|
||||
|
||||
public interface HandlerForLayer {
|
||||
|
||||
/** The layer name this handler is for */
|
||||
String name();
|
||||
}
|
||||
|
||||
/** Handlers should implement this interface to get notified when a source finishes processing. */
|
||||
public interface FinishHandler {
|
||||
|
||||
/**
|
||||
* Invoked once for each source after all elements for a source have been processed.
|
||||
*
|
||||
* @see Profile#finish(String, FeatureCollector.Factory, Consumer)
|
||||
*/
|
||||
void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> emit);
|
||||
}
|
||||
|
||||
/** Handlers should implement this interface to pre-process OSM relations during pass 1 through the data. */
|
||||
public interface OsmRelationPreprocessor {
|
||||
|
||||
/**
|
||||
* Returns information extracted from an OSM relation during pass 1 of the input OSM data to make available when
|
||||
* processing elements in that relation during pass 2.
|
||||
*
|
||||
* @see Profile#preprocessOsmRelation(OsmElement.Relation)
|
||||
*/
|
||||
List<OsmRelationInfo> preprocessOsmRelation(OsmElement.Relation relation);
|
||||
}
|
||||
|
||||
/** Handlers should implement this interface to post-process vector tile features before emitting an output tile. */
|
||||
public interface FeaturePostProcessor extends HandlerForLayer {
|
||||
|
||||
/**
|
||||
* Apply any post-processing to features in this output layer of a tile before writing it to the output file.
|
||||
*
|
||||
* @throws GeometryException if the input elements cannot be deserialized, or output elements cannot be
|
||||
* serialized
|
||||
* @see Profile#postProcessLayerFeatures(String, int, List)
|
||||
*/
|
||||
List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) throws GeometryException;
|
||||
}
|
||||
|
||||
/** Handlers should implement this interface to process input features from a given source ID. */
|
||||
public interface FeatureProcessor {
|
||||
|
||||
/**
|
||||
* Process an input element from a source feature.
|
||||
*
|
||||
* @see Profile#processFeature(SourceFeature, FeatureCollector)
|
||||
*/
|
||||
void processFeature(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,401 @@
|
|||
package com.onthegomap.flatmap.expression;
|
||||
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.util.Format;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* A framework for defining and manipulating boolean expressions that match on input element.
|
||||
* <p>
|
||||
* Calling {@code toString()} on any expression will generate code that can be used to recreate an identical copy of the
|
||||
* original expression, assuming that the generated code includes:
|
||||
* <pre>{@code
|
||||
* import static com.onthegomap.flatmap.openmaptiles.expression.Expression.*;
|
||||
* }</pre>
|
||||
*/
|
||||
public interface Expression {
|
||||
|
||||
String LINESTRING_TYPE = "linestring";
|
||||
String POINT_TYPE = "point";
|
||||
String POLYGON_TYPE = "polygon";
|
||||
String RELATION_MEMBER_TYPE = "relation_member";
|
||||
|
||||
Set<String> supportedTypes = Set.of(LINESTRING_TYPE, POINT_TYPE, POLYGON_TYPE, RELATION_MEMBER_TYPE);
|
||||
Expression TRUE = new Expression() {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TRUE";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
Expression FALSE = new Expression() {
|
||||
public String toString() {
|
||||
return "FALSE";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
static And and(Expression... children) {
|
||||
return and(List.of(children));
|
||||
}
|
||||
|
||||
static And and(List<Expression> children) {
|
||||
return new And(children);
|
||||
}
|
||||
|
||||
static Or or(Expression... children) {
|
||||
return or(List.of(children));
|
||||
}
|
||||
|
||||
static Or or(List<Expression> children) {
|
||||
return new Or(children);
|
||||
}
|
||||
|
||||
static Not not(Expression child) {
|
||||
return new Not(child);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an expression that evaluates to true if the value for {@code field} tag is any of {@code values}.
|
||||
* <p>
|
||||
* {@code values} can contain exact matches, "%text%" to match any value containing "text", or "" to match any value.
|
||||
*/
|
||||
static MatchAny matchAny(String field, String... values) {
|
||||
return matchAny(field, List.of(values));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an expression that evaluates to true if the value for {@code field} tag is any of {@code values}.
|
||||
* <p>
|
||||
* {@code values} can contain exact matches, "%text%" to match any value containing "text", or "" to match any value.
|
||||
*/
|
||||
static MatchAny matchAny(String field, List<String> values) {
|
||||
return new MatchAny(field, values);
|
||||
}
|
||||
|
||||
/** Returns an expression that evaluates to true if the element has any value for tag {@code field}. */
|
||||
static MatchField matchField(String field) {
|
||||
return new MatchField(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an expression that evaluates to true if the geometry of an element matches {@code type}.
|
||||
* <p>
|
||||
* Allowed values:
|
||||
* <ul>
|
||||
* <li>"linestring"</li>
|
||||
* <li>"point"</li>
|
||||
* <li>"polygon"</li>
|
||||
* <li>"relation_member"</li>
|
||||
* </ul>
|
||||
*/
|
||||
static MatchType matchType(String type) {
|
||||
if (!supportedTypes.contains(type)) {
|
||||
throw new IllegalArgumentException("Unsupported type: " + type);
|
||||
}
|
||||
return new MatchType(type);
|
||||
}
|
||||
|
||||
private static String listToString(List<?> items) {
|
||||
return items.stream().map(Object::toString).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
private static Expression simplify(Expression initial) {
|
||||
// iteratively simplify the expression until we reach a fixed point and start seeing
|
||||
// an expression that's already been seen before
|
||||
Expression simplified = initial;
|
||||
Set<Expression> seen = new HashSet<>();
|
||||
seen.add(simplified);
|
||||
while (true) {
|
||||
simplified = simplifyOnce(simplified);
|
||||
if (seen.contains(simplified) || seen.size() > 100) {
|
||||
return simplified;
|
||||
}
|
||||
seen.add(simplified);
|
||||
}
|
||||
}
|
||||
|
||||
private static Expression simplifyOnce(Expression expression) {
|
||||
if (expression instanceof Not not) {
|
||||
if (not.child instanceof Or or) {
|
||||
return and(or.children.stream().<Expression>map(Expression::not).toList());
|
||||
} else if (not.child instanceof And and) {
|
||||
return or(and.children.stream().<Expression>map(Expression::not).toList());
|
||||
} else if (not.child instanceof Not not2) {
|
||||
return not2.child;
|
||||
} else if (not.child == TRUE) {
|
||||
return FALSE;
|
||||
} else if (not.child == FALSE) {
|
||||
return TRUE;
|
||||
}
|
||||
return not;
|
||||
} else if (expression instanceof Or or) {
|
||||
if (or.children.isEmpty()) {
|
||||
return FALSE;
|
||||
}
|
||||
if (or.children.size() == 1) {
|
||||
return simplifyOnce(or.children.get(0));
|
||||
}
|
||||
if (or.children.contains(TRUE)) {
|
||||
return TRUE;
|
||||
}
|
||||
return or(or.children.stream()
|
||||
// hoist children
|
||||
.flatMap(child -> child instanceof Or childOr ? childOr.children.stream() : Stream.of(child))
|
||||
.filter(child -> child != FALSE)
|
||||
.map(Expression::simplifyOnce).toList());
|
||||
} else if (expression instanceof And and) {
|
||||
if (and.children.isEmpty()) {
|
||||
return FALSE;
|
||||
}
|
||||
if (and.children.size() == 1) {
|
||||
return simplifyOnce(and.children.get(0));
|
||||
}
|
||||
if (and.children.contains(FALSE)) {
|
||||
return FALSE;
|
||||
}
|
||||
return and(and.children.stream()
|
||||
// hoist children
|
||||
.flatMap(child -> child instanceof And childAnd ? childAnd.children.stream() : Stream.of(child))
|
||||
.filter(child -> child != TRUE)
|
||||
.map(Expression::simplifyOnce).toList());
|
||||
} else {
|
||||
return expression;
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns an equivalent, simplified copy of this expression but does not modify {@code this}. */
|
||||
default Expression simplify() {
|
||||
return simplify(this);
|
||||
}
|
||||
|
||||
/** Returns a copy of this expression where every nested instance of {@code a} is replaced with {@code b}. */
|
||||
default Expression replace(Expression a, Expression b) {
|
||||
return replace(a::equals, b);
|
||||
}
|
||||
|
||||
/** Returns a copy of this expression where every nested instance matching {@code replace} is replaced with {@code b}. */
|
||||
default Expression replace(Predicate<Expression> replace, Expression b) {
|
||||
if (replace.test(this)) {
|
||||
return b;
|
||||
} else if (this instanceof Not not) {
|
||||
return new Not(not.child.replace(replace, b));
|
||||
} else if (this instanceof Or or) {
|
||||
return new Or(or.children.stream().map(child -> child.replace(replace, b)).toList());
|
||||
} else if (this instanceof And and) {
|
||||
return new And(and.children.stream().map(child -> child.replace(replace, b)).toList());
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns true if this expression or any subexpression matches {@code filter}. */
|
||||
default boolean contains(Predicate<Expression> filter) {
|
||||
if (filter.test(this)) {
|
||||
return true;
|
||||
} else if (this instanceof Not not) {
|
||||
return not.child.contains(filter);
|
||||
} else if (this instanceof Or or) {
|
||||
return or.children.stream().anyMatch(child -> child.contains(filter));
|
||||
} else if (this instanceof And and) {
|
||||
return and.children.stream().anyMatch(child -> child.contains(filter));
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this expression matches an input element.
|
||||
*
|
||||
* @param input the input element
|
||||
* @param matchKeys list that this method call will add any key to that was responsible for triggering the match
|
||||
* @return true if this expression matches the input element
|
||||
*/
|
||||
boolean evaluate(SourceFeature input, List<String> matchKeys);
|
||||
|
||||
record And(List<Expression> children) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "and(" + listToString(children) + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
for (Expression child : children) {
|
||||
if (!child.evaluate(input, matchKeys)) {
|
||||
matchKeys.clear();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
record Or(List<Expression> children) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "or(" + listToString(children) + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
int size = children.size();
|
||||
// Optimization: this method consumes the most time when matching against input elements, and
|
||||
// iterating through this list by index is slightly faster than an enhanced for loop
|
||||
// noinspection ForLoopReplaceableByForEach - for intellij
|
||||
for (int i = 0; i < size; i++) {
|
||||
Expression child = children.get(i);
|
||||
if (child.evaluate(input, matchKeys)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || obj.getClass() != this.getClass()) {
|
||||
return false;
|
||||
}
|
||||
var that = (Or) obj;
|
||||
return Objects.equals(this.children, that.children);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(children);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
record Not(Expression child) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "not(" + child + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
return !child.evaluate(input, new ArrayList<>());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates to true if the value for {@code field} tag is any of {@code exactMatches} or contains any of {@code
|
||||
* wildcards}.
|
||||
*
|
||||
* @param values all raw string values that were initially provided
|
||||
* @param exactMatches the input {@code values} that should be treated as exact matches
|
||||
* @param wildcards the input {@code values} that should be treated as wildcards
|
||||
* @param matchWhenMissing if {@code values} contained ""
|
||||
*/
|
||||
record MatchAny(
|
||||
String field, List<String> values, Set<String> exactMatches, List<String> wildcards, boolean matchWhenMissing
|
||||
) implements Expression {
|
||||
|
||||
private static final Pattern containsPattern = Pattern.compile("^%(.*)%$");
|
||||
|
||||
MatchAny(String field, List<String> values) {
|
||||
this(field, values,
|
||||
values.stream().filter(v -> !v.contains("%")).collect(Collectors.toSet()),
|
||||
values.stream().filter(v -> v.contains("%")).map(val -> {
|
||||
var matcher = containsPattern.matcher(val);
|
||||
if (!matcher.matches()) {
|
||||
throw new IllegalArgumentException("wildcards must start/end with %: " + val);
|
||||
}
|
||||
return matcher.group(1);
|
||||
}).toList(),
|
||||
values.contains("")
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
Object value = input.getTag(field);
|
||||
if (value == null) {
|
||||
return matchWhenMissing;
|
||||
} else {
|
||||
String str = value.toString();
|
||||
if (exactMatches.contains(str)) {
|
||||
matchKeys.add(field);
|
||||
return true;
|
||||
}
|
||||
for (String target : wildcards) {
|
||||
if (str.contains(target)) {
|
||||
matchKeys.add(field);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchAny(" + Format.quote(field) + ", " + values.stream().map(Format::quote)
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
/** Evaluates to true if an input element contains any value for {@code field} tag. */
|
||||
record MatchField(String field) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchField(" + Format.quote(field) + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
if (input.hasTag(field)) {
|
||||
matchKeys.add(field);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates to true if an input element has geometry type matching {@code type}.
|
||||
*/
|
||||
record MatchType(String type) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchType(" + Format.quote(type) + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean evaluate(SourceFeature input, List<String> matchKeys) {
|
||||
return switch (type) {
|
||||
case LINESTRING_TYPE -> input.canBeLine();
|
||||
case POLYGON_TYPE -> input.canBePolygon();
|
||||
case POINT_TYPE -> input.isPoint();
|
||||
case RELATION_MEMBER_TYPE -> input.hasRelationInfo();
|
||||
default -> false;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,320 @@
|
|||
package com.onthegomap.flatmap.expression;
|
||||
|
||||
import static com.onthegomap.flatmap.expression.Expression.FALSE;
|
||||
import static com.onthegomap.flatmap.expression.Expression.TRUE;
|
||||
import static com.onthegomap.flatmap.expression.Expression.matchType;
|
||||
import static com.onthegomap.flatmap.geo.GeoUtils.EMPTY_GEOMETRY;
|
||||
|
||||
import com.onthegomap.flatmap.reader.SimpleFeature;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* A list of {@link Expression Expressions} to evaluate on input elements.
|
||||
* <p>
|
||||
* {@link #index()} returns an optimized {@link Index} that evaluates the minimal set of expressions on the keys present
|
||||
* on the element.
|
||||
* <p>
|
||||
* {@link Index#getMatches(SourceFeature)} returns the data value associated with the expressions that match an input
|
||||
* element.
|
||||
*
|
||||
* @param <T> type of data value associated with each expression
|
||||
*/
|
||||
public record MultiExpression<T>(List<Entry<T>> expressions) {
|
||||
|
||||
public static <T> MultiExpression<T> of(List<Entry<T>> expressions) {
|
||||
return new MultiExpression<>(expressions);
|
||||
}
|
||||
|
||||
public static <T> Entry<T> entry(T result, Expression expression) {
|
||||
return new Entry<>(result, expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates a list of expressions on an input element, storing the matches into {@code result} and using {@code
|
||||
* visited} to avoid evaluating an expression more than once.
|
||||
*/
|
||||
private static <T> void visitExpressions(SourceFeature input, List<Match<T>> result,
|
||||
boolean[] visited, List<EntryWithId<T>> expressions) {
|
||||
if (expressions != null) {
|
||||
for (EntryWithId<T> expressionValue : expressions) {
|
||||
if (!visited[expressionValue.id]) {
|
||||
visited[expressionValue.id] = true;
|
||||
List<String> matchKeys = new ArrayList<>();
|
||||
if (expressionValue.expression().evaluate(input, matchKeys)) {
|
||||
result.add(new Match<>(expressionValue.result, matchKeys));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Calls {@code acceptKey} for every tag that could possibly cause {@code exp} to match an input element. */
|
||||
private static void getRelevantKeys(Expression exp, Consumer<String> acceptKey) {
|
||||
if (exp instanceof Expression.And and) {
|
||||
and.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Or or) {
|
||||
or.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Not) {
|
||||
// ignore anything that's purely used as a filter
|
||||
} else if (exp instanceof Expression.MatchField field) {
|
||||
acceptKey.accept(field.field());
|
||||
} else if (exp instanceof Expression.MatchAny any) {
|
||||
acceptKey.accept(any.field());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@code acceptKey} for every tag that, when missing, could possibly cause {@code exp} to match an input
|
||||
* element.
|
||||
*/
|
||||
private static void getRelevantMissingKeys(Expression exp, Consumer<String> acceptKey) {
|
||||
if (exp instanceof Expression.And and) {
|
||||
and.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Or or) {
|
||||
or.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Not) {
|
||||
// ignore anything that's purely used as a filter
|
||||
} else if (exp instanceof Expression.MatchAny any && any.matchWhenMissing()) {
|
||||
acceptKey.accept(any.field());
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns an optimized index for matching {@link #expressions()} against each input element. */
|
||||
public Index<T> index() {
|
||||
if (expressions.isEmpty()) {
|
||||
return new EmptyIndex<>();
|
||||
}
|
||||
boolean caresAboutGeometryType = expressions.stream().anyMatch(entry ->
|
||||
entry.expression.contains(exp -> exp instanceof Expression.MatchType));
|
||||
return caresAboutGeometryType ? new GeometryTypeIndex<>(this) : new KeyIndex<>(this);
|
||||
}
|
||||
|
||||
/** Returns a copy of this multi-expression that replaces every expression using {@code mapper}. */
|
||||
public MultiExpression<T> map(Function<Expression, Expression> mapper) {
|
||||
return new MultiExpression<>(
|
||||
expressions.stream()
|
||||
.map(entry -> entry(entry.result, mapper.apply(entry.expression).simplify()))
|
||||
.filter(entry -> entry.expression != Expression.FALSE)
|
||||
.toList()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this multi-expression that replaces every sub-expression that matches {@code test} with {@code
|
||||
* b}.
|
||||
*/
|
||||
public MultiExpression<T> replace(Predicate<Expression> test, Expression b) {
|
||||
return map(e -> e.replace(test, b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this multi-expression that replaces every sub-expression equal to {@code a} with {@code b}.
|
||||
*/
|
||||
public MultiExpression<T> replace(Expression a, Expression b) {
|
||||
return map(e -> e.replace(a, b));
|
||||
}
|
||||
|
||||
/** Returns a copy of this multi-expression with each expression simplified. */
|
||||
public MultiExpression<T> simplify() {
|
||||
return map(e -> e.simplify());
|
||||
}
|
||||
|
||||
/** Returns a copy of this multi-expression, filtering-out the entry for each data value matching {@code accept}. */
|
||||
public MultiExpression<T> filterResults(Predicate<T> accept) {
|
||||
return new MultiExpression<>(
|
||||
expressions.stream()
|
||||
.filter(entry -> accept.test(entry.result))
|
||||
.toList()
|
||||
);
|
||||
}
|
||||
|
||||
/** Returns a copy of this multi-expression, replacing the data value with {@code fn}. */
|
||||
public <U> MultiExpression<U> mapResults(Function<T, U> fn) {
|
||||
return new MultiExpression<>(
|
||||
expressions.stream()
|
||||
.map(entry -> entry(fn.apply(entry.result), entry.expression))
|
||||
.toList()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* An optimized index for finding which expressions match an input element.
|
||||
*
|
||||
* @param <T> type of data value associated with each expression
|
||||
*/
|
||||
public interface Index<T> {
|
||||
|
||||
List<Match<T>> getMatchesWithTriggers(SourceFeature input);
|
||||
|
||||
/** Returns all data values associated with expressions that match an input element. */
|
||||
default List<T> getMatches(SourceFeature input) {
|
||||
List<Match<T>> matches = getMatchesWithTriggers(input);
|
||||
return matches.stream().map(d -> d.match).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the data value associated with the first expression that match an input element, or {@code defaultValue}
|
||||
* if none match.
|
||||
*/
|
||||
default T getOrElse(SourceFeature input, T defaultValue) {
|
||||
List<T> matches = getMatches(input);
|
||||
return matches.isEmpty() ? defaultValue : matches.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the data value associated with expressions matching a feature with {@code tags}.
|
||||
*/
|
||||
default T getOrElse(Map<String, Object> tags, T defaultValue) {
|
||||
List<T> matches = getMatches(SimpleFeature.create(EMPTY_GEOMETRY, tags));
|
||||
return matches.isEmpty() ? defaultValue : matches.get(0);
|
||||
}
|
||||
|
||||
/** Returns true if any expression matches that tags from an input element. */
|
||||
default boolean matches(SourceFeature input) {
|
||||
return !getMatchesWithTriggers(input).isEmpty();
|
||||
}
|
||||
|
||||
default boolean isEmpty() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static class EmptyIndex<T> implements Index<T> {
|
||||
|
||||
@Override
|
||||
public List<Match<T>> getMatchesWithTriggers(SourceFeature input) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/** Index that limits the search space of expressions based on keys present on an input element. */
|
||||
private static class KeyIndex<T> implements Index<T> {
|
||||
|
||||
private final int numExpressions;
|
||||
// index from source feature tag key to the expressions that include it so that
|
||||
// we can limit the number of expressions we need to evaluate for each input,
|
||||
// improves matching performance by ~5x
|
||||
private final Map<String, List<EntryWithId<T>>> keyToExpressionsMap;
|
||||
// same as keyToExpressionsMap but as a list (optimized for iteration when # source feature keys > # tags we care about)
|
||||
private final List<Map.Entry<String, List<EntryWithId<T>>>> keyToExpressionsList;
|
||||
// expressions that should match when certain tags are *not* present on an input element
|
||||
private final List<Map.Entry<String, List<EntryWithId<T>>>> missingKeyToExpressionList;
|
||||
|
||||
private KeyIndex(MultiExpression<T> expressions) {
|
||||
AtomicInteger ids = new AtomicInteger();
|
||||
// build the indexes
|
||||
Map<String, Set<EntryWithId<T>>> keyToExpressions = new HashMap<>();
|
||||
Map<String, Set<EntryWithId<T>>> missingKeyToExpressions = new HashMap<>();
|
||||
for (var entry : expressions.expressions) {
|
||||
Expression expression = entry.expression;
|
||||
EntryWithId<T> expressionValue = new EntryWithId<>(entry.result, expression, ids.incrementAndGet());
|
||||
getRelevantKeys(expression,
|
||||
key -> keyToExpressions.computeIfAbsent(key, k -> new HashSet<>()).add(expressionValue));
|
||||
getRelevantMissingKeys(expression,
|
||||
key -> missingKeyToExpressions.computeIfAbsent(key, k -> new HashSet<>()).add(expressionValue));
|
||||
}
|
||||
keyToExpressionsMap = new HashMap<>();
|
||||
keyToExpressions.forEach((key, value) -> keyToExpressionsMap.put(key, value.stream().toList()));
|
||||
keyToExpressionsList = keyToExpressionsMap.entrySet().stream().toList();
|
||||
missingKeyToExpressionList = missingKeyToExpressions.entrySet().stream()
|
||||
.map(entry -> Map.entry(entry.getKey(), entry.getValue().stream().toList())).toList();
|
||||
numExpressions = ids.incrementAndGet();
|
||||
}
|
||||
|
||||
/** Lookup matches in this index for expressions that match a certain type. */
|
||||
@Override
|
||||
public List<Match<T>> getMatchesWithTriggers(SourceFeature input) {
|
||||
List<Match<T>> result = new ArrayList<>();
|
||||
boolean[] visited = new boolean[numExpressions];
|
||||
for (var entry : missingKeyToExpressionList) {
|
||||
if (!input.hasTag(entry.getKey())) {
|
||||
visitExpressions(input, result, visited, entry.getValue());
|
||||
}
|
||||
}
|
||||
Map<String, Object> tags = input.tags();
|
||||
if (tags.size() < keyToExpressionsMap.size()) {
|
||||
for (String inputKey : tags.keySet()) {
|
||||
visitExpressions(input, result, visited, keyToExpressionsMap.get(inputKey));
|
||||
}
|
||||
} else {
|
||||
for (var entry : keyToExpressionsList) {
|
||||
if (tags.containsKey(entry.getKey())) {
|
||||
visitExpressions(input, result, visited, entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/** Index that limits the search space of expressions based on geometry type of an input element. */
|
||||
private static class GeometryTypeIndex<T> implements Index<T> {
|
||||
|
||||
private final KeyIndex<T> pointIndex;
|
||||
private final KeyIndex<T> lineIndex;
|
||||
private final KeyIndex<T> polygonIndex;
|
||||
|
||||
private GeometryTypeIndex(MultiExpression<T> expressions) {
|
||||
// build an index per type then search in each of those indexes based on the geometry type of each input element
|
||||
// this narrows the search space substantially, improving matching performance
|
||||
pointIndex = indexForType(expressions, Expression.POINT_TYPE);
|
||||
lineIndex = indexForType(expressions, Expression.LINESTRING_TYPE);
|
||||
polygonIndex = indexForType(expressions, Expression.POLYGON_TYPE);
|
||||
}
|
||||
|
||||
private KeyIndex<T> indexForType(MultiExpression<T> expressions, String type) {
|
||||
return new KeyIndex<>(expressions
|
||||
.replace(matchType(type), TRUE)
|
||||
.replace(e -> e instanceof Expression.MatchType, FALSE)
|
||||
.simplify());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all data values associated with expressions that match an input element, along with the tag keys that
|
||||
* caused the match.
|
||||
*/
|
||||
public List<Match<T>> getMatchesWithTriggers(SourceFeature input) {
|
||||
List<Match<T>> result;
|
||||
if (input.isPoint()) {
|
||||
result = pointIndex.getMatchesWithTriggers(input);
|
||||
} else if (input.canBeLine()) {
|
||||
result = lineIndex.getMatchesWithTriggers(input);
|
||||
// closed ways can be lines or polygons, unless area=yes or no
|
||||
if (input.canBePolygon()) {
|
||||
result.addAll(polygonIndex.getMatchesWithTriggers(input));
|
||||
}
|
||||
} else if (input.canBePolygon()) {
|
||||
result = polygonIndex.getMatchesWithTriggers(input);
|
||||
} else {
|
||||
result = pointIndex.getMatchesWithTriggers(input);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/** An expression/value pair with unique ID to store whether we evaluated it yet. */
|
||||
private static record EntryWithId<T>(T result, Expression expression, int id) {}
|
||||
|
||||
/**
|
||||
* An {@code expression} to evaluate on input elements and {@code result} value to return when the element matches.
|
||||
*/
|
||||
public static record Entry<T>(T result, Expression expression) {}
|
||||
|
||||
/** The result when an expression matches, along with the input element tag {@code keys} that triggered the match. */
|
||||
public static record Match<T>(T match, List<String> keys) {}
|
||||
}
|
|
@ -38,9 +38,12 @@ public class GeoUtils {
|
|||
public static final PrecisionModel TILE_PRECISON = new PrecisionModel(4096d / 256d);
|
||||
public static final GeometryFactory JTS_FACTORY = new GeometryFactory(PackedCoordinateSequenceFactory.DOUBLE_FACTORY);
|
||||
public static final WKBReader WKB_READER = new WKBReader(JTS_FACTORY);
|
||||
public static final Geometry EMPTY_GEOMETRY = JTS_FACTORY.createGeometryCollection();
|
||||
public static final Point EMPTY_POINT = JTS_FACTORY.createPoint();
|
||||
public static final LineString EMPTY_LINE = JTS_FACTORY.createLineString();
|
||||
public static final Polygon EMPTY_POLYGON = JTS_FACTORY.createPolygon();
|
||||
private static final LineString[] EMPTY_LINE_STRING_ARRAY = new LineString[0];
|
||||
private static final Polygon[] EMPTY_POLYGON_ARRAY = new Polygon[0];
|
||||
private static final Coordinate[] EMPTY_COORD_ARRAY = new Coordinate[0];
|
||||
private static final Point[] EMPTY_POINT_ARRAY = new Point[0];
|
||||
private static final double WORLD_RADIUS_METERS = 6_378_137;
|
||||
public static final double WORLD_CIRCUMFERENCE_METERS = Math.PI * 2 * WORLD_RADIUS_METERS;
|
||||
|
|
|
@ -47,13 +47,13 @@ public abstract class SourceFeature implements WithTags {
|
|||
*
|
||||
* @param tags string key/value pairs associated with this element
|
||||
* @param source source name that profile can use to distinguish between elements from different data sources
|
||||
* @param sourceLayer layer name within {@code source} that profile can use to dinstinguish between different kinds
|
||||
* @param sourceLayer layer name within {@code source} that profile can use to distinguish between different kinds
|
||||
* of elements in a given source.
|
||||
* @param relationInfos relations that this element is contained within
|
||||
* @param id numeric ID of this feature within this source (i.e. an OSM element ID)
|
||||
*/
|
||||
protected SourceFeature(Map<String, Object> tags, String source, String sourceLayer,
|
||||
List<OsmReader.RelationMember<OsmRelationInfo>> relationInfos, long id) {
|
||||
List<OsmReader.RelationMember<OsmRelationInfo>> relationInfos, long id) {
|
||||
this.tags = tags;
|
||||
this.source = source;
|
||||
this.sourceLayer = sourceLayer;
|
||||
|
@ -61,6 +61,29 @@ public abstract class SourceFeature implements WithTags {
|
|||
this.id = id;
|
||||
}
|
||||
|
||||
// slight optimization: replace default implementation with direct access to the tags
|
||||
// map to get slightly improved performance when matching elements against expressions
|
||||
|
||||
@Override
|
||||
public Object getTag(String key) {
|
||||
return tags.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTag(String key) {
|
||||
return tags.containsKey(key);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Object getTag(String key, Object defaultValue) {
|
||||
Object val = tags.get(key);
|
||||
if (val == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> tags() {
|
||||
return tags;
|
||||
|
@ -91,25 +114,25 @@ public abstract class SourceFeature implements WithTags {
|
|||
/** Returns and caches {@link Geometry#getCentroid()} of this geometry in world web mercator coordinates. */
|
||||
public final Geometry centroid() throws GeometryException {
|
||||
return centroid != null ? centroid : (centroid =
|
||||
canBePolygon() ? polygon().getCentroid() :
|
||||
canBeLine() ? line().getCentroid() :
|
||||
worldGeometry().getCentroid());
|
||||
canBePolygon() ? polygon().getCentroid() :
|
||||
canBeLine() ? line().getCentroid() :
|
||||
worldGeometry().getCentroid());
|
||||
}
|
||||
|
||||
/** Returns and caches {@link Geometry#getInteriorPoint()} of this geometry in world web mercator coordinates. */
|
||||
public final Geometry pointOnSurface() throws GeometryException {
|
||||
return pointOnSurface != null ? pointOnSurface : (pointOnSurface =
|
||||
canBePolygon() ? polygon().getInteriorPoint() :
|
||||
canBeLine() ? line().getInteriorPoint() :
|
||||
worldGeometry().getInteriorPoint());
|
||||
canBePolygon() ? polygon().getInteriorPoint() :
|
||||
canBeLine() ? line().getInteriorPoint() :
|
||||
worldGeometry().getInteriorPoint());
|
||||
}
|
||||
|
||||
private Geometry computeCentroidIfConvex() throws GeometryException {
|
||||
if (!canBePolygon()) {
|
||||
return centroid();
|
||||
} else if (polygon() instanceof Polygon poly &&
|
||||
poly.getNumInteriorRing() == 0 &&
|
||||
GeoUtils.isConvex(poly.getExteriorRing())) {
|
||||
poly.getNumInteriorRing() == 0 &&
|
||||
GeoUtils.isConvex(poly.getExteriorRing())) {
|
||||
return centroid();
|
||||
} else { // multipolygon, polygon with holes, or concave polygon
|
||||
return pointOnSurface();
|
||||
|
@ -221,7 +244,7 @@ public abstract class SourceFeature implements WithTags {
|
|||
*/
|
||||
public double length() throws GeometryException {
|
||||
return Double.isNaN(length) ? (length =
|
||||
(isPoint() || canBePolygon() || canBeLine()) ? worldGeometry().getLength() : 0) : length;
|
||||
(isPoint() || canBePolygon() || canBeLine()) ? worldGeometry().getLength() : 0) : length;
|
||||
}
|
||||
|
||||
/** Returns true if this feature can be interpreted as a {@link Point} or {@link MultiPoint}. */
|
||||
|
@ -264,7 +287,7 @@ public abstract class SourceFeature implements WithTags {
|
|||
*/
|
||||
// TODO this should be in a specialized OSM subclass, not the generic superclass
|
||||
public <T extends OsmRelationInfo> List<OsmReader.RelationMember<T>> relationInfo(
|
||||
Class<T> relationInfoClass) {
|
||||
Class<T> relationInfoClass) {
|
||||
List<OsmReader.RelationMember<T>> result = null;
|
||||
if (relationInfos != null) {
|
||||
for (OsmReader.RelationMember<?> info : relationInfos) {
|
||||
|
@ -286,4 +309,8 @@ public abstract class SourceFeature implements WithTags {
|
|||
return id;
|
||||
}
|
||||
|
||||
/** Returns true if this element has any OSM relation info. */
|
||||
public boolean hasRelationInfo() {
|
||||
return relationInfos != null && !relationInfos.isEmpty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ public interface WithTags {
|
|||
}
|
||||
|
||||
default Object getTag(String key, Object defaultValue) {
|
||||
Object val = tags().get(key);
|
||||
Object val = getTag(key);
|
||||
if (val == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
@ -36,13 +36,13 @@ public interface WithTags {
|
|||
return value1.equals(actual) || value2.equals(actual);
|
||||
}
|
||||
|
||||
/** Returns the {@link Object#toString()} value for {@code key} or {code null} if not present. */
|
||||
/** Returns the {@link Object#toString()} value for {@code key} or {@code null} if not present. */
|
||||
default String getString(String key) {
|
||||
Object value = getTag(key);
|
||||
return value == null ? null : value.toString();
|
||||
}
|
||||
|
||||
/** Returns the {@link Object#toString()} value for {@code key} or {code defaultValue} if not present. */
|
||||
/** Returns the {@link Object#toString()} value for {@code key} or {@code defaultValue} if not present. */
|
||||
default String getString(String key, String defaultValue) {
|
||||
Object value = getTag(key, defaultValue);
|
||||
return value == null ? null : value.toString();
|
||||
|
|
|
@ -5,6 +5,7 @@ import java.time.Duration;
|
|||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
|
||||
/**
|
||||
* Utilities for formatting values as strings.
|
||||
|
@ -106,4 +107,12 @@ public class Format {
|
|||
double seconds = duration.toNanos() * 1d / Duration.ofSeconds(1).toNanos();
|
||||
return formatDecimal(seconds < 1 ? seconds : Math.round(seconds)) + "s";
|
||||
}
|
||||
|
||||
/** Returns Java code that can re-create {@code string}: {@code null} if null, or {@code "contents"} if not empty. */
|
||||
public static String quote(String string) {
|
||||
if (string == null) {
|
||||
return "null";
|
||||
}
|
||||
return '"' + StringEscapeUtils.escapeJava(string) + '"';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
package com.onthegomap.flatmap.expression;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.*;
|
||||
import static com.onthegomap.flatmap.expression.Expression.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.util.Set;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -91,4 +93,12 @@ public class ExpressionTest {
|
|||
or(not(matchCD), matchCD, and(matchCD, matchCD))
|
||||
.replace(e -> Set.of(matchAB, matchBC).contains(e), matchCD));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContains() {
|
||||
assertTrue(matchCD.contains(e -> e.equals(matchCD)));
|
||||
assertTrue(or(not(matchCD)).contains(e -> e.equals(matchCD)));
|
||||
assertFalse(matchCD.contains(e -> e.equals(matchAB)));
|
||||
assertFalse(or(not(matchCD)).contains(e -> e.equals(matchAB)));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,253 @@
|
|||
package com.onthegomap.flatmap.expression;
|
||||
|
||||
import static com.onthegomap.flatmap.TestUtils.newLineString;
|
||||
import static com.onthegomap.flatmap.TestUtils.newPoint;
|
||||
import static com.onthegomap.flatmap.TestUtils.rectangle;
|
||||
import static com.onthegomap.flatmap.expression.Expression.*;
|
||||
import static com.onthegomap.flatmap.expression.MultiExpression.entry;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.onthegomap.flatmap.reader.SimpleFeature;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class MultiExpressionTest {
|
||||
|
||||
private static SourceFeature featureWithTags(String... tags) {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
for (int i = 0; i < tags.length; i += 2) {
|
||||
map.put(tags[i], tags[i + 1]);
|
||||
}
|
||||
return SimpleFeature.create(newPoint(0, 0), map);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmpty() {
|
||||
var index = MultiExpression.<String>of(List.of()).index();
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key", "value")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleElement() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", matchAny("key", "value"))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBlankStringTreatedAsNotMatch() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", matchAny("key", "value", ""))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags()));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("otherkey", "othervalue")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key", "no")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleMatchField() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", matchField("key"))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value2", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWildcard() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", matchAny("key", "%value%"))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "1value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "1value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "1value1", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleWildcardsMixedWithExacts() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", matchAny("key", "%value%", "other"))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "1value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "other")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "1value1", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key", "other", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnd() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key1", "no", "key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key1", "val1", "key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key1", "val1")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOr() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "no", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "no")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key1", "no", "key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNot() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", and(
|
||||
matchAny("key1", "val1"),
|
||||
not(
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key2", "val3")));
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1", "key3", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatchesMultiple() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)),
|
||||
entry("b", or(
|
||||
matchAny("key2", "val2"),
|
||||
matchAny("key3", "val3")
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(featureWithTags("key1", "val1")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(featureWithTags("key2", "val2")));
|
||||
assertSameElements(List.of("b"), index.getMatches(featureWithTags("key3", "val3")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(featureWithTags("key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(featureWithTags("key1", "val1", "key3", "val3")));
|
||||
assertSameElements(List.of(), index.getMatches(featureWithTags()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTracksMatchingKey() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)),
|
||||
entry("b", or(
|
||||
matchAny("key2", "val2"),
|
||||
matchAny("key3", "val3")
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of(new MultiExpression.Match<>(
|
||||
"a", List.of("key1")
|
||||
)), index.getMatchesWithTriggers(featureWithTags("key1", "val1")));
|
||||
assertSameElements(List.of(new MultiExpression.Match<>(
|
||||
"a", List.of("key2")
|
||||
), new MultiExpression.Match<>(
|
||||
"b", List.of("key2")
|
||||
)), index.getMatchesWithTriggers(featureWithTags("key2", "val2")));
|
||||
assertSameElements(List.of(new MultiExpression.Match<>(
|
||||
"b", List.of("key3")
|
||||
)), index.getMatchesWithTriggers(featureWithTags("key3", "val3")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTracksMatchingKeyFromCorrectPath() {
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("a", or(
|
||||
and(
|
||||
matchAny("key3", "val3"),
|
||||
matchAny("key2", "val2")
|
||||
),
|
||||
and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key3", "val3")
|
||||
)
|
||||
))
|
||||
)).index();
|
||||
assertSameElements(List.of(new MultiExpression.Match<>(
|
||||
"a", List.of("key1", "key3")
|
||||
)), index.getMatchesWithTriggers(featureWithTags("key1", "val1", "key3", "val3")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatchDifferentTypes() {
|
||||
Expression polygonExpression = and(matchType("polygon"), matchField("field"));
|
||||
Expression linestringExpression = and(matchType("linestring"), matchField("field"));
|
||||
Expression pointExpression = and(matchType("point"), matchField("field"));
|
||||
Map<String, Object> map = Map.of("field", "value");
|
||||
SourceFeature point = SimpleFeature.create(newPoint(0, 0), map);
|
||||
SourceFeature linestring = SimpleFeature.create(newLineString(0, 0, 1, 1), map);
|
||||
SourceFeature polygon = SimpleFeature.create(rectangle(0, 1), map);
|
||||
var index = MultiExpression.of(List.of(
|
||||
entry("polygon", polygonExpression),
|
||||
entry("linestring", linestringExpression),
|
||||
entry("point", pointExpression)
|
||||
)).index();
|
||||
assertTrue(pointExpression.evaluate(point, new ArrayList<>()));
|
||||
assertTrue(linestringExpression.evaluate(linestring, new ArrayList<>()));
|
||||
assertTrue(polygonExpression.evaluate(polygon, new ArrayList<>()));
|
||||
assertEquals("point", index.getOrElse(point, null));
|
||||
assertEquals("linestring", index.getOrElse(linestring, null));
|
||||
assertEquals("polygon", index.getOrElse(polygon, null));
|
||||
}
|
||||
|
||||
private static <T> void assertSameElements(List<T> a, List<T> b) {
|
||||
assertEquals(
|
||||
a.stream().sorted(Comparator.comparing(Object::toString)).toList(),
|
||||
b.stream().sorted(Comparator.comparing(Object::toString)).toList()
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import com.graphhopper.reader.ReaderElementUtils;
|
||||
import com.graphhopper.reader.ReaderNode;
|
||||
import com.graphhopper.reader.ReaderRelation;
|
||||
import com.graphhopper.reader.ReaderWay;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmInputFile;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
|
||||
public class BenchmarkMapping {
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
var profile = new OpenMapTilesProfile(Translations.nullProvider(List.of()), FlatmapConfig.defaults(),
|
||||
Stats.inMemory());
|
||||
var random = new Random(0);
|
||||
var input = new OsmInputFile(Path.of("data", "sources", "north-america_us_massachusetts.pbf"));
|
||||
List<SourceFeature> inputs = new ArrayList<>();
|
||||
input.readTo(readerElem -> {
|
||||
if (random.nextDouble() < 0.25) {
|
||||
if (inputs.size() % 1_000_000 == 0) {
|
||||
System.err.println(inputs.size());
|
||||
}
|
||||
var props = ReaderElementUtils.getTags(readerElem);
|
||||
inputs.add(new SourceFeature(props, "", "", null, readerElem.getId()) {
|
||||
@Override
|
||||
public Geometry latLonGeometry() throws GeometryException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Geometry worldGeometry() throws GeometryException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPoint() {
|
||||
return readerElem instanceof ReaderNode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canBePolygon() {
|
||||
return readerElem instanceof ReaderWay || readerElem instanceof ReaderRelation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canBeLine() {
|
||||
return readerElem instanceof ReaderWay;
|
||||
}
|
||||
});
|
||||
}
|
||||
}, "reader", 3);
|
||||
|
||||
System.err.println("read " + inputs.size() + " elems");
|
||||
|
||||
long startStart = System.nanoTime();
|
||||
long count = -1;
|
||||
while (true) {
|
||||
count++;
|
||||
long start = System.nanoTime();
|
||||
int i = 0;
|
||||
for (SourceFeature in : inputs) {
|
||||
i += profile.getTableMatches(in).size();
|
||||
}
|
||||
if (count == 0) {
|
||||
startStart = System.nanoTime();
|
||||
System.err.println("finished warmup");
|
||||
} else {
|
||||
System.err.println(
|
||||
"took:" + Duration.ofNanos(System.nanoTime() - start).toMillis() + "ms found:" + i + " avg:" + (Duration
|
||||
.ofNanos(System.nanoTime() - startStart).toMillis() / count) + "ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,220 +0,0 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public interface Expression {
|
||||
|
||||
static And and(Expression... children) {
|
||||
return and(List.of(children));
|
||||
}
|
||||
|
||||
static And and(List<Expression> children) {
|
||||
return new And(children);
|
||||
}
|
||||
|
||||
static Or or(Expression... children) {
|
||||
return or(List.of(children));
|
||||
}
|
||||
|
||||
static Or or(List<Expression> children) {
|
||||
return new Or(children);
|
||||
}
|
||||
|
||||
static Not not(Expression child) {
|
||||
return new Not(child);
|
||||
}
|
||||
|
||||
static MatchAny matchAny(String field, String... values) {
|
||||
return matchAny(field, List.of(values));
|
||||
}
|
||||
|
||||
static MatchAny matchAny(String field, List<String> values) {
|
||||
return new MatchAny(field, values);
|
||||
}
|
||||
|
||||
static MatchField matchField(String field) {
|
||||
return new MatchField(field);
|
||||
}
|
||||
|
||||
Set<String> supportedTypes = Set.of("linestring", "point", "polygon", "relation_member");
|
||||
|
||||
static MatchType matchType(String type) {
|
||||
if (!supportedTypes.contains(type)) {
|
||||
throw new IllegalArgumentException("Unsupported type: " + type);
|
||||
}
|
||||
return new MatchType(type);
|
||||
}
|
||||
|
||||
private static String listToString(List<?> items) {
|
||||
return items.stream().map(Object::toString).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
default Expression simplify() {
|
||||
return simplify(this);
|
||||
}
|
||||
|
||||
private static Expression simplifyOnce(Expression expression) {
|
||||
if (expression instanceof Not not) {
|
||||
if (not.child instanceof Or or) {
|
||||
return and(or.children.stream().<Expression>map(Expression::not).toList());
|
||||
} else if (not.child instanceof And and) {
|
||||
return or(and.children.stream().<Expression>map(Expression::not).toList());
|
||||
} else if (not.child instanceof Not not2) {
|
||||
return not2.child;
|
||||
} else if (not.child == TRUE) {
|
||||
return FALSE;
|
||||
} else if (not.child == FALSE) {
|
||||
return TRUE;
|
||||
}
|
||||
return not;
|
||||
} else if (expression instanceof Or or) {
|
||||
if (or.children.isEmpty()) {
|
||||
return FALSE;
|
||||
}
|
||||
if (or.children.size() == 1) {
|
||||
return simplifyOnce(or.children.get(0));
|
||||
}
|
||||
if (or.children.contains(TRUE)) {
|
||||
return TRUE;
|
||||
}
|
||||
return or(or.children.stream()
|
||||
// hoist children
|
||||
.flatMap(child -> child instanceof Or childOr ? childOr.children.stream() : Stream.of(child))
|
||||
.filter(child -> child != FALSE)
|
||||
.map(Expression::simplifyOnce).toList());
|
||||
} else if (expression instanceof And and) {
|
||||
if (and.children.isEmpty()) {
|
||||
return FALSE;
|
||||
}
|
||||
if (and.children.size() == 1) {
|
||||
return simplifyOnce(and.children.get(0));
|
||||
}
|
||||
if (and.children.contains(FALSE)) {
|
||||
return FALSE;
|
||||
}
|
||||
return and(and.children.stream()
|
||||
// hoist children
|
||||
.flatMap(child -> child instanceof And childAnd ? childAnd.children.stream() : Stream.of(child))
|
||||
.filter(child -> child != TRUE)
|
||||
.map(Expression::simplifyOnce).toList());
|
||||
} else {
|
||||
return expression;
|
||||
}
|
||||
}
|
||||
|
||||
private static Expression simplify(Expression initial) {
|
||||
Expression simplified = initial;
|
||||
Set<Expression> seen = new HashSet<>();
|
||||
seen.add(simplified);
|
||||
while (true) {
|
||||
simplified = simplifyOnce(simplified);
|
||||
if (seen.contains(simplified) || seen.size() > 100) {
|
||||
return simplified;
|
||||
}
|
||||
seen.add(simplified);
|
||||
}
|
||||
}
|
||||
|
||||
default Expression replace(Expression a, Expression b) {
|
||||
return replace(a::equals, b);
|
||||
}
|
||||
|
||||
default Expression replace(Predicate<Expression> replace, Expression b) {
|
||||
if (replace.test(this)) {
|
||||
return b;
|
||||
} else if (this instanceof Not not) {
|
||||
return new Not(not.child.replace(replace, b));
|
||||
} else if (this instanceof Or or) {
|
||||
return new Or(or.children.stream().map(child -> child.replace(replace, b)).toList());
|
||||
} else if (this instanceof And and) {
|
||||
return new And(and.children.stream().map(child -> child.replace(replace, b)).toList());
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
record And(List<Expression> children) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "and(" + listToString(children) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
record Or(List<Expression> children) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "or(" + listToString(children) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
record Not(Expression child) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "not(" + child + ")";
|
||||
}
|
||||
}
|
||||
|
||||
Expression TRUE = new Expression() {
|
||||
public String toString() {
|
||||
return "TRUE";
|
||||
}
|
||||
};
|
||||
|
||||
Expression FALSE = new Expression() {
|
||||
public String toString() {
|
||||
return "FALSE";
|
||||
}
|
||||
};
|
||||
|
||||
record MatchAny(
|
||||
String field, List<String> values, Set<String> exactMatches, List<String> wildcards, boolean matchWhenMissing
|
||||
) implements Expression {
|
||||
|
||||
private static final Pattern containsPattern = Pattern.compile("^%(.*)%$");
|
||||
|
||||
MatchAny(String field, List<String> values) {
|
||||
this(field, values,
|
||||
values.stream().filter(v -> !v.contains("%")).collect(Collectors.toSet()),
|
||||
values.stream().filter(v -> v.contains("%")).map(val -> {
|
||||
var matcher = containsPattern.matcher(val);
|
||||
if (!matcher.matches()) {
|
||||
throw new IllegalArgumentException("wildcards must start/end with %: " + val);
|
||||
}
|
||||
return matcher.group(1);
|
||||
}).toList(),
|
||||
values.contains("")
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchAny(" + Generate.quote(field) + ", " + values.stream().map(Generate::quote)
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
record MatchField(String field) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchField(" + Generate.quote(field) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
record MatchType(String type) implements Expression {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "matchType(" + Generate.quote(type) + ")";
|
||||
}
|
||||
}
|
||||
}
|
Plik diff jest za duży
Load Diff
|
@ -1,9 +1,8 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
public interface Layer {
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
|
||||
default void release() {
|
||||
}
|
||||
|
||||
String name();
|
||||
}
|
||||
/** Interface for all vector tile layer implementations that {@link OpenMapTilesProfile} delegates to. */
|
||||
public interface Layer extends
|
||||
Profile.ForwardingProfile.Handler,
|
||||
Profile.ForwardingProfile.HandlerForLayer {}
|
||||
|
|
|
@ -1,235 +0,0 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public record MultiExpression<T>(Map<T, Expression> expressions) {
|
||||
|
||||
public static <T> MultiExpression<T> of(Map<T, Expression> expressions) {
|
||||
return new MultiExpression<>(expressions);
|
||||
}
|
||||
|
||||
public MultiExpressionIndex<T> index() {
|
||||
return new MultiExpressionIndex<>(this);
|
||||
}
|
||||
|
||||
public MultiExpression<T> map(Function<Expression, Expression> mapper) {
|
||||
return new MultiExpression<>(
|
||||
expressions.entrySet().stream()
|
||||
.map(entry -> Map.entry(entry.getKey(), mapper.apply(entry.getValue())))
|
||||
.filter(entry -> entry.getValue() != Expression.FALSE)
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))
|
||||
);
|
||||
}
|
||||
|
||||
public MultiExpression<T> replace(Predicate<Expression> test, Expression b) {
|
||||
return map(e -> e.replace(test, b));
|
||||
}
|
||||
|
||||
public MultiExpression<T> replace(Expression a, Expression b) {
|
||||
return map(e -> e.replace(a, b));
|
||||
}
|
||||
|
||||
public MultiExpression<T> simplify() {
|
||||
return map(e -> e.simplify());
|
||||
}
|
||||
|
||||
public MultiExpression<T> filterKeys(Predicate<T> accept) {
|
||||
return new MultiExpression<>(
|
||||
expressions.entrySet().stream()
|
||||
.filter(entry -> accept.test(entry.getKey()))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))
|
||||
);
|
||||
}
|
||||
|
||||
public static class MultiExpressionIndex<T> {
|
||||
|
||||
private static final AtomicInteger ids = new AtomicInteger(0);
|
||||
// index from source feature tag key to the expressions that include it so that
|
||||
// we can limit the number of expressions we need to evaluate for each input
|
||||
// and improve matching performance by ~5x
|
||||
private final Map<String, List<ExpressionValue<T>>> keyToExpressionsMap;
|
||||
// same thing as a list (optimized for iteration when # source feature keys > # tags we care about)
|
||||
private final List<Map.Entry<String, List<ExpressionValue<T>>>> keyToExpressionsList;
|
||||
private final List<Map.Entry<String, List<ExpressionValue<T>>>> missingKeyToExpressionList;
|
||||
|
||||
private MultiExpressionIndex(MultiExpression<T> expressions) {
|
||||
Map<String, Set<ExpressionValue<T>>> keyToExpressions = new HashMap<>();
|
||||
Map<String, Set<ExpressionValue<T>>> missingKeyToExpressions = new HashMap<>();
|
||||
for (var entry : expressions.expressions.entrySet()) {
|
||||
T result = entry.getKey();
|
||||
Expression exp = entry.getValue();
|
||||
ExpressionValue<T> expressionValue = new ExpressionValue<>(exp, result);
|
||||
getRelevantKeys(exp, key -> keyToExpressions.computeIfAbsent(key, k -> new HashSet<>()).add(expressionValue));
|
||||
getRelevantMissingKeys(exp,
|
||||
key -> missingKeyToExpressions.computeIfAbsent(key, k -> new HashSet<>()).add(expressionValue));
|
||||
}
|
||||
keyToExpressionsMap = new HashMap<>();
|
||||
keyToExpressions.forEach((key, value) -> keyToExpressionsMap.put(key, value.stream().toList()));
|
||||
keyToExpressionsList = keyToExpressionsMap.entrySet().stream().toList();
|
||||
missingKeyToExpressionList = missingKeyToExpressions.entrySet().stream()
|
||||
.map(entry -> Map.entry(entry.getKey(), entry.getValue().stream().toList())).toList();
|
||||
}
|
||||
|
||||
private static void getRelevantKeys(Expression exp, Consumer<String> acceptKey) {
|
||||
if (exp instanceof Expression.And and) {
|
||||
and.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Or or) {
|
||||
or.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Not) {
|
||||
// ignore anything that's purely used as a filter
|
||||
} else if (exp instanceof Expression.MatchField field) {
|
||||
acceptKey.accept(field.field());
|
||||
} else if (exp instanceof Expression.MatchAny any) {
|
||||
acceptKey.accept(any.field());
|
||||
}
|
||||
}
|
||||
|
||||
private static void getRelevantMissingKeys(Expression exp, Consumer<String> acceptKey) {
|
||||
if (exp instanceof Expression.And and) {
|
||||
and.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Or or) {
|
||||
or.children().forEach(child -> getRelevantKeys(child, acceptKey));
|
||||
} else if (exp instanceof Expression.Not) {
|
||||
// ignore anything that's purely used as a filter
|
||||
} else if (exp instanceof Expression.MatchAny any && any.matchWhenMissing()) {
|
||||
acceptKey.accept(any.field());
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean evaluate(Expression expr, Map<String, Object> input, List<String> matchKeys) {
|
||||
// optimization: since this is evaluated for every input element, use
|
||||
// simple for loops instead of enhanced to avoid overhead of generating the
|
||||
// iterator (~30% speedup)
|
||||
|
||||
if (expr == Expression.FALSE) {
|
||||
return false;
|
||||
} else if (expr == Expression.TRUE) {
|
||||
return true;
|
||||
} else if (expr instanceof Expression.MatchAny match) {
|
||||
Object value = input.get(match.field());
|
||||
if (value == null) {
|
||||
return match.matchWhenMissing();
|
||||
} else {
|
||||
String str = value.toString();
|
||||
if (match.exactMatches().contains(str)) {
|
||||
matchKeys.add(match.field());
|
||||
return true;
|
||||
}
|
||||
List<String> wildcards = match.wildcards();
|
||||
for (int i = 0; i < wildcards.size(); i++) {
|
||||
var target = wildcards.get(i);
|
||||
if (str.contains(target)) {
|
||||
matchKeys.add(match.field());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} else if (expr instanceof Expression.MatchField match) {
|
||||
matchKeys.add(match.field());
|
||||
return input.containsKey(match.field());
|
||||
} else if (expr instanceof Expression.Or or) {
|
||||
List<Expression> children = or.children();
|
||||
for (int i = 0; i < children.size(); i++) {
|
||||
Expression child = children.get(i);
|
||||
if (evaluate(child, input, matchKeys)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} else if (expr instanceof Expression.And and) {
|
||||
List<Expression> children = and.children();
|
||||
for (int i = 0; i < children.size(); i++) {
|
||||
Expression child = children.get(i);
|
||||
if (!evaluate(child, input, matchKeys)) {
|
||||
matchKeys.clear();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else if (expr instanceof Expression.Not not) {
|
||||
return !evaluate(not.child(), input, new ArrayList<>());
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unrecognized expression: " + expr);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean matches(Map<String, Object> input) {
|
||||
return !getMatchesWithTriggers(input).isEmpty();
|
||||
}
|
||||
|
||||
public static record MatchWithTriggers<T>(T match, List<String> keys) {}
|
||||
|
||||
public List<MatchWithTriggers<T>> getMatchesWithTriggers(Map<String, Object> input) {
|
||||
List<MatchWithTriggers<T>> result = new ArrayList<>();
|
||||
BitSet visited = new BitSet(ids.get());
|
||||
for (int i = 0; i < missingKeyToExpressionList.size(); i++) {
|
||||
var entry = missingKeyToExpressionList.get(i);
|
||||
if (!input.containsKey(entry.getKey())) {
|
||||
visitExpression(input, result, visited, entry.getValue());
|
||||
}
|
||||
}
|
||||
if (input.size() < keyToExpressionsMap.size()) {
|
||||
for (String inputKey : input.keySet()) {
|
||||
visitExpression(input, result, visited, keyToExpressionsMap.get(inputKey));
|
||||
}
|
||||
} else {
|
||||
// optimization: since this is evaluated for every element, generating an iterator
|
||||
// for enhanced for loop becomes a bottleneck so use simple for loop over list instead
|
||||
for (int i = 0; i < keyToExpressionsList.size(); i++) {
|
||||
var entry = keyToExpressionsList.get(i);
|
||||
if (input.containsKey(entry.getKey())) {
|
||||
visitExpression(input, result, visited, entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<T> getMatches(Map<String, Object> input) {
|
||||
List<MatchWithTriggers<T>> matches = getMatchesWithTriggers(input);
|
||||
return matches.stream().map(d -> d.match).toList();
|
||||
}
|
||||
|
||||
public T getOrElse(Map<String, Object> input, T defaultValue) {
|
||||
List<T> matches = getMatches(input);
|
||||
return matches.isEmpty() ? defaultValue : matches.get(0);
|
||||
}
|
||||
|
||||
private void visitExpression(Map<String, Object> input, List<MatchWithTriggers<T>> result, BitSet visited,
|
||||
List<ExpressionValue<T>> expressionValues) {
|
||||
if (expressionValues != null) {
|
||||
// optimization: since this is evaluated for every element, generating an iterator
|
||||
// for enhanced for loop becomes a bottleneck so use simple for loop over list instead
|
||||
for (int i = 0; i < expressionValues.size(); i++) {
|
||||
var expressionValue = expressionValues.get(i);
|
||||
if (!visited.get(expressionValue.id)) {
|
||||
visited.set(expressionValue.id);
|
||||
List<String> matchKeys = new ArrayList<>();
|
||||
if (evaluate(expressionValue.exp(), input, matchKeys)) {
|
||||
result.add(new MatchWithTriggers<>(expressionValue.result, matchKeys));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static record ExpressionValue<T>(Expression exp, T result, int id) {
|
||||
|
||||
ExpressionValue(Expression exp, T result) {
|
||||
this(exp, result, ids.getAndIncrement());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,23 +4,35 @@ import com.onthegomap.flatmap.FlatmapRunner;
|
|||
import com.onthegomap.flatmap.config.Arguments;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import java.nio.file.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Main entrypoint for generating a map using the OpenMapTiles schema.
|
||||
*/
|
||||
public class OpenMapTilesMain {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(OpenMapTilesMain.class);
|
||||
private static final Path sourcesDir = Path.of("data", "sources");
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
run(Arguments.fromArgsOrConfigFile(args));
|
||||
}
|
||||
|
||||
static void run(Arguments arguments) throws Exception {
|
||||
Path dataDir = Path.of("data");
|
||||
Path sourcesDir = dataDir.resolve("sources");
|
||||
// use --area=... argument, AREA=... env var or area=... in config to set the region of the world to use
|
||||
// will be ignored if osm_path or osm_url are set
|
||||
String area = arguments.getString(
|
||||
"area",
|
||||
"name of the geofabrik extract to download if osm_url/osm_path not specified (i.e. 'monaco' 'rhode island' or 'australia')",
|
||||
"monaco"
|
||||
);
|
||||
|
||||
FlatmapRunner.create(arguments)
|
||||
.setDefaultLanguages(OpenMapTilesSchema.LANGUAGES)
|
||||
.fetchWikidataNameTranslations(sourcesDir.resolve("wikidata_names.json"))
|
||||
// defer creation of the profile because it depends on data from the runner
|
||||
.setProfile(OpenMapTilesProfile::new)
|
||||
// override any of these with arguments: --osm_path=... or --osm_url=...
|
||||
// or OSM_PATH=... OSM_URL=... environmental argument
|
||||
// or osm_path=... osm_url=... in a config file
|
||||
.addShapefileSource("EPSG:3857", OpenMapTilesProfile.LAKE_CENTERLINE_SOURCE,
|
||||
sourcesDir.resolve("lake_centerline.shp.zip"),
|
||||
"https://github.com/lukasmartinelli/osm-lakelines/releases/download/v0.9/lake_centerline.shp.zip")
|
||||
|
@ -29,10 +41,12 @@ public class OpenMapTilesMain {
|
|||
"https://osmdata.openstreetmap.de/download/water-polygons-split-3857.zip")
|
||||
.addNaturalEarthSource(OpenMapTilesProfile.NATURAL_EARTH_SOURCE,
|
||||
sourcesDir.resolve("natural_earth_vector.sqlite.zip"),
|
||||
"https://naturalearth.s3.amazonaws.com/packages/natural_earth_vector.sqlite.zip")
|
||||
// TODO: "https://naciscdn.org/naturalearth/packages/natural_earth_vector.sqlite.zip")
|
||||
.addOsmSource(OpenMapTilesProfile.OSM_SOURCE, sourcesDir.resolve("input.osm.pbf"), "geofabrik:monaco")
|
||||
.setOutput("mbtiles", Path.of("data", "output.mbtiles"))
|
||||
"https://naturalearth.s3.amazonaws.com/packages/natural_earth_vector.sqlite.zip") // TODO: go back to "https://naciscdn.org/naturalearth/packages/natural_earth_vector.sqlite.zip")
|
||||
.addOsmSource(OpenMapTilesProfile.OSM_SOURCE,
|
||||
sourcesDir.resolve(area.replaceAll("[^a-zA-Z]+", "_") + ".osm.pbf"),
|
||||
"geofabrik:" + area)
|
||||
// override with --mbtiles=... argument or MBTILES=... env var or mbtiles=... in a config file
|
||||
.setOutput("mbtiles", dataDir.resolve("output.mbtiles"))
|
||||
.run();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,73 +1,55 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.FALSE;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.TRUE;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.matchType;
|
||||
import static com.onthegomap.flatmap.geo.GeoUtils.EMPTY_LINE;
|
||||
import static com.onthegomap.flatmap.geo.GeoUtils.EMPTY_POINT;
|
||||
import static com.onthegomap.flatmap.geo.GeoUtils.EMPTY_POLYGON;
|
||||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.FlatmapRunner;
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.reader.SimpleFeature;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmElement;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmRelationInfo;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class OpenMapTilesProfile implements Profile {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(OpenMapTilesProfile.class);
|
||||
/**
|
||||
* Delegates the logic for generating a map using OpenMapTiles vector schema to individual implementations in the {@code
|
||||
* layers} package.
|
||||
* <p>
|
||||
* Layer implementations extend these interfaces to subscribe to elements from different sources:
|
||||
* <ul>
|
||||
* <li>{@link LakeCenterlineProcessor}</li>
|
||||
* <li>{@link NaturalEarthProcessor}</li>
|
||||
* <li>{@link OsmWaterPolygonProcessor}</li>
|
||||
* <li>{@link OsmAllProcessor} to process every OSM feature</li>
|
||||
* <li>{@link OsmRelationPreprocessor} to process every OSM relation during first pass through OSM file</li>
|
||||
* <li>A {@link Tables.RowHandler} implementation in {@code Tables.java} to process input features filtered and parsed
|
||||
* according to the imposm3 mappings defined in the OpenMapTiles schema. Each element corresponds to a row in the
|
||||
* table that imposm3 would have generated, with generated methods for accessing the data that would have been in each
|
||||
* column</li>
|
||||
* </ul>
|
||||
* Layers can also subscribe to notifications when we finished processing an input source by implementing
|
||||
* {@link FinishHandler} or post-process features in that layer before rendering the output tile by implementing
|
||||
* {@link FeaturePostProcessor}.
|
||||
*/
|
||||
public class OpenMapTilesProfile extends Profile.ForwardingProfile {
|
||||
|
||||
// IDs used in stats and logs for each input source, as well as argument/config file overrides to source locations
|
||||
public static final String LAKE_CENTERLINE_SOURCE = "lake_centerlines";
|
||||
public static final String WATER_POLYGON_SOURCE = "water_polygons";
|
||||
public static final String NATURAL_EARTH_SOURCE = "natural_earth";
|
||||
public static final String OSM_SOURCE = "osm";
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> osmPointMappings;
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> osmLineMappings;
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> osmPolygonMappings;
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> wikidataOsmPointMappings;
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> wikidataOsmLineMappings;
|
||||
private final MultiExpression.MultiExpressionIndex<Tables.Constructor> wikidataOsmPolygonMappings;
|
||||
private final List<Layer> layers;
|
||||
private final Map<Class<? extends Tables.Row>, List<Tables.RowHandler<Tables.Row>>> osmDispatchMap;
|
||||
private final Map<String, FeaturePostProcessor> postProcessors;
|
||||
private final List<NaturalEarthProcessor> naturalEarthProcessors;
|
||||
private final List<OsmWaterPolygonProcessor> osmWaterProcessors;
|
||||
private final List<LakeCenterlineProcessor> lakeCenterlineProcessors;
|
||||
private final List<OsmAllProcessor> osmAllProcessors;
|
||||
private final List<OsmRelationPreprocessor> osmRelationPreprocessors;
|
||||
private final List<FinishHandler> finishHandlers;
|
||||
private final Map<Class<? extends Tables.Row>, Set<Class<?>>> osmClassHandlerMap;
|
||||
|
||||
private MultiExpression.MultiExpressionIndex<Tables.Constructor> indexForType(String type, boolean requireWikidata) {
|
||||
return Tables.MAPPINGS
|
||||
.filterKeys(constructor -> {
|
||||
// exclude any mapping that generates a class we don't have a handler for
|
||||
var clz = constructor.create(SimpleFeature.empty(), "").getClass();
|
||||
var handlers = osmClassHandlerMap.getOrDefault(clz, Set.of()).stream();
|
||||
if (requireWikidata) {
|
||||
handlers = handlers.filter(handler -> !IgnoreWikidata.class.isAssignableFrom(handler));
|
||||
}
|
||||
return handlers.findAny().isPresent();
|
||||
})
|
||||
.replace(matchType(type), TRUE)
|
||||
.replace(e -> e instanceof Expression.MatchType, FALSE)
|
||||
.simplify()
|
||||
.index();
|
||||
}
|
||||
/** Index to efficiently find the imposm3 "table row" constructor from an OSM element based on its tags. */
|
||||
private final MultiExpression.Index<RowDispatch> osmMappings;
|
||||
/** Index variant that filters out any table only used by layers that implement IgnoreWikidata class. */
|
||||
private final MultiExpression.Index<Boolean> wikidataMappings;
|
||||
|
||||
public OpenMapTilesProfile(FlatmapRunner runner) {
|
||||
this(runner.translations(), runner.config(), runner.stats());
|
||||
|
@ -76,214 +58,91 @@ public class OpenMapTilesProfile implements Profile {
|
|||
public OpenMapTilesProfile(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
List<String> onlyLayers = config.arguments().getList("only_layers", "Include only certain layers", List.of());
|
||||
List<String> excludeLayers = config.arguments().getList("exclude_layers", "Exclude certain layers", List.of());
|
||||
this.layers = OpenMapTilesSchema.createInstances(translations, config, stats)
|
||||
.stream()
|
||||
.filter(l -> (onlyLayers.isEmpty() || onlyLayers.contains(l.name())) && !excludeLayers.contains(l.name()))
|
||||
.toList();
|
||||
osmDispatchMap = new HashMap<>();
|
||||
Tables.generateDispatchMap(layers).forEach((clazz, handlers) -> {
|
||||
osmDispatchMap.put(clazz, handlers.stream().map(handler -> {
|
||||
@SuppressWarnings("unchecked") Tables.RowHandler<Tables.Row> rawHandler = (Tables.RowHandler<Tables.Row>) handler;
|
||||
return rawHandler;
|
||||
}).toList());
|
||||
});
|
||||
osmClassHandlerMap = Tables.generateHandlerClassMap(layers);
|
||||
this.osmPointMappings = indexForType("point", false);
|
||||
this.osmLineMappings = indexForType("linestring", false);
|
||||
this.osmPolygonMappings = indexForType("polygon", false);
|
||||
this.wikidataOsmPointMappings = indexForType("point", true);
|
||||
this.wikidataOsmLineMappings = indexForType("linestring", true);
|
||||
this.wikidataOsmPolygonMappings = indexForType("polygon", true);
|
||||
postProcessors = new HashMap<>();
|
||||
osmAllProcessors = new ArrayList<>();
|
||||
lakeCenterlineProcessors = new ArrayList<>();
|
||||
naturalEarthProcessors = new ArrayList<>();
|
||||
osmWaterProcessors = new ArrayList<>();
|
||||
osmRelationPreprocessors = new ArrayList<>();
|
||||
finishHandlers = new ArrayList<>();
|
||||
for (Layer layer : layers) {
|
||||
if (layer instanceof FeaturePostProcessor postProcessor) {
|
||||
postProcessors.put(layer.name(), postProcessor);
|
||||
}
|
||||
if (layer instanceof OsmAllProcessor processor) {
|
||||
osmAllProcessors.add(processor);
|
||||
}
|
||||
if (layer instanceof OsmWaterPolygonProcessor processor) {
|
||||
osmWaterProcessors.add(processor);
|
||||
}
|
||||
if (layer instanceof LakeCenterlineProcessor processor) {
|
||||
lakeCenterlineProcessors.add(processor);
|
||||
}
|
||||
if (layer instanceof NaturalEarthProcessor processor) {
|
||||
naturalEarthProcessors.add(processor);
|
||||
}
|
||||
if (layer instanceof OsmRelationPreprocessor processor) {
|
||||
osmRelationPreprocessors.add(processor);
|
||||
}
|
||||
if (layer instanceof FinishHandler processor) {
|
||||
finishHandlers.add(processor);
|
||||
|
||||
// register release/finish/feature postprocessor/osm relationship handler methods...
|
||||
List<Handler> layers = new ArrayList<>();
|
||||
for (Layer layer : OpenMapTilesSchema.createInstances(translations, config, stats)) {
|
||||
if ((onlyLayers.isEmpty() || onlyLayers.contains(layer.name())) && !excludeLayers.contains(layer.name())) {
|
||||
layers.add(layer);
|
||||
registerHandler(layer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
layers.forEach(Layer::release);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcessLayerFeatures(String layer, int zoom,
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
FeaturePostProcessor postProcesor = postProcessors.get(layer);
|
||||
List<VectorTile.Feature> result = null;
|
||||
if (postProcesor != null) {
|
||||
result = postProcesor.postProcess(zoom, items);
|
||||
}
|
||||
return result == null ? items : result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OsmRelationInfo> preprocessOsmRelation(OsmElement.Relation relation) {
|
||||
List<OsmRelationInfo> result = null;
|
||||
for (int i = 0; i < osmRelationPreprocessors.size(); i++) {
|
||||
List<OsmRelationInfo> thisResult = osmRelationPreprocessors.get(i)
|
||||
.preprocessOsmRelation(relation);
|
||||
if (thisResult != null) {
|
||||
if (result == null) {
|
||||
result = new ArrayList<>(thisResult);
|
||||
} else {
|
||||
result.addAll(thisResult);
|
||||
}
|
||||
// register per-source input element handlers
|
||||
for (Handler handler : layers) {
|
||||
if (handler instanceof NaturalEarthProcessor processor) {
|
||||
registerSourceHandler(NATURAL_EARTH_SOURCE,
|
||||
(source, features) -> processor.processNaturalEarth(source.getSourceLayer(), source, features));
|
||||
}
|
||||
if (handler instanceof OsmWaterPolygonProcessor processor) {
|
||||
registerSourceHandler(WATER_POLYGON_SOURCE, processor::processOsmWater);
|
||||
}
|
||||
if (handler instanceof LakeCenterlineProcessor processor) {
|
||||
registerSourceHandler(LAKE_CENTERLINE_SOURCE, processor::processLakeCenterline);
|
||||
}
|
||||
if (handler instanceof OsmAllProcessor processor) {
|
||||
registerSourceHandler(OSM_SOURCE, processor::processAllOsm);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processFeature(SourceFeature sourceFeature, FeatureCollector features) {
|
||||
switch (sourceFeature.getSource()) {
|
||||
case OSM_SOURCE -> {
|
||||
for (var match : getTableMatches(sourceFeature)) {
|
||||
var row = match.match().create(sourceFeature, match.keys().get(0));
|
||||
var handlers = osmDispatchMap.get(row.getClass());
|
||||
if (handlers != null) {
|
||||
for (Tables.RowHandler<Tables.Row> handler : handlers) {
|
||||
handler.process(row, features);
|
||||
}
|
||||
// pre-process layers to build efficient indexes for matching OSM elements based on matching expressions
|
||||
// Map from imposm3 table row class to the layers that implement its handler.
|
||||
var handlerMap = Tables.generateDispatchMap(layers);
|
||||
osmMappings = Tables.MAPPINGS
|
||||
.mapResults(constructor -> {
|
||||
var handlers = handlerMap.getOrDefault(constructor.rowClass(), List.of()).stream()
|
||||
.map(r -> {
|
||||
@SuppressWarnings("unchecked") var handler = (Tables.RowHandler<Tables.Row>) r.handler();
|
||||
return handler;
|
||||
})
|
||||
.toList();
|
||||
return new RowDispatch(constructor.create(), handlers);
|
||||
}).simplify().index();
|
||||
wikidataMappings = Tables.MAPPINGS
|
||||
.mapResults(constructor ->
|
||||
handlerMap.getOrDefault(constructor.rowClass(), List.of()).stream()
|
||||
.anyMatch(handler -> !IgnoreWikidata.class.isAssignableFrom(handler.handlerClass()))
|
||||
).filterResults(b -> b).simplify().index();
|
||||
|
||||
// register a handler for all OSM elements that forwards to imposm3 "table row" handler methods
|
||||
// based on efficient pre-processed index
|
||||
if (!osmMappings.isEmpty()) {
|
||||
registerSourceHandler(OSM_SOURCE, (source, features) -> {
|
||||
for (var match : getTableMatches(source)) {
|
||||
RowDispatch rowDispatch = match.match();
|
||||
var row = rowDispatch.constructor.create(source, match.keys().get(0));
|
||||
for (Tables.RowHandler<Tables.Row> handler : rowDispatch.handlers()) {
|
||||
handler.process(row, features);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < osmAllProcessors.size(); i++) {
|
||||
osmAllProcessors.get(i).processAllOsm(sourceFeature, features);
|
||||
}
|
||||
}
|
||||
case LAKE_CENTERLINE_SOURCE -> {
|
||||
for (LakeCenterlineProcessor lakeCenterlineProcessor : lakeCenterlineProcessors) {
|
||||
lakeCenterlineProcessor.processLakeCenterline(sourceFeature, features);
|
||||
}
|
||||
}
|
||||
case NATURAL_EARTH_SOURCE -> {
|
||||
for (NaturalEarthProcessor naturalEarthProcessor : naturalEarthProcessors) {
|
||||
naturalEarthProcessor.processNaturalEarth(sourceFeature.getSourceLayer(), sourceFeature, features);
|
||||
}
|
||||
}
|
||||
case WATER_POLYGON_SOURCE -> {
|
||||
for (OsmWaterPolygonProcessor osmWaterProcessor : osmWaterProcessors) {
|
||||
osmWaterProcessor.processOsmWater(sourceFeature, features);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public List<MultiExpression.MultiExpressionIndex.MatchWithTriggers<Tables.Constructor>> getTableMatches(
|
||||
SourceFeature sourceFeature) {
|
||||
List<MultiExpression.MultiExpressionIndex.MatchWithTriggers<Tables.Constructor>> result = null;
|
||||
if (sourceFeature.isPoint()) {
|
||||
result = osmPointMappings.getMatchesWithTriggers(sourceFeature.tags());
|
||||
} else {
|
||||
if (sourceFeature.canBeLine()) {
|
||||
result = osmLineMappings.getMatchesWithTriggers(sourceFeature.tags());
|
||||
if (sourceFeature.canBePolygon()) {
|
||||
result.addAll(osmPolygonMappings.getMatchesWithTriggers(sourceFeature.tags()));
|
||||
}
|
||||
} else if (sourceFeature.canBePolygon()) {
|
||||
result = osmPolygonMappings.getMatchesWithTriggers(sourceFeature.tags());
|
||||
}
|
||||
}
|
||||
return result == null ? List.of() : result;
|
||||
/** Returns the imposm3 table row constructors that match an input element's tags. */
|
||||
public List<MultiExpression.Match<RowDispatch>> getTableMatches(SourceFeature input) {
|
||||
return osmMappings.getMatchesWithTriggers(input);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> next) {
|
||||
for (var handler : finishHandlers) {
|
||||
handler.finish(sourceName, featureCollectors, next);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean caresAboutSource(String name) {
|
||||
return switch (name) {
|
||||
case NATURAL_EARTH_SOURCE -> !naturalEarthProcessors.isEmpty();
|
||||
case WATER_POLYGON_SOURCE -> !osmWaterProcessors.isEmpty();
|
||||
case OSM_SOURCE -> !osmAllProcessors.isEmpty() || !osmDispatchMap.isEmpty();
|
||||
case LAKE_CENTERLINE_SOURCE -> !lakeCenterlineProcessors.isEmpty();
|
||||
default -> true;
|
||||
};
|
||||
}
|
||||
|
||||
public interface NaturalEarthProcessor {
|
||||
|
||||
void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
public interface LakeCenterlineProcessor {
|
||||
|
||||
void processLakeCenterline(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
public interface OsmWaterPolygonProcessor {
|
||||
|
||||
void processOsmWater(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
public interface OsmAllProcessor {
|
||||
|
||||
void processAllOsm(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
public interface FinishHandler {
|
||||
|
||||
void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> next);
|
||||
}
|
||||
|
||||
public interface OsmRelationPreprocessor {
|
||||
|
||||
List<OsmRelationInfo> preprocessOsmRelation(OsmElement.Relation relation);
|
||||
}
|
||||
|
||||
public interface FeaturePostProcessor {
|
||||
|
||||
List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items)
|
||||
throws GeometryException;
|
||||
}
|
||||
|
||||
public interface IgnoreWikidata {}
|
||||
|
||||
@Override
|
||||
public boolean caresAboutWikidataTranslation(OsmElement elem) {
|
||||
var tags = elem.tags();
|
||||
if (elem instanceof OsmElement.Node) {
|
||||
return wikidataOsmPointMappings.matches(tags);
|
||||
return wikidataMappings.getOrElse(SimpleFeature.create(EMPTY_POINT, tags), false);
|
||||
} else if (elem instanceof OsmElement.Way) {
|
||||
return wikidataOsmPolygonMappings.matches(tags) || wikidataOsmLineMappings.matches(tags);
|
||||
return wikidataMappings.getOrElse(SimpleFeature.create(EMPTY_POLYGON, tags), false)
|
||||
|| wikidataMappings.getOrElse(SimpleFeature.create(EMPTY_LINE, tags), false);
|
||||
} else if (elem instanceof OsmElement.Relation) {
|
||||
return wikidataOsmPolygonMappings.matches(tags);
|
||||
return wikidataMappings.getOrElse(SimpleFeature.create(EMPTY_POLYGON, tags), false);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Pass-through constants generated from the OpenMapTiles vector schema
|
||||
*/
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return OpenMapTilesSchema.NAME;
|
||||
|
@ -303,4 +162,71 @@ public class OpenMapTilesProfile implements Profile {
|
|||
public String version() {
|
||||
return OpenMapTilesSchema.VERSION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Layers should implement this interface to subscribe to elements from <a href="https://www.naturalearthdata.com/">natural
|
||||
* earth</a>.
|
||||
*/
|
||||
public interface NaturalEarthProcessor {
|
||||
|
||||
/**
|
||||
* Process an element from {@code table} in the<a href="https://www.naturalearthdata.com/">natural earth
|
||||
* source</a>.
|
||||
*
|
||||
* @see Profile#processFeature(SourceFeature, FeatureCollector)
|
||||
*/
|
||||
void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
/**
|
||||
* Layers should implement this interface to subscribe to elements from <a href="https://github.com/lukasmartinelli/osm-lakelines">OSM
|
||||
* lake centerlines source</a>.
|
||||
*/
|
||||
public interface LakeCenterlineProcessor {
|
||||
|
||||
/**
|
||||
* Process an element from the <a href="https://github.com/lukasmartinelli/osm-lakelines">OSM lake centerlines
|
||||
* source</a>
|
||||
*
|
||||
* @see Profile#processFeature(SourceFeature, FeatureCollector)
|
||||
*/
|
||||
void processLakeCenterline(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
/**
|
||||
* Layers should implement this interface to subscribe to elements from <a href="https://osmdata.openstreetmap.de/data/water-polygons.html">OSM
|
||||
* water polygons source</a>.
|
||||
*/
|
||||
public interface OsmWaterPolygonProcessor {
|
||||
|
||||
/**
|
||||
* Process an element from the <a href="https://osmdata.openstreetmap.de/data/water-polygons.html">OSM water
|
||||
* polygons source</a>
|
||||
*
|
||||
* @see Profile#processFeature(SourceFeature, FeatureCollector)
|
||||
*/
|
||||
void processOsmWater(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
/** Layers should implement this interface to subscribe to every OSM element. */
|
||||
public interface OsmAllProcessor {
|
||||
|
||||
/**
|
||||
* Process an OSM element during the second pass through the OSM data file.
|
||||
*
|
||||
* @see Profile#processFeature(SourceFeature, FeatureCollector)
|
||||
*/
|
||||
void processAllOsm(SourceFeature feature, FeatureCollector features);
|
||||
}
|
||||
|
||||
/**
|
||||
* Layers should implement to indicate they do not need wikidata name translations to avoid downloading more
|
||||
* translations than are needed.
|
||||
*/
|
||||
public interface IgnoreWikidata {}
|
||||
|
||||
private static record RowDispatch(
|
||||
Tables.Constructor constructor,
|
||||
List<Tables.RowHandler<Tables.Row>> handlers
|
||||
) {}
|
||||
}
|
||||
|
|
Plik diff jest za duży
Load Diff
Plik diff jest za duży
Load Diff
|
@ -35,24 +35,29 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.Utils;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.Utils;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/aerodrome_label
|
||||
* Defines the logic for generating map elements in the {@code aerodrome_label} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/aerodrome_label">OpenMapTiles
|
||||
* aerodrome_layer sql files</a>.
|
||||
*/
|
||||
public class AerodromeLabel implements OpenMapTilesSchema.AerodromeLabel, Tables.OsmAerodromeLabelPoint.Handler {
|
||||
public class AerodromeLabel implements
|
||||
OpenMapTilesSchema.AerodromeLabel,
|
||||
Tables.OsmAerodromeLabelPoint.Handler {
|
||||
|
||||
private final MultiExpression.MultiExpressionIndex<String> classLookup;
|
||||
private final MultiExpression.Index<String> classLookup;
|
||||
private final Translations translations;
|
||||
|
||||
public AerodromeLabel(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
|
@ -64,11 +69,11 @@ public class AerodromeLabel implements OpenMapTilesSchema.AerodromeLabel, Tables
|
|||
public void process(Tables.OsmAerodromeLabelPoint element, FeatureCollector features) {
|
||||
features.centroid(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setZoomRange(10, 14)
|
||||
.setMinZoom(10)
|
||||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.putAttrs(Utils.elevationTags(element.ele()))
|
||||
.setAttr(Fields.IATA, nullIfEmpty(element.iata()))
|
||||
.setAttr(Fields.ICAO, nullIfEmpty(element.icao()))
|
||||
.setAttr(Fields.CLASS, classLookup.getOrElse(element.source().tags(), FieldValues.CLASS_OTHER));
|
||||
.setAttr(Fields.CLASS, classLookup.getOrElse(element.source(), FieldValues.CLASS_OTHER));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,9 +43,14 @@ import com.onthegomap.flatmap.stats.Stats;
|
|||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/aeroway
|
||||
* Defines the logic for generating map elements in the {@code aeroway} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/aeroway">OpenMapTiles
|
||||
* aeroway sql files</a>.
|
||||
*/
|
||||
public class Aeroway implements OpenMapTilesSchema.Aeroway, Tables.OsmAerowayLinestring.Handler,
|
||||
public class Aeroway implements
|
||||
OpenMapTilesSchema.Aeroway,
|
||||
Tables.OsmAerowayLinestring.Handler,
|
||||
Tables.OsmAerowayPolygon.Handler,
|
||||
Tables.OsmAerowayPoint.Handler {
|
||||
|
||||
|
@ -55,7 +60,7 @@ public class Aeroway implements OpenMapTilesSchema.Aeroway, Tables.OsmAerowayLin
|
|||
@Override
|
||||
public void process(Tables.OsmAerowayPolygon element, FeatureCollector features) {
|
||||
features.polygon(LAYER_NAME)
|
||||
.setZoomRange(10, 14)
|
||||
.setMinZoom(10)
|
||||
.setMinPixelSize(2)
|
||||
.setAttr(Fields.CLASS, element.aeroway())
|
||||
.setAttr(Fields.REF, element.ref());
|
||||
|
@ -64,7 +69,7 @@ public class Aeroway implements OpenMapTilesSchema.Aeroway, Tables.OsmAerowayLin
|
|||
@Override
|
||||
public void process(Tables.OsmAerowayLinestring element, FeatureCollector features) {
|
||||
features.line(LAYER_NAME)
|
||||
.setZoomRange(10, 14)
|
||||
.setMinZoom(10)
|
||||
.setAttr(Fields.CLASS, element.aeroway())
|
||||
.setAttr(Fields.REF, element.ref());
|
||||
}
|
||||
|
@ -72,7 +77,7 @@ public class Aeroway implements OpenMapTilesSchema.Aeroway, Tables.OsmAerowayLin
|
|||
@Override
|
||||
public void process(Tables.OsmAerowayPoint element, FeatureCollector features) {
|
||||
features.point(LAYER_NAME)
|
||||
.setZoomRange(14, 14)
|
||||
.setMinZoom(14)
|
||||
.setAttr(Fields.CLASS, element.aeroway())
|
||||
.setAttr(Fields.REF, element.ref());
|
||||
}
|
||||
|
|
|
@ -82,7 +82,11 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/boundary
|
||||
* Defines the logic for generating map elements for country, state, and town boundaries in the {@code boundary} layer
|
||||
* from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/boundary">OpenMapTiles
|
||||
* boundary sql files</a>.
|
||||
*/
|
||||
public class Boundary implements
|
||||
OpenMapTilesSchema.Boundary,
|
||||
|
@ -92,6 +96,24 @@ public class Boundary implements
|
|||
OpenMapTilesProfile.FeaturePostProcessor,
|
||||
OpenMapTilesProfile.FinishHandler {
|
||||
|
||||
/*
|
||||
* Uses natural earth at lower zoom levels and OpenStreetMap at higher zoom levels.
|
||||
*
|
||||
* For OpenStreetMap data at higher zoom levels:
|
||||
* 1) Preprocess relations on the first pass to extract info for relations where
|
||||
* type=boundary and boundary=administrative and store the admin_level for
|
||||
* later.
|
||||
* 2) When processing individual ways, take the minimum (most important) admin
|
||||
* level of every relation they are a part of and use that as the admin level
|
||||
* for the way.
|
||||
* 3) If boundary_country_names argument is true and the way is part of a country
|
||||
* (admin_level=2) boundary, then hold onto it for later
|
||||
* 4) When we finish processing the OSM source, build country polygons from the
|
||||
* saved ways and use that to determine which country is on the left and right
|
||||
* side of each way, then emit the way with ADM0_L and ADM0_R keys set.
|
||||
* 5) Before emitting boundary lines, merge linestrings with the same tags.
|
||||
*/
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Boundary.class);
|
||||
private static final double COUNTRY_TEST_OFFSET = GeoUtils.metersToPixelAtEquator(0, 10) / 256d;
|
||||
private final Stats stats;
|
||||
|
@ -195,6 +217,8 @@ public class Boundary implements
|
|||
String disputedName = null, claimedBy = null;
|
||||
Set<Long> regionIds = new HashSet<>();
|
||||
boolean disputed = false;
|
||||
// aggregate all borders this way is a part of - take the lowest
|
||||
// admin level, and assume it is disputed if any relation is disputed.
|
||||
for (var info : relationInfos) {
|
||||
BoundaryRelation rel = info.relation();
|
||||
disputed |= rel.disputed;
|
||||
|
@ -257,7 +281,7 @@ public class Boundary implements
|
|||
.setAttr(Fields.DISPUTED, disputed ? 1 : 0)
|
||||
.setAttr(Fields.MARITIME, maritime ? 1 : 0)
|
||||
.setMinPixelSizeAtAllZooms(0)
|
||||
.setZoomRange(minzoom, 14)
|
||||
.setMinZoom(minzoom)
|
||||
.setAttr(Fields.CLAIMED_BY, claimedBy)
|
||||
.setAttr(Fields.DISPUTED_NAME, editName(disputedName));
|
||||
}
|
||||
|
@ -267,7 +291,7 @@ public class Boundary implements
|
|||
|
||||
@Override
|
||||
public void finish(String sourceName, FeatureCollector.Factory featureCollectors,
|
||||
Consumer<FeatureCollector.Feature> next) {
|
||||
Consumer<FeatureCollector.Feature> emit) {
|
||||
if (OpenMapTilesProfile.OSM_SOURCE.equals(sourceName)) {
|
||||
var timer = stats.startStage("boundaries");
|
||||
LongObjectMap<PreparedGeometry> countryBoundaries = prepareRegionPolygons();
|
||||
|
@ -293,9 +317,9 @@ public class Boundary implements
|
|||
.setAttr(Fields.ADM0_L, borderingRegions.left == null ? null : regionNames.get(borderingRegions.left))
|
||||
.setAttr(Fields.ADM0_R, borderingRegions.right == null ? null : regionNames.get(borderingRegions.right))
|
||||
.setMinPixelSizeAtAllZooms(0)
|
||||
.setZoomRange(key.minzoom, 14);
|
||||
.setMinZoom(key.minzoom);
|
||||
for (var feature : features) {
|
||||
next.accept(feature);
|
||||
emit.accept(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -305,19 +329,18 @@ public class Boundary implements
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) throws GeometryException {
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
double minLength = config.minFeatureSize(zoom);
|
||||
double tolerance = config.tolerance(zoom);
|
||||
return FeatureMerge.mergeLineStrings(items, attrs -> minLength, tolerance, BUFFER_SIZE);
|
||||
}
|
||||
|
||||
|
||||
/** Returns the left and right country for {@code lineString}. */
|
||||
private BorderingRegions getBorderingRegions(
|
||||
LongObjectMap<PreparedGeometry> countryBoundaries,
|
||||
Set<Long> allRegions,
|
||||
LineString lineString
|
||||
) {
|
||||
Long rightCountry = null, leftCountry = null;
|
||||
Set<Long> validRegions = allRegions.stream()
|
||||
.filter(countryBoundaries::containsKey)
|
||||
.collect(Collectors.toSet());
|
||||
|
@ -345,15 +368,11 @@ public class Boundary implements
|
|||
|
||||
var right = mode(rights);
|
||||
if (right != null) {
|
||||
rightCountry = right.getKey();
|
||||
lefts.removeAll(List.of(rightCountry));
|
||||
lefts.removeAll(List.of(right));
|
||||
}
|
||||
var left = mode(lefts);
|
||||
if (left != null) {
|
||||
leftCountry = left.getKey();
|
||||
}
|
||||
|
||||
if (leftCountry == null && rightCountry == null) {
|
||||
if (left == null && right == null) {
|
||||
Coordinate point = GeoUtils.worldToLatLonCoords(GeoUtils.pointAlongOffset(lineString, 0.5, 0)).getCoordinate();
|
||||
LOGGER.warn("no left or right country for border between OSM country relations: %s around %.5f, %.5f"
|
||||
.formatted(
|
||||
|
@ -363,10 +382,10 @@ public class Boundary implements
|
|||
));
|
||||
}
|
||||
|
||||
return new BorderingRegions(leftCountry, rightCountry);
|
||||
return new BorderingRegions(left, right);
|
||||
}
|
||||
|
||||
|
||||
/** Returns a map from region ID to prepared geometry optimized for {@code contains} queries. */
|
||||
private LongObjectMap<PreparedGeometry> prepareRegionPolygons() {
|
||||
LOGGER.info("Creating polygons for " + regionGeometries.size() + " boundaries");
|
||||
LongObjectMap<PreparedGeometry> countryBoundaries = new GHLongObjectHashMap<>();
|
||||
|
@ -391,10 +410,12 @@ public class Boundary implements
|
|||
return countryBoundaries;
|
||||
}
|
||||
|
||||
private Map.Entry<Long, Long> mode(List<Long> rights) {
|
||||
return rights.stream()
|
||||
/** Returns most frequently-occurring element in {@code list}. */
|
||||
private static Long mode(List<Long> list) {
|
||||
return list.stream()
|
||||
.collect(groupingBy(Function.identity(), counting())).entrySet().stream()
|
||||
.max(Map.Entry.comparingByValue())
|
||||
.map(Map.Entry::getKey)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
|
@ -405,6 +426,10 @@ public class Boundary implements
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Minimal set of information extracted from a boundary relation to be used when processing each way in that
|
||||
* relation.
|
||||
*/
|
||||
private static record BoundaryRelation(
|
||||
long id,
|
||||
int adminLevel,
|
||||
|
@ -426,6 +451,7 @@ public class Boundary implements
|
|||
}
|
||||
}
|
||||
|
||||
/** Information to hold onto from processing a way in a boundary relation to determine the left/right region ID later. */
|
||||
private static record CountryBoundaryComponent(
|
||||
int adminLevel,
|
||||
boolean disputed,
|
||||
|
@ -440,6 +466,5 @@ public class Boundary implements
|
|||
CountryBoundaryComponent groupingKey() {
|
||||
return new CountryBoundaryComponent(adminLevel, disputed, maritime, minzoom, null, regions, claimedBy, name);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.util.MemoryEstimator.CLASS_HEADER_BYTES;
|
||||
import static com.onthegomap.flatmap.util.Parse.parseDoubleOrNull;
|
||||
import static java.util.Map.entry;
|
||||
|
@ -58,14 +58,28 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/building
|
||||
* Defines the logic for generating map elements for buildings in the {@code building} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/building">OpenMapTiles
|
||||
* building sql files</a>.
|
||||
*/
|
||||
public class Building implements OpenMapTilesSchema.Building,
|
||||
public class Building implements
|
||||
OpenMapTilesSchema.Building,
|
||||
Tables.OsmBuildingPolygon.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor,
|
||||
OpenMapTilesProfile.OsmRelationPreprocessor {
|
||||
|
||||
private final boolean mergeZ13Buildings;
|
||||
/*
|
||||
* Emit all buildings from OSM data at z14.
|
||||
*
|
||||
* At z13, emit all buildings at process-time, but then at tile render-time,
|
||||
* merge buildings that are overlapping or almost touching into combined
|
||||
* buildings so that entire city blocks show up as a single building polygon.
|
||||
*
|
||||
* THIS IS VERY EXPENSIVE! Merging buildings at z13 adds about 50% to the
|
||||
* total map generation time. To disable it, set building_merge_z13 argument
|
||||
* to false.
|
||||
*/
|
||||
|
||||
private static final Map<String, String> MATERIAL_COLORS = Map.ofEntries(
|
||||
entry("cement_block", "#6a7880"),
|
||||
|
@ -86,6 +100,7 @@ public class Building implements OpenMapTilesSchema.Building,
|
|||
entry("sandstone", "#b4a995"), // same as stone
|
||||
entry("clay", "#9d8b75") // same as mud
|
||||
);
|
||||
private final boolean mergeZ13Buildings;
|
||||
|
||||
public Building(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.mergeZ13Buildings = config.arguments().getBoolean(
|
||||
|
@ -95,14 +110,6 @@ public class Building implements OpenMapTilesSchema.Building,
|
|||
);
|
||||
}
|
||||
|
||||
private static record BuildingRelationInfo(long id) implements OsmRelationInfo {
|
||||
|
||||
@Override
|
||||
public long estimateMemoryUsageBytes() {
|
||||
return CLASS_HEADER_BYTES + MemoryEstimator.estimateSizeLong(id);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OsmRelationInfo> preprocessOsmRelation(OsmElement.Relation relation) {
|
||||
if (relation.hasTag("type", "building")) {
|
||||
|
@ -154,12 +161,13 @@ public class Building implements OpenMapTilesSchema.Building,
|
|||
|
||||
if (renderHeight < 3660 && renderMinHeight < 3660) {
|
||||
var feature = features.polygon(LAYER_NAME).setBufferPixels(BUFFER_SIZE)
|
||||
.setZoomRange(13, 14)
|
||||
.setMinZoom(13)
|
||||
.setMinPixelSize(2)
|
||||
.setAttrWithMinzoom(Fields.RENDER_HEIGHT, renderHeight, 14)
|
||||
.setAttrWithMinzoom(Fields.RENDER_MIN_HEIGHT, renderMinHeight, 14)
|
||||
.setAttrWithMinzoom(Fields.COLOUR, color, 14)
|
||||
.setAttrWithMinzoom(Fields.HIDE_3D, hide3d, 14);
|
||||
.setAttrWithMinzoom(Fields.HIDE_3D, hide3d, 14)
|
||||
.setZorder(renderHeight);
|
||||
if (mergeZ13Buildings) {
|
||||
feature
|
||||
.setMinPixelSize(0.1)
|
||||
|
@ -173,4 +181,12 @@ public class Building implements OpenMapTilesSchema.Building,
|
|||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
return (mergeZ13Buildings && zoom == 13) ? FeatureMerge.mergeNearbyPolygons(items, 4, 4, 0.5, 0.5) : items;
|
||||
}
|
||||
|
||||
private static record BuildingRelationInfo(long id) implements OsmRelationInfo {
|
||||
|
||||
@Override
|
||||
public long estimateMemoryUsageBytes() {
|
||||
return CLASS_HEADER_BYTES + MemoryEstimator.estimateSizeLong(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,9 +43,14 @@ import com.onthegomap.flatmap.stats.Stats;
|
|||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/housenumber
|
||||
* Defines the logic for generating map elements in the {@code housenumber} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/housenumber">OpenMapTiles
|
||||
* housenumber sql files</a>.
|
||||
*/
|
||||
public class Housenumber implements OpenMapTilesSchema.Housenumber, Tables.OsmHousenumberPoint.Handler {
|
||||
public class Housenumber implements
|
||||
OpenMapTilesSchema.Housenumber,
|
||||
Tables.OsmHousenumberPoint.Handler {
|
||||
|
||||
public Housenumber(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
}
|
||||
|
@ -55,6 +60,6 @@ public class Housenumber implements OpenMapTilesSchema.Housenumber, Tables.OsmHo
|
|||
features.centroidIfConvex(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.HOUSENUMBER, element.housenumber())
|
||||
.setZoomRange(14, 14);
|
||||
.setMinZoom(14);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,8 +39,8 @@ import com.onthegomap.flatmap.FeatureCollector;
|
|||
import com.onthegomap.flatmap.FeatureMerge;
|
||||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
|
@ -54,7 +54,11 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/landcover
|
||||
* Defines the logic for generating map elements for natural land cover polygons like ice, sand, and forest in the
|
||||
* {@code landcover} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/landcover">OpenMapTiles
|
||||
* landcover sql files</a>.
|
||||
*/
|
||||
public class Landcover implements
|
||||
OpenMapTilesSchema.Landcover,
|
||||
|
@ -62,17 +66,25 @@ public class Landcover implements
|
|||
Tables.OsmLandcoverPolygon.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor {
|
||||
|
||||
/*
|
||||
* Large ice areas come from natural earth and the rest come from OpenStreetMap at higher zoom
|
||||
* levels. At render-time, postProcess() merges polygons into larger connected area based
|
||||
* on the number of points in the original area. Since postProcess() only has visibility into
|
||||
* features on a single tile, process() needs to pass the number of points the original feature
|
||||
* had through using a temporary "_numpoints" attribute.
|
||||
*/
|
||||
|
||||
public static final ZoomFunction<Number> MIN_PIXEL_SIZE_THRESHOLDS = ZoomFunction.fromMaxZoomThresholds(Map.of(
|
||||
13, 8,
|
||||
10, 4,
|
||||
9, 2
|
||||
));
|
||||
private static final String NUM_POINTS_ATTR = "_numpoints";
|
||||
private static final String TEMP_NUM_POINTS_ATTR = "_numpoints";
|
||||
private static final Set<String> WOOD_OR_FOREST = Set.of(
|
||||
FieldValues.SUBCLASS_WOOD,
|
||||
FieldValues.SUBCLASS_FOREST
|
||||
);
|
||||
private final MultiExpression.MultiExpressionIndex<String> classMapping;
|
||||
private final MultiExpression.Index<String> classMapping;
|
||||
|
||||
public Landcover(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.classMapping = FieldMappings.Class.index();
|
||||
|
@ -114,39 +126,40 @@ public class Landcover implements
|
|||
.setMinPixelSizeOverrides(MIN_PIXEL_SIZE_THRESHOLDS)
|
||||
.setAttr(Fields.CLASS, clazz)
|
||||
.setAttr(Fields.SUBCLASS, subclass)
|
||||
.setNumPointsAttr(NUM_POINTS_ATTR)
|
||||
.setZoomRange(WOOD_OR_FOREST.contains(subclass) ? 9 : 7, 14);
|
||||
.setNumPointsAttr(TEMP_NUM_POINTS_ATTR)
|
||||
.setMinZoom(WOOD_OR_FOREST.contains(subclass) ? 9 : 7);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items)
|
||||
throws GeometryException {
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) throws GeometryException {
|
||||
if (zoom < 7 || zoom > 13) {
|
||||
for (var item : items) {
|
||||
item.attrs().remove(NUM_POINTS_ATTR);
|
||||
item.attrs().remove(TEMP_NUM_POINTS_ATTR);
|
||||
}
|
||||
return items;
|
||||
} else { // z7-13
|
||||
String groupKey = "_group";
|
||||
// merging only merges polygons with the same attributes, so use this temporary key
|
||||
// to separate features into layers that will be merged separately
|
||||
String tempGroupKey = "_group";
|
||||
List<VectorTile.Feature> result = new ArrayList<>();
|
||||
List<VectorTile.Feature> toMerge = new ArrayList<>();
|
||||
for (var item : items) {
|
||||
Map<String, Object> attrs = item.attrs();
|
||||
Object numPointsObj = attrs.remove(NUM_POINTS_ATTR);
|
||||
Object numPointsObj = attrs.remove(TEMP_NUM_POINTS_ATTR);
|
||||
Object subclassObj = attrs.get(Fields.SUBCLASS);
|
||||
if (numPointsObj instanceof Number num && subclassObj instanceof String subclass) {
|
||||
long numPoints = num.longValue();
|
||||
if (zoom >= 10) {
|
||||
if (WOOD_OR_FOREST.contains(subclass) && numPoints < 300) {
|
||||
attrs.put(groupKey, numPoints < 50 ? "<50" : "<300");
|
||||
attrs.put(tempGroupKey, numPoints < 50 ? "<50" : "<300");
|
||||
toMerge.add(item);
|
||||
} else { // don't merge
|
||||
result.add(item);
|
||||
}
|
||||
} else if (zoom == 9) {
|
||||
if (WOOD_OR_FOREST.contains(subclass)) {
|
||||
attrs.put(groupKey, numPoints < 50 ? "<50" : numPoints < 300 ? "<300" : ">300");
|
||||
attrs.put(tempGroupKey, numPoints < 50 ? "<50" : numPoints < 300 ? "<300" : ">300");
|
||||
toMerge.add(item);
|
||||
} else { // don't merge
|
||||
result.add(item);
|
||||
|
@ -160,7 +173,7 @@ public class Landcover implements
|
|||
}
|
||||
var merged = FeatureMerge.mergeOverlappingPolygons(toMerge, 4);
|
||||
for (var item : merged) {
|
||||
item.attrs().remove(groupKey);
|
||||
item.attrs().remove(tempGroupKey);
|
||||
}
|
||||
result.addAll(merged);
|
||||
return result;
|
||||
|
|
|
@ -35,8 +35,8 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
|
@ -52,13 +52,29 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/landuse
|
||||
* Defines the logic for generating map elements for man-made land use polygons like cemeteries, zoos, and hospitals in
|
||||
* the {@code landuse} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/landuse">OpenMapTiles
|
||||
* landuse sql files</a>.
|
||||
*/
|
||||
public class Landuse implements
|
||||
OpenMapTilesSchema.Landuse,
|
||||
OpenMapTilesProfile.NaturalEarthProcessor,
|
||||
Tables.OsmLandusePolygon.Handler {
|
||||
|
||||
private static final ZoomFunction<Number> MIN_PIXEL_SIZE_THRESHOLDS = ZoomFunction.fromMaxZoomThresholds(Map.of(
|
||||
13, 4,
|
||||
7, 2,
|
||||
6, 1
|
||||
));
|
||||
private static final Set<String> Z6_CLASSES = Set.of(
|
||||
FieldValues.CLASS_RESIDENTIAL,
|
||||
FieldValues.CLASS_SUBURB,
|
||||
FieldValues.CLASS_QUARTER,
|
||||
FieldValues.CLASS_NEIGHBOURHOOD
|
||||
);
|
||||
|
||||
public Landuse(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
}
|
||||
|
||||
|
@ -74,19 +90,6 @@ public class Landuse implements
|
|||
}
|
||||
}
|
||||
|
||||
private static final ZoomFunction<Number> MIN_PIXEL_SIZE_THRESHOLDS = ZoomFunction.fromMaxZoomThresholds(Map.of(
|
||||
13, 4,
|
||||
7, 2,
|
||||
6, 1
|
||||
));
|
||||
|
||||
private static final Set<String> Z6_CLASSES = Set.of(
|
||||
FieldValues.CLASS_RESIDENTIAL,
|
||||
FieldValues.CLASS_SUBURB,
|
||||
FieldValues.CLASS_QUARTER,
|
||||
FieldValues.CLASS_NEIGHBOURHOOD
|
||||
);
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmLandusePolygon element, FeatureCollector features) {
|
||||
String clazz = coalesce(
|
||||
|
@ -101,7 +104,7 @@ public class Landuse implements
|
|||
features.polygon(LAYER_NAME).setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.CLASS, clazz)
|
||||
.setMinPixelSizeOverrides(MIN_PIXEL_SIZE_THRESHOLDS)
|
||||
.setZoomRange(Z6_CLASSES.contains(clazz) ? 6 : 9, 14);
|
||||
.setMinZoom(Z6_CLASSES.contains(clazz) ? 6 : 9);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,8 +35,8 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.elevationTags;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.elevationTags;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
import com.carrotsearch.hppc.LongIntMap;
|
||||
|
@ -44,10 +44,10 @@ import com.onthegomap.flatmap.FeatureCollector;
|
|||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Parse;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
@ -56,13 +56,24 @@ import org.locationtech.jts.geom.Geometry;
|
|||
import org.locationtech.jts.geom.Point;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/mountain_peak
|
||||
* Defines the logic for generating map elements for mountain peak label points in the {@code mountain_peak} layer from
|
||||
* source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/mountain_peak">OpenMapTiles
|
||||
* mountain_peak sql files</a>.
|
||||
*/
|
||||
public class MountainPeak implements
|
||||
OpenMapTilesSchema.MountainPeak,
|
||||
Tables.OsmPeakPoint.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor {
|
||||
|
||||
/*
|
||||
* Mountain peaks come from OpenStreetMap data and are ranked by importance (based on if they
|
||||
* have a name or wikipedia page) then by elevation. Uses the "label grid" feature to limit
|
||||
* label density by only taking the top 5 most important mountain peaks within each 100x100px
|
||||
* square.
|
||||
*/
|
||||
|
||||
private final Translations translations;
|
||||
private final Stats stats;
|
||||
|
||||
|
@ -84,7 +95,10 @@ public class MountainPeak implements
|
|||
(nullIfEmpty(element.wikipedia()) != null ? 10_000 : 0) +
|
||||
(nullIfEmpty(element.name()) != null ? 10_000 : 0)
|
||||
)
|
||||
.setZoomRange(7, 14)
|
||||
.setMinZoom(7)
|
||||
// need to use a larger buffer size to allow enough points through to not cut off
|
||||
// any label grid squares which could lead to inconsistent label ranks for a feature
|
||||
// in adjacent tiles. postProcess() will remove anything outside the desired buffer.
|
||||
.setBufferPixels(100)
|
||||
.setPointLabelGridSizeAndLimit(13, 100, 5);
|
||||
}
|
||||
|
@ -97,8 +111,8 @@ public class MountainPeak implements
|
|||
VectorTile.Feature feature = items.get(i);
|
||||
int gridrank = groupCounts.getOrDefault(feature.group(), 1);
|
||||
groupCounts.put(feature.group(), gridrank + 1);
|
||||
// now that we have accurate ranks, remove anything outside the desired buffer
|
||||
if (!insideTileBuffer(feature)) {
|
||||
// remove from the output
|
||||
items.set(i, null);
|
||||
} else if (!feature.attrs().containsKey(Fields.RANK)) {
|
||||
feature.attrs().put(Fields.RANK, gridrank);
|
||||
|
@ -107,7 +121,7 @@ public class MountainPeak implements
|
|||
return items;
|
||||
}
|
||||
|
||||
private boolean insideTileBuffer(double xOrY) {
|
||||
private static boolean insideTileBuffer(double xOrY) {
|
||||
return xOrY >= -BUFFER_SIZE && xOrY <= 256 + BUFFER_SIZE;
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,8 @@ package com.onthegomap.flatmap.openmaptiles.layers;
|
|||
|
||||
import static com.onthegomap.flatmap.collection.FeatureGroup.Z_ORDER_BITS;
|
||||
import static com.onthegomap.flatmap.collection.FeatureGroup.Z_ORDER_MIN;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
import com.carrotsearch.hppc.LongIntMap;
|
||||
|
@ -48,26 +48,38 @@ import com.onthegomap.flatmap.config.FlatmapConfig;
|
|||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.geo.GeometryType;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/park
|
||||
* Defines the logic for generating map elements for designated parks polygons and their label points in the {@code
|
||||
* park} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/park">OpenMapTiles
|
||||
* park sql files</a>.
|
||||
*/
|
||||
public class Park implements
|
||||
OpenMapTilesSchema.Park,
|
||||
Tables.OsmParkPolygon.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Park.class);
|
||||
// constants for packing the minimum zoom ordering of park labels into the z-order field
|
||||
private static final int PARK_NATIONAL_PARK_BOOST = 1 << (Z_ORDER_BITS - 1);
|
||||
private static final int PARK_WIKIPEDIA_BOOST = 1 << (Z_ORDER_BITS - 2);
|
||||
|
||||
// constants for determining the minimum zoom level for a park label based on its area
|
||||
private static final double WORLD_AREA_FOR_70K_SQUARE_METERS =
|
||||
Math.pow(GeoUtils.metersToPixelAtEquator(0, Math.sqrt(70_000)) / 256d, 2);
|
||||
private static final double LOG2 = Math.log(2);
|
||||
private static final int PARK_AREA_RANGE = 1 << (Z_ORDER_BITS - 3);
|
||||
private static final double PARK_LOG_RANGE = Math.log(Math.pow(4, 26)); // 2^14 tiles, 2^12 pixels per tile
|
||||
|
||||
private final Translations translations;
|
||||
private final Stats stats;
|
||||
|
||||
|
@ -76,15 +88,6 @@ public class Park implements
|
|||
this.translations = translations;
|
||||
}
|
||||
|
||||
private static final int PARK_NATIONAL_PARK_BOOST = 1 << (Z_ORDER_BITS - 1);
|
||||
private static final int PARK_WIKIPEDIA_BOOST = 1 << (Z_ORDER_BITS - 2);
|
||||
private static final double WORLD_AREA_FOR_70K_SQUARE_METERS =
|
||||
Math.pow(GeoUtils.metersToPixelAtEquator(0, Math.sqrt(70_000)) / 256d, 2);
|
||||
private static final double LOG2 = Math.log(2);
|
||||
private static final int PARK_AREA_RANGE = 1 << (Z_ORDER_BITS - 3);
|
||||
private static final double PARK_LOG_RANGE = Math.log(Math.pow(4, 26)); // 2^14 tiles, 2^12 pixels per tile
|
||||
private static final double LOG4 = Math.log(4);
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmParkPolygon element, FeatureCollector features) {
|
||||
String protectionTitle = element.protectionTitle();
|
||||
|
@ -96,11 +99,14 @@ public class Park implements
|
|||
nullIfEmpty(element.boundary()),
|
||||
nullIfEmpty(element.leisure())
|
||||
);
|
||||
|
||||
// park shape
|
||||
features.polygon(LAYER_NAME).setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.CLASS, clazz)
|
||||
.setMinPixelSize(2)
|
||||
.setZoomRange(6, 14);
|
||||
.setMinZoom(6);
|
||||
|
||||
// park name label point (if it has one)
|
||||
if (element.name() != null) {
|
||||
try {
|
||||
double area = element.source().area();
|
||||
|
@ -118,7 +124,7 @@ public class Park implements
|
|||
((element.source().hasTag("wikipedia") || element.source().hasTag("wikidata")) ? PARK_WIKIPEDIA_BOOST
|
||||
: 0) +
|
||||
areaBoost
|
||||
).setZoomRange(minzoom, 14);
|
||||
).setMinZoom(minzoom);
|
||||
} catch (GeometryException e) {
|
||||
e.log(stats, "omt_park_area", "Unable to get park area for " + element.source().id());
|
||||
}
|
||||
|
@ -127,6 +133,7 @@ public class Park implements
|
|||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
// infer the "rank" attribute from point ordering within each label grid square
|
||||
LongIntMap counts = new LongIntHashMap();
|
||||
for (int i = items.size() - 1; i >= 0; i--) {
|
||||
var feature = items.get(i);
|
||||
|
|
|
@ -38,9 +38,9 @@ package com.onthegomap.flatmap.openmaptiles.layers;
|
|||
import static com.onthegomap.flatmap.collection.FeatureGroup.Z_ORDER_BITS;
|
||||
import static com.onthegomap.flatmap.collection.FeatureGroup.Z_ORDER_MAX;
|
||||
import static com.onthegomap.flatmap.collection.FeatureGroup.Z_ORDER_MIN;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullOrEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullOrEmpty;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
import com.carrotsearch.hppc.LongIntMap;
|
||||
|
@ -51,10 +51,10 @@ import com.onthegomap.flatmap.geo.GeoUtils;
|
|||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.geo.PointIndex;
|
||||
import com.onthegomap.flatmap.geo.PolygonIndex;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Parse;
|
||||
|
@ -72,11 +72,13 @@ import java.util.stream.DoubleStream;
|
|||
import java.util.stream.Stream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/place
|
||||
* Defines the logic for generating label points for populated places like continents, countries, cities, and towns in
|
||||
* the {@code place} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/place">OpenMapTiles
|
||||
* place sql files</a>.
|
||||
*/
|
||||
public class Place implements
|
||||
OpenMapTilesSchema.Place,
|
||||
|
@ -89,24 +91,72 @@ public class Place implements
|
|||
Tables.OsmCityPoint.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Place.class);
|
||||
/*
|
||||
* Place labels locations and names come from OpenStreetMap, but we also join with natural
|
||||
* earth state/country geographic areas and city point labels to give a hint for what rank
|
||||
* and minimum zoom level to use for those points.
|
||||
*/
|
||||
|
||||
private static final TreeMap<Double, Integer> ISLAND_AREA_RANKS = new TreeMap<>(Map.of(
|
||||
Double.MAX_VALUE, 3,
|
||||
squareMetersToWorldArea(40_000_000), 4,
|
||||
squareMetersToWorldArea(15_000_000), 5,
|
||||
squareMetersToWorldArea(1_000_000), 6
|
||||
));
|
||||
// constants for packing island label precedence into the z-order field
|
||||
private static final int ISLAND_ZORDER_RANGE = Z_ORDER_MAX;
|
||||
private static final double ISLAND_LOG_AREA_RANGE = Math.log(Math.pow(4, 26)); // 2^14 tiles, 2^12 pixels per tile
|
||||
private static final double CITY_JOIN_DISTANCE = GeoUtils.metersToPixelAtEquator(0, 50_000) / 256d;
|
||||
// constants for packing place label precedence into the z-order fiels
|
||||
private static final int Z_ORDER_RANK_BITS = 4;
|
||||
private static final int Z_ORDER_PLACE_BITS = 4;
|
||||
private static final int Z_ORDER_LENGTH_BITS = 5;
|
||||
private static final int Z_ORDER_POPULATION_BITS =
|
||||
Z_ORDER_BITS - (Z_ORDER_RANK_BITS + Z_ORDER_PLACE_BITS + Z_ORDER_LENGTH_BITS);
|
||||
private static final int Z_ORDER_POPULATION_RANGE = (1 << Z_ORDER_POPULATION_BITS) - 1;
|
||||
private static final double LOG_MAX_POPULATION = Math.log(100_000_000d);
|
||||
private static final Set<String> MAJOR_CITY_PLACES = Set.of("city", "town", "village");
|
||||
private static final ZoomFunction<Number> LABEL_GRID_LIMITS = ZoomFunction.fromMaxZoomThresholds(Map.of(
|
||||
8, 4,
|
||||
9, 8,
|
||||
10, 12,
|
||||
12, 14
|
||||
), 0);
|
||||
private final Translations translations;
|
||||
private final Stats stats;
|
||||
|
||||
private static record NaturalEarthRegion(String name, int rank) {
|
||||
|
||||
NaturalEarthRegion(String name, int maxRank, double... ranks) {
|
||||
this(name, (int) Math.ceil(DoubleStream.of(ranks).average().orElse(maxRank)));
|
||||
}
|
||||
}
|
||||
|
||||
private static record NaturalEarthPoint(String name, String wikidata, int scaleRank, Set<String> names) {}
|
||||
|
||||
// spatial indexes for joining natural earth place labels with their corresponding points
|
||||
// from openstreetmap
|
||||
private PolygonIndex<NaturalEarthRegion> countries = PolygonIndex.create();
|
||||
private PolygonIndex<NaturalEarthRegion> states = PolygonIndex.create();
|
||||
private PointIndex<NaturalEarthPoint> cities = PointIndex.create();
|
||||
|
||||
public Place(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.translations = translations;
|
||||
this.stats = stats;
|
||||
}
|
||||
|
||||
/** Returns the portion of the world that {@code squareMeters} covers where 1 is the entire planet. */
|
||||
private static double squareMetersToWorldArea(double squareMeters) {
|
||||
double oneSideMeters = Math.sqrt(squareMeters);
|
||||
double oneSideWorld = GeoUtils.metersToPixelAtEquator(0, oneSideMeters) / 256d;
|
||||
return Math.pow(oneSideWorld, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Packs place precedence ordering ({@code rank asc, place asc, population desc, name.length asc}) into the z-order
|
||||
* field.
|
||||
*/
|
||||
static int getZorder(Integer rank, PlaceType place, long population, String name) {
|
||||
int zorder = rank == null ? 0 : Math.max(1, 15 - rank);
|
||||
zorder = (zorder << Z_ORDER_PLACE_BITS) | (place == null ? 0 : Math.max(1, 15 - place.ordinal()));
|
||||
double logPop = Math.min(LOG_MAX_POPULATION, Math.log(population));
|
||||
zorder = (zorder << Z_ORDER_POPULATION_BITS) | Math.max(0, Math.min(Z_ORDER_POPULATION_RANGE,
|
||||
(int) Math.round(logPop * Z_ORDER_POPULATION_RANGE / LOG_MAX_POPULATION)));
|
||||
zorder = (zorder << Z_ORDER_LENGTH_BITS) | (name == null ? 0 : Math.max(1, 31 - name.length()));
|
||||
|
||||
return zorder + Z_ORDER_MIN;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
countries = null;
|
||||
|
@ -114,13 +164,10 @@ public class Place implements
|
|||
cities = null;
|
||||
}
|
||||
|
||||
public Place(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.translations = translations;
|
||||
this.stats = stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features) {
|
||||
// store data from natural earth to help with ranks and min zoom levels when actually
|
||||
// emitting features from openstreetmap data.
|
||||
try {
|
||||
switch (table) {
|
||||
case "ne_10m_admin_0_countries" -> countries.put(feature.worldGeometry(), new NaturalEarthRegion(
|
||||
|
@ -182,6 +229,8 @@ public class Place implements
|
|||
return;
|
||||
}
|
||||
try {
|
||||
// set country rank to 6, unless there is a match in natural earth that indicates it
|
||||
// should be lower
|
||||
int rank = 7;
|
||||
NaturalEarthRegion country = countries.get(element.source().worldGeometry().getCentroid());
|
||||
var names = LanguageUtils.getNames(element.source().tags(), translations);
|
||||
|
@ -200,7 +249,7 @@ public class Place implements
|
|||
.setAttr(Fields.ISO_A2, isoA2)
|
||||
.setAttr(Fields.CLASS, FieldValues.CLASS_COUNTRY)
|
||||
.setAttr(Fields.RANK, rank)
|
||||
.setZoomRange(rank - 1, 14)
|
||||
.setMinZoom(rank - 1)
|
||||
.setZorder(-rank);
|
||||
} catch (GeometryException e) {
|
||||
e.log(stats, "omt_place_country",
|
||||
|
@ -211,7 +260,8 @@ public class Place implements
|
|||
@Override
|
||||
public void process(Tables.OsmStatePoint element, FeatureCollector features) {
|
||||
try {
|
||||
// don't want nearest since we pre-filter the states in the polygon index
|
||||
// want the containing (not nearest) state polygon since we pre-filter the states in the polygon index
|
||||
// use natural earth to filter out any spurious states, and to set the rank field
|
||||
NaturalEarthRegion state = states.getOnlyContaining(element.source().worldGeometry().getCentroid());
|
||||
if (state != null) {
|
||||
var names = LanguageUtils.getNames(element.source().tags(), translations);
|
||||
|
@ -224,7 +274,7 @@ public class Place implements
|
|||
.putAttrs(names)
|
||||
.setAttr(Fields.CLASS, FieldValues.CLASS_STATE)
|
||||
.setAttr(Fields.RANK, rank)
|
||||
.setZoomRange(2, 14)
|
||||
.setMinZoom(2)
|
||||
.setZorder(-rank);
|
||||
}
|
||||
} catch (GeometryException e) {
|
||||
|
@ -233,22 +283,6 @@ public class Place implements
|
|||
}
|
||||
}
|
||||
|
||||
private static double squareMeters(double meters) {
|
||||
double oneSideMeters = Math.sqrt(meters);
|
||||
double oneSideWorld = GeoUtils.metersToPixelAtEquator(0, oneSideMeters) / 256d;
|
||||
return Math.pow(oneSideWorld, 2);
|
||||
}
|
||||
|
||||
private static final TreeMap<Double, Integer> ISLAND_AREA_RANKS = new TreeMap<>(Map.of(
|
||||
Double.MAX_VALUE, 3,
|
||||
squareMeters(40_000_000), 4,
|
||||
squareMeters(15_000_000), 5,
|
||||
squareMeters(1_000_000), 6
|
||||
));
|
||||
|
||||
private static final int ISLAND_ZORDER_RANGE = Z_ORDER_MAX;
|
||||
private static final double ISLAND_LOG_AREA_RANGE = Math.log(Math.pow(4, 26)); // 2^14 tiles, 2^12 pixels per tile
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmIslandPolygon element, FeatureCollector features) {
|
||||
try {
|
||||
|
@ -265,7 +299,7 @@ public class Place implements
|
|||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setAttr(Fields.CLASS, "island")
|
||||
.setAttr(Fields.RANK, rank)
|
||||
.setZoomRange(minzoom, 14)
|
||||
.setMinZoom(minzoom)
|
||||
.setZorder(zOrder);
|
||||
} catch (GeometryException e) {
|
||||
e.log(stats, "omt_place_island_poly",
|
||||
|
@ -279,72 +313,16 @@ public class Place implements
|
|||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setAttr(Fields.CLASS, "island")
|
||||
.setAttr(Fields.RANK, 7)
|
||||
.setZoomRange(12, 14);
|
||||
.setMinZoom(12);
|
||||
}
|
||||
|
||||
private static final Set<String> majorCityPlaces = Set.of("city", "town", "village");
|
||||
private static final double CITY_JOIN_DISTANCE = GeoUtils.metersToPixelAtEquator(0, 50_000) / 256d;
|
||||
|
||||
enum PlaceType {
|
||||
CITY("city"),
|
||||
TOWN("town"),
|
||||
VILLAGE("village"),
|
||||
HAMLET("hamlet"),
|
||||
SUBURB("suburb"),
|
||||
QUARTER("quarter"),
|
||||
NEIGHBORHOOD("neighbourhood"),
|
||||
ISOLATED_DWELLING("isolated_dwelling"),
|
||||
UNKNOWN("unknown");
|
||||
|
||||
private final String name;
|
||||
private static final Map<String, PlaceType> byName = new HashMap<>();
|
||||
|
||||
static {
|
||||
for (PlaceType place : values()) {
|
||||
byName.put(place.name, place);
|
||||
}
|
||||
}
|
||||
|
||||
PlaceType(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public static PlaceType forName(String name) {
|
||||
return byName.getOrDefault(name, UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
private static final int Z_ORDER_RANK_BITS = 4;
|
||||
private static final int Z_ORDER_PLACE_BITS = 4;
|
||||
private static final int Z_ORDER_LENGTH_BITS = 5;
|
||||
private static final int Z_ORDER_POPULATION_BITS = Z_ORDER_BITS -
|
||||
(Z_ORDER_RANK_BITS + Z_ORDER_PLACE_BITS + Z_ORDER_LENGTH_BITS);
|
||||
private static final int Z_ORDER_POPULATION_RANGE = (1 << Z_ORDER_POPULATION_BITS) - 1;
|
||||
private static final double LOG_MAX_POPULATION = Math.log(100_000_000d);
|
||||
|
||||
// order by rank asc, place asc, population desc, name.length asc
|
||||
static int getZorder(Integer rank, PlaceType place, long population, String name) {
|
||||
int zorder = rank == null ? 0 : Math.max(1, 15 - rank);
|
||||
zorder = (zorder << Z_ORDER_PLACE_BITS) | (place == null ? 0 : Math.max(1, 15 - place.ordinal()));
|
||||
double logPop = Math.min(LOG_MAX_POPULATION, Math.log(population));
|
||||
zorder = (zorder << Z_ORDER_POPULATION_BITS) | Math.max(0, Math.min(Z_ORDER_POPULATION_RANGE,
|
||||
(int) Math.round(logPop * Z_ORDER_POPULATION_RANGE / LOG_MAX_POPULATION)));
|
||||
zorder = (zorder << Z_ORDER_LENGTH_BITS) | (name == null ? 0 : Math.max(1, 31 - name.length()));
|
||||
|
||||
return zorder + Z_ORDER_MIN;
|
||||
}
|
||||
|
||||
private static final ZoomFunction<Number> LABEL_GRID_LIMITS = ZoomFunction.fromMaxZoomThresholds(Map.of(
|
||||
8, 4,
|
||||
9, 8,
|
||||
10, 12,
|
||||
12, 14
|
||||
), 0);
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmCityPoint element, FeatureCollector features) {
|
||||
Integer rank = null;
|
||||
if (majorCityPlaces.contains(element.place())) {
|
||||
if (MAJOR_CITY_PLACES.contains(element.place())) {
|
||||
// only for major cities, attempt to find a nearby natural earth label with a similar
|
||||
// name and use that to set a rank from OSM that causes the label to be shown at lower
|
||||
// zoom levels
|
||||
try {
|
||||
Point point = element.source().worldGeometry().getCentroid();
|
||||
List<NaturalEarthPoint> neCities = cities.getWithin(point, CITY_JOIN_DISTANCE);
|
||||
|
@ -382,7 +360,7 @@ public class Place implements
|
|||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setAttr(Fields.CLASS, element.place())
|
||||
.setAttr(Fields.RANK, rank)
|
||||
.setZoomRange(minzoom, 14)
|
||||
.setMinZoom(minzoom)
|
||||
.setZorder(getZorder(rank, placeType, element.population(), element.name()))
|
||||
.setPointLabelGridPixelSize(12, 128);
|
||||
|
||||
|
@ -398,8 +376,8 @@ public class Place implements
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom,
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
// infer the rank field from ordering of the place labels with each label grid square
|
||||
LongIntMap groupCounts = new LongIntHashMap();
|
||||
for (int i = items.size() - 1; i >= 0; i--) {
|
||||
VectorTile.Feature feature = items.get(i);
|
||||
|
@ -411,4 +389,53 @@ public class Place implements
|
|||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
/** Ordering defines the precedence of place classes. */
|
||||
enum PlaceType {
|
||||
CITY("city"),
|
||||
TOWN("town"),
|
||||
VILLAGE("village"),
|
||||
HAMLET("hamlet"),
|
||||
SUBURB("suburb"),
|
||||
QUARTER("quarter"),
|
||||
NEIGHBORHOOD("neighbourhood"),
|
||||
ISOLATED_DWELLING("isolated_dwelling"),
|
||||
UNKNOWN("unknown");
|
||||
|
||||
private static final Map<String, PlaceType> byName = new HashMap<>();
|
||||
|
||||
static {
|
||||
for (PlaceType place : values()) {
|
||||
byName.put(place.name, place);
|
||||
}
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
PlaceType(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public static PlaceType forName(String name) {
|
||||
return byName.getOrDefault(name, UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Information extracted from a natural earth geographic region that will be inspected when joining with OpenStreetMap
|
||||
* data.
|
||||
*/
|
||||
private static record NaturalEarthRegion(String name, int rank) {
|
||||
|
||||
NaturalEarthRegion(String name, int maxRank, double... ranks) {
|
||||
this(name, (int) Math.ceil(DoubleStream.of(ranks).average().orElse(maxRank)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Information extracted from a natural earth place label that will be inspected when joining with OpenStreetMap
|
||||
* data.
|
||||
*/
|
||||
private static record NaturalEarthPoint(String name, String wikidata, int scaleRank, Set<String> names) {}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,10 +35,10 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIf;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullOrEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIf;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullOrEmpty;
|
||||
import static java.util.Map.entry;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
|
@ -46,12 +46,11 @@ import com.carrotsearch.hppc.LongIntMap;
|
|||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Parse;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
@ -59,28 +58,22 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/poi
|
||||
* Defines the logic for generating map elements for things like shops, parks, and schools in the {@code poi} layer from
|
||||
* source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/poi">OpenMapTiles
|
||||
* poi sql files</a>.
|
||||
*/
|
||||
public class Poi implements OpenMapTilesSchema.Poi,
|
||||
public class Poi implements
|
||||
OpenMapTilesSchema.Poi,
|
||||
Tables.OsmPoiPoint.Handler,
|
||||
Tables.OsmPoiPolygon.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor {
|
||||
|
||||
private final MultiExpression.MultiExpressionIndex<String> classMapping;
|
||||
private final Translations translations;
|
||||
|
||||
public Poi(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.classMapping = FieldMappings.Class.index();
|
||||
this.translations = translations;
|
||||
}
|
||||
|
||||
private String poiClass(String subclass, String mappingKey) {
|
||||
subclass = coalesce(subclass, "");
|
||||
return classMapping.getOrElse(Map.of(
|
||||
"subclass", subclass,
|
||||
"mapping_key", coalesce(mappingKey, "")
|
||||
), subclass);
|
||||
}
|
||||
/*
|
||||
* process() creates the raw POI feature from OSM elements and postProcess()
|
||||
* assigns the feature rank from order in the tile at render-time.
|
||||
*/
|
||||
|
||||
private static final Map<String, Integer> CLASS_RANKS = Map.ofEntries(
|
||||
entry(FieldValues.CLASS_HOSPITAL, 20),
|
||||
|
@ -106,11 +99,26 @@ public class Poi implements OpenMapTilesSchema.Poi,
|
|||
entry(FieldValues.CLASS_CLOTHING_STORE, 700),
|
||||
entry(FieldValues.CLASS_BAR, 800)
|
||||
);
|
||||
private final MultiExpression.Index<String> classMapping;
|
||||
private final Translations translations;
|
||||
|
||||
public Poi(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.classMapping = FieldMappings.Class.index();
|
||||
this.translations = translations;
|
||||
}
|
||||
|
||||
static int poiClassRank(String clazz) {
|
||||
return CLASS_RANKS.getOrDefault(clazz, 1_000);
|
||||
}
|
||||
|
||||
private String poiClass(String subclass, String mappingKey) {
|
||||
subclass = coalesce(subclass, "");
|
||||
return classMapping.getOrElse(Map.of(
|
||||
"subclass", subclass,
|
||||
"mapping_key", coalesce(mappingKey, "")
|
||||
), subclass);
|
||||
}
|
||||
|
||||
private int minzoom(String subclass, String mappingKey) {
|
||||
boolean lowZoom = ("station".equals(subclass) && "railway".equals(mappingKey)) ||
|
||||
"halt".equals(subclass) || "ferry_terminal".equals(subclass);
|
||||
|
@ -128,7 +136,18 @@ public class Poi implements OpenMapTilesSchema.Poi,
|
|||
setupPoiFeature(element, features.centroidIfConvex(LAYER_NAME));
|
||||
}
|
||||
|
||||
private <T extends Tables.WithSubclass & Tables.WithStation & Tables.WithFunicular & Tables.WithSport & Tables.WithInformation & Tables.WithReligion & Tables.WithMappingKey & Tables.WithName & Tables.WithIndoor & Tables.WithLayer & Tables.WithSource>
|
||||
private <T extends
|
||||
Tables.WithSubclass &
|
||||
Tables.WithStation &
|
||||
Tables.WithFunicular &
|
||||
Tables.WithSport &
|
||||
Tables.WithInformation &
|
||||
Tables.WithReligion &
|
||||
Tables.WithMappingKey &
|
||||
Tables.WithName &
|
||||
Tables.WithIndoor &
|
||||
Tables.WithLayer &
|
||||
Tables.WithSource>
|
||||
void setupPoiFeature(T element, FeatureCollector.Feature output) {
|
||||
String rawSubclass = element.subclass();
|
||||
if ("station".equals(rawSubclass) && "subway".equals(element.station())) {
|
||||
|
@ -157,12 +176,12 @@ public class Poi implements OpenMapTilesSchema.Poi,
|
|||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setPointLabelGridPixelSize(14, 64)
|
||||
.setZorder(-rankOrder)
|
||||
.setZoomRange(minzoom(element.subclass(), element.mappingKey()), 14);
|
||||
.setMinZoom(minzoom(element.subclass(), element.mappingKey()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom,
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
// infer the "rank" field from the order of features within each label grid square
|
||||
LongIntMap groupCounts = new LongIntHashMap();
|
||||
for (int i = items.size() - 1; i >= 0; i--) {
|
||||
VectorTile.Feature feature = items.get(i);
|
||||
|
|
|
@ -35,14 +35,14 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.*;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.*;
|
||||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.FeatureMerge;
|
||||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
|
@ -54,11 +54,13 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/transportation
|
||||
* Defines the logic for generating map elements for roads, shipways, railroads, and paths in the {@code transportation}
|
||||
* layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/transportation">OpenMapTiles
|
||||
* transportation sql files</a>.
|
||||
*/
|
||||
public class Transportation implements
|
||||
OpenMapTilesSchema.Transportation,
|
||||
|
@ -70,9 +72,13 @@ public class Transportation implements
|
|||
OpenMapTilesProfile.FeaturePostProcessor,
|
||||
OpenMapTilesProfile.IgnoreWikidata {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Transportation.class);
|
||||
/*
|
||||
* Generates the shape for roads, trails, ferries, railways with detailed
|
||||
* attributes for rendering, but not any names. The transportation_name
|
||||
* layer includes names, but less detailed attributes.
|
||||
*/
|
||||
|
||||
private static final MultiExpression.MultiExpressionIndex<String> classMapping = FieldMappings.Class.index();
|
||||
private static final MultiExpression.Index<String> classMapping = FieldMappings.Class.index();
|
||||
private static final Set<String> RAILWAY_RAIL_VALUES = Set.of(
|
||||
FieldValues.SUBCLASS_RAIL,
|
||||
FieldValues.SUBCLASS_NARROW_GAUGE,
|
||||
|
@ -102,21 +108,19 @@ public class Transportation implements
|
|||
"paved", "asphalt", "cobblestone", "concrete", "concrete:lanes", "concrete:plates", "metal",
|
||||
"paving_stones", "sett", "unhewn_cobblestone", "wood"
|
||||
);
|
||||
private final Map<String, Integer> MINZOOMS;
|
||||
private static final ZoomFunction.MeterToPixelThresholds MIN_LENGTH = ZoomFunction.meterThresholds()
|
||||
.put(7, 50)
|
||||
.put(6, 100)
|
||||
.put(5, 500)
|
||||
.put(4, 1_000);
|
||||
private static final double PIXEL = 256d / 4096d;
|
||||
private final boolean z13Paths;
|
||||
private final Map<String, Integer> MINZOOMS;
|
||||
private final Stats stats;
|
||||
private final FlatmapConfig config;
|
||||
|
||||
public Transportation(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.config = config;
|
||||
this.stats = stats;
|
||||
this.z13Paths = config.arguments().getBoolean(
|
||||
boolean z13Paths = config.arguments().getBoolean(
|
||||
"transportation_z13_paths",
|
||||
"transportation(_name) layer: show paths on z13",
|
||||
false
|
||||
|
@ -134,11 +138,13 @@ public class Transportation implements
|
|||
);
|
||||
}
|
||||
|
||||
/** Returns a value for {@code surface} tag constrained to a small set of known values from raw OSM data. */
|
||||
private static String surface(String value) {
|
||||
return value == null ? null : SURFACE_PAVED_VALUES.contains(value) ? FieldValues.SURFACE_PAVED :
|
||||
SURFACE_UNPAVED_VALUES.contains(value) ? FieldValues.SURFACE_UNPAVED : null;
|
||||
}
|
||||
|
||||
/** Returns a value for {@code service} tag constrained to a small set of known values from raw OSM data. */
|
||||
private static String service(String value) {
|
||||
return (value == null || !SERVICE_VALUES.contains(value)) ? null : value;
|
||||
}
|
||||
|
@ -209,8 +215,8 @@ public class Transportation implements
|
|||
}
|
||||
|
||||
boolean highwayRamp = highwayIsLink || "steps".equals(highway);
|
||||
Integer rampAboveZ12 = (highwayRamp || element.isRamp()) ? 1 : 0;
|
||||
Integer rampBelowZ12 = highwayRamp ? 1 : 0;
|
||||
int rampAboveZ12 = (highwayRamp || element.isRamp()) ? 1 : 0;
|
||||
int rampBelowZ12 = highwayRamp ? 1 : 0;
|
||||
|
||||
FeatureCollector.Feature feature = features.line(LAYER_NAME).setBufferPixels(BUFFER_SIZE)
|
||||
// main attributes at all zoom levels (used for grouping <= z8)
|
||||
|
@ -228,9 +234,9 @@ public class Transportation implements
|
|||
.setAttrWithMinzoom(Fields.HORSE, nullIfEmpty(element.horse()), 9)
|
||||
.setAttrWithMinzoom(Fields.MTB_SCALE, nullIfEmpty(element.mtbScale()), 9)
|
||||
.setAttrWithMinzoom(Fields.SURFACE, surface(element.surface()), 12)
|
||||
.setMinPixelSize(0)
|
||||
.setMinPixelSize(0) // merge during post-processing, then limit by size
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(minzoom, 14);
|
||||
.setMinZoom(minzoom);
|
||||
|
||||
if (isFootwayOrSteps(highway)) {
|
||||
feature
|
||||
|
@ -240,14 +246,6 @@ public class Transportation implements
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom,
|
||||
List<VectorTile.Feature> items) throws GeometryException {
|
||||
double tolerance = config.tolerance(zoom);
|
||||
double minLength = coalesce(MIN_LENGTH.apply(zoom), config.minFeatureSize(zoom)).doubleValue();
|
||||
return FeatureMerge.mergeLineStrings(items, minLength, tolerance, BUFFER_SIZE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmRailwayLinestring element, FeatureCollector features) {
|
||||
String railway = element.railway();
|
||||
|
@ -275,7 +273,7 @@ public class Transportation implements
|
|||
.setAttrWithMinzoom(Fields.BRUNNEL, brunnel(element.isBridge(), element.isTunnel(), element.isFord()), 10)
|
||||
.setAttrWithMinzoom(Fields.LAYER, nullIf(element.layer(), 0), 9)
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(minzoom, 14);
|
||||
.setMinZoom(minzoom);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +288,7 @@ public class Transportation implements
|
|||
.setAttr(Fields.BRUNNEL, brunnel(element.isBridge(), element.isTunnel(), element.isFord()))
|
||||
.setAttr(Fields.LAYER, nullIf(element.layer(), 0))
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(12, 14);
|
||||
.setMinZoom(12);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -304,7 +302,7 @@ public class Transportation implements
|
|||
.setAttr(Fields.BRUNNEL, brunnel(element.isBridge(), element.isTunnel(), element.isFord()))
|
||||
.setAttr(Fields.LAYER, nullIf(element.layer(), 0))
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(11, 14);
|
||||
.setMinZoom(11);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -321,8 +319,15 @@ public class Transportation implements
|
|||
.setAttr(Fields.BRUNNEL, brunnel("bridge".equals(manMade), false, false))
|
||||
.setAttr(Fields.LAYER, nullIf(element.layer(), 0))
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(13, 14);
|
||||
.setMinZoom(13);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
double tolerance = config.tolerance(zoom);
|
||||
double minLength = coalesce(MIN_LENGTH.apply(zoom), config.minFeatureSize(zoom)).doubleValue();
|
||||
return FeatureMerge.mergeLineStrings(items, minLength, tolerance, BUFFER_SIZE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,13 +35,13 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.brunnel;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIf;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.layers.Transportation.highwayClass;
|
||||
import static com.onthegomap.flatmap.openmaptiles.layers.Transportation.highwaySubclass;
|
||||
import static com.onthegomap.flatmap.openmaptiles.layers.Transportation.isFootwayOrSteps;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.brunnel;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIf;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.util.MemoryEstimator.CLASS_HEADER_BYTES;
|
||||
import static com.onthegomap.flatmap.util.MemoryEstimator.POINTER_BYTES;
|
||||
import static com.onthegomap.flatmap.util.MemoryEstimator.estimateSize;
|
||||
|
@ -52,10 +52,10 @@ import com.onthegomap.flatmap.VectorTile;
|
|||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmElement;
|
||||
import com.onthegomap.flatmap.reader.osm.OsmReader;
|
||||
|
@ -79,7 +79,11 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/transportation_name
|
||||
* Defines the logic for generating map elements for road, shipway, rail, and path names in the {@code
|
||||
* transportation_name} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/transportation_name">OpenMapTiles
|
||||
* transportation_name sql files</a>.
|
||||
*/
|
||||
public class TransportationName implements
|
||||
OpenMapTilesSchema.TransportationName,
|
||||
|
@ -89,13 +93,28 @@ public class TransportationName implements
|
|||
OpenMapTilesProfile.OsmRelationPreprocessor,
|
||||
OpenMapTilesProfile.IgnoreWikidata {
|
||||
|
||||
/*
|
||||
* Generate road names from OSM data. Route network and ref are copied
|
||||
* from relations that roads are a part of - except in Great Britain which
|
||||
* uses a naming convention instead of relations.
|
||||
*
|
||||
* The goal is to make name linestrings as long as possible to give clients
|
||||
* the best chance of showing road names at different zoom levels, so do not
|
||||
* limit linestrings by length at process time and merge them at tile
|
||||
* render-time.
|
||||
*
|
||||
* Any 3-way nodes and intersections break line merging so set the
|
||||
* transportation_name_limit_merge argument to true to add temporary
|
||||
* "is link" and "relation" keys to prevent opposite directions of a
|
||||
* divided highway or on/off ramps from getting merged for main highways.
|
||||
*/
|
||||
|
||||
// extra temp key used to group on/off-ramps separately from main highways
|
||||
private static final String LINK_TEMP_KEY = "__islink";
|
||||
private static final String RELATION_ID_TEMP_KEY = "__relid";
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TransportationName.class);
|
||||
private static final Pattern GREAT_BRITAIN_REF_NETWORK_PATTERN = Pattern.compile("^[AM][0-9AM()]+");
|
||||
private final Map<String, Integer> MINZOOMS;
|
||||
private static final ZoomFunction.MeterToPixelThresholds MIN_LENGTH = ZoomFunction.meterThresholds()
|
||||
.put(6, 20_000)
|
||||
.put(7, 20_000)
|
||||
|
@ -103,15 +122,19 @@ public class TransportationName implements
|
|||
.put(9, 8_000)
|
||||
.put(10, 8_000)
|
||||
.put(11, 8_000);
|
||||
private static final double PIXEL = 256d / 4096d;
|
||||
private static final Comparator<RouteRelation> RELATION_ORDERING = Comparator
|
||||
.<RouteRelation>comparingInt(r -> r.network.ordinal())
|
||||
// TODO also compare network string?
|
||||
.thenComparingInt(r -> r.ref.length())
|
||||
.thenComparing(RouteRelation::ref);
|
||||
private final Map<String, Integer> MINZOOMS;
|
||||
private final boolean brunnel;
|
||||
private final boolean sizeForShield;
|
||||
private final boolean limitMerge;
|
||||
private final boolean z13Paths;
|
||||
private final Stats stats;
|
||||
private final FlatmapConfig config;
|
||||
private PreparedGeometry greatBritain = null;
|
||||
private final AtomicBoolean loggedNoGb = new AtomicBoolean(false);
|
||||
private PreparedGeometry greatBritain = null;
|
||||
|
||||
public TransportationName(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.config = config;
|
||||
|
@ -131,7 +154,7 @@ public class TransportationName implements
|
|||
"transportation_name layer: limit merge so we don't combine different relations to help merge long highways",
|
||||
false
|
||||
);
|
||||
this.z13Paths = config.arguments().getBoolean(
|
||||
boolean z13Paths = config.arguments().getBoolean(
|
||||
"transportation_z13_paths",
|
||||
"transportation(_name) layer: show paths on z13",
|
||||
false
|
||||
|
@ -168,7 +191,6 @@ public class TransportationName implements
|
|||
if (relation.hasTag("route", "road")) {
|
||||
RouteNetwork networkType = null;
|
||||
String network = relation.getString("network");
|
||||
String name = relation.getString("name");
|
||||
String ref = relation.getString("ref");
|
||||
|
||||
if ("US:I".equals(network)) {
|
||||
|
@ -219,7 +241,7 @@ public class TransportationName implements
|
|||
FeatureCollector.Feature feature = features.line(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setBufferPixelOverrides(MIN_LENGTH)
|
||||
// TODO abbreviate road names
|
||||
// TODO abbreviate road names - can't port osml10n because it is AGPL
|
||||
.putAttrs(LanguageUtils.getNamesWithoutTranslations(element.source().tags()))
|
||||
.setAttr(Fields.REF, ref)
|
||||
.setAttr(Fields.REF_LENGTH, ref != null ? ref.length() : null)
|
||||
|
@ -229,7 +251,7 @@ public class TransportationName implements
|
|||
.setAttr(Fields.SUBCLASS, highwaySubclass(highwayClass, null, highway))
|
||||
.setMinPixelSize(0)
|
||||
.setZorder(element.zOrder())
|
||||
.setZoomRange(minzoom, 14);
|
||||
.setMinZoom(minzoom);
|
||||
|
||||
if (brunnel) {
|
||||
feature.setAttr(Fields.BRUNNEL, brunnel(element.isBridge(), element.isTunnel(), element.isFord()));
|
||||
|
@ -261,6 +283,8 @@ public class TransportationName implements
|
|||
.min(RELATION_ORDERING)
|
||||
.orElse(null);
|
||||
if (relation == null && ref != null) {
|
||||
// GB doesn't use regular relations like everywhere else, so if we are
|
||||
// in GB then use a naming convention instead.
|
||||
Matcher refMatcher = GREAT_BRITAIN_REF_NETWORK_PATTERN.matcher(ref);
|
||||
if (refMatcher.find()) {
|
||||
if (greatBritain == null) {
|
||||
|
@ -286,7 +310,7 @@ public class TransportationName implements
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) throws GeometryException {
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
double tolerance = config.tolerance(zoom);
|
||||
double minLength = coalesce(MIN_LENGTH.apply(zoom), 0).doubleValue();
|
||||
// TODO tolerances:
|
||||
|
@ -300,6 +324,7 @@ public class TransportationName implements
|
|||
this::getMinLengthForName;
|
||||
var result = FeatureMerge.mergeLineStrings(items, lengthLimitCalculator, tolerance, BUFFER_SIZE);
|
||||
if (limitMerge) {
|
||||
// remove temp keys that were just used to improve line merging
|
||||
for (var feature : result) {
|
||||
feature.attrs().remove(LINK_TEMP_KEY);
|
||||
feature.attrs().remove(RELATION_ID_TEMP_KEY);
|
||||
|
@ -308,6 +333,7 @@ public class TransportationName implements
|
|||
return result;
|
||||
}
|
||||
|
||||
/** Returns the minimum pixel length that a name will fit into. */
|
||||
private double getMinLengthForName(Map<String, Object> attrs) {
|
||||
Object ref = attrs.get(Fields.REF);
|
||||
Object name = coalesce(attrs.get(Fields.NAME), ref);
|
||||
|
@ -331,6 +357,7 @@ public class TransportationName implements
|
|||
}
|
||||
}
|
||||
|
||||
/** Information extracted from route relations to use when processing ways in that relation. */
|
||||
private static record RouteRelation(
|
||||
String ref,
|
||||
RouteNetwork network,
|
||||
|
@ -345,10 +372,4 @@ public class TransportationName implements
|
|||
MemoryEstimator.estimateSizeLong(id);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Comparator<RouteRelation> RELATION_ORDERING = Comparator
|
||||
.<RouteRelation>comparingInt(r -> r.network.ordinal())
|
||||
// TODO also compare network string?
|
||||
.thenComparingInt(r -> r.ref.length())
|
||||
.thenComparing(RouteRelation::ref);
|
||||
}
|
||||
|
|
|
@ -37,51 +37,51 @@ package com.onthegomap.flatmap.openmaptiles.layers;
|
|||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.openmaptiles.MultiExpression;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.Utils;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.Utils;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/water
|
||||
* Defines the logic for generating map elements for oceans and lakes in the {@code water} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/water">OpenMapTiles
|
||||
* water sql files</a>.
|
||||
*/
|
||||
public class Water implements OpenMapTilesSchema.Water, Tables.OsmWaterPolygon.Handler,
|
||||
OpenMapTilesProfile.NaturalEarthProcessor, OpenMapTilesProfile.OsmWaterPolygonProcessor {
|
||||
public class Water implements
|
||||
OpenMapTilesSchema.Water,
|
||||
Tables.OsmWaterPolygon.Handler,
|
||||
OpenMapTilesProfile.NaturalEarthProcessor,
|
||||
OpenMapTilesProfile.OsmWaterPolygonProcessor {
|
||||
|
||||
private final MultiExpression.MultiExpressionIndex<String> classMapping;
|
||||
/*
|
||||
* At low zoom levels, use natural earth for oceans and major lakes, and at high zoom levels
|
||||
* use OpenStreetMap data. OpenStreetMap data contains smaller bodies of water, but not
|
||||
* large ocean polygons. For oceans, use https://osmdata.openstreetmap.de/data/water-polygons.html
|
||||
* which infers ocean polygons by preprocessing all coastline elements.
|
||||
*/
|
||||
|
||||
private final MultiExpression.Index<String> classMapping;
|
||||
|
||||
public Water(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.classMapping = FieldMappings.Class.index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmWaterPolygon element, FeatureCollector features) {
|
||||
if (!"bay".equals(element.natural())) {
|
||||
features.polygon(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setMinPixelSizeBelowZoom(11, 2)
|
||||
.setZoomRange(6, 14)
|
||||
.setAttr(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0)
|
||||
.setAttrWithMinzoom(Fields.BRUNNEL, Utils.brunnel(element.isBridge(), element.isTunnel()), 12)
|
||||
.setAttr(Fields.CLASS, classMapping.getOrElse(element.source().tags(), FieldValues.CLASS_RIVER));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features) {
|
||||
record WaterInfo(int minZoom, int maxZoom, String clazz) {}
|
||||
WaterInfo info = switch (table) {
|
||||
case "ne_10m_ocean" -> new WaterInfo(5, 5, FieldValues.CLASS_OCEAN);
|
||||
case "ne_50m_ocean" -> new WaterInfo(2, 4, FieldValues.CLASS_OCEAN);
|
||||
case "ne_110m_ocean" -> new WaterInfo(0, 1, FieldValues.CLASS_OCEAN);
|
||||
case "ne_50m_ocean" -> new WaterInfo(2, 4, FieldValues.CLASS_OCEAN);
|
||||
case "ne_10m_ocean" -> new WaterInfo(5, 5, FieldValues.CLASS_OCEAN);
|
||||
|
||||
case "ne_10m_lakes" -> new WaterInfo(4, 5, FieldValues.CLASS_LAKE);
|
||||
case "ne_50m_lakes" -> new WaterInfo(2, 3, FieldValues.CLASS_LAKE);
|
||||
case "ne_110m_lakes" -> new WaterInfo(0, 1, FieldValues.CLASS_LAKE);
|
||||
case "ne_50m_lakes" -> new WaterInfo(2, 3, FieldValues.CLASS_LAKE);
|
||||
case "ne_10m_lakes" -> new WaterInfo(4, 5, FieldValues.CLASS_LAKE);
|
||||
default -> null;
|
||||
};
|
||||
if (info != null) {
|
||||
|
@ -97,6 +97,19 @@ public class Water implements OpenMapTilesSchema.Water, Tables.OsmWaterPolygon.H
|
|||
features.polygon(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.CLASS, FieldValues.CLASS_OCEAN)
|
||||
.setZoomRange(6, 14);
|
||||
.setMinZoom(6);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmWaterPolygon element, FeatureCollector features) {
|
||||
if (!"bay".equals(element.natural())) {
|
||||
features.polygon(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setMinPixelSizeBelowZoom(11, 2)
|
||||
.setMinZoom(6)
|
||||
.setAttr(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0)
|
||||
.setAttrWithMinzoom(Fields.BRUNNEL, Utils.brunnel(element.isBridge(), element.isTunnel()), 12)
|
||||
.setAttr(Fields.CLASS, classMapping.getOrElse(element.source(), FieldValues.CLASS_RIVER));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.carrotsearch.hppc.LongObjectMap;
|
||||
import com.graphhopper.coll.GHLongObjectHashMap;
|
||||
|
@ -43,10 +43,10 @@ import com.onthegomap.flatmap.FeatureCollector;
|
|||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Parse;
|
||||
|
@ -58,16 +58,30 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/water_name
|
||||
* Defines the logic for generating map elements for ocean and lake names in the {@code water_name} layer from source
|
||||
* features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/water_name">OpenMapTiles
|
||||
* water_name sql files</a>.
|
||||
*/
|
||||
public class WaterName implements OpenMapTilesSchema.WaterName,
|
||||
public class WaterName implements
|
||||
OpenMapTilesSchema.WaterName,
|
||||
Tables.OsmMarinePoint.Handler,
|
||||
Tables.OsmWaterPolygon.Handler,
|
||||
OpenMapTilesProfile.NaturalEarthProcessor,
|
||||
OpenMapTilesProfile.LakeCenterlineProcessor {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(WaterName.class);
|
||||
/*
|
||||
* Labels for lakes and oceans come primarily from OpenStreetMap data, but we also join
|
||||
* with the lake centerlines source to get linestring geometries for prominent lakes.
|
||||
* We also join with natural earth to make certain important lake/ocean labels visible
|
||||
* at lower zoom levels.
|
||||
*/
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(WaterName.class);
|
||||
private static final double WORLD_AREA_FOR_70K_SQUARE_METERS =
|
||||
Math.pow(GeoUtils.metersToPixelAtEquator(0, Math.sqrt(70_000)) / 256d, 2);
|
||||
private static final double LOG2 = Math.log(2);
|
||||
private final Translations translations;
|
||||
// need to synchronize updates from multiple threads
|
||||
private final LongObjectMap<Geometry> lakeCenterlines = new GHLongObjectHashMap<>();
|
||||
|
@ -75,32 +89,20 @@ public class WaterName implements OpenMapTilesSchema.WaterName,
|
|||
private final ConcurrentSkipListMap<String, Integer> importantMarinePoints = new ConcurrentSkipListMap<>();
|
||||
private final Stats stats;
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
lakeCenterlines.release();
|
||||
importantMarinePoints.clear();
|
||||
}
|
||||
|
||||
public WaterName(Translations translations, FlatmapConfig config, Stats stats) {
|
||||
this.translations = translations;
|
||||
this.stats = stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processNaturalEarth(String table, SourceFeature feature,
|
||||
FeatureCollector features) {
|
||||
if ("ne_10m_geography_marine_polys".equals(table)) {
|
||||
String name = feature.getString("name");
|
||||
Integer scalerank = Parse.parseIntOrNull(feature.getTag("scalerank"));
|
||||
if (name != null && scalerank != null) {
|
||||
name = name.replaceAll("\\s+", " ").trim().toLowerCase();
|
||||
importantMarinePoints.put(name, scalerank);
|
||||
}
|
||||
}
|
||||
public void release() {
|
||||
lakeCenterlines.release();
|
||||
importantMarinePoints.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processLakeCenterline(SourceFeature feature, FeatureCollector features) {
|
||||
// TODO pull lake centerline computation into flatmap?
|
||||
long osmId = Math.abs(feature.getLong("OSM_ID"));
|
||||
if (osmId == 0L) {
|
||||
LOGGER.warn("Bad lake centerline. Tags: " + feature.tags());
|
||||
|
@ -116,15 +118,28 @@ public class WaterName implements OpenMapTilesSchema.WaterName,
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features) {
|
||||
// use natural earth named polygons just as a source of name to zoom-level mappings for later
|
||||
if ("ne_10m_geography_marine_polys".equals(table)) {
|
||||
String name = feature.getString("name");
|
||||
Integer scalerank = Parse.parseIntOrNull(feature.getTag("scalerank"));
|
||||
if (name != null && scalerank != null) {
|
||||
name = name.replaceAll("\\s+", " ").trim().toLowerCase();
|
||||
importantMarinePoints.put(name, scalerank);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmMarinePoint element, FeatureCollector features) {
|
||||
if (!element.name().isBlank()) {
|
||||
String place = element.place();
|
||||
var source = element.source();
|
||||
// use name from OSM, but min zoom from natural earth if it exists
|
||||
// use name from OSM, but get min zoom from natural earth based on fuzzy name match...
|
||||
Integer rank = Parse.parseIntOrNull(source.getTag("rank"));
|
||||
Integer nerank;
|
||||
String name = element.name().toLowerCase();
|
||||
Integer nerank;
|
||||
if ((nerank = importantMarinePoints.get(name)) != null) {
|
||||
rank = nerank;
|
||||
} else if ((nerank = importantMarinePoints.get(source.getString("name:en", "").toLowerCase())) != null) {
|
||||
|
@ -143,14 +158,10 @@ public class WaterName implements OpenMapTilesSchema.WaterName,
|
|||
.putAttrs(LanguageUtils.getNames(source.tags(), translations))
|
||||
.setAttr(Fields.CLASS, place)
|
||||
.setAttr(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0)
|
||||
.setZoomRange(minZoom, 14);
|
||||
.setMinZoom(minZoom);
|
||||
}
|
||||
}
|
||||
|
||||
private static final double WORLD_AREA_FOR_70K_SQUARE_METERS =
|
||||
Math.pow(GeoUtils.metersToPixelAtEquator(0, Math.sqrt(70_000)) / 256d, 2);
|
||||
private static final double LOG2 = Math.log(2);
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmWaterPolygon element, FeatureCollector features) {
|
||||
if (nullIfEmpty(element.name()) != null) {
|
||||
|
@ -159,9 +170,11 @@ public class WaterName implements OpenMapTilesSchema.WaterName,
|
|||
FeatureCollector.Feature feature;
|
||||
int minzoom = 9;
|
||||
if (centerlineGeometry != null) {
|
||||
// prefer lake centerline if it exists
|
||||
feature = features.geometry(LAYER_NAME, centerlineGeometry)
|
||||
.setMinPixelSizeBelowZoom(13, 6 * element.name().length());
|
||||
} else {
|
||||
// otherwise just use a label point inside the lake
|
||||
feature = features.pointOnSurface(LAYER_NAME);
|
||||
Geometry geometry = element.source().worldGeometry();
|
||||
double area = geometry.getArea();
|
||||
|
@ -173,7 +186,7 @@ public class WaterName implements OpenMapTilesSchema.WaterName,
|
|||
.setBufferPixels(BUFFER_SIZE)
|
||||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setAttr(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0)
|
||||
.setZoomRange(minzoom, 14);
|
||||
.setMinZoom(minzoom);
|
||||
} catch (GeometryException e) {
|
||||
e.log(stats, "omt_water_polygon", "Unable to get geometry for water polygon " + element.source().id());
|
||||
}
|
||||
|
|
|
@ -35,18 +35,17 @@ See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for deta
|
|||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles.layers;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.onthegomap.flatmap.FeatureCollector;
|
||||
import com.onthegomap.flatmap.FeatureMerge;
|
||||
import com.onthegomap.flatmap.VectorTile;
|
||||
import com.onthegomap.flatmap.config.FlatmapConfig;
|
||||
import com.onthegomap.flatmap.geo.GeometryException;
|
||||
import com.onthegomap.flatmap.openmaptiles.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.openmaptiles.Utils;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.OpenMapTilesSchema;
|
||||
import com.onthegomap.flatmap.openmaptiles.generated.Tables;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.LanguageUtils;
|
||||
import com.onthegomap.flatmap.openmaptiles.util.Utils;
|
||||
import com.onthegomap.flatmap.reader.SourceFeature;
|
||||
import com.onthegomap.flatmap.stats.Stats;
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
|
@ -55,10 +54,27 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class is ported to Java from https://github.com/openmaptiles/openmaptiles/tree/master/layers/waterway
|
||||
* Defines the logic for generating river map elements in the {@code waterway} layer from source features.
|
||||
* <p>
|
||||
* This class is ported to Java from <a href="https://github.com/openmaptiles/openmaptiles/tree/master/layers/waterway">OpenMapTiles
|
||||
* waterway sql files</a>.
|
||||
*/
|
||||
public class Waterway implements OpenMapTilesSchema.Waterway, Tables.OsmWaterwayLinestring.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor, OpenMapTilesProfile.NaturalEarthProcessor {
|
||||
public class Waterway implements
|
||||
OpenMapTilesSchema.Waterway,
|
||||
Tables.OsmWaterwayLinestring.Handler,
|
||||
OpenMapTilesProfile.FeaturePostProcessor,
|
||||
OpenMapTilesProfile.NaturalEarthProcessor {
|
||||
|
||||
/*
|
||||
* Uses Natural Earth at lower zoom-levels and OpenStreetMap at higher zoom levels.
|
||||
*
|
||||
* For OpenStreetMap, attempts to merge disconnected linestrings with the same name
|
||||
* at lower zoom levels so that clients can more easily render the name. We also
|
||||
* limit their length at merge-time which only has visibilty into that feature in a
|
||||
* single tile, so at render-time we need to allow through features far enough outside
|
||||
* the tile boundary enough to not accidentally filter out a long river only because a
|
||||
* short segment of it goes through this tile.
|
||||
*/
|
||||
|
||||
private final Translations translations;
|
||||
private final FlatmapConfig config;
|
||||
|
@ -68,7 +84,7 @@ public class Waterway implements OpenMapTilesSchema.Waterway, Tables.OsmWaterway
|
|||
this.translations = translations;
|
||||
}
|
||||
|
||||
private static final Map<String, Integer> minzooms = Map.of(
|
||||
private static final Map<String, Integer> CLASS_MINZOOM = Map.of(
|
||||
"river", 12,
|
||||
"canal", 12,
|
||||
|
||||
|
@ -77,37 +93,19 @@ public class Waterway implements OpenMapTilesSchema.Waterway, Tables.OsmWaterway
|
|||
"ditch", 13
|
||||
);
|
||||
|
||||
private static final ZoomFunction.MeterToPixelThresholds minPixelSizeThresholds = ZoomFunction.meterThresholds()
|
||||
private static final ZoomFunction.MeterToPixelThresholds MIN_PIXEL_LENGTHS = ZoomFunction.meterThresholds()
|
||||
.put(9, 8_000)
|
||||
.put(10, 4_000)
|
||||
.put(11, 1_000);
|
||||
|
||||
@Override
|
||||
public void process(Tables.OsmWaterwayLinestring element, FeatureCollector features) {
|
||||
String waterway = element.waterway();
|
||||
String name = nullIfEmpty(element.name());
|
||||
boolean important = "river".equals(waterway) && name != null;
|
||||
int minzoom = important ? 9 : minzooms.getOrDefault(element.waterway(), 14);
|
||||
features.line(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.CLASS, element.waterway())
|
||||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setZoomRange(minzoom, 14)
|
||||
// details only at higher zoom levels
|
||||
.setAttrWithMinzoom(Fields.BRUNNEL, Utils.brunnel(element.isBridge(), element.isTunnel()), 12)
|
||||
.setAttrWithMinzoom(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0, 12)
|
||||
// at lower zoom levels, we'll merge linestrings and limit length/clip afterwards
|
||||
.setBufferPixelOverrides(minPixelSizeThresholds).setMinPixelSizeBelowZoom(11, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processNaturalEarth(String table, SourceFeature feature, FeatureCollector features) {
|
||||
if (feature.hasTag("featurecla", "River")) {
|
||||
record ZoomRange(int min, int max) {}
|
||||
ZoomRange zoom = switch (table) {
|
||||
case "ne_10m_rivers_lake_centerlines" -> new ZoomRange(6, 8);
|
||||
case "ne_50m_rivers_lake_centerlines" -> new ZoomRange(4, 5);
|
||||
case "ne_110m_rivers_lake_centerlines" -> new ZoomRange(3, 3);
|
||||
case "ne_50m_rivers_lake_centerlines" -> new ZoomRange(4, 5);
|
||||
case "ne_10m_rivers_lake_centerlines" -> new ZoomRange(6, 8);
|
||||
default -> null;
|
||||
};
|
||||
if (zoom != null) {
|
||||
|
@ -120,12 +118,29 @@ public class Waterway implements OpenMapTilesSchema.Waterway, Tables.OsmWaterway
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items)
|
||||
throws GeometryException {
|
||||
public void process(Tables.OsmWaterwayLinestring element, FeatureCollector features) {
|
||||
String waterway = element.waterway();
|
||||
String name = nullIfEmpty(element.name());
|
||||
boolean important = "river".equals(waterway) && name != null;
|
||||
int minzoom = important ? 9 : CLASS_MINZOOM.getOrDefault(element.waterway(), 14);
|
||||
features.line(LAYER_NAME)
|
||||
.setBufferPixels(BUFFER_SIZE)
|
||||
.setAttr(Fields.CLASS, element.waterway())
|
||||
.putAttrs(LanguageUtils.getNames(element.source().tags(), translations))
|
||||
.setMinZoom(minzoom)
|
||||
// details only at higher zoom levels so that named rivers can be merged more aggressively
|
||||
.setAttrWithMinzoom(Fields.BRUNNEL, Utils.brunnel(element.isBridge(), element.isTunnel()), 12)
|
||||
.setAttrWithMinzoom(Fields.INTERMITTENT, element.isIntermittent() ? 1 : 0, 12)
|
||||
// at lower zoom levels, we'll merge linestrings and limit length/clip afterwards
|
||||
.setBufferPixelOverrides(MIN_PIXEL_LENGTHS).setMinPixelSizeBelowZoom(11, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTile.Feature> postProcess(int zoom, List<VectorTile.Feature> items) {
|
||||
if (zoom >= 9 && zoom <= 11) {
|
||||
return FeatureMerge.mergeLineStrings(
|
||||
items,
|
||||
minPixelSizeThresholds.apply(zoom).doubleValue(),
|
||||
MIN_PIXEL_LENGTHS.apply(zoom).doubleValue(),
|
||||
config.tolerance(zoom),
|
||||
BUFFER_SIZE
|
||||
);
|
||||
|
|
|
@ -33,10 +33,10 @@ Design license: CC-BY 4.0
|
|||
|
||||
See https://github.com/openmaptiles/openmaptiles/blob/master/LICENSE.md for details on usage
|
||||
*/
|
||||
package com.onthegomap.flatmap.openmaptiles;
|
||||
package com.onthegomap.flatmap.openmaptiles.util;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Utils.nullIfEmpty;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.coalesce;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.Utils.nullIfEmpty;
|
||||
|
||||
import com.onthegomap.flatmap.util.Translations;
|
||||
import java.util.HashMap;
|
||||
|
@ -46,11 +46,22 @@ import java.util.regex.Pattern;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* This class is ported from https://github.com/openmaptiles/openmaptiles-tools/blob/master/sql/zzz_language.sql
|
||||
* Utilities to extract common name fields (name, name_en, name_de, name:latin, name:nonlatin, name_int) that the
|
||||
* OpenMapTiles schema uses across any map element with a name.
|
||||
* <p>
|
||||
* Ported from <a href="https://github.com/openmaptiles/openmaptiles-tools/blob/master/sql/zzz_language.sql">openmaptiles-tools</a>.
|
||||
*/
|
||||
public class LanguageUtils {
|
||||
|
||||
private static void putIfNotNull(Map<String, Object> dest, String key, Object value) {
|
||||
private static final Pattern NONLATIN = Pattern
|
||||
.compile("[^\\x{0000}-\\x{024f}\\x{1E00}-\\x{1EFF}\\x{0300}-\\x{036f}\\x{0259}]");
|
||||
private static final Pattern LETTER = Pattern.compile("[A-Za-zÀ-ÖØ-öø-ÿĀ-ɏ]+");
|
||||
private static final Pattern EMPTY_PARENS = Pattern.compile("(\\([ -.]*\\)|\\[[ -.]*])");
|
||||
private static final Pattern LEADING_TRAILING_JUNK = Pattern.compile("(^\\s*([./-]\\s*)*|(\\s+[./-])*\\s*$)");
|
||||
private static final Pattern WHITESPACE = Pattern.compile("\\s+");
|
||||
private static final Set<String> EN_DE_NAME_KEYS = Set.of("name:en", "name:de");
|
||||
|
||||
private static void putIfNotEmpty(Map<String, Object> dest, String key, Object value) {
|
||||
if (value != null && !value.equals("")) {
|
||||
dest.put(key, value);
|
||||
}
|
||||
|
@ -60,29 +71,24 @@ public class LanguageUtils {
|
|||
return nullIfEmpty(obj == null ? null : obj.toString());
|
||||
}
|
||||
|
||||
private static final Pattern NONLATIN = Pattern
|
||||
.compile("[^\\x{0000}-\\x{024f}\\x{1E00}-\\x{1EFF}\\x{0300}-\\x{036f}\\x{0259}]");
|
||||
|
||||
static boolean isLatin(String string) {
|
||||
static boolean containsOnlyLatinCharacters(String string) {
|
||||
return string != null && !NONLATIN.matcher(string).find();
|
||||
}
|
||||
|
||||
private static String transliterate(Map<String, Object> tags) {
|
||||
private static String transliteratedName(Map<String, Object> tags) {
|
||||
return Translations.transliterate(string(tags.get("name")));
|
||||
}
|
||||
|
||||
private static final Pattern LETTER = Pattern.compile("[A-Za-zÀ-ÖØ-öø-ÿĀ-ɏ]+");
|
||||
private static final Pattern EMPTY_PARENS = Pattern.compile("(\\([ -.]*\\)|\\[[ -.]*])");
|
||||
private static final Pattern LEADING_TRAILING_JUNK = Pattern.compile("(^\\s*([./-]\\s*)*|(\\s+[./-])*\\s*$)");
|
||||
private static final Pattern WHITESPACE = Pattern.compile("\\s+");
|
||||
|
||||
static String removeNonLatin(String name) {
|
||||
static String removeLatinCharacters(String name) {
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
var matcher = LETTER.matcher(name);
|
||||
if (matcher.find()) {
|
||||
String result = matcher.replaceAll("");
|
||||
// if the name was "<nonlatin text> (<latin description)"
|
||||
// or "<nonlatin text> - <latin description>"
|
||||
// then remove any of those extra characters now
|
||||
result = EMPTY_PARENS.matcher(result).replaceAll("");
|
||||
result = LEADING_TRAILING_JUNK.matcher(result).replaceAll("");
|
||||
return WHITESPACE.matcher(result).replaceAll(" ");
|
||||
|
@ -90,10 +96,27 @@ public class LanguageUtils {
|
|||
return name.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map with default name attributes (name, name_en, name_de, name:latin, name:nonlatin, name_int) that every
|
||||
* element should have, derived from name, int_name, name:en, and name:de tags on the input element.
|
||||
*
|
||||
* <ul>
|
||||
* <li>name is the original name value from the element</li>
|
||||
* <li>name_en is the original name:en value from the element, or name if missing</li>
|
||||
* <li>name_de is the original name:de value from the element, or name/ name_en if missing</li>
|
||||
* <li>name:latin is the first of name, int_name, or any name: attribute that contains only latin characters</li>
|
||||
* <li>name:nonlatin is any nonlatin part of name if present</li>
|
||||
* <li>name_int is the first of int_name name:en name:latin name</li>
|
||||
* </ul>
|
||||
*/
|
||||
public static Map<String, Object> getNamesWithoutTranslations(Map<String, Object> tags) {
|
||||
return getNames(tags, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map with default name attributes that {@link #getNamesWithoutTranslations(Map)} adds, but also
|
||||
* translations for every language that {@code translations} is configured to handle.
|
||||
*/
|
||||
public static Map<String, Object> getNames(Map<String, Object> tags, Translations translations) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
|
@ -102,24 +125,25 @@ public class LanguageUtils {
|
|||
String nameEn = string(tags.get("name:en"));
|
||||
String nameDe = string(tags.get("name:de"));
|
||||
|
||||
boolean isLatin = isLatin(name);
|
||||
String latin = isLatin ? name : Stream.concat(Stream.of(nameEn, intName, nameDe), getAllNames(tags))
|
||||
.filter(LanguageUtils::isLatin)
|
||||
.findFirst().orElse(null);
|
||||
boolean isLatin = containsOnlyLatinCharacters(name);
|
||||
String latin = isLatin ? name
|
||||
: Stream.concat(Stream.of(nameEn, intName, nameDe), getAllNameTranslationsBesidesEnglishAndGerman(tags))
|
||||
.filter(LanguageUtils::containsOnlyLatinCharacters)
|
||||
.findFirst().orElse(null);
|
||||
if (latin == null && translations != null && translations.getShouldTransliterate()) {
|
||||
latin = transliterate(tags);
|
||||
latin = transliteratedName(tags);
|
||||
}
|
||||
String nonLatin = isLatin ? null : removeNonLatin(name);
|
||||
String nonLatin = isLatin ? null : removeLatinCharacters(name);
|
||||
if (coalesce(nonLatin, "").equals(latin)) {
|
||||
nonLatin = null;
|
||||
}
|
||||
|
||||
putIfNotNull(result, "name", name);
|
||||
putIfNotNull(result, "name_en", coalesce(nameEn, name));
|
||||
putIfNotNull(result, "name_de", coalesce(nameDe, name, nameEn));
|
||||
putIfNotNull(result, "name:latin", latin);
|
||||
putIfNotNull(result, "name:nonlatin", nonLatin);
|
||||
putIfNotNull(result, "name_int", coalesce(
|
||||
putIfNotEmpty(result, "name", name);
|
||||
putIfNotEmpty(result, "name_en", coalesce(nameEn, name));
|
||||
putIfNotEmpty(result, "name_de", coalesce(nameDe, name, nameEn));
|
||||
putIfNotEmpty(result, "name:latin", latin);
|
||||
putIfNotEmpty(result, "name:nonlatin", nonLatin);
|
||||
putIfNotEmpty(result, "name_int", coalesce(
|
||||
intName,
|
||||
nameEn,
|
||||
latin,
|
||||
|
@ -133,9 +157,7 @@ public class LanguageUtils {
|
|||
return result;
|
||||
}
|
||||
|
||||
private static final Set<String> EN_DE_NAME_KEYS = Set.of("name:en", "name:de");
|
||||
|
||||
private static Stream<String> getAllNames(Map<String, Object> tags) {
|
||||
private static Stream<String> getAllNameTranslationsBesidesEnglishAndGerman(Map<String, Object> tags) {
|
||||
return tags.entrySet().stream()
|
||||
.filter(e -> {
|
||||
String key = e.getKey();
|
|
@ -1,10 +1,11 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
package com.onthegomap.flatmap.openmaptiles.util;
|
||||
|
||||
import com.onthegomap.flatmap.util.Parse;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* Common utilities for working with data and the OpenMapTiles schema in {@code layers} implementations.
|
||||
*/
|
||||
public class Utils {
|
||||
|
||||
public static <T> T coalesce(T a, T b) {
|
||||
|
@ -27,26 +28,22 @@ public class Utils {
|
|||
return a != null ? a : b != null ? b : c != null ? c : d != null ? d : e != null ? e : f;
|
||||
}
|
||||
|
||||
public static <T> T coalesceLazy(T a, Supplier<T> b) {
|
||||
return a != null ? a : b.get();
|
||||
}
|
||||
|
||||
public static <T, U> T coalesceLazy(T a, Function<U, T> b, U arg) {
|
||||
return a != null ? a : b.apply(arg);
|
||||
}
|
||||
|
||||
/** Returns {@code a} or {@code nullValue} if {@code a} is null. */
|
||||
public static <T> T nullIf(T a, T nullValue) {
|
||||
return nullValue.equals(a) ? null : a;
|
||||
}
|
||||
|
||||
/** Returns {@code a}, or null if {@code a} is "". */
|
||||
public static String nullIfEmpty(String a) {
|
||||
return (a == null || a.isEmpty()) ? null : a;
|
||||
}
|
||||
|
||||
/** Returns true if {@code a} is null, or its {@link Object#toString()} value is "". */
|
||||
public static boolean nullOrEmpty(Object a) {
|
||||
return a == null || a.toString().isEmpty();
|
||||
}
|
||||
|
||||
/** Returns a map with {@code ele} (meters) and {ele_ft} attributes from an elevation in meters. */
|
||||
public static Map<String, Object> elevationTags(int meters) {
|
||||
return Map.of(
|
||||
"ele", meters,
|
||||
|
@ -54,16 +51,23 @@ public class Utils {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map with {@code ele} (meters) and {ele_ft} attributes from an elevation string in meters, if {@code
|
||||
* meters} can be parsed as a valid number.
|
||||
*/
|
||||
public static Map<String, Object> elevationTags(String meters) {
|
||||
Integer ele = Parse.parseIntSubstring(meters);
|
||||
return ele == null ? Map.of() : elevationTags(ele);
|
||||
}
|
||||
|
||||
/** Returns "bridge" or "tunnel" string used for "brunnel" attribute by OpenMapTiles schema. */
|
||||
public static String brunnel(boolean isBridge, boolean isTunnel) {
|
||||
return brunnel(isBridge, isTunnel, false);
|
||||
}
|
||||
|
||||
/** Returns "bridge" or "tunnel" or "ford" string used for "brunnel" attribute by OpenMapTiles schema. */
|
||||
public static String brunnel(boolean isBridge, boolean isTunnel, boolean isFord) {
|
||||
return isBridge ? "bridge" : isTunnel ? "tunnel" : isFord ? "ford" : null;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,11 +1,13 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.*;
|
||||
import static com.onthegomap.flatmap.expression.Expression.*;
|
||||
import static com.onthegomap.flatmap.openmaptiles.Generate.parseYaml;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.DynamicTest.dynamicTest;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.onthegomap.flatmap.expression.Expression;
|
||||
import com.onthegomap.flatmap.expression.MultiExpression;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -25,11 +27,11 @@ public class GenerateTest {
|
|||
- value2
|
||||
- '%value3%'
|
||||
"""));
|
||||
assertEquals(MultiExpression.of(Map.of(
|
||||
"output", or(
|
||||
assertEquals(MultiExpression.of(List.of(
|
||||
MultiExpression.entry("output", or(
|
||||
matchAny("key", "value"),
|
||||
matchAny("key2", "value2", "%value3%")
|
||||
)
|
||||
))
|
||||
)), parsed);
|
||||
}
|
||||
|
||||
|
@ -41,11 +43,11 @@ public class GenerateTest {
|
|||
key1: val1
|
||||
key2: val2
|
||||
"""));
|
||||
assertEquals(MultiExpression.of(Map.of(
|
||||
"output", and(
|
||||
assertEquals(MultiExpression.of(List.of(
|
||||
MultiExpression.entry("output", and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
))
|
||||
)), parsed);
|
||||
}
|
||||
|
||||
|
@ -58,14 +60,14 @@ public class GenerateTest {
|
|||
key1: val1
|
||||
key2: val2
|
||||
"""));
|
||||
assertEquals(MultiExpression.of(Map.of(
|
||||
"output", or(
|
||||
assertEquals(MultiExpression.of(List.of(
|
||||
MultiExpression.entry("output", or(
|
||||
matchAny("key0", "val0"),
|
||||
and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
)
|
||||
))
|
||||
)), parsed);
|
||||
}
|
||||
|
||||
|
@ -79,14 +81,14 @@ public class GenerateTest {
|
|||
key2: val2
|
||||
key3: val3
|
||||
"""));
|
||||
assertEquals(MultiExpression.of(Map.of(
|
||||
"output", and(
|
||||
assertEquals(MultiExpression.of(List.of(
|
||||
MultiExpression.entry("output", and(
|
||||
matchAny("key1", "val1"),
|
||||
or(
|
||||
matchAny("key2", "val2"),
|
||||
matchAny("key3", "val3")
|
||||
)
|
||||
)
|
||||
))
|
||||
)), parsed);
|
||||
}
|
||||
|
||||
|
@ -97,11 +99,11 @@ public class GenerateTest {
|
|||
key1: val1
|
||||
key2:
|
||||
"""));
|
||||
assertEquals(MultiExpression.of(Map.of(
|
||||
"output", or(
|
||||
assertEquals(MultiExpression.of(List.of(
|
||||
MultiExpression.entry("output", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchField("key2")
|
||||
)
|
||||
))
|
||||
)), parsed);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,214 +0,0 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
|
||||
import static com.onthegomap.flatmap.openmaptiles.Expression.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class MultiExpressionTest {
|
||||
|
||||
@Test
|
||||
public void testEmpty() {
|
||||
var index = MultiExpression.<String>of(Map.of()).index();
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key", "value")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleElement() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", matchAny("key", "value")
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBlankStringTreatedAsNotMatch() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", matchAny("key", "value", "")
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of()));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("otherkey", "othervalue")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key", "no")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleMatchField() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", matchField("key")
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value2", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWildcard() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", matchAny("key", "%value%")
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "1value")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "1value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "1value1", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleWildcardsMixedWithExacts() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", matchAny("key", "%value%", "other")
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "1value1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "other")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "1value1", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key", "other", "otherkey", "othervalue")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value", "key3", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "value")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnd() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key1", "no", "key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key1", "val1", "key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key1", "val1")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOr() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "no", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "no")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key2", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key1", "no", "key2", "no")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNot() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", and(
|
||||
matchAny("key1", "val1"),
|
||||
not(
|
||||
matchAny("key2", "val2")
|
||||
)
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of("key1", "val1", "key2", "val2")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key2", "val3")));
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1", "key3", "val2")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatchesMultiple() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
),
|
||||
"b", or(
|
||||
matchAny("key2", "val2"),
|
||||
matchAny("key3", "val3")
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of("a"), index.getMatches(Map.of("key1", "val1")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(Map.of("key2", "val2")));
|
||||
assertSameElements(List.of("b"), index.getMatches(Map.of("key3", "val3")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(Map.of("key2", "val2", "key3", "val3")));
|
||||
assertSameElements(List.of("a", "b"), index.getMatches(Map.of("key1", "val1", "key3", "val3")));
|
||||
assertSameElements(List.of(), index.getMatches(Map.of()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTracksMatchingKey() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", or(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key2", "val2")
|
||||
),
|
||||
"b", or(
|
||||
matchAny("key2", "val2"),
|
||||
matchAny("key3", "val3")
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of(new MultiExpression.MultiExpressionIndex.MatchWithTriggers<>(
|
||||
"a", List.of("key1")
|
||||
)), index.getMatchesWithTriggers(Map.of("key1", "val1")));
|
||||
assertSameElements(List.of(new MultiExpression.MultiExpressionIndex.MatchWithTriggers<>(
|
||||
"a", List.of("key2")
|
||||
), new MultiExpression.MultiExpressionIndex.MatchWithTriggers<>(
|
||||
"b", List.of("key2")
|
||||
)), index.getMatchesWithTriggers(Map.of("key2", "val2")));
|
||||
assertSameElements(List.of(new MultiExpression.MultiExpressionIndex.MatchWithTriggers<>(
|
||||
"b", List.of("key3")
|
||||
)), index.getMatchesWithTriggers(Map.of("key3", "val3")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTracksMatchingKeyFromCorrectPath() {
|
||||
var index = MultiExpression.of(Map.of(
|
||||
"a", or(
|
||||
and(
|
||||
matchAny("key3", "val3"),
|
||||
matchAny("key2", "val2")
|
||||
),
|
||||
and(
|
||||
matchAny("key1", "val1"),
|
||||
matchAny("key3", "val3")
|
||||
)
|
||||
)
|
||||
)).index();
|
||||
assertSameElements(List.of(new MultiExpression.MultiExpressionIndex.MatchWithTriggers<>(
|
||||
"a", List.of("key1", "key3")
|
||||
)), index.getMatchesWithTriggers(Map.of("key1", "val1", "key3", "val3")));
|
||||
}
|
||||
|
||||
private static <T> void assertSameElements(List<T> a, List<T> b) {
|
||||
assertEquals(
|
||||
a.stream().sorted(Comparator.comparing(Object::toString)).toList(),
|
||||
b.stream().sorted(Comparator.comparing(Object::toString)).toList()
|
||||
);
|
||||
}
|
||||
}
|
|
@ -270,7 +270,7 @@ public class TransportationTest extends AbstractLayerTest {
|
|||
))));
|
||||
}
|
||||
|
||||
OsmElement.Relation relUS = new OsmElement.Relation(1);
|
||||
final OsmElement.Relation relUS = new OsmElement.Relation(1);
|
||||
|
||||
{
|
||||
relUS.setTag("type", "route");
|
||||
|
@ -279,7 +279,7 @@ public class TransportationTest extends AbstractLayerTest {
|
|||
relUS.setTag("ref", "3");
|
||||
}
|
||||
|
||||
OsmElement.Relation relMA = new OsmElement.Relation(2);
|
||||
final OsmElement.Relation relMA = new OsmElement.Relation(2);
|
||||
|
||||
{
|
||||
relMA.setTag("type", "route");
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package com.onthegomap.flatmap.openmaptiles;
|
||||
package com.onthegomap.flatmap.openmaptiles.util;
|
||||
|
||||
import static com.onthegomap.flatmap.TestUtils.assertSubmap;
|
||||
import static com.onthegomap.flatmap.openmaptiles.LanguageUtils.isLatin;
|
||||
import static com.onthegomap.flatmap.openmaptiles.util.LanguageUtils.containsOnlyLatinCharacters;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
@ -69,7 +69,7 @@ public class LanguageUtilsTest {
|
|||
})
|
||||
public void testIsLatin(String in, boolean isLatin) {
|
||||
if (!isLatin) {
|
||||
assertFalse(isLatin(in));
|
||||
assertFalse(containsOnlyLatinCharacters(in));
|
||||
} else {
|
||||
assertEquals(in, LanguageUtils.getNames(Map.of(
|
||||
"name", in
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
JAR="flatmap-openmaptiles/target/flatmap-openmaptiles-0.1-SNAPSHOT-fatjar.jar"
|
||||
|
||||
AREA="${1:-monaco}"
|
||||
shift
|
||||
|
||||
echo "Building..."
|
||||
mvn -DskipTests=true --projects flatmap-openmaptiles -am package
|
||||
|
||||
echo "Running..."
|
||||
java -jar "$JAR" --force=true --area="${AREA}" $*
|
|
@ -1,21 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
JAR="flatmap-openmaptiles/target/flatmap-openmaptiles-0.1-SNAPSHOT-fatjar.jar"
|
||||
|
||||
echo "Downloading data..."
|
||||
AREA="${1:-north-america_us_massachusetts}"
|
||||
./scripts/download-osm.sh "${AREA}"
|
||||
./scripts/download-other-sources.sh
|
||||
|
||||
if [ ! -f "$JAR" ]; then
|
||||
echo "Building..."
|
||||
mvn -DskipTests=true --projects flatmap-openmaptiles -am clean package
|
||||
fi
|
||||
|
||||
echo "Running..."
|
||||
java -cp "$JAR" com.onthegomap.flatmap.openmaptiles.OpenMapTilesMain \
|
||||
-force=true -input="./data/sources/${AREA}.pbf"
|
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
JAR="flatmap-openmaptiles/target/flatmap-openmaptiles-0.1-SNAPSHOT-fatjar.jar"
|
||||
TAG="${1:-"v3.12.2"}"
|
||||
echo "tag=${TAG}"
|
||||
|
||||
echo "Building..."
|
||||
mvn -DskipTests=true --projects flatmap-openmaptiles -am package
|
||||
|
||||
echo "Running..."
|
||||
java -cp "$JAR" com.onthegomap.flatmap.openmaptiles.Generate -tag="${TAG}"
|
Ładowanie…
Reference in New Issue