kopia lustrzana https://github.com/onthegomap/planetiler
some refactoring
rodzic
10db8f1fe1
commit
d7b0414d15
|
@ -2,12 +2,13 @@ package com.onthegomap.flatmap;
|
|||
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -22,18 +23,27 @@ public class Arguments {
|
|||
return System.getProperty(key, defaultValue).trim();
|
||||
}
|
||||
|
||||
public double[] bounds(String arg, String description, OsmInputFile osmInputFile) {
|
||||
private String getArg(String key) {
|
||||
String value = System.getProperty(key);
|
||||
return value == null ? null : value.trim();
|
||||
}
|
||||
|
||||
public Envelope bounds(String arg, String description, BoundsProvider defaultBounds) {
|
||||
String input = System.getProperty(arg, null);
|
||||
double[] result;
|
||||
Envelope result;
|
||||
if (input == null) {
|
||||
// get from osm.pbf
|
||||
result = osmInputFile.getBounds();
|
||||
// get from input file
|
||||
result = defaultBounds.getBounds();
|
||||
} else if ("world".equalsIgnoreCase(input)) {
|
||||
result = GeoUtils.WORLD_LAT_LON_BOUNDS;
|
||||
} else {
|
||||
result = Stream.of(input.split("[\\s,]+")).mapToDouble(Double::parseDouble).toArray();
|
||||
double[] bounds = Stream.of(input.split("[\\s,]+")).mapToDouble(Double::parseDouble).toArray();
|
||||
if (bounds.length != 4) {
|
||||
throw new IllegalArgumentException("bounds must have 4 coordinates, got: " + input);
|
||||
}
|
||||
result = new Envelope(bounds[0], bounds[2], bounds[1], bounds[3]);
|
||||
}
|
||||
LOGGER.info(description + ": " + Arrays.toString(result));
|
||||
LOGGER.info(description + ": " + result);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -43,19 +53,19 @@ public class Arguments {
|
|||
return value;
|
||||
}
|
||||
|
||||
public File file(String arg, String description, String defaultValue) {
|
||||
String value = getArg(arg, defaultValue);
|
||||
File file = new File(value);
|
||||
public Path file(String arg, String description, Path defaultValue) {
|
||||
String value = getArg(arg);
|
||||
Path file = value == null ? defaultValue : Path.of(value);
|
||||
LOGGER.info(description + ": " + value);
|
||||
return file;
|
||||
}
|
||||
|
||||
public File inputFile(String arg, String description, String defaultValue) {
|
||||
File file = file(arg, description, defaultValue);
|
||||
if (!file.exists()) {
|
||||
throw new IllegalArgumentException(file + " does not exist");
|
||||
public Path inputFile(String arg, String description, Path defaultValue) {
|
||||
Path path = file(arg, description, defaultValue);
|
||||
if (!Files.exists(path)) {
|
||||
throw new IllegalArgumentException(path + " does not exist");
|
||||
}
|
||||
return file;
|
||||
return path;
|
||||
}
|
||||
|
||||
public boolean get(String arg, String description, boolean defaultValue) {
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
public interface BoundsProvider {
|
||||
|
||||
BoundsProvider WORLD = () -> GeoUtils.WORLD_LAT_LON_BOUNDS;
|
||||
|
||||
Envelope getBounds();
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
import java.time.Duration;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
public record CommonParams(
|
||||
Envelope bounds,
|
||||
int threads,
|
||||
Duration logInterval,
|
||||
int minzoom,
|
||||
int maxzoom,
|
||||
boolean deferIndexCreation,
|
||||
boolean optimizeDb
|
||||
) {
|
||||
|
||||
public CommonParams {
|
||||
if (minzoom > maxzoom) {
|
||||
throw new IllegalArgumentException("Minzoom cannot be greater than maxzoom");
|
||||
}
|
||||
if (minzoom < 0) {
|
||||
throw new IllegalArgumentException("Minzoom must be >= 0, was " + minzoom);
|
||||
}
|
||||
if (maxzoom >= 14) {
|
||||
throw new IllegalArgumentException("Max zoom must be <= 14, was " + maxzoom);
|
||||
}
|
||||
}
|
||||
|
||||
public static CommonParams from(Arguments arguments) {
|
||||
return from(arguments, BoundsProvider.WORLD);
|
||||
}
|
||||
|
||||
public static CommonParams from(Arguments arguments, BoundsProvider defaultBounds) {
|
||||
return new CommonParams(
|
||||
arguments.bounds("bounds", "bounds", defaultBounds),
|
||||
arguments.threads(),
|
||||
arguments.duration("loginterval", "time between logs", "10s"),
|
||||
arguments.integer("minzoom", "minimum zoom level", 0),
|
||||
arguments.integer("maxzoom", "maximum zoom level (limit 14)", 14),
|
||||
arguments.get("defer_mbtiles_index_creation", "add index to mbtiles file after finished writing", false),
|
||||
arguments.get("optimize_db", "optimize mbtiles after writing", false)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -16,9 +16,9 @@ import org.slf4j.LoggerFactory;
|
|||
public class FeatureRenderer {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(FeatureRenderer.class);
|
||||
private final FlatMapConfig config;
|
||||
private final CommonParams config;
|
||||
|
||||
public FeatureRenderer(FlatMapConfig config) {
|
||||
public FeatureRenderer(CommonParams config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import java.time.Duration;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
public record FlatMapConfig(
|
||||
Profile profile,
|
||||
Envelope envelope,
|
||||
int threads,
|
||||
Stats stats,
|
||||
Duration logInterval
|
||||
) {
|
||||
|
||||
}
|
|
@ -0,0 +1,245 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.geo.TileCoord;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.DoubleStream;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Mbtiles implements Closeable {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Mbtiles.class);
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
private final Connection connection;
|
||||
|
||||
private Mbtiles(Connection connection) {
|
||||
this.connection = connection;
|
||||
}
|
||||
|
||||
public static Mbtiles newInMemoryDatabase() {
|
||||
try {
|
||||
return new Mbtiles(DriverManager.getConnection("jdbc:sqlite::memory:"));
|
||||
} catch (SQLException throwables) {
|
||||
throw new IllegalStateException("Unable to create in-memory database", throwables);
|
||||
}
|
||||
}
|
||||
|
||||
public static Mbtiles newFileDatabase(Path path) {
|
||||
try {
|
||||
return new Mbtiles(DriverManager.getConnection("jdbc:sqlite:" + path.toAbsolutePath()));
|
||||
} catch (SQLException throwables) {
|
||||
throw new IllegalArgumentException("Unable to open " + path, throwables);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException throwables) {
|
||||
throw new IOException(throwables);
|
||||
}
|
||||
}
|
||||
|
||||
public void addIndex() {
|
||||
}
|
||||
|
||||
public void setupSchema() {
|
||||
}
|
||||
|
||||
public void tuneForWrites() {
|
||||
}
|
||||
|
||||
public void vacuumAnalyze() {
|
||||
}
|
||||
|
||||
public BatchedTileWriter newBatchedTileWriter() {
|
||||
return new BatchedTileWriter();
|
||||
}
|
||||
|
||||
public class BatchedTileWriter implements AutoCloseable {
|
||||
|
||||
public void write(TileCoord tile, byte[] data) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static record MetadataRow(String name, String value) {
|
||||
|
||||
}
|
||||
|
||||
public static record MetadataJson(List<VectorLayer> vectorLayers) {
|
||||
|
||||
public static MetadataJson fromJson(String json) {
|
||||
try {
|
||||
return objectMapper.readValue(json, MetadataJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalStateException("Invalid metadata json: " + json, e);
|
||||
}
|
||||
}
|
||||
|
||||
public String toJson() {
|
||||
try {
|
||||
return objectMapper.writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalArgumentException("Unable to encode as string: " + this, e);
|
||||
}
|
||||
}
|
||||
|
||||
public enum FieldType {
|
||||
NUMBER("Number"),
|
||||
BOOLEAN("Boolean"),
|
||||
STRING("String");
|
||||
|
||||
private final String name;
|
||||
|
||||
FieldType(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
public static record VectorLayer(
|
||||
String id,
|
||||
Map<String, FieldType> fields,
|
||||
Optional<String> description,
|
||||
OptionalInt minzoom,
|
||||
OptionalInt maxzoom
|
||||
) {
|
||||
|
||||
public VectorLayer(String id, Map<String, FieldType> fields) {
|
||||
this(id, fields, Optional.empty(), OptionalInt.empty(), OptionalInt.empty());
|
||||
}
|
||||
|
||||
public VectorLayer copyWithDescription(String newDescription) {
|
||||
return new VectorLayer(id, fields, Optional.of(newDescription), minzoom, maxzoom);
|
||||
}
|
||||
|
||||
public VectorLayer copyWithMinzoom(int newMinzoom) {
|
||||
return new VectorLayer(id, fields, description, OptionalInt.of(newMinzoom), maxzoom);
|
||||
}
|
||||
|
||||
public VectorLayer copyWithMaxzoom(int newMaxzoom) {
|
||||
return new VectorLayer(id, fields, description, minzoom, OptionalInt.of(newMaxzoom));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Metadata metadata() {
|
||||
return new Metadata();
|
||||
}
|
||||
|
||||
public class Metadata {
|
||||
|
||||
private static String join(double... items) {
|
||||
return DoubleStream.of(items).mapToObj(Double::toString).collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
public Metadata setMetadata(String name, Object value) {
|
||||
if (value != null) {
|
||||
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Metadata setName(String value) {
|
||||
return setMetadata("name", value);
|
||||
}
|
||||
|
||||
public Metadata setFormat(String format) {
|
||||
return setMetadata("format", format);
|
||||
}
|
||||
|
||||
public Metadata setBounds(double left, double bottom, double right, double top) {
|
||||
return setMetadata("bounds", join(left, bottom, right, top));
|
||||
}
|
||||
|
||||
public Metadata setBounds(Envelope envelope) {
|
||||
return setBounds(envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY());
|
||||
}
|
||||
|
||||
public Metadata setCenter(double longitude, double latitude, double zoom) {
|
||||
return setMetadata("center", join(longitude, latitude, zoom));
|
||||
}
|
||||
|
||||
public Metadata setBoundsAndCenter(Envelope envelope) {
|
||||
return setBounds(envelope).setCenter(envelope);
|
||||
}
|
||||
|
||||
public Metadata setCenter(Envelope envelope) {
|
||||
Coordinate center = envelope.centre();
|
||||
double zoom = GeoUtils.getZoomFromLonLatBounds(envelope);
|
||||
return setCenter(center.x, center.y, zoom);
|
||||
}
|
||||
|
||||
public Metadata setMinzoom(int value) {
|
||||
return setMetadata("minzoom", value);
|
||||
}
|
||||
|
||||
public Metadata setMaxzoom(int maxZoom) {
|
||||
return setMetadata("minzoom", maxZoom);
|
||||
}
|
||||
|
||||
public Metadata setAttribution(String value) {
|
||||
return setMetadata("attribution", value);
|
||||
}
|
||||
|
||||
public Metadata setDescription(String value) {
|
||||
return setMetadata("description", value);
|
||||
}
|
||||
|
||||
public Metadata setType(String value) {
|
||||
return setMetadata("type", value);
|
||||
}
|
||||
|
||||
public Metadata setTypeIsOverlay() {
|
||||
return setType("overlay");
|
||||
}
|
||||
|
||||
public Metadata setTypeIsBaselayer() {
|
||||
return setType("baselayer");
|
||||
}
|
||||
|
||||
public Metadata setVersion(String value) {
|
||||
return setMetadata("version", value);
|
||||
}
|
||||
|
||||
public Metadata setJson(String value) {
|
||||
return setMetadata("json", value);
|
||||
}
|
||||
|
||||
public Metadata setJson(MetadataJson value) {
|
||||
return setJson(value.toJson());
|
||||
}
|
||||
|
||||
public Map<String, String> getAll() {
|
||||
return Map.of();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,8 +6,9 @@ import com.onthegomap.flatmap.monitoring.ProgressLoggers;
|
|||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -22,9 +23,15 @@ public class MbtilesWriter {
|
|||
private final AtomicLong featuresProcessed = new AtomicLong(0);
|
||||
private final AtomicLong memoizedTiles = new AtomicLong(0);
|
||||
private final AtomicLong tiles = new AtomicLong(0);
|
||||
private final Path path;
|
||||
private final CommonParams config;
|
||||
private final Profile profile;
|
||||
private final Stats stats;
|
||||
|
||||
private MbtilesWriter(Stats stats) {
|
||||
private MbtilesWriter(Path path, CommonParams config, Profile profile, Stats stats) {
|
||||
this.path = path;
|
||||
this.config = config;
|
||||
this.profile = profile;
|
||||
this.stats = stats;
|
||||
}
|
||||
|
||||
|
@ -32,10 +39,14 @@ public class MbtilesWriter {
|
|||
|
||||
}
|
||||
|
||||
public static void writeOutput(long featureCount, FeatureGroup features, File output, FlatMapConfig config) {
|
||||
Stats stats = config.stats();
|
||||
output.delete();
|
||||
MbtilesWriter writer = new MbtilesWriter(config.stats());
|
||||
public static void writeOutput(long featureCount, FeatureGroup features, Path output, Profile profile,
|
||||
CommonParams config, Stats stats) {
|
||||
try {
|
||||
Files.deleteIfExists(output);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Unable to delete " + output);
|
||||
}
|
||||
MbtilesWriter writer = new MbtilesWriter(output, config, profile, stats);
|
||||
|
||||
var topology = Topology.start("mbtiles", stats)
|
||||
.readFrom("reader", features)
|
||||
|
@ -81,8 +92,42 @@ public class MbtilesWriter {
|
|||
}
|
||||
}
|
||||
|
||||
private void tileWriter(Supplier<RenderedTile> prev) throws Exception {
|
||||
private void tileWriter(Supplier<RenderedTile> tiles) throws Exception {
|
||||
try (Mbtiles db = Mbtiles.newFileDatabase(path)) {
|
||||
db.setupSchema();
|
||||
db.tuneForWrites();
|
||||
if (!config.deferIndexCreation()) {
|
||||
db.addIndex();
|
||||
} else {
|
||||
LOGGER.info("Deferring index creation until after tiles are written.");
|
||||
}
|
||||
|
||||
db.metadata()
|
||||
.setName(profile.name())
|
||||
.setFormat("pbf")
|
||||
.setDescription(profile.description())
|
||||
.setAttribution(profile.attribution())
|
||||
.setVersion(profile.version())
|
||||
.setTypeIsBaselayer()
|
||||
.setBoundsAndCenter(config.bounds())
|
||||
.setMinzoom(config.minzoom())
|
||||
.setMaxzoom(config.maxzoom())
|
||||
.setJson(stats.getTileStats());
|
||||
|
||||
try (var batchedWriter = db.newBatchedTileWriter()) {
|
||||
RenderedTile tile;
|
||||
while ((tile = tiles.get()) != null) {
|
||||
batchedWriter.write(tile.tile(), tile.contents);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.deferIndexCreation()) {
|
||||
db.addIndex();
|
||||
}
|
||||
if (config.optimizeDb()) {
|
||||
db.vacuumAnalyze();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] gzipCompress(byte[] uncompressedData) throws IOException {
|
||||
|
|
|
@ -7,14 +7,12 @@ import com.onthegomap.flatmap.profiles.OpenMapTilesProfile;
|
|||
import com.onthegomap.flatmap.reader.NaturalEarthReader;
|
||||
import com.onthegomap.flatmap.reader.OpenStreetMapReader;
|
||||
import com.onthegomap.flatmap.reader.ShapefileReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -27,26 +25,24 @@ public class OpenMapTilesMain {
|
|||
var stats = arguments.getStats();
|
||||
stats.startTimer("import");
|
||||
LOGGER.info("Arguments:");
|
||||
Path sourcesDir = Path.of("data", "sources");
|
||||
OsmInputFile osmInputFile = new OsmInputFile(
|
||||
arguments.inputFile("input", "OSM input file", "./data/sources/north-america_us_massachusetts.pbf"));
|
||||
File centerlines = arguments
|
||||
.inputFile("centerline", "lake centerlines input", "./data/sources/lake_centerline.shp.zip");
|
||||
File naturalEarth = arguments.inputFile("natural_earth", "natural earth input",
|
||||
"./data/sources/natural_earth_vector.sqlite.zip");
|
||||
File waterPolygons = arguments.inputFile("water_polygons", "water polygons input",
|
||||
"./data/sources/water-polygons-split-3857.zip");
|
||||
double[] bounds = arguments.bounds("bounds", "bounds", osmInputFile);
|
||||
Envelope envelope = new Envelope(bounds[0], bounds[2], bounds[1], bounds[3]);
|
||||
int threads = arguments.threads();
|
||||
Duration logInterval = arguments.duration("loginterval", "time between logs", "10s");
|
||||
Path tmpDir = arguments.file("tmpdir", "temp directory", "./data/tmp").toPath();
|
||||
arguments.inputFile("input", "OSM input file", sourcesDir.resolve("north-america_us_massachusetts.pbf")));
|
||||
Path centerlines = arguments
|
||||
.inputFile("centerline", "lake centerlines input", sourcesDir.resolve("lake_centerline.shp.zip"));
|
||||
Path naturalEarth = arguments
|
||||
.inputFile("natural_earth", "natural earth input", sourcesDir.resolve("natural_earth_vector.sqlite.zip"));
|
||||
Path waterPolygons = arguments
|
||||
.inputFile("water_polygons", "water polygons input", sourcesDir.resolve("water-polygons-split-3857.zip"));
|
||||
Path tmpDir = arguments.file("tmpdir", "temp directory", Path.of("data", "tmp"));
|
||||
boolean fetchWikidata = arguments.get("fetch_wikidata", "fetch wikidata translations", false);
|
||||
boolean useWikidata = arguments.get("use_wikidata", "use wikidata translations", true);
|
||||
File wikidataNamesFile = arguments.file("wikidata_cache", "wikidata cache file",
|
||||
"./data/sources/wikidata_names.json");
|
||||
File output = arguments.file("output", "mbtiles output file", "./massachusetts.mbtiles");
|
||||
Path wikidataNamesFile = arguments.file("wikidata_cache", "wikidata cache file",
|
||||
Path.of("data", "sources", "wikidata_names.json"));
|
||||
Path output = arguments.file("output", "mbtiles output file", Path.of("massachusetts.mbtiles"));
|
||||
List<String> languages = arguments.get("name_languages", "languages to use",
|
||||
"en,ru,ar,zh,ja,ko,fr,de,fi,pl,es,be,br,he".split(","));
|
||||
CommonParams config = CommonParams.from(arguments, osmInputFile);
|
||||
|
||||
LOGGER.info("Building OpenMapTiles profile into " + output + " in these phases:");
|
||||
if (fetchWikidata) {
|
||||
|
@ -64,42 +60,42 @@ public class OpenMapTilesMain {
|
|||
var profile = new OpenMapTilesProfile();
|
||||
|
||||
FileUtils.forceMkdir(tmpDir.toFile());
|
||||
File nodeDb = tmpDir.resolve("node.db").toFile();
|
||||
Path nodeDb = tmpDir.resolve("node.db");
|
||||
LongLongMap nodeLocations = new LongLongMap.MapdbSortedTable(nodeDb);
|
||||
FeatureSort featureDb = FeatureSort.newExternalMergeSort(tmpDir.resolve("feature.db"), threads, stats);
|
||||
FeatureSort featureDb = FeatureSort.newExternalMergeSort(tmpDir.resolve("feature.db"), config.threads(), stats);
|
||||
FeatureGroup featureMap = new FeatureGroup(featureDb, profile);
|
||||
FlatMapConfig config = new FlatMapConfig(profile, envelope, threads, stats, logInterval);
|
||||
FeatureRenderer renderer = new FeatureRenderer(config);
|
||||
|
||||
if (fetchWikidata) {
|
||||
stats.time("wikidata", () -> Wikidata.fetch(osmInputFile, wikidataNamesFile, config));
|
||||
stats.time("wikidata", () -> Wikidata.fetch(osmInputFile, wikidataNamesFile, config, profile, stats));
|
||||
}
|
||||
if (useWikidata) {
|
||||
translations.addTranslationProvider(Wikidata.load(wikidataNamesFile));
|
||||
}
|
||||
|
||||
stats.time("lake_centerlines", () ->
|
||||
ShapefileReader.process("EPSG:3857", "lake_centerlines", centerlines, renderer, featureMap, config));
|
||||
ShapefileReader
|
||||
.process("EPSG:3857", "lake_centerlines", centerlines, renderer, featureMap, config, profile, stats));
|
||||
stats.time("water_polygons", () ->
|
||||
ShapefileReader.process("water_polygons", waterPolygons, renderer, featureMap, config));
|
||||
ShapefileReader.process("water_polygons", waterPolygons, renderer, featureMap, config, profile, stats));
|
||||
stats.time("natural_earth", () ->
|
||||
new NaturalEarthReader(naturalEarth, tmpDir.resolve("natearth.sqlite").toFile(), stats)
|
||||
new NaturalEarthReader(naturalEarth, tmpDir.resolve("natearth.sqlite"), profile, stats)
|
||||
.process("natural_earth", renderer, featureMap, config)
|
||||
);
|
||||
|
||||
AtomicLong featureCount = new AtomicLong(0);
|
||||
try (var osmReader = new OpenStreetMapReader(osmInputFile, nodeLocations, stats)) {
|
||||
try (var osmReader = new OpenStreetMapReader(osmInputFile, nodeLocations, profile, stats)) {
|
||||
stats.time("osm_pass1", () -> osmReader.pass1(config));
|
||||
stats.time("osm_pass2",
|
||||
() -> featureCount.set(osmReader.pass2(renderer, featureMap, Math.max(threads / 4, 1), threads - 1, config)));
|
||||
stats.time("osm_pass2", () -> featureCount.set(osmReader.pass2(renderer, featureMap, config)));
|
||||
}
|
||||
|
||||
LOGGER.info("Deleting node.db to make room for mbtiles");
|
||||
profile.release();
|
||||
nodeDb.delete();
|
||||
Files.delete(nodeDb);
|
||||
|
||||
stats.time("sort", featureDb::sort);
|
||||
stats.time("mbtiles", () -> MbtilesWriter.writeOutput(featureCount.get(), featureMap, output, config));
|
||||
stats
|
||||
.time("mbtiles", () -> MbtilesWriter.writeOutput(featureCount.get(), featureMap, output, profile, config, stats));
|
||||
|
||||
stats.stopTimer("import");
|
||||
|
||||
|
|
|
@ -8,34 +8,36 @@ import com.graphhopper.reader.osm.pbf.Sink;
|
|||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.zip.DataFormatException;
|
||||
import java.util.zip.Inflater;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.openstreetmap.osmosis.osmbinary.Fileformat.Blob;
|
||||
import org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader;
|
||||
import org.openstreetmap.osmosis.osmbinary.Osmformat.HeaderBBox;
|
||||
import org.openstreetmap.osmosis.osmbinary.Osmformat.HeaderBlock;
|
||||
import org.openstreetmap.osmosis.osmbinary.file.FileFormatException;
|
||||
|
||||
public class OsmInputFile {
|
||||
public class OsmInputFile implements BoundsProvider {
|
||||
|
||||
private final File file;
|
||||
private final Path path;
|
||||
|
||||
public OsmInputFile(File file) {
|
||||
this.file = file;
|
||||
public OsmInputFile(Path path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public double[] getBounds() {
|
||||
try (var input = new FileInputStream(file)) {
|
||||
@Override
|
||||
public Envelope getBounds() {
|
||||
try (var input = Files.newInputStream(path)) {
|
||||
var dataInput = new DataInputStream(input);
|
||||
int headerSize = dataInput.readInt();
|
||||
if (headerSize > 65536) {
|
||||
throw new FileFormatException("Unexpectedly long header 65536 bytes. Possibly corrupt file " + file);
|
||||
throw new FileFormatException("Unexpectedly long header 65536 bytes. Possibly corrupt file " + path);
|
||||
}
|
||||
byte[] buf = dataInput.readNBytes(headerSize);
|
||||
BlobHeader header = BlobHeader.parseFrom(buf);
|
||||
|
@ -57,12 +59,12 @@ public class OsmInputFile {
|
|||
}
|
||||
HeaderBlock headerblock = HeaderBlock.parseFrom(data);
|
||||
HeaderBBox bbox = headerblock.getBbox();
|
||||
return new double[]{
|
||||
return new Envelope(
|
||||
bbox.getLeft() / 1e9,
|
||||
bbox.getBottom() / 1e9,
|
||||
bbox.getRight() / 1e9,
|
||||
bbox.getBottom() / 1e9,
|
||||
bbox.getTop() / 1e9
|
||||
};
|
||||
);
|
||||
} catch (IOException | DataFormatException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -70,7 +72,7 @@ public class OsmInputFile {
|
|||
|
||||
public void readTo(Consumer<ReaderElement> next, int threads) throws IOException {
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(threads);
|
||||
try (var stream = new BufferedInputStream(new FileInputStream(file), 50_000)) {
|
||||
try (var stream = new BufferedInputStream(Files.newInputStream(path), 50_000)) {
|
||||
PbfStreamSplitter streamSplitter = new PbfStreamSplitter(new DataInputStream(stream));
|
||||
var sink = new ReaderElementSink(next);
|
||||
PbfDecoder pbfDecoder = new PbfDecoder(streamSplitter, executorService, threads + 1, sink);
|
||||
|
|
|
@ -15,6 +15,20 @@ public interface Profile {
|
|||
List<VectorTileEncoder.Feature> postProcessLayerFeatures(String layer, int zoom,
|
||||
List<VectorTileEncoder.Feature> items);
|
||||
|
||||
String name();
|
||||
|
||||
default String description() {
|
||||
return null;
|
||||
}
|
||||
|
||||
default String attribution() {
|
||||
return null;
|
||||
}
|
||||
|
||||
default String version() {
|
||||
return null;
|
||||
}
|
||||
|
||||
class NullProfile implements Profile {
|
||||
|
||||
@Override
|
||||
|
@ -33,10 +47,14 @@ public interface Profile {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VectorTileEncoder.Feature> postProcessLayerFeatures(String layer,
|
||||
int zoom,
|
||||
public List<VectorTileEncoder.Feature> postProcessLayerFeatures(String layer, int zoom,
|
||||
List<VectorTileEncoder.Feature> items) {
|
||||
return items;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "null";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,17 +14,11 @@ import com.graphhopper.reader.ReaderElement;
|
|||
import com.graphhopper.util.StopWatch;
|
||||
import com.onthegomap.flatmap.monitoring.ProgressLoggers;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.profiles.OpenMapTilesProfile;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.io.Writer;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
@ -32,6 +26,8 @@ import java.net.http.HttpClient;
|
|||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse.BodyHandlers;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -46,7 +42,6 @@ import java.util.stream.Collectors;
|
|||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.Inflater;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -121,9 +116,8 @@ public class Wikidata {
|
|||
return resultMap;
|
||||
}
|
||||
|
||||
public static void fetch(OsmInputFile infile, File outfile, FlatMapConfig config) {
|
||||
public static void fetch(OsmInputFile infile, Path outfile, CommonParams config, Profile profile, Stats stats) {
|
||||
int threads = config.threads();
|
||||
Stats stats = config.stats();
|
||||
stats.startTimer("wikidata");
|
||||
int readerThreads = Math.max(1, Math.min(4, threads * 3 / 4));
|
||||
int processThreads = Math.max(1, Math.min(4, threads / 4));
|
||||
|
@ -131,7 +125,7 @@ public class Wikidata {
|
|||
.info("[wikidata] Starting with " + readerThreads + " reader threads and " + processThreads + " process threads");
|
||||
|
||||
WikidataTranslations oldMappings = load(outfile);
|
||||
try (Writer writer = new BufferedWriter(new FileWriter(outfile))) {
|
||||
try (Writer writer = Files.newBufferedWriter(outfile)) {
|
||||
HttpClient client = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(30)).build();
|
||||
Wikidata fetcher = new Wikidata(writer, Client.wrap(client), 5_000);
|
||||
fetcher.loadExisting(oldMappings);
|
||||
|
@ -168,30 +162,28 @@ public class Wikidata {
|
|||
stats.stopTimer("wikidata");
|
||||
}
|
||||
|
||||
public static WikidataTranslations load(File file) {
|
||||
public static WikidataTranslations load(Path path) {
|
||||
StopWatch watch = new StopWatch().start();
|
||||
try (FileReader fis = new FileReader(file)) {
|
||||
try (BufferedReader fis = Files.newBufferedReader(path)) {
|
||||
WikidataTranslations result = load(fis);
|
||||
LOGGER.info(
|
||||
"[wikidata] loaded from " + result.getAll().size() + " mappings from " + file.getAbsolutePath() + " in " + watch
|
||||
"[wikidata] loaded from " + result.getAll().size() + " mappings from " + path.toAbsolutePath() + " in " + watch
|
||||
.stop());
|
||||
return result;
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("[wikidata] error loading " + file.getAbsolutePath() + ": " + e);
|
||||
LOGGER.info("[wikidata] error loading " + path.toAbsolutePath() + ": " + e);
|
||||
return new WikidataTranslations();
|
||||
}
|
||||
}
|
||||
|
||||
public static WikidataTranslations load(Reader reader) throws IOException {
|
||||
public static WikidataTranslations load(BufferedReader reader) throws IOException {
|
||||
WikidataTranslations mappings = new WikidataTranslations();
|
||||
try (BufferedReader br = new BufferedReader(reader)) {
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
JsonNode node = objectMapper.readTree(line);
|
||||
long id = Long.parseLong(node.get(0).asText());
|
||||
ObjectNode theseMappings = (ObjectNode) node.get(1);
|
||||
theseMappings.fields().forEachRemaining(entry -> mappings.put(id, entry.getKey(), entry.getValue().asText()));
|
||||
}
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
JsonNode node = objectMapper.readTree(line);
|
||||
long id = Long.parseLong(node.get(0).asText());
|
||||
ObjectNode theseMappings = (ObjectNode) node.get(1);
|
||||
theseMappings.fields().forEachRemaining(entry -> mappings.put(id, entry.getKey(), entry.getValue().asText()));
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
|
@ -218,23 +210,6 @@ public class Wikidata {
|
|||
return result;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
LOGGER.info("Arguments:");
|
||||
Arguments arguments = new Arguments(args);
|
||||
var stats = arguments.getStats();
|
||||
int threads = arguments.threads();
|
||||
OsmInputFile osmInputFile = new OsmInputFile(
|
||||
arguments.inputFile("input", "OSM input file", "./data/sources/north-america_us_massachusetts.pbf"));
|
||||
File output = arguments.file("output", "wikidata cache file", "./data/sources/wikidata_names.json");
|
||||
Duration logInterval = arguments.duration("loginterval", "time between logs", "10s");
|
||||
double[] bounds = arguments.bounds("bounds", "bounds", osmInputFile);
|
||||
Envelope envelope = new Envelope(bounds[0], bounds[2], bounds[1], bounds[3]);
|
||||
Profile profile = new OpenMapTilesProfile();
|
||||
FlatMapConfig config = new FlatMapConfig(profile, envelope, threads, stats, logInterval);
|
||||
|
||||
fetch(osmInputFile, output, config);
|
||||
}
|
||||
|
||||
private void filter(Supplier<ReaderElement> prev, Consumer<Long> next) {
|
||||
TrackUsageMapping qidTracker = new TrackUsageMapping();
|
||||
ReaderElement elem;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package com.onthegomap.flatmap.collections;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public interface LongLongMap extends Closeable {
|
||||
|
||||
|
@ -10,11 +10,11 @@ public interface LongLongMap extends Closeable {
|
|||
|
||||
long get(long key);
|
||||
|
||||
File filePath();
|
||||
Path filePath();
|
||||
|
||||
class MapdbSortedTable implements LongLongMap {
|
||||
|
||||
public MapdbSortedTable(File nodeDb) {
|
||||
public MapdbSortedTable(Path nodeDb) {
|
||||
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ public interface LongLongMap extends Closeable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public File filePath() {
|
||||
public Path filePath() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.onthegomap.flatmap.geo;
|
||||
|
||||
import org.locationtech.jts.geom.CoordinateSequence;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.geom.GeometryFactory;
|
||||
import org.locationtech.jts.geom.impl.PackedCoordinateSequence;
|
||||
|
@ -16,25 +17,24 @@ public class GeoUtils {
|
|||
private static final double RADIANS_TO_DEGREES = 180 / Math.PI;
|
||||
private static final double MAX_LAT = getWorldLat(-0.1);
|
||||
private static final double MIN_LAT = getWorldLat(1.1);
|
||||
public static double[] WORLD_BOUNDS = new double[]{0, 0, 1, 1};
|
||||
public static double[] WORLD_LAT_LON_BOUNDS = toLatLonBoundsBounds(WORLD_BOUNDS);
|
||||
public static Envelope WORLD_BOUNDS = new Envelope(0, 1, 0, 1);
|
||||
public static Envelope WORLD_LAT_LON_BOUNDS = toLatLonBoundsBounds(WORLD_BOUNDS);
|
||||
|
||||
public static double[] toLatLonBoundsBounds(double[] worldBounds) {
|
||||
return new double[]{
|
||||
getWorldLon(worldBounds[0]),
|
||||
getWorldLat(worldBounds[1]),
|
||||
getWorldLon(worldBounds[2]),
|
||||
getWorldLat(worldBounds[3])
|
||||
};
|
||||
public static Envelope toLatLonBoundsBounds(Envelope worldBounds) {
|
||||
return new Envelope(
|
||||
getWorldLon(worldBounds.getMinX()),
|
||||
getWorldLon(worldBounds.getMaxX()),
|
||||
getWorldLat(worldBounds.getMinY()),
|
||||
getWorldLat(worldBounds.getMaxY()));
|
||||
}
|
||||
|
||||
public static double[] toWorldBounds(double[] lonLatBounds) {
|
||||
return new double[]{
|
||||
getWorldX(lonLatBounds[0]),
|
||||
getWorldY(lonLatBounds[1]),
|
||||
getWorldX(lonLatBounds[2]),
|
||||
getWorldY(lonLatBounds[3])
|
||||
};
|
||||
public static Envelope toWorldBounds(Envelope lonLatBounds) {
|
||||
return new Envelope(
|
||||
getWorldX(lonLatBounds.getMinX()),
|
||||
getWorldX(lonLatBounds.getMaxX()),
|
||||
getWorldY(lonLatBounds.getMinY()),
|
||||
getWorldY(lonLatBounds.getMaxY())
|
||||
);
|
||||
}
|
||||
|
||||
public static double getWorldLon(double x) {
|
||||
|
@ -97,4 +97,14 @@ public class GeoUtils {
|
|||
public static double decodeWorldX(long encoded) {
|
||||
return ((double) (encoded >> 32)) / QUANTIZED_WORLD_SIZE;
|
||||
}
|
||||
|
||||
public static double getZoomFromLonLatBounds(Envelope envelope) {
|
||||
Envelope worldBounds = GeoUtils.toWorldBounds(envelope);
|
||||
return getZoomFromWorldBounds(worldBounds);
|
||||
}
|
||||
|
||||
public static double getZoomFromWorldBounds(Envelope worldBounds) {
|
||||
double maxEdge = Math.max(worldBounds.getWidth(), worldBounds.getHeight());
|
||||
return Math.max(0, -Math.log(maxEdge) / Math.log(2));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,9 @@ import com.onthegomap.flatmap.Format;
|
|||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import com.onthegomap.flatmap.worker.WorkQueue;
|
||||
import com.onthegomap.flatmap.worker.Worker;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -107,17 +109,30 @@ public class ProgressLoggers {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ProgressLoggers addFileSize(File file) {
|
||||
return addFileSize(file::length);
|
||||
public ProgressLoggers addFileSize(Path file) {
|
||||
loggers.add(string(() -> {
|
||||
String bytes;
|
||||
try {
|
||||
bytes = formatBytes(Files.size(file), false);
|
||||
} catch (IOException e) {
|
||||
bytes = "-";
|
||||
}
|
||||
return " " + padRight(bytes, 5);
|
||||
}));
|
||||
return this;
|
||||
}
|
||||
|
||||
private Object string(Supplier<String> supplier) {
|
||||
return new Object() {
|
||||
@Override
|
||||
public String toString() {
|
||||
return supplier.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public ProgressLoggers addFileSize(LongSupplier longSupplier) {
|
||||
loggers.add(new Object() {
|
||||
@Override
|
||||
public String toString() {
|
||||
return " " + padRight(formatBytes(longSupplier.getAsLong(), false), 5);
|
||||
}
|
||||
});
|
||||
loggers.add(string(() -> " " + padRight(formatBytes(longSupplier.getAsLong(), false), 5)));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.onthegomap.flatmap.monitoring;
|
||||
|
||||
import com.graphhopper.util.StopWatch;
|
||||
import com.onthegomap.flatmap.Mbtiles;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -20,6 +21,8 @@ public interface Stats {
|
|||
|
||||
void gauge(String name, int value);
|
||||
|
||||
Mbtiles.MetadataJson getTileStats();
|
||||
|
||||
class InMemory implements Stats {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(InMemory.class);
|
||||
|
@ -57,5 +60,10 @@ public interface Stats {
|
|||
public void gauge(String name, int value) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mbtiles.MetadataJson getTileStats() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,28 @@ public class OpenMapTilesProfile implements Profile {
|
|||
return items;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "OpenMapTiles";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "A tileset showcasing all layers in OpenMapTiles. https://openmaptiles.org";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String attribution() {
|
||||
return """
|
||||
<a href="https://www.openmaptiles.org/" target="_blank">© OpenMapTiles</a> <a href="https://www.openstreetmap.org/copyright" target="_blank">© OpenStreetMap contributors</a>
|
||||
""".trim();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String version() {
|
||||
return "3.12.1";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OpenStreetMapReader.RelationInfo> preprocessOsmRelation(ReaderRelation relation) {
|
||||
return null;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.onthegomap.flatmap.reader;
|
||||
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.SourceFeature;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
|
@ -28,12 +29,12 @@ public class NaturalEarthReader extends Reader {
|
|||
private final Connection conn;
|
||||
private Path extracted;
|
||||
|
||||
public NaturalEarthReader(File input, Stats stats) {
|
||||
this(input, null, stats);
|
||||
public NaturalEarthReader(Path input, Profile profile, Stats stats) {
|
||||
this(input, null, profile, stats);
|
||||
}
|
||||
|
||||
public NaturalEarthReader(File input, File tmpDir, Stats stats) {
|
||||
super(stats);
|
||||
public NaturalEarthReader(Path input, Path tmpDir, Profile profile, Stats stats) {
|
||||
super(profile, stats);
|
||||
try {
|
||||
conn = open(input, tmpDir);
|
||||
} catch (IOException | SQLException e) {
|
||||
|
@ -41,23 +42,24 @@ public class NaturalEarthReader extends Reader {
|
|||
}
|
||||
}
|
||||
|
||||
private Connection open(File file, File tmpLocation) throws IOException, SQLException {
|
||||
String path = "jdbc:sqlite:" + file.getAbsolutePath();
|
||||
if (file.getName().endsWith(".zip")) {
|
||||
File toOpen = tmpLocation == null ? File.createTempFile("sqlite", "natearth") : tmpLocation;
|
||||
extracted = toOpen.toPath();
|
||||
toOpen.deleteOnExit();
|
||||
private Connection open(Path path, Path tmpLocation) throws IOException, SQLException {
|
||||
String uri = "jdbc:sqlite:" + path.toAbsolutePath();
|
||||
if (path.toString().toLowerCase().endsWith(".zip")) {
|
||||
Path toOpen = tmpLocation == null ? Files.createTempFile("sqlite", "natearth") : tmpLocation;
|
||||
extracted = toOpen;
|
||||
File file = extracted.toFile();
|
||||
file.deleteOnExit();
|
||||
try (ZipFile zipFile = new ZipFile(file)) {
|
||||
var zipEntry = zipFile.stream()
|
||||
.filter(entry -> entry.getName().endsWith(".sqlite"))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new IllegalArgumentException("No .sqlite file found inside " + file.getName()));
|
||||
LOGGER.info("unzipping " + file.getAbsolutePath() + " to " + extracted);
|
||||
LOGGER.info("unzipping " + path.toAbsolutePath() + " to " + extracted);
|
||||
Files.copy(zipFile.getInputStream(zipEntry), extracted, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
path = "jdbc:sqlite:" + toOpen.getAbsolutePath();
|
||||
uri = "jdbc:sqlite:" + toOpen.toAbsolutePath();
|
||||
}
|
||||
return DriverManager.getConnection(path);
|
||||
return DriverManager.getConnection(uri);
|
||||
}
|
||||
|
||||
private List<String> tableNames() {
|
||||
|
|
|
@ -8,8 +8,8 @@ import com.graphhopper.reader.ReaderElementUtils;
|
|||
import com.graphhopper.reader.ReaderNode;
|
||||
import com.graphhopper.reader.ReaderRelation;
|
||||
import com.graphhopper.reader.ReaderWay;
|
||||
import com.onthegomap.flatmap.CommonParams;
|
||||
import com.onthegomap.flatmap.FeatureRenderer;
|
||||
import com.onthegomap.flatmap.FlatMapConfig;
|
||||
import com.onthegomap.flatmap.OsmInputFile;
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.RenderableFeature;
|
||||
|
@ -39,6 +39,7 @@ public class OpenStreetMapReader implements Closeable {
|
|||
private final AtomicLong TOTAL_NODES = new AtomicLong(0);
|
||||
private final AtomicLong TOTAL_WAYS = new AtomicLong(0);
|
||||
private final AtomicLong TOTAL_RELATIONS = new AtomicLong(0);
|
||||
private final Profile profile;
|
||||
|
||||
// need a few large objects to process ways in relations, should be small enough to keep in memory
|
||||
// for routes (750k rels 40m ways) and boundaries (650k rels, 8m ways)
|
||||
|
@ -54,14 +55,14 @@ public class OpenStreetMapReader implements Closeable {
|
|||
// ~7GB
|
||||
private LongLongMultimap multipolygonWayGeometries = new LongLongMultimap.ManyOrderedBinarySearchMultimap();
|
||||
|
||||
public OpenStreetMapReader(OsmInputFile osmInputFile, LongLongMap nodeDb, Stats stats) {
|
||||
public OpenStreetMapReader(OsmInputFile osmInputFile, LongLongMap nodeDb, Profile profile, Stats stats) {
|
||||
this.osmInputFile = osmInputFile;
|
||||
this.nodeDb = nodeDb;
|
||||
this.stats = stats;
|
||||
this.profile = profile;
|
||||
}
|
||||
|
||||
public void pass1(FlatMapConfig config) {
|
||||
Profile profile = config.profile();
|
||||
public void pass1(CommonParams config) {
|
||||
var topology = Topology.start("osm_pass1", stats)
|
||||
.fromGenerator("pbf", osmInputFile.read(config.threads() - 1))
|
||||
.addBuffer("reader_queue", 50_000, 10_000)
|
||||
|
@ -107,9 +108,9 @@ public class OpenStreetMapReader implements Closeable {
|
|||
topology.awaitAndLog(loggers, config.logInterval());
|
||||
}
|
||||
|
||||
public long pass2(FeatureRenderer renderer, FeatureGroup writer, int readerThreads, int processThreads,
|
||||
FlatMapConfig config) {
|
||||
Profile profile = config.profile();
|
||||
public long pass2(FeatureRenderer renderer, FeatureGroup writer, CommonParams config) {
|
||||
int readerThreads = Math.max(config.threads() / 4, 1);
|
||||
int processThreads = config.threads() - 1;
|
||||
AtomicLong nodesProcessed = new AtomicLong(0);
|
||||
AtomicLong waysProcessed = new AtomicLong(0);
|
||||
AtomicLong relsProcessed = new AtomicLong(0);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package com.onthegomap.flatmap.reader;
|
||||
|
||||
import com.onthegomap.flatmap.CommonParams;
|
||||
import com.onthegomap.flatmap.FeatureRenderer;
|
||||
import com.onthegomap.flatmap.FlatMapConfig;
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.RenderableFeature;
|
||||
import com.onthegomap.flatmap.RenderableFeatures;
|
||||
|
@ -21,16 +21,17 @@ public abstract class Reader implements Closeable {
|
|||
|
||||
protected final Stats stats;
|
||||
private final Logger LOGGER = LoggerFactory.getLogger(getClass());
|
||||
private final Profile profile;
|
||||
|
||||
public Reader(Stats stats) {
|
||||
public Reader(Profile profile, Stats stats) {
|
||||
this.stats = stats;
|
||||
this.profile = profile;
|
||||
}
|
||||
|
||||
public final void process(String name, FeatureRenderer renderer, FeatureGroup writer, FlatMapConfig config) {
|
||||
public final void process(String name, FeatureRenderer renderer, FeatureGroup writer, CommonParams config) {
|
||||
long featureCount = getCount();
|
||||
int threads = config.threads();
|
||||
Envelope env = config.envelope();
|
||||
Profile profile = config.profile();
|
||||
Envelope env = config.bounds();
|
||||
AtomicLong featuresRead = new AtomicLong(0);
|
||||
AtomicLong featuresWritten = new AtomicLong(0);
|
||||
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
package com.onthegomap.flatmap.reader;
|
||||
|
||||
import com.onthegomap.flatmap.CommonParams;
|
||||
import com.onthegomap.flatmap.FeatureRenderer;
|
||||
import com.onthegomap.flatmap.FlatMapConfig;
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.SourceFeature;
|
||||
import com.onthegomap.flatmap.collections.FeatureGroup;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.nio.file.Path;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
import org.geotools.data.FeatureSource;
|
||||
|
@ -32,20 +33,20 @@ public class ShapefileReader extends Reader implements Closeable {
|
|||
private final ShapefileDataStore dataStore;
|
||||
private MathTransform transform;
|
||||
|
||||
public static void process(String sourceProjection, String name, File input, FeatureRenderer renderer,
|
||||
FeatureGroup writer, FlatMapConfig config) {
|
||||
try (var reader = new ShapefileReader(sourceProjection, input, config.stats())) {
|
||||
public static void process(String sourceProjection, String name, Path input, FeatureRenderer renderer,
|
||||
FeatureGroup writer, CommonParams config, Profile profile, Stats stats) {
|
||||
try (var reader = new ShapefileReader(sourceProjection, input, profile, stats)) {
|
||||
reader.process(name, renderer, writer, config);
|
||||
}
|
||||
}
|
||||
|
||||
public static void process(String name, File input, FeatureRenderer renderer,
|
||||
FeatureGroup writer, FlatMapConfig config) {
|
||||
process(null, name, input, renderer, writer, config);
|
||||
public static void process(String name, Path input, FeatureRenderer renderer,
|
||||
FeatureGroup writer, CommonParams config, Profile profile, Stats stats) {
|
||||
process(null, name, input, renderer, writer, config, profile, stats);
|
||||
}
|
||||
|
||||
public ShapefileReader(String sourceProjection, File input, Stats stats) {
|
||||
super(stats);
|
||||
public ShapefileReader(String sourceProjection, Path input, Profile profile, Stats stats) {
|
||||
super(profile, stats);
|
||||
dataStore = decode(input);
|
||||
try {
|
||||
String typeName = dataStore.getTypeNames()[0];
|
||||
|
@ -69,25 +70,24 @@ public class ShapefileReader extends Reader implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private ShapefileDataStore decode(File file) {
|
||||
private ShapefileDataStore decode(Path path) {
|
||||
try {
|
||||
final String name = file.getName();
|
||||
|
||||
URI uri;
|
||||
|
||||
if (name.endsWith(".zip")) {
|
||||
try (ZipFile zip = new ZipFile(file)) {
|
||||
if (path.toString().toLowerCase().endsWith(".zip")) {
|
||||
try (ZipFile zip = new ZipFile(path.toFile())) {
|
||||
String shapeFileInZip = zip.stream()
|
||||
.map(ZipEntry::getName)
|
||||
.filter(z -> z.endsWith(".shp"))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new IllegalArgumentException("No .shp file found inside " + name));
|
||||
uri = URI.create("jar:file:" + file.toPath().toAbsolutePath() + "!/" + shapeFileInZip);
|
||||
.orElseThrow(() -> new IllegalArgumentException("No .shp file found inside " + path));
|
||||
uri = URI.create("jar:file:" + path.toAbsolutePath() + "!/" + shapeFileInZip);
|
||||
}
|
||||
} else if (name.endsWith(".shp")) {
|
||||
uri = file.toURI();
|
||||
} else if (path.toString().toLowerCase().endsWith(".shp")) {
|
||||
uri = path.toUri();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Invalid shapefile input: " + file + " must be zip or shp");
|
||||
throw new IllegalArgumentException("Invalid shapefile input: " + path + " must be zip or shp");
|
||||
}
|
||||
return new ShapefileDataStore(uri.toURL());
|
||||
} catch (IOException e) {
|
||||
|
@ -95,8 +95,8 @@ public class ShapefileReader extends Reader implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
public ShapefileReader(File input, Stats stats) {
|
||||
this(null, input, stats);
|
||||
public ShapefileReader(Path input, Profile profile, Stats stats) {
|
||||
this(null, input, profile, stats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
public class MbtilesTest {
|
||||
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
public class MbtilesWriterTest {
|
||||
|
||||
}
|
|
@ -1,23 +1,23 @@
|
|||
package com.onthegomap.flatmap;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import com.graphhopper.reader.ReaderElement;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
public class OsmInputFileTest {
|
||||
|
||||
private OsmInputFile file = new OsmInputFile(new File("src/test/resources/monaco-latest.osm.pbf"));
|
||||
private OsmInputFile file = new OsmInputFile(Path.of("src", "test", "resources", "monaco-latest.osm.pbf"));
|
||||
|
||||
@Test
|
||||
public void testGetBounds() {
|
||||
assertArrayEquals(new double[]{7.409205, 43.72335, 7.448637, 43.75169}, file.getBounds());
|
||||
assertEquals(new Envelope(7.409205, 7.448637, 43.72335, 43.75169), file.getBounds());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -6,6 +6,7 @@ import static org.junit.jupiter.api.DynamicTest.dynamicTest;
|
|||
|
||||
import com.graphhopper.reader.ReaderElement;
|
||||
import com.graphhopper.reader.ReaderNode;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
|
@ -110,7 +111,7 @@ public class WikidataTest {
|
|||
""", body);
|
||||
}),
|
||||
dynamicTest("can load serialized data", () -> {
|
||||
var translations = Wikidata.load(new StringReader(writer.toString()));
|
||||
var translations = Wikidata.load(new BufferedReader(new StringReader(writer.toString())));
|
||||
assertEquals(Map.of("name:en", "en name", "name:es", "es name"), translations.get(1));
|
||||
assertEquals(Map.of("name:es", "es name2"), translations.get(2));
|
||||
}),
|
||||
|
@ -118,7 +119,7 @@ public class WikidataTest {
|
|||
StringWriter writer2 = new StringWriter();
|
||||
Wikidata.Client client2 = Mockito.mock(Wikidata.Client.class, Mockito.RETURNS_SMART_NULLS);
|
||||
Wikidata fixture2 = new Wikidata(writer2, client2, 2);
|
||||
fixture2.loadExisting(Wikidata.load(new StringReader(writer.toString())));
|
||||
fixture2.loadExisting(Wikidata.load(new BufferedReader(new StringReader(writer.toString()))));
|
||||
fixture2.fetch(1L);
|
||||
fixture2.fetch(2L);
|
||||
fixture2.fetch(1L);
|
||||
|
|
|
@ -4,10 +4,11 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
@ -22,9 +23,9 @@ public class NaturalEarthReaderTest {
|
|||
@ParameterizedTest
|
||||
@ValueSource(strings = {"natural_earth_vector.sqlite", "natural_earth_vector.sqlite.zip"})
|
||||
@Timeout(30)
|
||||
public void testReadNaturalEarth(String filename, @TempDir File tempDir) {
|
||||
var file = new File("src/test/resources/" + filename);
|
||||
try (var reader = new NaturalEarthReader(file, tempDir, new Stats.InMemory())) {
|
||||
public void testReadNaturalEarth(String filename, @TempDir Path tempDir) {
|
||||
var path = Path.of("src", "test", "resources", filename);
|
||||
try (var reader = new NaturalEarthReader(path, tempDir, new Profile.NullProfile(), new Stats.InMemory())) {
|
||||
for (int i = 1; i <= 2; i++) {
|
||||
assertEquals(19, reader.getCount(), "iter " + i);
|
||||
|
||||
|
|
|
@ -3,10 +3,11 @@ package com.onthegomap.flatmap.reader;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.onthegomap.flatmap.Profile;
|
||||
import com.onthegomap.flatmap.geo.GeoUtils;
|
||||
import com.onthegomap.flatmap.monitoring.Stats;
|
||||
import com.onthegomap.flatmap.worker.Topology;
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -16,8 +17,11 @@ import org.locationtech.jts.geom.Geometry;
|
|||
|
||||
public class ShapefileReaderTest {
|
||||
|
||||
private ShapefileReader reader = new ShapefileReader(new File("src/test/resources/shapefile.zip"),
|
||||
new Stats.InMemory());
|
||||
private ShapefileReader reader = new ShapefileReader(
|
||||
Path.of("src", "test", "resources", "shapefile.zip"),
|
||||
new Profile.NullProfile(),
|
||||
new Stats.InMemory()
|
||||
);
|
||||
|
||||
@AfterEach
|
||||
public void close() {
|
||||
|
|
Ładowanie…
Reference in New Issue