|
| 1 | +import { createWriteStream } from 'node:fs' |
| 2 | +import { readFile, readdir, rm, stat } from 'node:fs/promises' |
| 3 | +import { join, relative } from 'node:path' |
| 4 | +import { pipeline } from 'node:stream/promises' |
| 5 | +import { createGzip } from 'node:zlib' |
| 6 | +import sqlite3 from 'sqlite3' |
| 7 | +// @ts-expect-error no types shipped with tar-stream |
| 8 | +import tarStream from 'tar-stream' |
| 9 | + |
| 10 | +interface RunnableDb { |
| 11 | + run: (sql: string, params?: unknown[]) => Promise<void> |
| 12 | + close: () => Promise<void> |
| 13 | +} |
| 14 | + |
| 15 | +const runDb = (db: sqlite3.Database, sql: string, params: unknown[] = []): Promise<void> => |
| 16 | + new Promise((resolve, reject) => { |
| 17 | + db.run(sql, params, (err) => err ? reject(err) : resolve()) |
| 18 | + }) |
| 19 | + |
| 20 | +const closeDb = (db: sqlite3.Database): Promise<void> => |
| 21 | + new Promise((resolve, reject) => { |
| 22 | + db.close((err) => err ? reject(err) : resolve()) |
| 23 | + }) |
| 24 | + |
| 25 | +const openDb = (path: string): Promise<RunnableDb> => new Promise((resolve, reject) => { |
| 26 | + const db = new sqlite3.Database(path, (err) => { |
| 27 | + if (err) return reject(err) |
| 28 | + resolve({ |
| 29 | + run: (sql, params) => runDb(db, sql, params), |
| 30 | + close: () => closeDb(db) |
| 31 | + }) |
| 32 | + }) |
| 33 | +}) |
| 34 | + |
| 35 | +export const buildMbtiles = async (destPath: string, tilesetId: string): Promise<void> => { |
| 36 | + await rm(destPath, { force: true }) |
| 37 | + const db = await openDb(destPath) |
| 38 | + try { |
| 39 | + await db.run('CREATE TABLE metadata (name TEXT, value TEXT)') |
| 40 | + await db.run('CREATE TABLE tiles (zoom_level INTEGER, tile_column INTEGER, tile_row INTEGER, tile_data BLOB, PRIMARY KEY (zoom_level, tile_column, tile_row))') |
| 41 | + |
| 42 | + const metadata: Record<string, string> = { |
| 43 | + name: tilesetId, |
| 44 | + type: 'overlay', |
| 45 | + version: '1.0.0', |
| 46 | + description: `Test fixture ${tilesetId}`, |
| 47 | + format: 'pbf', |
| 48 | + minzoom: '0', |
| 49 | + maxzoom: '2', |
| 50 | + bounds: '-180.0,-85.0511,180.0,85.0511', |
| 51 | + center: '0,0,0', |
| 52 | + json: JSON.stringify({ |
| 53 | + vector_layers: [ |
| 54 | + { id: 'placeholder', description: '', minzoom: 0, maxzoom: 2, fields: {} } |
| 55 | + ] |
| 56 | + }) |
| 57 | + } |
| 58 | + for (const [k, v] of Object.entries(metadata)) { |
| 59 | + await db.run('INSERT INTO metadata (name, value) VALUES (?, ?)', [k, v]) |
| 60 | + } |
| 61 | + } finally { |
| 62 | + await db.close() |
| 63 | + } |
| 64 | +} |
| 65 | + |
| 66 | +export const packStyleTarball = async (sourceDir: string, destPath: string): Promise<void> => { |
| 67 | + const pack = tarStream.pack() |
| 68 | + |
| 69 | + const walk = async (dir: string): Promise<string[]> => { |
| 70 | + const entries = await readdir(dir, { withFileTypes: true }) |
| 71 | + const files: string[] = [] |
| 72 | + for (const entry of entries) { |
| 73 | + const full = join(dir, entry.name) |
| 74 | + if (entry.isDirectory()) files.push(...await walk(full)) |
| 75 | + else if (entry.isFile()) files.push(full) |
| 76 | + } |
| 77 | + return files |
| 78 | + } |
| 79 | + |
| 80 | + const files = await walk(sourceDir) |
| 81 | + // Kick off the write in parallel with pack entries so backpressure works. |
| 82 | + const writePromise = pipeline(pack, createGzip(), createWriteStream(destPath)) |
| 83 | + |
| 84 | + for (const absPath of files) { |
| 85 | + const rel = relative(sourceDir, absPath).split('\\').join('/') |
| 86 | + const name = `package/${rel}` |
| 87 | + const content = await readFile(absPath) |
| 88 | + const st = await stat(absPath) |
| 89 | + await new Promise<void>((resolve, reject) => { |
| 90 | + pack.entry({ name, size: content.length, mode: st.mode }, content, (err: Error | null) => { |
| 91 | + if (err) reject(err) |
| 92 | + else resolve() |
| 93 | + }) |
| 94 | + }) |
| 95 | + } |
| 96 | + pack.finalize() |
| 97 | + await writePromise |
| 98 | +} |
0 commit comments