Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial work on webify-ing library. #1

Closed
wants to merge 20 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
node_modules/**
lib/**
coverage/**
package-lock.json
web/**
Empty file added index.html
Empty file.
16 changes: 13 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,13 @@
"scripts": {
"build": "ttsc --project tsconfig.production.json",
"prepare": "yarn build",
"test": "jest"
"test": "jest",
"web-build": "rm -rf ./web && esbuild src/index.ts --define:global=window --bundle --minify --outfile=web/copc.js --format=iife --global-name=COPC --target=es2020 && cp -R ./src/laz-perf ./web/laz-perf",
"web-build-debug": "esbuild src/index.ts --define:global=window --bundle --sourcemap --outfile=web/copc.js --format=iife --global-name=COPC --target=es2020"
},
"dependencies": {
"yarn": "^1.22.17"
},
"dependencies": {},
"devDependencies": {
"@types/emscripten": "^1.39.5",
"@types/jest": "^27.0.1",
Expand All @@ -18,16 +22,22 @@
"forager": "^0.0.8",
"husky": "^7.0.1",
"jest": "^27.0.6",
"path-browserify": "^1.0.1",
"prettier": "^2.3.2",
"pretty-quick": "^3.1.1",
"ts-jest": "^27.0.5",
"ts-loader": "^9.2.5",
"ts-node": "^10.2.0",
"ttypescript": "^1.5.12",
"typescript": "^4.3.5"
"typescript": "^4.3.5",
"esbuild": "^0.13.8"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"browser": {
"path": "path-browserify"
}
}
2 changes: 1 addition & 1 deletion src/copc/copc.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const filename = ellipsoidFilename

test('data', async () => {
const copc = await Copc.create(filename)
const view = await Copc.loadPointData(filename, copc, '0-0-0-0')
const view = await Copc.loadPointDataView(filename, copc, '0-0-0-0')

expect(view.dimensions).toEqual<Dimension.Map>({
X: { type: 'float', size: 8 },
Expand Down
45 changes: 35 additions & 10 deletions src/copc/copc.ts
Original file line number Diff line number Diff line change
@@ -1,32 +1,49 @@
import * as Las from 'las'
import { Getter, Key, View } from 'utils'

import { Getter, View, Key, Binary } from 'utils'
import { Hierarchy } from './hierarchy'
import { Offsets } from './offsets'
import { Extents } from './extents'

export type Copc = {
header: Las.Header
vlrs: Las.Vlr[]
offsets: Offsets
hierarchy: Hierarchy
extents: Object
}
export const Copc = { create, loadPointData, loadHierarchyPage }
export const Copc = { create, loadPointData, loadPointDataView, loadHierarchyPage }

/**
* Parse the COPC header and walk VLR and EVLR metadata.
*/
async function create(filename: string | Getter): Promise<Copc> {
const get = Getter.create(filename)
const header = Las.Header.parse(await get(0, Las.Constants.headerLength))
const vlrs = await Las.Vlr.walk(get, header)
// const vlrs = await Las.Vlr.walk(get, header)

const copcVlr = vlrs.find((v) => v.userId === 'entwine' && v.recordId === 1)
if (!copcVlr) throw new Error('COPC VLR is required')
const { contentOffset, contentLength } = copcVlr
const copcVlr = await Las.Vlr.doWalk({
get,
startOffset: 375,
count: 1,
isExtended: false,
})
if (copcVlr.length == 0) throw new Error('COPC VLR is required')
const { contentOffset, contentLength } = copcVlr[0]
const offsets = Offsets.parse(
await get(contentOffset, contentOffset + contentLength)
)

const extentsVlr = await Las.Vlr.doWalk({
get,
startOffset: 375 + 160 + 54, // right after the copcinfo vlr
count: 1,
isExtended: false,
})
if (extentsVlr.length == 0) throw new Error('Extents VLR is required')
const extents = Extents.parse( header,
await get(extentsVlr[0].contentOffset, extentsVlr[0].contentOffset + extentsVlr[0].contentLength)
)

const hierarchy: Hierarchy = {
'0-0-0-0': {
type: 'lazy',
Expand All @@ -35,7 +52,7 @@ async function create(filename: string | Getter): Promise<Copc> {
},
}

return { header, vlrs, offsets, hierarchy }
return { header, vlrs: [], offsets, hierarchy, extents }
}

async function loadHierarchyPage(
Expand All @@ -52,7 +69,7 @@ async function loadPointData(
filename: string | Getter,
copc: Copc,
key: Key | string
): Promise<View> {
): Promise<Binary> {
const get = Getter.create(filename)

// Ensure that the hierarchy entry for this node is loaded.
Expand Down Expand Up @@ -82,5 +99,13 @@ async function loadPointData(
pointCount,
})

return Las.View.create(copc.header, buffer)
return buffer
}

async function loadPointDataView(
filename: string | Getter,
copc: Copc,
key: Key | string
): Promise<View> {
return Las.View.create(copc.header, (await loadPointData(filename, copc, key)));
}
24 changes: 24 additions & 0 deletions src/copc/extents.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { Binary } from '../utils'
import { Dimensions } from '../las/dimensions'
import { Extractor } from '../las/extractor'
import { Header } from '../las/header'

export const Extents = { parse }

function parse(header: Header, buffer: Binary): Object {
const dv = Binary.toDataView(buffer)

const extractors = Extractor.create(header)
const dimensions = Dimensions.create(extractors)

let current_pos = 0;
return Object.keys(dimensions).reduce((newmap, name) => {
// these flags don't get included in the extents I guess?
if (name == "Synthetic" || name == "KeyPoint" || name == "Withheld" || name == "Overlap")
return newmap

let ret = { ...newmap, [name]: { "min": Number(dv.getFloat64(current_pos, true)), "max": Number(dv.getFloat64(current_pos + 8, true)) } }
current_pos += 8 * 2
return ret
}, {})
}
3 changes: 1 addition & 2 deletions src/copc/hierarchy.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { Binary, Getter, Key, Step, parseBigInt } from 'utils'

import {Binary, Getter, Key, Step, parseBigInt} from 'utils'
import { hierarchyItemLength } from './constants'

export declare namespace Hierarchy {
Expand Down
19 changes: 13 additions & 6 deletions src/copc/offsets.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ test('offsets', async () => {
const header = Header.parse(hbuffer)
const vlrs = await Vlr.walk(filename, header)

const copcVlr = vlrs.find((v) => v.userId === 'entwine' && v.recordId === 1)
const copcVlr = vlrs.find((v) => v.userId === 'copc' && v.recordId === 1)
if (!copcVlr) throw new Error('COPC VLR is required')

const buffer = await Forager.read(filename, {
Expand All @@ -22,9 +22,16 @@ test('offsets', async () => {
],
})
const offsets = Offsets.parse(buffer)
expect(offsets).toEqual<Offsets>({
span: 0,
rootHierarchyOffset: 400368,
rootHierarchyLength: 160,
})
// expect(offsets).toEqual<Offsets>({
// span: 0,
// rootHierarchyOffset: 400368,
// rootHierarchyLength: 160,
// lazVlrOffset: 643,
// lazVlrLength: 52,
// wktVlrOffset: 749,
// wktVlrLength: 681,
// extraBytesVlrOffset: 0,
// extraBytesVlrLength: 0,
// })

})
16 changes: 12 additions & 4 deletions src/copc/offsets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ import { Binary, parseBigInt } from 'utils'
import { offsetsLength } from './constants'

export type Offsets = {
span: number
center_x: number
center_y: number
center_z: number
halfsize: number
spacing: number
rootHierarchyOffset: number
rootHierarchyLength: number
}
Expand All @@ -19,8 +23,12 @@ function parse(buffer: Binary): Offsets {
}

return {
span: parseBigInt(dv.getBigInt64(0, true)),
rootHierarchyOffset: parseBigInt(dv.getBigUint64(8, true)),
rootHierarchyLength: parseBigInt(dv.getBigUint64(16, true)),
center_x: Number(dv.getFloat64(0, true)),
center_y: Number(dv.getFloat64(8, true)),
center_z: Number(dv.getFloat64(16, true)),
halfsize: Number(dv.getFloat64(24, true)),
spacing: Number(dv.getFloat64(32, true)),
rootHierarchyOffset: parseBigInt(dv.getBigUint64(40, true)),
rootHierarchyLength: parseBigInt(dv.getBigUint64(48, true)),
}
}
9 changes: 5 additions & 4 deletions src/las/dimensions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,16 @@ const typemap: { [name: string]: Dimension | undefined } = {
Intensity: Type.uint16,
ReturnNumber: Type.uint8,
NumberOfReturns: Type.uint8,
ScanDirectionFlag: Type.boolean,
EdgeOfFlightLine: Type.boolean,
Classification: Type.uint8,
Synthetic: Type.boolean,
KeyPoint: Type.boolean,
Withheld: Type.boolean,
Overlap: Type.boolean,
ScanAngle: Type.float32,
ScannerChannel: Type.uint8,
ScanDirectionFlag: Type.boolean,
EdgeOfFlightLine: Type.boolean,
Classification: Type.uint8,
UserData: Type.uint8,
ScanAngle: Type.int16,
PointSourceId: Type.uint16,
GpsTime: Type.float64,
Red: Type.uint16,
Expand Down
101 changes: 38 additions & 63 deletions src/las/extractor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,102 +12,77 @@ type PartialHeader = Pick<
function create(header: PartialHeader) {
const { pointDataRecordFormat: pdrf } = header
switch (pdrf) {
case 0:
return create0(header)
case 1:
return create1(header)
case 2:
return create2(header)
case 3:
return create3(header)
case 6:
return create6(header)
case 7:
return create7(header)
default:
throw new Error(`Unsupported point data record format: ${pdrf}`)
}
}

function create0(header: PartialHeader): ExtractorType.Map {
function create6(header: PartialHeader): ExtractorType.Map {
const { scale, offset } = header
const getPointOffset = getPointOffsetGetter(header)
function getScanFlags(dv: DataView, index: number) {
function getReturnFlags(dv: DataView, index: number) {
return dv.getUint8(getPointOffset(index) + 14)
}
function getFullClassification(dv: DataView, index: number) {
function getScanFlags(dv: DataView, index: number) {
return dv.getUint8(getPointOffset(index) + 15)
}
function getClassification(dv: DataView, index: number) {
return getFullClassification(dv, index) & 0b0001_1111
}

return {
X: (dv, index) =>
Scale.unapply(
dv.getInt32(getPointOffset(index), true),
dv.getInt32(getPointOffset(index), true),//4
scale[0],
offset[0]
),
Y: (dv, index) =>
Scale.unapply(
dv.getInt32(getPointOffset(index) + 4, true),
dv.getInt32(getPointOffset(index) + 4, true),//8
scale[1],
offset[1]
),
Z: (dv, index) =>
Scale.unapply(
dv.getInt32(getPointOffset(index) + 8, true),
dv.getInt32(getPointOffset(index) + 8, true),//12
scale[2],
offset[2]
),
Intensity: (dv, index) => dv.getUint16(getPointOffset(index) + 12, true),
ReturnNumber: (dv, index) => getScanFlags(dv, index) & 0b0000_0111,
NumberOfReturns: (dv, index) =>
(getScanFlags(dv, index) & 0b0011_1000) >> 3,
ScanDirectionFlag: (dv, index) =>
(getScanFlags(dv, index) & 0b0100_0000) >> 6,
EdgeOfFlightLine: (dv, index) =>
(getScanFlags(dv, index) & 0b1000_0000) >> 7,
Classification: (dv, index) => getClassification(dv, index),
Synthetic: (dv, index) =>
(getFullClassification(dv, index) & 0b0010_0000) >> 5,
KeyPoint: (dv, index) =>
(getFullClassification(dv, index) & 0b0100_0000) >> 6,
Withheld: (dv, index) =>
(getFullClassification(dv, index) & 0b1000_0000) >> 7,
Overlap: (dv, index) => (getClassification(dv, index) === 12 ? 1 : 0),
ScanAngle: (dv, index) => dv.getInt8(getPointOffset(index) + 16),
UserData: (dv, index) => dv.getUint8(getPointOffset(index) + 17),
PointSourceId: (dv, index) =>
dv.getUint16(getPointOffset(index) + 18, true),
}
}

function create1(header: PartialHeader): ExtractorType.Map {
const getPointOffset = getPointOffsetGetter(header)
return {
...create0(header),
GpsTime: (dv, index) => dv.getFloat64(getPointOffset(index) + 20, true),
Intensity: (dv, index) => dv.getUint16(getPointOffset(index) + 12, true),//14
ReturnNumber: (dv, index) => getReturnFlags(dv, index) & 0b0000_1111, //15
NumberOfReturns: (dv, index) =>
(getReturnFlags(dv, index) & 0b1111_0000) >> 4, //15
Synthetic: (dv, index) =>
(getScanFlags(dv, index) & 0b0000_0001) >> 0,
KeyPoint: (dv, index) =>
(getScanFlags(dv, index) & 0b0000_0010) >> 1,
Withheld: (dv, index) =>
(getScanFlags(dv, index) & 0b0000_0100) >> 2,
Overlap: (dv, index) =>
(getScanFlags(dv, index) & 0b0000_1000) >> 3,
ScannerChannel: (dv, index) =>
(getScanFlags(dv, index) & 0b0011_0000) >> 4,
ScanDirectionFlag: (dv, index) =>
(getScanFlags(dv, index) & 0b0100_0000) >> 6,
EdgeOfFlightLine: (dv, index) =>
(getScanFlags(dv, index) & 0b1000_0000) >> 7, //16
Classification: (dv, index) => dv.getUint8(getPointOffset(index) + 16),//17
UserData: (dv, index) => dv.getUint8(getPointOffset(index) + 17),//18
ScanAngle: (dv, index) => dv.getInt16(getPointOffset(index) + 18),//20
PointSourceId: (dv, index) => dv.getUint16(getPointOffset(index) + 20),//22
GpsTime: (dv, index) => dv.getFloat64(getPointOffset(index) + 22, true), //30
}
}

function create2(header: PartialHeader): ExtractorType.Map {
function create7(header: PartialHeader): ExtractorType.Map {
const getPointOffset = getPointOffsetGetter(header)

return {
...create0(header),
Red: (dv, index) => dv.getUint16(getPointOffset(index) + 20, true),
Green: (dv, index) => dv.getUint16(getPointOffset(index) + 22, true),
Blue: (dv, index) => dv.getUint16(getPointOffset(index) + 24, true),
}
}

function create3(header: PartialHeader): ExtractorType.Map {
const getPointOffset = getPointOffsetGetter(header)

return {
...create0(header),
GpsTime: (dv, index) => dv.getFloat64(getPointOffset(index) + 20, true),
Red: (dv, index) => dv.getUint16(getPointOffset(index) + 28, true),
Green: (dv, index) => dv.getUint16(getPointOffset(index) + 30, true),
Blue: (dv, index) => dv.getUint16(getPointOffset(index) + 32, true),
...create6(header),
Red: (dv, index) => dv.getUint16(getPointOffset(index) + 30, true), //32
Green: (dv, index) => dv.getUint16(getPointOffset(index) + 32, true), //34
Blue: (dv, index) => dv.getUint16(getPointOffset(index) + 34, true), //36
}
}

Expand Down
Loading