Skip to content

Commit

Permalink
Fix unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mpulkki-mapbox committed Dec 9, 2019
1 parent b935e3f commit 96aa968
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 58 deletions.
12 changes: 6 additions & 6 deletions test/unit/source/raster_dem_tile_source.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,11 @@ test('RasterTileSource', (t) => {
source.loadTile(tile, () => {});

t.deepEqual(Object.keys(tile.neighboringTiles), [
new OverscaledTileID(10, 0, 10, 4, 5).key,
new OverscaledTileID(10, 0, 10, 6, 5).key,
new OverscaledTileID(10, 0, 10, 4, 4).key,
new OverscaledTileID(10, 0, 10, 5, 4).key,
new OverscaledTileID(10, 0, 10, 6, 4).key,
new OverscaledTileID(10, 0, 10, 4, 5).key,
new OverscaledTileID(10, 0, 10, 6, 5).key,
new OverscaledTileID(10, 0, 10, 4, 6).key,
new OverscaledTileID(10, 0, 10, 5, 6).key,
new OverscaledTileID(10, 0, 10, 6, 6).key
Expand Down Expand Up @@ -134,13 +134,13 @@ test('RasterTileSource', (t) => {
source.loadTile(tile, () => {});

t.deepEqual(Object.keys(tile.neighboringTiles), [
new OverscaledTileID(5, 0, 5, 30, 4).key,
new OverscaledTileID(5, 0, 5, 31, 4).key,
new OverscaledTileID(5, 0, 5, 30, 5).key,
new OverscaledTileID(5, 0, 5, 30, 6).key,
new OverscaledTileID(5, 0, 5, 31, 6).key,
new OverscaledTileID(5, 1, 5, 0, 4).key,
new OverscaledTileID(5, 0, 5, 30, 5).key,
new OverscaledTileID(5, 1, 5, 0, 5).key,
new OverscaledTileID(5, 0, 5, 30, 4).key,
new OverscaledTileID(5, 0, 5, 31, 4).key,
new OverscaledTileID(5, 1, 5, 0, 4).key,
new OverscaledTileID(5, 1, 5, 0, 6).key
]);
t.end();
Expand Down
81 changes: 41 additions & 40 deletions test/unit/source/source_cache.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -779,20 +779,20 @@ test('SourceCache#update', (t) => {
if (e.sourceDataType === 'metadata') {
sourceCache.update(transform);
t.deepEqual(sourceCache.getRenderableIds(), [
new OverscaledTileID(16, 0, 16, 8192, 8192).key,
new OverscaledTileID(16, 0, 16, 8191, 8192).key,
new OverscaledTileID(16, 0, 16, 8192, 8191).key,
new OverscaledTileID(16, 0, 16, 8191, 8191).key
new OverscaledTileID(16, 0, 14, 8192, 8192).key,
new OverscaledTileID(16, 0, 14, 8191, 8192).key,
new OverscaledTileID(16, 0, 14, 8192, 8191).key,
new OverscaledTileID(16, 0, 14, 8191, 8191).key
]);

transform.zoom = 15;
sourceCache.update(transform);

t.deepEqual(sourceCache.getRenderableIds(), [
new OverscaledTileID(16, 0, 16, 8192, 8192).key,
new OverscaledTileID(16, 0, 16, 8191, 8192).key,
new OverscaledTileID(16, 0, 16, 8192, 8191).key,
new OverscaledTileID(16, 0, 16, 8191, 8191).key
new OverscaledTileID(16, 0, 14, 8192, 8192).key,
new OverscaledTileID(16, 0, 14, 8191, 8192).key,
new OverscaledTileID(16, 0, 14, 8192, 8191).key,
new OverscaledTileID(16, 0, 14, 8191, 8191).key
]);
t.end();
}
Expand Down Expand Up @@ -877,14 +877,14 @@ test('SourceCache#_updateRetainedTiles', (t) => {
}

const retained = sourceCache._updateRetainedTiles([idealTile], 3);
t.deepEqual(Object.keys(retained), [
t.deepEqual(Object.keys(retained).sort(), [
// parents are requested because ideal ideal tile is not completely covered by
// loaded child tiles
new OverscaledTileID(0, 0, 0, 0, 0),
new OverscaledTileID(1, 0, 1, 0, 0),
new OverscaledTileID(2, 0, 2, 0, 1),
new OverscaledTileID(1, 0, 1, 0, 0),
idealTile
].concat(loadedChildren).map(t => String(t.key)));
].concat(loadedChildren).map(t => t.key).sort());

t.end();
});
Expand Down Expand Up @@ -912,12 +912,12 @@ test('SourceCache#_updateRetainedTiles', (t) => {
// retained tiles include all ideal tiles and any parents that were loaded to cover
// non-existant tiles
t.deepEqual(retained, {
// parent
'0': new OverscaledTileID(0, 0, 0, 0, 0),
// 1/0/1
'65': new OverscaledTileID(1, 0, 1, 0, 1),
// 1/0/1
'211': new OverscaledTileID(1, 0, 1, 0, 1),
// 1/1/1
'97': new OverscaledTileID(1, 0, 1, 1, 1)
'311': new OverscaledTileID(1, 0, 1, 1, 1),
// parent
'000': new OverscaledTileID(0, 0, 0, 0, 0)
});
addTileSpy.restore();
getTileSpy.restore();
Expand Down Expand Up @@ -970,10 +970,11 @@ test('SourceCache#_updateRetainedTiles', (t) => {
}
});
const idealTile = new OverscaledTileID(1, 0, 1, 0, 1);
const parentTile = new OverscaledTileID(0, 0, 0, 0, 0);
sourceCache._tiles[idealTile.key] = new Tile(idealTile);
sourceCache._tiles[idealTile.key].state = 'loading';
sourceCache._tiles['0'] = new Tile(new OverscaledTileID(0, 0, 0, 0, 0));
sourceCache._tiles['0'].state = 'loaded';
sourceCache._tiles[parentTile.key] = new Tile(parentTile);
sourceCache._tiles[parentTile.key].state = 'loaded';

const addTileSpy = t.spy(sourceCache, '_addTile');
const getTileSpy = t.spy(sourceCache, 'getTile');
Expand All @@ -987,9 +988,9 @@ test('SourceCache#_updateRetainedTiles', (t) => {

t.deepEqual(retained, {
// parent of ideal tile 0/0/0
'0' : new OverscaledTileID(0, 0, 0, 0, 0),
'000' : new OverscaledTileID(0, 0, 0, 0, 0),
// ideal tile id 1/0/1
'65' : new OverscaledTileID(1, 0, 1, 0, 1)
'211' : new OverscaledTileID(1, 0, 1, 0, 1)
}, 'retain ideal and parent tile when ideal tiles aren\'t loaded');

addTileSpy.resetHistory();
Expand All @@ -1002,7 +1003,7 @@ test('SourceCache#_updateRetainedTiles', (t) => {
t.ok(getTileSpy.notCalled);
t.deepEqual(retainedLoaded, {
// only ideal tile retained
'65' : new OverscaledTileID(1, 0, 1, 0, 1)
'211' : new OverscaledTileID(1, 0, 1, 0, 1)
}, 'only retain ideal tiles when they\'re all loaded');

addTileSpy.restore();
Expand Down Expand Up @@ -1059,24 +1060,24 @@ test('SourceCache#_updateRetainedTiles', (t) => {
t.deepEqual(retained, {
// parent of ideal tile (0, 0, 0) (only partially covered by loaded child
// tiles, so we still need to load the parent)
'0' : new OverscaledTileID(0, 0, 0, 0, 0),
'000' : new OverscaledTileID(0, 0, 0, 0, 0),
// ideal tile id (1, 0, 0)
'1' : new OverscaledTileID(1, 0, 1, 0, 0),
'011' : new OverscaledTileID(1, 0, 1, 0, 0),
// loaded child tile (2, 0, 0)
'2': new OverscaledTileID(2, 0, 2, 0, 0)
'022': new OverscaledTileID(2, 0, 2, 0, 0)
}, 'retains children and parent when ideal tile is partially covered by a loaded child tile');

getTileSpy.restore();
// remove child tile and check that it only uses parent tile
delete sourceCache._tiles['2'];
delete sourceCache._tiles['022'];
retained = sourceCache._updateRetainedTiles([idealTile], 1);

t.deepEqual(retained, {
// parent of ideal tile (0, 0, 0) (only partially covered by loaded child
// tiles, so we still need to load the parent)
'0' : new OverscaledTileID(0, 0, 0, 0, 0),
'000' : new OverscaledTileID(0, 0, 0, 0, 0),
// ideal tile id (1, 0, 0)
'1' : new OverscaledTileID(1, 0, 1, 0, 0)
'011' : new OverscaledTileID(1, 0, 1, 0, 0)
}, 'only retains parent tile if no child tiles are loaded');

t.end();
Expand Down Expand Up @@ -1104,7 +1105,7 @@ test('SourceCache#_updateRetainedTiles', (t) => {

t.deepEqual(retained, {
// ideal tile id (2, 0, 0)
'2' : new OverscaledTileID(2, 0, 2, 0, 0)
'022' : new OverscaledTileID(2, 0, 2, 0, 0)
}, 'doesn\'t retain parent tiles below minzoom');

getTileSpy.restore();
Expand Down Expand Up @@ -1134,7 +1135,7 @@ test('SourceCache#_updateRetainedTiles', (t) => {

t.deepEqual(retained, {
// ideal tile id (2, 0, 0)
'2' : new OverscaledTileID(2, 0, 2, 0, 0)
'022' : new OverscaledTileID(2, 0, 2, 0, 0)
}, 'doesn\'t retain child tiles above maxzoom');

getTileSpy.restore();
Expand Down Expand Up @@ -1203,10 +1204,10 @@ test('SourceCache#_updateRetainedTiles', (t) => {
const retained = sourceCache._updateRetainedTiles(idealTiles, 8);

t.deepEqual(Object.keys(retained), [
new OverscaledTileID(7, 0, 7, 0, 0).key,
new OverscaledTileID(8, 0, 7, 0, 0).key,
new OverscaledTileID(7, 0, 7, 1, 0).key,
new OverscaledTileID(8, 0, 7, 1, 0).key
new OverscaledTileID(8, 0, 7, 1, 0).key,
new OverscaledTileID(8, 0, 7, 0, 0).key,
new OverscaledTileID(7, 0, 7, 0, 0).key
]);

t.end();
Expand Down Expand Up @@ -1302,12 +1303,12 @@ test('SourceCache#tilesIn', (t) => {
tiles.sort((a, b) => { return a.tile.tileID.canonical.x - b.tile.tileID.canonical.x; });
tiles.forEach((result) => { delete result.tile.uid; });

t.equal(tiles[0].tile.tileID.key, 1);
t.equal(tiles[0].tile.tileID.key, "011");
t.equal(tiles[0].tile.tileSize, 512);
t.equal(tiles[0].scale, 1);
t.deepEqual(round(tiles[0].queryGeometry), [{x: 4096, y: 4050}, {x:12288, y: 8146}]);

t.equal(tiles[1].tile.tileID.key, 33);
t.equal(tiles[1].tile.tileID.key, "111");
t.equal(tiles[1].tile.tileSize, 512);
t.equal(tiles[1].scale, 1);
t.deepEqual(round(tiles[1].queryGeometry), [{x: -4096, y: 4050}, {x: 4096, y: 8146}]);
Expand Down Expand Up @@ -1340,10 +1341,10 @@ test('SourceCache#tilesIn', (t) => {
sourceCache.update(transform);

t.deepEqual(sourceCache.getIds(), [
new OverscaledTileID(2, 0, 2, 1, 1).key,
new OverscaledTileID(2, 0, 2, 0, 1).key,
new OverscaledTileID(2, 0, 2, 1, 0).key,
new OverscaledTileID(2, 0, 2, 0, 0).key
new OverscaledTileID(2, 0, 1, 1, 1).key,
new OverscaledTileID(2, 0, 1, 0, 1).key,
new OverscaledTileID(2, 0, 1, 1, 0).key,
new OverscaledTileID(2, 0, 1, 0, 0).key
]);

const tiles = sourceCache.tilesIn([
Expand All @@ -1354,12 +1355,12 @@ test('SourceCache#tilesIn', (t) => {
tiles.sort((a, b) => { return a.tile.tileID.canonical.x - b.tile.tileID.canonical.x; });
tiles.forEach((result) => { delete result.tile.uid; });

t.equal(tiles[0].tile.tileID.key, 2);
t.equal(tiles[0].tile.tileID.key, "012");
t.equal(tiles[0].tile.tileSize, 1024);
t.equal(tiles[0].scale, 1);
t.deepEqual(round(tiles[0].queryGeometry), [{x: 4096, y: 4050}, {x:12288, y: 8146}]);

t.equal(tiles[1].tile.tileID.key, 34);
t.equal(tiles[1].tile.tileID.key, "112");
t.equal(tiles[1].tile.tileSize, 1024);
t.equal(tiles[1].scale, 1);
t.deepEqual(round(tiles[1].queryGeometry), [{x: -4096, y: 4050}, {x: 4096, y: 8146}]);
Expand Down
18 changes: 9 additions & 9 deletions test/unit/source/tile_id.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ test('CanonicalTileID', (t) => {
});

t.test('.key', (t) => {
t.deepEqual(new CanonicalTileID(0, 0, 0).key, 0);
t.deepEqual(new CanonicalTileID(1, 0, 0).key, 1);
t.deepEqual(new CanonicalTileID(1, 1, 0).key, 33);
t.deepEqual(new CanonicalTileID(1, 1, 1).key, 97);
t.deepEqual(new CanonicalTileID(0, 0, 0).key, "000");
t.deepEqual(new CanonicalTileID(1, 0, 0).key, "011");
t.deepEqual(new CanonicalTileID(1, 1, 0).key, "111");
t.deepEqual(new CanonicalTileID(1, 1, 1).key, "311");
t.end();
});

Expand Down Expand Up @@ -77,11 +77,11 @@ test('OverscaledTileID', (t) => {
});

t.test('.key', (t) => {
t.deepEqual(new OverscaledTileID(0, 0, 0, 0, 0).key, 0);
t.deepEqual(new OverscaledTileID(1, 0, 1, 0, 0).key, 1);
t.deepEqual(new OverscaledTileID(1, 0, 1, 1, 0).key, 33);
t.deepEqual(new OverscaledTileID(1, 0, 1, 1, 1).key, 97);
t.deepEqual(new OverscaledTileID(1, -1, 1, 1, 1).key, 225);
t.deepEqual(new OverscaledTileID(0, 0, 0, 0, 0).key, "000");
t.deepEqual(new OverscaledTileID(1, 0, 1, 0, 0).key, "011");
t.deepEqual(new OverscaledTileID(1, 0, 1, 1, 0).key, "111");
t.deepEqual(new OverscaledTileID(1, 0, 1, 1, 1).key, "311");
t.deepEqual(new OverscaledTileID(1, -1, 1, 1, 1).key, "711");
t.end();
});

Expand Down
6 changes: 3 additions & 3 deletions test/unit/ui/map.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -322,11 +322,11 @@ test('Map', (t) => {
const map = createMap(t, {style});
t.equal(map.areTilesLoaded(), true, 'returns true if there are no sources on the map');
map.on('load', () => {

const fakeTileId = new OverscaledTileID(0, 0, 0, 0, 0);
map.addSource('geojson', createStyleSource());
map.style.sourceCaches.geojson._tiles.fakeTile = new Tile(new OverscaledTileID(0, 0, 0, 0, 0));
map.style.sourceCaches.geojson._tiles[fakeTileId.key] = new Tile(fakeTileId);
t.equal(map.areTilesLoaded(), false, 'returns false if tiles are loading');
map.style.sourceCaches.geojson._tiles.fakeTile.state = 'loaded';
map.style.sourceCaches.geojson._tiles[fakeTileId.key].state = 'loaded';
t.equal(map.areTilesLoaded(), true, 'returns true if tiles are loaded');
t.end();
});
Expand Down

0 comments on commit 96aa968

Please sign in to comment.