add geotiffJS library full git

This commit is contained in:
JS 2023-10-29 22:27:19 +01:00
parent 2e27417fc5
commit aef672986a
62 changed files with 21999 additions and 0 deletions

View file

@ -0,0 +1,4 @@
{
"presets": ["@babel/preset-env"],
"plugins": ["@babel/plugin-transform-runtime"]
}

View file

@ -0,0 +1,54 @@
module.exports = {
extends: 'airbnb-base',
env: {
mocha: true,
browser: true,
worker: true,
node: true,
},
ignorePatterns: [
'test/lib/**/*',
],
parserOptions: {
ecmaVersion: 'latest',
sourceType: 'module',
},
rules: {
'no-underscore-dangle': 0,
'class-methods-use-this': 0,
'no-plusplus': 0,
'no-loop-func': 0,
'no-mixed-operators': [
'error', {
allowSamePrecedence: true,
},
],
'no-param-reassign': [
'error', {
props: false,
},
],
'no-prototype-builtins': 0,
'no-restricted-syntax': [
'error',
'LabeledStatement',
'WithStatement',
],
'no-console': 0,
'no-bitwise': 0,
'max-classes-per-file': 0,
'max-len': ['error', { code: 130 }],
'import/prefer-default-export': 0,
'import/extensions': ['error', 'always'],
'prefer-default-export': 0,
'func-names': 0,
'arrow-body-style': 0,
'function-paren-newline': 0,
'object-curly-newline': 0,
'no-await-in-loop': 0,
'prefer-destructuring': ['error', { object: true, array: false }],
curly: ['error', 'all'],
'brace-style': ['error', '1tbs', { allowSingleLine: false }],
'no-else-return': 0,
},
};

View file

@ -0,0 +1,31 @@
name: Node.js CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
ci:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: -|
docker pull osgeo/gdal:ubuntu-small-latest ;
docker run -i --rm -v `pwd`/test/data:/data osgeo/gdal:ubuntu-small-latest bash -c "apt-get update && apt-get -y install imagemagick libtiff-tools wget && cd /data && ./setup_data.sh"
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: 20.x
- run: npm ci
- run: npm run build
- run: npm test
- name: action-slack
uses: 8398a7/action-slack@v3.8.0
with:
status: ${{ job.status }}
fields: repo,message,commit,author,action,eventName,ref,workflow,job,took
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
if: ${{ github.event_name != 'pull_request'}}

View file

@ -0,0 +1,32 @@
name: Documentation
on:
push:
branches: [ master ]
jobs:
docs:
name: Build and publish Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- run: npm run docs
- name: Deploy pages
uses: JamesIves/github-pages-deploy-action@4.0.0
with:
branch: gh-pages
folder: docs/
- name: action-slack
uses: 8398a7/action-slack@v3.8.0
with:
status: ${{ job.status }}
fields: repo,message,commit,author,action,eventName,ref,workflow,job,took
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
if: always()

View file

@ -0,0 +1,38 @@
name: Release
on:
push:
tags:
- v*
jobs:
release:
name: Create Release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- run: npm install
- run: npm run docs
- uses: JS-DevTools/npm-publish@v1
with:
token: ${{ secrets.NPM_TOKEN }}
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
body: ""
draft: true
prerelease: false
- name: action-slack
uses: 8398a7/action-slack@v3.8.0
with:
status: ${{ job.status }}
fields: repo,message,commit,author,action,eventName,ref,workflow,job,took
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
if: always()

8
geotiffGesture/geotiffJS/.gitignore vendored Normal file
View file

@ -0,0 +1,8 @@
node_modules
.cache
dist
dist-node
dist-browser
dist-module
docs
.DS_Store

View file

@ -0,0 +1,6 @@
{
"plugins": ["plugins/markdown", "jsdoc-plugin-typescript", "jsdoc-plugin-intersection"],
"typescript": {
"moduleRoot": "src"
}
}

View file

@ -0,0 +1,4 @@
docs
test
src
scripts

View file

@ -0,0 +1,8 @@
# Community Packages
Here is a list of community packages that use or extend the functionality of the core geotiff library.
- [geotiff-stats](https://github.com/geotiff/geotiff-stats): JavaScript package for computing basic statistics (e.g., min and max) for geotiffs, especially in a low-memory environment.
- [geotiff-palette](https://github.com/GeoTIFF/geotiff-palette): JavaScript package for getting the palette (aka Color Map) for a geotiff.
- [geotiff-geokeys-to-proj4](https://github.com/matafokka/geotiff-geokeys-to-proj4): JavaScript package for converting GeoTIFF's geokeys to Proj4 string, so images could be reprojected and consumed correctly.
- [geotiff-precise-bbox](https://github.com/geotiff/geotiff-precise-bbox): JavaScript package for getting the most precise bounding box for a GeoTIFF image. It avoids floating-point arithmetic errors by using [preciso](https://github.com/danieljdufour/preciso) and storing numbers as strings.
- [geotiff-tile](https://github.com/GeoTIFF/geotiff-tile): JavaScript package for generating a map tile from a geotiff for nearly any standard extent or projection

View file

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 EOX IT Services GmbH
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,538 @@
# geotiff.js
[![Node.js CI](https://github.com/geotiffjs/geotiff.js/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/geotiffjs/geotiff.js/actions/workflows/ci.yml) [![npm version](https://badge.fury.io/js/geotiff.svg)](https://badge.fury.io/js/geotiff) [![Gitter chat](https://badges.gitter.im/geotiffjs/geotiff.js.png)](https://gitter.im/geotiffjs/Lobby)
Read (geospatial) metadata and raw array data from a wide variety of different
(Geo)TIFF files types.
## Features
Currently available functionality:
* Parsing TIFFs from various sources:
* remote (via `fetch` or XHR)
* from a local `ArrayBuffer`
* from the filesystem (on Browsers using the `FileReader` and on node using the filesystem functions)
* Parsing the headers of all possible TIFF files
* Rudimentary extraction of geospatial metadata
* Reading raster data from:
* stripped images
* tiled images
* band interleaved images
* pixel interleaved images
* Supported data-types:
* (U)Int8/16/32
* UInt1-31 (with some drawbacks)
* Float16/32/64
* Enabled compressions:
* no compression
* Packbits
* LZW
* Deflate (with floating point or horizontal predictor support)
* JPEG
* LERC (with additional Deflate compression support)
* Automatic selection of overview level to read from
* Subsetting via an image window or bounding box and selected bands
* Reading of samples into separate arrays or a single pixel-interleaved array
* Configurable tile/strip cache
* Configurable Pool of workers to increase decoding efficiency
* Utility functions for geospatial parameters (Bounding Box, Origin, Resolution)
* Limited [bigTIFF](http://bigtiff.org/#FILE_FORMAT) support
* Automated testing via PhantomJS
Further documentation can be found [here](https://geotiffjs.github.io/geotiff.js/).
## Example Usage
Geotiff gives you access to all GeoTIFF metadata, but does not offer any one specific higher level API (such as GDAL) for things like transforms or data extraction. However, you can write your own higher level API using this library, given your specific dataset needs.
As an example, here is how you would resolve GPS coordinates to elevation in a GeoTIFF that encodes WGS-84 compliant geo data:
```js
import { fromUrl, fromArrayBuffer, fromBlob } from "geotiff";
const lerp = (a, b, t) => (1 - t) * a + t * b;
function transform(a, b, M, roundToInt = false) {
const round = (v) => (roundToInt ? v | 0 : v);
return [
round(M[0] + M[1] * a + M[2] * b),
round(M[3] + M[4] * a + M[5] * b),
];
}
// Load our data tile from url, arraybuffer, or blob, so we can work with it:
const tiff = await fromArrayBuffer(...);
const image = await tiff.getImage(); // by default, the first image is read.
// Construct the WGS-84 forward and inverse affine matrices:
const { ModelPixelScale: s, ModelTiepoint: t } = image.fileDirectory;
let [sx, sy, sz] = s;
let [px, py, k, gx, gy, gz] = t;
sy = -sy; // WGS-84 tiles have a "flipped" y component
const pixelToGPS = [gx, sx, 0, gy, 0, sy];
console.log(`pixel to GPS transform matrix:`, pixelToGPS);
const gpsToPixel = [-gx / sx, 1 / sx, 0, -gy / sy, 0, 1 / sy];
console.log(`GPS to pixel transform matrix:`, gpsToPixel);
// Convert a GPS coordinate to a pixel coordinate in our tile:
const [gx1, gy1, gx2, gy2] = image.getBoundingBox();
const lat = lerp(gy1, gy2, Math.random());
const long = lerp(gx1, gx2, Math.random());
console.log(`Looking up GPS coordinate (${lat.toFixed(6)},${long.toFixed(6)})`);
const [x, y] = transform(long, lat, gpsToPixel, true);
console.log(`Corresponding tile pixel coordinate: [${x}][${y}]`);
// And as each pixel in the tile covers a geographic area, not a single
// GPS coordinate, get the area that this pixel covers:
const gpsBBox = [transform(x, y, pixelToGPS), transform(x + 1, y + 1, pixelToGPS)];
console.log(`Pixel covers the following GPS area:`, gpsBBox);
// Finally, retrieve the elevation associated with this pixel's geographic area:
const rasters = await image.readRasters();
const { width, [0]: raster } = rasters;
const elevation = raster[x + y * width];
console.log(`The elevation at (${lat.toFixed(6)},${long.toFixed(6)}) is ${elevation}m`);
```
## Advanced Example Usage
For more advanced examples of `geotiff` in larger codebases, please have a look at the following projects:
* [Slice view using Cesium.js (TAMP project)](http://www.youtube.com/watch?v=E6kFLtKgeJ8)
[![3D slice view](http://img.youtube.com/vi/E6kFLtKgeJ8/0.jpg)](http://www.youtube.com/watch?v=E6kFLtKgeJ8)
* [Contour generation using d3-contour](https://bl.ocks.org/mbostock/83c0be21dba7602ee14982b020b12f51)
[![contour](https://user-images.githubusercontent.com/482265/112866402-0b219880-90ba-11eb-9dda-5f1d9ed9bafc.jpg)](https://bl.ocks.org/mbostock/83c0be21dba7602ee14982b020b12f51)
## Setup
To setup the repository do the following steps:
```bash
# clone repo
git clone https://github.com/constantinius/geotiff.js.git
cd geotiff.js/
# install development dependencies
npm install
```
## Testing and Building
In order to run the tests you first have to set up the test data. This requires
the [GDAL](http://gdal.org/) and [ImageMagick](http://imagemagick.org/) tools.
Installation of these tools varies according to the operating system, the
following listing shows the installation on Ubuntu (using the ubuntugis-unstable
repository):
```bash
sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
sudo apt-get update
sudo apt-get install -y gdal-bin imagemagick
```
To install GDAL and ImageMagick on MacOS X, please use [Homebrew](https://brew.sh/). The setup script also needs `wget` on MacOS X
```bash
brew install wget gdal imagemagick
```
When GDAL and ImageMagick is installed, the test data setup script can be run:
```bash
cd test/data
sh setup_data.sh
cd -
```
To test the library (using PhantomJS, karma, mocha and chai) do the following:
```bash
npm test
```
To do some in-browser testing do:
```bash
npm run dev
```
and navigate to `http://localhost:8090/test/`
To build the library do:
```bash
npm run build
```
The output is written to `dist-browser/main.js` and `dist-node/main.js`.
## Install
You can install geotiff.js using npm:
```
npm install geotiff
```
or you can use the prebuilt version with a CDN:
```html
<script src="https://cdn.jsdelivr.net/npm/geotiff"></script>
```
## Usage
geotiff.js works with both `require`, `import` and the global variable `GeoTIFF`:
```javascript
const GeoTIFF = require('geotiff');
const { fromUrl, fromUrls, fromArrayBuffer, fromBlob } = GeoTIFF;
// or
import GeoTIFF, { fromUrl, fromUrls, fromArrayBuffer, fromBlob } from 'geotiff';
```
or:
```html
<script src="https://cdn.jsdelivr.net/npm/geotiff"></script>
<script>
console.log(GeoTIFF);
</script>
```
To parse a GeoTIFF, first a data source is required. To help with the development,
there are shortcuts available. The following creates a source that reads from a
remote GeoTIFF referenced by a URL:
```javascript
fromUrl(someUrl)
.then(tiff => { /* ... */});
// or when using async/await
(async function() {
const tiff = await fromUrl(someUrl);
// ...
})()
```
Note: the interactions with geotiff.js objects are oftentimes asynchronous. For
the sake of brevity we will only show the async/await syntax and not the
`Promise` based one in the following examples.
Accessing remote images is just one way to open TIFF images with geotiff.js. Other
options are reading from a local `ArrayBuffer`:
```javascript
// using local ArrayBuffer
const response = await fetch(someUrl);
const arrayBuffer = await response.arrayBuffer();
const tiff = await fromArrayBuffer(arrayBuffer);
```
or a `Blob`/`File`:
```html
<input type="file" id="file">
<script>
const input = document.getElementById('file'):
input.onchange = async function() {
const tiff = await fromBlob(input.files[0]);
}
</script>
```
Now that we have opened the TIFF file, we can inspect it. The TIFF is structured
in a small header and a list of one or more images (Image File Directory, IFD to
use the TIFF nomenclature). To get one image by index the `getImage()` function
must be used. This is again an asynchronous operation, as the IFDs are loaded
lazily:
```javascript
const image = await tiff.getImage(); // by default, the first image is read.
```
Now that we have obtained a `GeoTIFFImage` object we can inspect its metadata
(like size, tiling, number of samples, geographical information, etc.). All
the metadata is parsed once the IFD is first parsed, thus the access to that
is synchronous:
```javascript
const width = image.getWidth();
const height = image.getHeight();
const tileWidth = image.getTileWidth();
const tileHeight = image.getTileHeight();
const samplesPerPixel = image.getSamplesPerPixel();
// when we are actually dealing with geo-data the following methods return
// meaningful results:
const origin = image.getOrigin();
const resolution = image.getResolution();
const bbox = image.getBoundingBox();
```
The actual raster data is not fetched and parsed automatically. This is because
it is usually much more spacious and the decoding of the pixels can be time
consuming due to the necessity of decompression.
To read a whole image into one big array of arrays the following method call can be used:
```javascript
const data = await image.readRasters();
```
For convenience the result always has a `width` and `height` attribute:
```javascript
const data = await image.readRasters();
const { width, height } = data;
```
By default, the raster is split to a separate array for each component. For an RGB image
for example, we'd get three arrays, one for red, green and blue.
```javascript
const [red, green, blue] = await image.readRasters();
```
If we want instead all the bands interleaved in one big array, we have to pass the
`interleave: true` option:
```javascript
const [r0, g0, b0, r1, g1, b1, ...] = await image.readRasters({ interleave: true });
```
If we are only interested in a specific region of the image, the `window` option can be
used to limit reading in that bounding box. Note: the bounding box is in 'image coordinates'
not geographical ones:
```javascript
const left = 50;
const top = 10;
const right = 150;
const bottom = 60;
const data = await image.readRasters({ window: [left, top, right, bottom] });
```
This image window can go beyond the image bounds. In that case it might be usefull to supply
a `fillValue: value` option (can also be an array, one value for each sample).
It is also possible to just read specific samples for each pixel. For example, we can only
read the red component from an RGB image:
```javascript
const [red] = await image.readRasters({ samples: [0] });
```
When you want your output in a specific size, you can use the `width` and `height` options.
This defaults of course to the size of your supplied `window` or the image size if no
`window` was supplied.
As the data now needs to be resampled, a `resampleMethod` can be specified. This defaults to
the nearest neighbour method, but also the `'bilinear'` method is supported:
```javascript
const data = await image.readRasters({ width: 40, height: 40, resampleMethod: 'bilinear' });
```
### Using decoder pools to improve parsing performance
Decoding compressed images can be a time consuming process. To minimize this
geotiff.js provides the `Pool` mechanism which uses WebWorkers to split the amount
of work on multiple 'threads'.
```javascript
const pool = new GeoTIFF.Pool();
const data = await image.readRasters({ pool });
```
It is possible to provide a pool size (i.e: number of workers), by default the number
of available processors is used.
### Dealing with visual data
The TIFF specification provides various ways to encode visual data. In the
specification this is called photometric interpretation. The simplest case we
already dealt with is the RGB one. Others are grayscale, paletted images, CMYK,
YCbCr, and CIE L*a*b.
geotiff.js provides a method to automatically convert these images to RGB:
`readRGB()`. This method is very similar to the `readRasters` method with
distinction that the `interleave` option is now always `true` and the
`samples` are automatically chosen.
```javascript
const rgb = await image.readRGB({
// options...
});
```
### Automatic image selection (experimental)
When dealing with images that have internal (or even external, see the next section)
overviews, `GeoTIFF` objects provide a separate `readRasters` method. This method
works very similar to the method on the `GeoTIFFImage` objects with the same name.
By default, it uses the larges image available (highest resolution), but when either
`width`, `height`, `resX`, or `resY` are specified, then the best fitting image will
be used for reading.
Additionally, it allows the `bbox` instead of the `window` parameter. This works
similarly, but uses geographic coordinates instead of pixel ones.
```javascript
const data = await tiff.readRasters({
bbox: [10.34, 57.28, 13.34, 60.23],
resX: 0.1,
resY: 0.1
});
```
### External overviews
Especially for certain kinds of high resolution images it is not uncommon to separate
the highest resolution from the lower resolution overviews (usually using the `.ovr`
extension). With geotiff.js it is possible to use files of this setup, just as you
would use single-file images by taking advantage of the `MultiGeoTIFF` objects. They
behave exactly the same as the before mentioned `GeoTIFF` objects: you can select
images by index or read data using `readRasters`. Toget such a file use the `fromUrls`
factory function:
```javascript
const multiTiff = await fromUrls(
'LC08_L1TP_189027_20170403_20170414_01_T1_B3.TIF',
['LC08_L1TP_189027_20170403_20170414_01_T1_B3.TIF.ovr']
);
```
### AbortController Support
Geotiff.js supports the use of [`AbortController`s](https://developer.mozilla.org/en-US/docs/Web/API/AbortController). Calls to `getRasters`, `readRGB` and `getTileOrStrip` will throw an `Error` with name `AbortSignal` similar to the browser's `fetch` behavior.
```javascript
const tiff = await fromUrl(source);
const abortController = new AbortController();
const { signal } = abortController;
abortController.abort();
try {
const data = await tiff.readRasters({ signal });
} catch(e) {
if (err.name === 'AbortError') {
// do stuff
}
}
```
### Writing GeoTIFFs (Beta Version)
You can create a binary representation of a GeoTIFF using `writeArrayBuffer`.
This function returns an ArrayBuffer which you can then save as a .tif file.
:warning: writeArrayBuffer currently writes the values uncompressed
```javascript
import GeoTIFF, { writeArrayBuffer } from 'geotiff';
const values = [1, 2, 3, 4, 5, 6, 7, 8, 9];
const metadata = {
height: 3,
width: 3
};
const arrayBuffer = await writeArrayBuffer(values, metadata);
```
You can also customize the metadata using names found in the [TIFF Spec](https://www.loc.gov/preservation/digital/formats/content/tiff_tags.shtml) and [GeoTIFF spec](https://cdn.earthdata.nasa.gov/conduit/upload/6852/geotiff-1.8.1-1995-10-31.pdf).
```javascript
import { writeArrayBuffer } from 'geotiff';
const values = [1, 2, 3, 4, 5, 6, 7, 8, 9];
const metadata = {
GeographicTypeGeoKey: 4326,
height: 3,
ModelPixelScale: [0.031355, 0.031355, 0],
ModelTiepoint: [0, 0, 0, 11.331755000000001, 46.268645, 0],
width: 3
};
const arrayBuffer = await writeArrayBuffer(values, metadata);
```
## What to do with the data?
There is a nice HTML 5/WebGL based rendering library called
[plotty](https://github.com/santilland/plotty), that allows for some really nice
on the fly rendering of the data contained in a GeoTIFF.
```html
<canvas id="plot"></canvas>
<script>
// ...
(async function() {
const tiff = await fromUrl(url);
const image = await tiff.getImage();
const data = await image.readRasters();
const canvas = document.getElementById("plot");
const plot = new plotty.plot({
canvas,
data: data[0],
width: image.getWidth(),
height: image.getHeight(),
domain: [0, 256],
colorScale: "viridis"
});
plot.render();
})();
</script>
```
There's also a library called [geotiff-geokeys-to-proj4](https://github.com/matafokka/geotiff-geokeys-to-proj4), that allows for reprojecting pixel coordinates and, therefore, consuming geospatial data contained in GeoTIFF.
## BigTIFF support
geotiff.js has a limited support for files in the BigTIFF format. The limitations
originate in the capabilities of current JavaScript implementations regarding
64 bit integer parsers and structures: there are no functions to read 64 bit
integers from a stream and no such typed arrays. As BigTIFF relies on 64 bit
offsets and also allows tag values of those types. In order to still provide
a reasonable support, the following is implemented:
* 64 bit integers are read as two 32 bit integers and then combined. As
numbers in JavaScript are typically implemented as 64 bit floats, there
might be inaccuracies for *very* large values.
* For 64 bit integer arrays, the default `Array` type is used. This might
cause problems for some compression algorithms if those arrays are used for
pixel values.
## n-bit Support
geotiff.js has some n-bit support which means that it supports unsigned integer
data reading with each element using a non-multiple of 8 bit depth. This only
works with band interleaved images (see
[this related issue](https://github.com/geotiffjs/geotiff.js/issues/202)).
## Planned stuff:
* Better support of geospatial parameters:
* WKT representation
## Known Issues
The open issues can be found on [GitHub](https://github.com/geotiffjs/geotiff.js/issues).
## Contribution
If you have an idea, found a bug or have a remark, please open a ticket, we will
look into it ASAP.
Pull requests are welcome as well!
## Community Packages
A list of community packages can be found in [COMMUNITY.md](COMMUNITY.md)
## Acknowledgements
This library was inspired by
[GeotiffParser](https://github.com/xlhomme/GeotiffParser.js). It provided a
great starting point, but lacked the capabilities to read the raw raster data
which is the aim of geotiff.js.

13559
geotiffGesture/geotiffJS/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,163 @@
{
"name": "geotiff",
"version": "2.1.0",
"description": "GeoTIFF image decoding in JavaScript",
"repository": "https://github.com/geotiffjs/geotiff.js",
"keywords": [
"TIFF",
"GeoTIFF",
"image",
"raster"
],
"type": "module",
"main": "dist-node/geotiff.js",
"module": "dist-module/geotiff.js",
"jsdelivr": "dist-browser/geotiff.js",
"unpkg": "dist-browser/geotiff.js",
"exports": {
".": {
"import": "./dist-module/geotiff.js",
"require": "./dist-node/geotiff.js",
"browser": "./dist-browser/geotiff.js"
}
},
"typesVersions": {
"*": {
"globals": [
"dist-module/globals.d.ts"
],
"rgb": [
"dist-module/rgb.d.ts"
],
"BaseDecoder": [
"dist-module/compression/BaseDecoder.d.ts"
],
"getDecoder": [
"dist-module/compression/index.d.ts"
],
"addDecoder": [
"dist-module/compression/index.d.ts"
],
"setLogger": [
"dist-module/logging.d.ts"
],
"GeoTIFF": [
"dist-module/geotiff.d.ts"
],
"MultiGeoTIFF": [
"dist-module/geotiff.d.ts"
],
"fromUrl": [
"dist-module/geotiff.d.ts"
],
"fromArrayBuffer": [
"dist-module/geotiff.d.ts"
],
"fromFile": [
"dist-module/geotiff.d.ts"
],
"fromBlob": [
"dist-module/geotiff.d.ts"
],
"fromUrls": [
"dist-module/geotiff.d.ts"
],
"writeArrayBuffer": [
"dist-module/geotiff.d.ts"
],
"Pool": [
"dist-module/pool.d.ts"
],
"GeoTIFFImage": [
"dist-module/geotiffimage.d.ts"
]
}
},
"files": [
"dist-module",
"dist-node",
"dist-browser"
],
"engines": {
"node": ">=10.19"
},
"dependencies": {
"@petamoriken/float16": "^3.4.7",
"lerc": "^3.0.0",
"pako": "^2.0.4",
"parse-headers": "^2.0.2",
"quick-lru": "^6.1.1",
"web-worker": "^1.2.0",
"xml-utils": "^1.0.2",
"zstddec": "^0.1.0"
},
"devDependencies": {
"@babel/core": "^7.8.7",
"@babel/plugin-transform-runtime": "^7.16.10",
"@babel/preset-env": "^7.10.2",
"@babel/register": "^7.8.6",
"@rollup/plugin-babel": "^5.3.0",
"@rollup/plugin-commonjs": "^21.0.1",
"@rollup/plugin-node-resolve": "^13.1.3",
"chai": "^4.2.0",
"chokidar-cli": "^3.0.0",
"detect-node": "^2.0.4",
"eslint": "^7.32.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.24.2",
"express": "^4.17.1",
"finalhandler": "^1.1.2",
"fs-extra": "^7.0.1",
"jsdoc": "^3.6.4",
"jsdoc-plugin-intersection": "^1.0.4",
"jsdoc-plugin-typescript": "^2.0.6",
"jshint-stylish": "^2.2.1",
"mocha": "^7.1.0",
"node-abort-controller": "^1.1.0",
"npm-run-all": "^4.1.5",
"rimraf": "^3.0.2",
"rollup": "^2.63.0",
"rollup-plugin-terser": "^7.0.2",
"send-ranges": "^4.0.0",
"serve": "^13.0.2",
"serve-static": "^1.14.1",
"shx": "^0.3.3",
"typescript": "^4.5.5"
},
"scripts": {
"prebuild": "npm run build:clean && npm run build:module",
"build": "run-p build:browser build:node build:types",
"build:clean": "rimraf dist-node/ dist-browser/ dist-module/",
"build:node": "tsc --project tsconfig.build.json && shx echo \"{\\\"type\\\":\\\"commonjs\\\"}\" > dist-node/package.json",
"build:browser": "rollup -c rollup.config.js",
"build:module": "shx mkdir -p dist-module && shx cp -rf src/* dist-module/ && node scripts/serialize-workers.cjs",
"build:types": "tsc --outdir dist-module/",
"watch:browser": "chokidar \"dist-module/*.js\" -c \"npm run build:browser\"",
"watch:module": "chokidar \"src/*.js\" -c \"npm run build:module\"",
"predev": "npm run build",
"dev": "run-p watch:module watch:browser dev:serve",
"dev:serve": "serve --listen 8090",
"docs": "rm -rf docs/; jsdoc -c .jsdoc.json -r src README.md -d docs",
"prelint": "npm run build:module",
"lint": "eslint src test scripts/*.cjs .eslintrc.cjs",
"lint:fix": "npm run lint -- --fix",
"prepare": "npm run build",
"pretest": "npm run lint",
"test": "mocha --full-trace test/geotiff.spec.js"
},
"author": "Fabian Schindler",
"browser": {
"fs": false,
"http": false,
"https": false,
"url": false
},
"sideEffects": false,
"contributors": [
{
"name": "Fabian Schindler",
"email": "fabian.schindler@eox.at"
}
],
"license": "MIT"
}

View file

@ -0,0 +1,34 @@
import resolve from '@rollup/plugin-node-resolve';
import commonjs from '@rollup/plugin-commonjs';
import { babel } from '@rollup/plugin-babel';
import { terser } from 'rollup-plugin-terser';
export default {
input: 'dist-module/geotiff.js',
output: {
file: 'dist-browser/geotiff.js',
format: 'umd',
name: 'GeoTIFF',
exports: 'named',
sourcemap: true,
inlineDynamicImports: true,
},
plugins: [
resolve({ browser: true }),
commonjs(),
babel({
babelHelpers: 'runtime',
presets: [
[
'@babel/preset-env',
{
modules: false,
targets: 'last 2 versions, not dead',
},
],
],
plugins: ['@babel/plugin-transform-runtime'],
}),
terser(),
],
};

View file

@ -0,0 +1,104 @@
/* eslint-disable import/no-commonjs */
/* eslint-disable import/no-extraneous-dependencies */
const path = require('path');
const { babel } = require('@rollup/plugin-babel');
const resolve = require('@rollup/plugin-node-resolve').nodeResolve;
const common = require('@rollup/plugin-commonjs');
const rollup = require('rollup');
const { terser } = require('rollup-plugin-terser');
const fse = require('fs-extra');
async function build(input, { minify = true } = {}) {
const plugins = [
{
name: 'remove export let create',
transform(code, id) {
if (id !== input) {
return null;
}
return code.replace('export let create;', '');
},
},
common(),
resolve(),
babel({
babelHelpers: 'runtime',
presets: [
[
'@babel/preset-env',
{
modules: false,
targets: 'last 2 versions, not dead',
},
],
],
plugins: ['@babel/plugin-transform-runtime'],
}),
];
if (minify) {
plugins.push(terser());
}
plugins.push({
name: 'serialize worker and export create function',
renderChunk(code) {
return `
import Worker from 'web-worker';
export function create() {
const source = ${JSON.stringify(code)};
return new Worker(typeof Buffer !== 'undefined'
? 'data:application/javascript;base64,' + Buffer.from(source, 'binary').toString('base64')
: URL.createObjectURL(new Blob([source], {type: 'application/javascript'})));
}
`;
},
});
const bundle = await rollup.rollup({
input,
plugins,
inlineDynamicImports: true,
});
const { output } = await bundle.generate({ format: 'es' });
if (output.length !== 1) {
throw new Error(`Unexpected output length: ${output.length}`);
}
const chunk = output[0];
if (chunk.isAsset) {
throw new Error('Expected a chunk, got an asset');
}
return chunk;
}
exports.build = build;
/**
* Creates modules with inlined versions of the worker sources. These modules
* export a `create` function for creating a worker.
*/
async function main() {
const inputDir = path.join(__dirname, '../src/worker');
const outputDir = path.join(__dirname, '../dist-module/worker');
await fse.ensureDir(outputDir);
const entries = await fse.readdir(inputDir);
for (const entry of entries) {
if (entry.endsWith('.js')) {
const chunk = await build(path.join(inputDir, entry));
await fse.writeFile(path.join(outputDir, entry), chunk.code);
}
}
}
if (require.main === module) {
main().catch((err) => {
process.stderr.write(`${err.stack}\n`);
process.exit(1);
});
}

View file

@ -0,0 +1,20 @@
import { applyPredictor } from '../predictor.js';
export default class BaseDecoder {
async decode(fileDirectory, buffer) {
const decoded = await this.decodeBlock(buffer);
const predictor = fileDirectory.Predictor || 1;
if (predictor !== 1) {
const isTiled = !fileDirectory.StripOffsets;
const tileWidth = isTiled ? fileDirectory.TileWidth : fileDirectory.ImageWidth;
const tileHeight = isTiled ? fileDirectory.TileLength : (
fileDirectory.RowsPerStrip || fileDirectory.ImageLength
);
return applyPredictor(
decoded, predictor, tileWidth, tileHeight, fileDirectory.BitsPerSample,
fileDirectory.PlanarConfiguration,
);
}
return decoded;
}
}

View file

@ -0,0 +1,8 @@
import { inflate } from 'pako';
import BaseDecoder from './basedecoder.js';
export default class DeflateDecoder extends BaseDecoder {
decodeBlock(buffer) {
return inflate(new Uint8Array(buffer)).buffer;
}
}

View file

@ -0,0 +1,35 @@
const registry = new Map();
export function addDecoder(cases, importFn) {
if (!Array.isArray(cases)) {
cases = [cases]; // eslint-disable-line no-param-reassign
}
cases.forEach((c) => registry.set(c, importFn));
}
export async function getDecoder(fileDirectory) {
const importFn = registry.get(fileDirectory.Compression);
if (!importFn) {
throw new Error(`Unknown compression method identifier: ${fileDirectory.Compression}`);
}
const Decoder = await importFn();
return new Decoder(fileDirectory);
}
// Add default decoders to registry (end-user may override with other implementations)
addDecoder([undefined, 1], () => import('./raw.js').then((m) => m.default));
addDecoder(5, () => import('./lzw.js').then((m) => m.default));
addDecoder(6, () => {
throw new Error('old style JPEG compression is not supported.');
});
addDecoder(7, () => import('./jpeg.js').then((m) => m.default));
addDecoder([8, 32946], () => import('./deflate.js').then((m) => m.default));
addDecoder(32773, () => import('./packbits.js').then((m) => m.default));
addDecoder(34887, () => import('./lerc.js')
.then(async (m) => {
await m.zstd.init();
return m;
})
.then((m) => m.default),
);
addDecoder(50001, () => import('./webimage.js').then((m) => m.default));

View file

@ -0,0 +1,897 @@
import BaseDecoder from './basedecoder.js';
/* -*- tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- /
/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */
/*
Copyright 2011 notmasteryet
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// - The JPEG specification can be found in the ITU CCITT Recommendation T.81
// (www.w3.org/Graphics/JPEG/itu-t81.pdf)
// - The JFIF specification can be found in the JPEG File Interchange Format
// (www.w3.org/Graphics/JPEG/jfif3.pdf)
// - The Adobe Application-Specific JPEG markers in the Supporting the DCT Filters
// in PostScript Level 2, Technical Note #5116
// (partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf)
const dctZigZag = new Int32Array([
0,
1, 8,
16, 9, 2,
3, 10, 17, 24,
32, 25, 18, 11, 4,
5, 12, 19, 26, 33, 40,
48, 41, 34, 27, 20, 13, 6,
7, 14, 21, 28, 35, 42, 49, 56,
57, 50, 43, 36, 29, 22, 15,
23, 30, 37, 44, 51, 58,
59, 52, 45, 38, 31,
39, 46, 53, 60,
61, 54, 47,
55, 62,
63,
]);
const dctCos1 = 4017; // cos(pi/16)
const dctSin1 = 799; // sin(pi/16)
const dctCos3 = 3406; // cos(3*pi/16)
const dctSin3 = 2276; // sin(3*pi/16)
const dctCos6 = 1567; // cos(6*pi/16)
const dctSin6 = 3784; // sin(6*pi/16)
const dctSqrt2 = 5793; // sqrt(2)
const dctSqrt1d2 = 2896;// sqrt(2) / 2
function buildHuffmanTable(codeLengths, values) {
let k = 0;
const code = [];
let length = 16;
while (length > 0 && !codeLengths[length - 1]) {
--length;
}
code.push({ children: [], index: 0 });
let p = code[0];
let q;
for (let i = 0; i < length; i++) {
for (let j = 0; j < codeLengths[i]; j++) {
p = code.pop();
p.children[p.index] = values[k];
while (p.index > 0) {
p = code.pop();
}
p.index++;
code.push(p);
while (code.length <= i) {
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
k++;
}
if (i + 1 < length) {
// p here points to last code
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
}
return code[0].children;
}
function decodeScan(data, initialOffset,
frame, components, resetInterval,
spectralStart, spectralEnd,
successivePrev, successive) {
const { mcusPerLine, progressive } = frame;
const startOffset = initialOffset;
let offset = initialOffset;
let bitsData = 0;
let bitsCount = 0;
function readBit() {
if (bitsCount > 0) {
bitsCount--;
return (bitsData >> bitsCount) & 1;
}
bitsData = data[offset++];
if (bitsData === 0xFF) {
const nextByte = data[offset++];
if (nextByte) {
throw new Error(`unexpected marker: ${((bitsData << 8) | nextByte).toString(16)}`);
}
// unstuff 0
}
bitsCount = 7;
return bitsData >>> 7;
}
function decodeHuffman(tree) {
let node = tree;
let bit;
while ((bit = readBit()) !== null) { // eslint-disable-line no-cond-assign
node = node[bit];
if (typeof node === 'number') {
return node;
}
if (typeof node !== 'object') {
throw new Error('invalid huffman sequence');
}
}
return null;
}
function receive(initialLength) {
let length = initialLength;
let n = 0;
while (length > 0) {
const bit = readBit();
if (bit === null) {
return undefined;
}
n = (n << 1) | bit;
--length;
}
return n;
}
function receiveAndExtend(length) {
const n = receive(length);
if (n >= 1 << (length - 1)) {
return n;
}
return n + (-1 << length) + 1;
}
function decodeBaseline(component, zz) {
const t = decodeHuffman(component.huffmanTableDC);
const diff = t === 0 ? 0 : receiveAndExtend(t);
component.pred += diff;
zz[0] = component.pred;
let k = 1;
while (k < 64) {
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
const r = rs >> 4;
if (s === 0) {
if (r < 15) {
break;
}
k += 16;
} else {
k += r;
const z = dctZigZag[k];
zz[z] = receiveAndExtend(s);
k++;
}
}
}
function decodeDCFirst(component, zz) {
const t = decodeHuffman(component.huffmanTableDC);
const diff = t === 0 ? 0 : (receiveAndExtend(t) << successive);
component.pred += diff;
zz[0] = component.pred;
}
function decodeDCSuccessive(component, zz) {
zz[0] |= readBit() << successive;
}
let eobrun = 0;
function decodeACFirst(component, zz) {
if (eobrun > 0) {
eobrun--;
return;
}
let k = spectralStart;
const e = spectralEnd;
while (k <= e) {
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
const r = rs >> 4;
if (s === 0) {
if (r < 15) {
eobrun = receive(r) + (1 << r) - 1;
break;
}
k += 16;
} else {
k += r;
const z = dctZigZag[k];
zz[z] = receiveAndExtend(s) * (1 << successive);
k++;
}
}
}
let successiveACState = 0;
let successiveACNextValue;
function decodeACSuccessive(component, zz) {
let k = spectralStart;
const e = spectralEnd;
let r = 0;
while (k <= e) {
const z = dctZigZag[k];
const direction = zz[z] < 0 ? -1 : 1;
switch (successiveACState) {
case 0: { // initial state
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
r = rs >> 4;
if (s === 0) {
if (r < 15) {
eobrun = receive(r) + (1 << r);
successiveACState = 4;
} else {
r = 16;
successiveACState = 1;
}
} else {
if (s !== 1) {
throw new Error('invalid ACn encoding');
}
successiveACNextValue = receiveAndExtend(s);
successiveACState = r ? 2 : 3;
}
continue; // eslint-disable-line no-continue
}
case 1: // skipping r zero items
case 2:
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
} else {
r--;
if (r === 0) {
successiveACState = successiveACState === 2 ? 3 : 0;
}
}
break;
case 3: // set value for a zero item
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
} else {
zz[z] = successiveACNextValue << successive;
successiveACState = 0;
}
break;
case 4: // eob
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
}
break;
default:
break;
}
k++;
}
if (successiveACState === 4) {
eobrun--;
if (eobrun === 0) {
successiveACState = 0;
}
}
}
function decodeMcu(component, decodeFunction, mcu, row, col) {
const mcuRow = (mcu / mcusPerLine) | 0;
const mcuCol = mcu % mcusPerLine;
const blockRow = (mcuRow * component.v) + row;
const blockCol = (mcuCol * component.h) + col;
decodeFunction(component, component.blocks[blockRow][blockCol]);
}
function decodeBlock(component, decodeFunction, mcu) {
const blockRow = (mcu / component.blocksPerLine) | 0;
const blockCol = mcu % component.blocksPerLine;
decodeFunction(component, component.blocks[blockRow][blockCol]);
}
const componentsLength = components.length;
let component;
let i;
let j;
let k;
let n;
let decodeFn;
if (progressive) {
if (spectralStart === 0) {
decodeFn = successivePrev === 0 ? decodeDCFirst : decodeDCSuccessive;
} else {
decodeFn = successivePrev === 0 ? decodeACFirst : decodeACSuccessive;
}
} else {
decodeFn = decodeBaseline;
}
let mcu = 0;
let marker;
let mcuExpected;
if (componentsLength === 1) {
mcuExpected = components[0].blocksPerLine * components[0].blocksPerColumn;
} else {
mcuExpected = mcusPerLine * frame.mcusPerColumn;
}
const usedResetInterval = resetInterval || mcuExpected;
while (mcu < mcuExpected) {
// reset interval stuff
for (i = 0; i < componentsLength; i++) {
components[i].pred = 0;
}
eobrun = 0;
if (componentsLength === 1) {
component = components[0];
for (n = 0; n < usedResetInterval; n++) {
decodeBlock(component, decodeFn, mcu);
mcu++;
}
} else {
for (n = 0; n < usedResetInterval; n++) {
for (i = 0; i < componentsLength; i++) {
component = components[i];
const { h, v } = component;
for (j = 0; j < v; j++) {
for (k = 0; k < h; k++) {
decodeMcu(component, decodeFn, mcu, j, k);
}
}
}
mcu++;
// If we've reached our expected MCU's, stop decoding
if (mcu === mcuExpected) {
break;
}
}
}
// find marker
bitsCount = 0;
marker = (data[offset] << 8) | data[offset + 1];
if (marker < 0xFF00) {
throw new Error('marker was not found');
}
if (marker >= 0xFFD0 && marker <= 0xFFD7) { // RSTx
offset += 2;
} else {
break;
}
}
return offset - startOffset;
}
function buildComponentData(frame, component) {
const lines = [];
const { blocksPerLine, blocksPerColumn } = component;
const samplesPerLine = blocksPerLine << 3;
const R = new Int32Array(64);
const r = new Uint8Array(64);
// A port of poppler's IDCT method which in turn is taken from:
// Christoph Loeffler, Adriaan Ligtenberg, George S. Moschytz,
// "Practical Fast 1-D DCT Algorithms with 11 Multiplications",
// IEEE Intl. Conf. on Acoustics, Speech & Signal Processing, 1989,
// 988-991.
function quantizeAndInverse(zz, dataOut, dataIn) {
const qt = component.quantizationTable;
let v0;
let v1;
let v2;
let v3;
let v4;
let v5;
let v6;
let v7;
let t;
const p = dataIn;
let i;
// dequant
for (i = 0; i < 64; i++) {
p[i] = zz[i] * qt[i];
}
// inverse DCT on rows
for (i = 0; i < 8; ++i) {
const row = 8 * i;
// check for all-zero AC coefficients
if (p[1 + row] === 0 && p[2 + row] === 0 && p[3 + row] === 0
&& p[4 + row] === 0 && p[5 + row] === 0 && p[6 + row] === 0
&& p[7 + row] === 0) {
t = ((dctSqrt2 * p[0 + row]) + 512) >> 10;
p[0 + row] = t;
p[1 + row] = t;
p[2 + row] = t;
p[3 + row] = t;
p[4 + row] = t;
p[5 + row] = t;
p[6 + row] = t;
p[7 + row] = t;
continue; // eslint-disable-line no-continue
}
// stage 4
v0 = ((dctSqrt2 * p[0 + row]) + 128) >> 8;
v1 = ((dctSqrt2 * p[4 + row]) + 128) >> 8;
v2 = p[2 + row];
v3 = p[6 + row];
v4 = ((dctSqrt1d2 * (p[1 + row] - p[7 + row])) + 128) >> 8;
v7 = ((dctSqrt1d2 * (p[1 + row] + p[7 + row])) + 128) >> 8;
v5 = p[3 + row] << 4;
v6 = p[5 + row] << 4;
// stage 3
t = (v0 - v1 + 1) >> 1;
v0 = (v0 + v1 + 1) >> 1;
v1 = t;
t = ((v2 * dctSin6) + (v3 * dctCos6) + 128) >> 8;
v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 128) >> 8;
v3 = t;
t = (v4 - v6 + 1) >> 1;
v4 = (v4 + v6 + 1) >> 1;
v6 = t;
t = (v7 + v5 + 1) >> 1;
v5 = (v7 - v5 + 1) >> 1;
v7 = t;
// stage 2
t = (v0 - v3 + 1) >> 1;
v0 = (v0 + v3 + 1) >> 1;
v3 = t;
t = (v1 - v2 + 1) >> 1;
v1 = (v1 + v2 + 1) >> 1;
v2 = t;
t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;
v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;
v7 = t;
t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;
v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;
v6 = t;
// stage 1
p[0 + row] = v0 + v7;
p[7 + row] = v0 - v7;
p[1 + row] = v1 + v6;
p[6 + row] = v1 - v6;
p[2 + row] = v2 + v5;
p[5 + row] = v2 - v5;
p[3 + row] = v3 + v4;
p[4 + row] = v3 - v4;
}
// inverse DCT on columns
for (i = 0; i < 8; ++i) {
const col = i;
// check for all-zero AC coefficients
if (p[(1 * 8) + col] === 0 && p[(2 * 8) + col] === 0 && p[(3 * 8) + col] === 0
&& p[(4 * 8) + col] === 0 && p[(5 * 8) + col] === 0 && p[(6 * 8) + col] === 0
&& p[(7 * 8) + col] === 0) {
t = ((dctSqrt2 * dataIn[i + 0]) + 8192) >> 14;
p[(0 * 8) + col] = t;
p[(1 * 8) + col] = t;
p[(2 * 8) + col] = t;
p[(3 * 8) + col] = t;
p[(4 * 8) + col] = t;
p[(5 * 8) + col] = t;
p[(6 * 8) + col] = t;
p[(7 * 8) + col] = t;
continue; // eslint-disable-line no-continue
}
// stage 4
v0 = ((dctSqrt2 * p[(0 * 8) + col]) + 2048) >> 12;
v1 = ((dctSqrt2 * p[(4 * 8) + col]) + 2048) >> 12;
v2 = p[(2 * 8) + col];
v3 = p[(6 * 8) + col];
v4 = ((dctSqrt1d2 * (p[(1 * 8) + col] - p[(7 * 8) + col])) + 2048) >> 12;
v7 = ((dctSqrt1d2 * (p[(1 * 8) + col] + p[(7 * 8) + col])) + 2048) >> 12;
v5 = p[(3 * 8) + col];
v6 = p[(5 * 8) + col];
// stage 3
t = (v0 - v1 + 1) >> 1;
v0 = (v0 + v1 + 1) >> 1;
v1 = t;
t = ((v2 * dctSin6) + (v3 * dctCos6) + 2048) >> 12;
v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 2048) >> 12;
v3 = t;
t = (v4 - v6 + 1) >> 1;
v4 = (v4 + v6 + 1) >> 1;
v6 = t;
t = (v7 + v5 + 1) >> 1;
v5 = (v7 - v5 + 1) >> 1;
v7 = t;
// stage 2
t = (v0 - v3 + 1) >> 1;
v0 = (v0 + v3 + 1) >> 1;
v3 = t;
t = (v1 - v2 + 1) >> 1;
v1 = (v1 + v2 + 1) >> 1;
v2 = t;
t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;
v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;
v7 = t;
t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;
v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;
v6 = t;
// stage 1
p[(0 * 8) + col] = v0 + v7;
p[(7 * 8) + col] = v0 - v7;
p[(1 * 8) + col] = v1 + v6;
p[(6 * 8) + col] = v1 - v6;
p[(2 * 8) + col] = v2 + v5;
p[(5 * 8) + col] = v2 - v5;
p[(3 * 8) + col] = v3 + v4;
p[(4 * 8) + col] = v3 - v4;
}
// convert to 8-bit integers
for (i = 0; i < 64; ++i) {
const sample = 128 + ((p[i] + 8) >> 4);
if (sample < 0) {
dataOut[i] = 0;
} else if (sample > 0XFF) {
dataOut[i] = 0xFF;
} else {
dataOut[i] = sample;
}
}
}
for (let blockRow = 0; blockRow < blocksPerColumn; blockRow++) {
const scanLine = blockRow << 3;
for (let i = 0; i < 8; i++) {
lines.push(new Uint8Array(samplesPerLine));
}
for (let blockCol = 0; blockCol < blocksPerLine; blockCol++) {
quantizeAndInverse(component.blocks[blockRow][blockCol], r, R);
let offset = 0;
const sample = blockCol << 3;
for (let j = 0; j < 8; j++) {
const line = lines[scanLine + j];
for (let i = 0; i < 8; i++) {
line[sample + i] = r[offset++];
}
}
}
}
return lines;
}
class JpegStreamReader {
constructor() {
this.jfif = null;
this.adobe = null;
this.quantizationTables = [];
this.huffmanTablesAC = [];
this.huffmanTablesDC = [];
this.resetFrames();
}
resetFrames() {
this.frames = [];
}
parse(data) {
let offset = 0;
// const { length } = data;
function readUint16() {
const value = (data[offset] << 8) | data[offset + 1];
offset += 2;
return value;
}
function readDataBlock() {
const length = readUint16();
const array = data.subarray(offset, offset + length - 2);
offset += array.length;
return array;
}
function prepareComponents(frame) {
let maxH = 0;
let maxV = 0;
let component;
let componentId;
for (componentId in frame.components) {
if (frame.components.hasOwnProperty(componentId)) {
component = frame.components[componentId];
if (maxH < component.h) {
maxH = component.h;
}
if (maxV < component.v) {
maxV = component.v;
}
}
}
const mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / maxH);
const mcusPerColumn = Math.ceil(frame.scanLines / 8 / maxV);
for (componentId in frame.components) {
if (frame.components.hasOwnProperty(componentId)) {
component = frame.components[componentId];
const blocksPerLine = Math.ceil(Math.ceil(frame.samplesPerLine / 8) * component.h / maxH);
const blocksPerColumn = Math.ceil(Math.ceil(frame.scanLines / 8) * component.v / maxV);
const blocksPerLineForMcu = mcusPerLine * component.h;
const blocksPerColumnForMcu = mcusPerColumn * component.v;
const blocks = [];
for (let i = 0; i < blocksPerColumnForMcu; i++) {
const row = [];
for (let j = 0; j < blocksPerLineForMcu; j++) {
row.push(new Int32Array(64));
}
blocks.push(row);
}
component.blocksPerLine = blocksPerLine;
component.blocksPerColumn = blocksPerColumn;
component.blocks = blocks;
}
}
frame.maxH = maxH;
frame.maxV = maxV;
frame.mcusPerLine = mcusPerLine;
frame.mcusPerColumn = mcusPerColumn;
}
let fileMarker = readUint16();
if (fileMarker !== 0xFFD8) { // SOI (Start of Image)
throw new Error('SOI not found');
}
fileMarker = readUint16();
while (fileMarker !== 0xFFD9) { // EOI (End of image)
switch (fileMarker) {
case 0xFF00: break;
case 0xFFE0: // APP0 (Application Specific)
case 0xFFE1: // APP1
case 0xFFE2: // APP2
case 0xFFE3: // APP3
case 0xFFE4: // APP4
case 0xFFE5: // APP5
case 0xFFE6: // APP6
case 0xFFE7: // APP7
case 0xFFE8: // APP8
case 0xFFE9: // APP9
case 0xFFEA: // APP10
case 0xFFEB: // APP11
case 0xFFEC: // APP12
case 0xFFED: // APP13
case 0xFFEE: // APP14
case 0xFFEF: // APP15
case 0xFFFE: { // COM (Comment)
const appData = readDataBlock();
if (fileMarker === 0xFFE0) {
if (appData[0] === 0x4A && appData[1] === 0x46 && appData[2] === 0x49
&& appData[3] === 0x46 && appData[4] === 0) { // 'JFIF\x00'
this.jfif = {
version: { major: appData[5], minor: appData[6] },
densityUnits: appData[7],
xDensity: (appData[8] << 8) | appData[9],
yDensity: (appData[10] << 8) | appData[11],
thumbWidth: appData[12],
thumbHeight: appData[13],
thumbData: appData.subarray(14, 14 + (3 * appData[12] * appData[13])),
};
}
}
// TODO APP1 - Exif
if (fileMarker === 0xFFEE) {
if (appData[0] === 0x41 && appData[1] === 0x64 && appData[2] === 0x6F
&& appData[3] === 0x62 && appData[4] === 0x65 && appData[5] === 0) { // 'Adobe\x00'
this.adobe = {
version: appData[6],
flags0: (appData[7] << 8) | appData[8],
flags1: (appData[9] << 8) | appData[10],
transformCode: appData[11],
};
}
}
break;
}
case 0xFFDB: { // DQT (Define Quantization Tables)
const quantizationTablesLength = readUint16();
const quantizationTablesEnd = quantizationTablesLength + offset - 2;
while (offset < quantizationTablesEnd) {
const quantizationTableSpec = data[offset++];
const tableData = new Int32Array(64);
if ((quantizationTableSpec >> 4) === 0) { // 8 bit values
for (let j = 0; j < 64; j++) {
const z = dctZigZag[j];
tableData[z] = data[offset++];
}
} else if ((quantizationTableSpec >> 4) === 1) { // 16 bit
for (let j = 0; j < 64; j++) {
const z = dctZigZag[j];
tableData[z] = readUint16();
}
} else {
throw new Error('DQT: invalid table spec');
}
this.quantizationTables[quantizationTableSpec & 15] = tableData;
}
break;
}
case 0xFFC0: // SOF0 (Start of Frame, Baseline DCT)
case 0xFFC1: // SOF1 (Start of Frame, Extended DCT)
case 0xFFC2: { // SOF2 (Start of Frame, Progressive DCT)
readUint16(); // skip data length
const frame = {
extended: (fileMarker === 0xFFC1),
progressive: (fileMarker === 0xFFC2),
precision: data[offset++],
scanLines: readUint16(),
samplesPerLine: readUint16(),
components: {},
componentsOrder: [],
};
const componentsCount = data[offset++];
let componentId;
// let maxH = 0;
// let maxV = 0;
for (let i = 0; i < componentsCount; i++) {
componentId = data[offset];
const h = data[offset + 1] >> 4;
const v = data[offset + 1] & 15;
const qId = data[offset + 2];
frame.componentsOrder.push(componentId);
frame.components[componentId] = {
h,
v,
quantizationIdx: qId,
};
offset += 3;
}
prepareComponents(frame);
this.frames.push(frame);
break;
}
case 0xFFC4: { // DHT (Define Huffman Tables)
const huffmanLength = readUint16();
for (let i = 2; i < huffmanLength;) {
const huffmanTableSpec = data[offset++];
const codeLengths = new Uint8Array(16);
let codeLengthSum = 0;
for (let j = 0; j < 16; j++, offset++) {
codeLengths[j] = data[offset];
codeLengthSum += codeLengths[j];
}
const huffmanValues = new Uint8Array(codeLengthSum);
for (let j = 0; j < codeLengthSum; j++, offset++) {
huffmanValues[j] = data[offset];
}
i += 17 + codeLengthSum;
if ((huffmanTableSpec >> 4) === 0) {
this.huffmanTablesDC[huffmanTableSpec & 15] = buildHuffmanTable(
codeLengths, huffmanValues,
);
} else {
this.huffmanTablesAC[huffmanTableSpec & 15] = buildHuffmanTable(
codeLengths, huffmanValues,
);
}
}
break;
}
case 0xFFDD: // DRI (Define Restart Interval)
readUint16(); // skip data length
this.resetInterval = readUint16();
break;
case 0xFFDA: { // SOS (Start of Scan)
readUint16(); // skip length
const selectorsCount = data[offset++];
const components = [];
const frame = this.frames[0];
for (let i = 0; i < selectorsCount; i++) {
const component = frame.components[data[offset++]];
const tableSpec = data[offset++];
component.huffmanTableDC = this.huffmanTablesDC[tableSpec >> 4];
component.huffmanTableAC = this.huffmanTablesAC[tableSpec & 15];
components.push(component);
}
const spectralStart = data[offset++];
const spectralEnd = data[offset++];
const successiveApproximation = data[offset++];
const processed = decodeScan(data, offset,
frame, components, this.resetInterval,
spectralStart, spectralEnd,
successiveApproximation >> 4, successiveApproximation & 15);
offset += processed;
break;
}
case 0xFFFF: // Fill bytes
if (data[offset] !== 0xFF) { // Avoid skipping a valid marker.
offset--;
}
break;
default:
if (data[offset - 3] === 0xFF
&& data[offset - 2] >= 0xC0 && data[offset - 2] <= 0xFE) {
// could be incorrect encoding -- last 0xFF byte of the previous
// block was eaten by the encoder
offset -= 3;
break;
}
throw new Error(`unknown JPEG marker ${fileMarker.toString(16)}`);
}
fileMarker = readUint16();
}
}
getResult() {
const { frames } = this;
if (this.frames.length === 0) {
throw new Error('no frames were decoded');
} else if (this.frames.length > 1) {
console.warn('more than one frame is not supported');
}
// set each frame's components quantization table
for (let i = 0; i < this.frames.length; i++) {
const cp = this.frames[i].components;
for (const j of Object.keys(cp)) {
cp[j].quantizationTable = this.quantizationTables[cp[j].quantizationIdx];
delete cp[j].quantizationIdx;
}
}
const frame = frames[0];
const { components, componentsOrder } = frame;
const outComponents = [];
const width = frame.samplesPerLine;
const height = frame.scanLines;
for (let i = 0; i < componentsOrder.length; i++) {
const component = components[componentsOrder[i]];
outComponents.push({
lines: buildComponentData(frame, component),
scaleX: component.h / frame.maxH,
scaleY: component.v / frame.maxV,
});
}
const out = new Uint8Array(width * height * outComponents.length);
let oi = 0;
for (let y = 0; y < height; ++y) {
for (let x = 0; x < width; ++x) {
for (let i = 0; i < outComponents.length; ++i) {
const component = outComponents[i];
out[oi] = component.lines[0 | y * component.scaleY][0 | x * component.scaleX];
++oi;
}
}
}
return out;
}
}
export default class JpegDecoder extends BaseDecoder {
constructor(fileDirectory) {
super();
this.reader = new JpegStreamReader();
if (fileDirectory.JPEGTables) {
this.reader.parse(fileDirectory.JPEGTables);
}
}
decodeBlock(buffer) {
this.reader.resetFrames();
this.reader.parse(new Uint8Array(buffer));
return this.reader.getResult().buffer;
}
}

View file

@ -0,0 +1,37 @@
import { inflate } from 'pako';
import Lerc from 'lerc';
import { ZSTDDecoder } from 'zstddec';
import BaseDecoder from './basedecoder.js';
import { LercParameters, LercAddCompression } from '../globals.js';
export const zstd = new ZSTDDecoder();
export default class LercDecoder extends BaseDecoder {
constructor(fileDirectory) {
super();
this.planarConfiguration = typeof fileDirectory.PlanarConfiguration !== 'undefined' ? fileDirectory.PlanarConfiguration : 1;
this.samplesPerPixel = typeof fileDirectory.SamplesPerPixel !== 'undefined' ? fileDirectory.SamplesPerPixel : 1;
this.addCompression = fileDirectory.LercParameters[LercParameters.AddCompression];
}
decodeBlock(buffer) {
switch (this.addCompression) {
case LercAddCompression.None:
break;
case LercAddCompression.Deflate:
buffer = inflate(new Uint8Array(buffer)).buffer; // eslint-disable-line no-param-reassign, prefer-destructuring
break;
case LercAddCompression.Zstandard:
buffer = zstd.decode(new Uint8Array(buffer)).buffer; // eslint-disable-line no-param-reassign, prefer-destructuring
break;
default:
throw new Error(`Unsupported LERC additional compression method identifier: ${this.addCompression}`);
}
const lercResult = Lerc.decode(buffer, { returnPixelInterleavedDims: this.planarConfiguration === 1 });
const lercData = lercResult.pixels[0];
return lercData.buffer;
}
}

View file

@ -0,0 +1,131 @@
import BaseDecoder from './basedecoder.js';
const MIN_BITS = 9;
const CLEAR_CODE = 256; // clear code
const EOI_CODE = 257; // end of information
const MAX_BYTELENGTH = 12;
function getByte(array, position, length) {
const d = position % 8;
const a = Math.floor(position / 8);
const de = 8 - d;
const ef = (position + length) - ((a + 1) * 8);
let fg = (8 * (a + 2)) - (position + length);
const dg = ((a + 2) * 8) - position;
fg = Math.max(0, fg);
if (a >= array.length) {
console.warn('ran off the end of the buffer before finding EOI_CODE (end on input code)');
return EOI_CODE;
}
let chunk1 = array[a] & ((2 ** (8 - d)) - 1);
chunk1 <<= (length - de);
let chunks = chunk1;
if (a + 1 < array.length) {
let chunk2 = array[a + 1] >>> fg;
chunk2 <<= Math.max(0, (length - dg));
chunks += chunk2;
}
if (ef > 8 && a + 2 < array.length) {
const hi = ((a + 3) * 8) - (position + length);
const chunk3 = array[a + 2] >>> hi;
chunks += chunk3;
}
return chunks;
}
function appendReversed(dest, source) {
for (let i = source.length - 1; i >= 0; i--) {
dest.push(source[i]);
}
return dest;
}
function decompress(input) {
const dictionaryIndex = new Uint16Array(4093);
const dictionaryChar = new Uint8Array(4093);
for (let i = 0; i <= 257; i++) {
dictionaryIndex[i] = 4096;
dictionaryChar[i] = i;
}
let dictionaryLength = 258;
let byteLength = MIN_BITS;
let position = 0;
function initDictionary() {
dictionaryLength = 258;
byteLength = MIN_BITS;
}
function getNext(array) {
const byte = getByte(array, position, byteLength);
position += byteLength;
return byte;
}
function addToDictionary(i, c) {
dictionaryChar[dictionaryLength] = c;
dictionaryIndex[dictionaryLength] = i;
dictionaryLength++;
return dictionaryLength - 1;
}
function getDictionaryReversed(n) {
const rev = [];
for (let i = n; i !== 4096; i = dictionaryIndex[i]) {
rev.push(dictionaryChar[i]);
}
return rev;
}
const result = [];
initDictionary();
const array = new Uint8Array(input);
let code = getNext(array);
let oldCode;
while (code !== EOI_CODE) {
if (code === CLEAR_CODE) {
initDictionary();
code = getNext(array);
while (code === CLEAR_CODE) {
code = getNext(array);
}
if (code === EOI_CODE) {
break;
} else if (code > CLEAR_CODE) {
throw new Error(`corrupted code at scanline ${code}`);
} else {
const val = getDictionaryReversed(code);
appendReversed(result, val);
oldCode = code;
}
} else if (code < dictionaryLength) {
const val = getDictionaryReversed(code);
appendReversed(result, val);
addToDictionary(oldCode, val[val.length - 1]);
oldCode = code;
} else {
const oldVal = getDictionaryReversed(oldCode);
if (!oldVal) {
throw new Error(`Bogus entry. Not in dictionary, ${oldCode} / ${dictionaryLength}, position: ${position}`);
}
appendReversed(result, oldVal);
result.push(oldVal[oldVal.length - 1]);
addToDictionary(oldCode, oldVal[oldVal.length - 1]);
oldCode = code;
}
if (dictionaryLength + 1 >= (2 ** byteLength)) {
if (byteLength === MAX_BYTELENGTH) {
oldCode = undefined;
} else {
byteLength++;
}
}
code = getNext(array);
}
return new Uint8Array(result);
}
export default class LZWDecoder extends BaseDecoder {
decodeBlock(buffer) {
return decompress(buffer, false).buffer;
}
}

View file

@ -0,0 +1,26 @@
import BaseDecoder from './basedecoder.js';
export default class PackbitsDecoder extends BaseDecoder {
decodeBlock(buffer) {
const dataView = new DataView(buffer);
const out = [];
for (let i = 0; i < buffer.byteLength; ++i) {
let header = dataView.getInt8(i);
if (header < 0) {
const next = dataView.getUint8(i + 1);
header = -header;
for (let j = 0; j <= header; ++j) {
out.push(next);
}
i += 1;
} else {
for (let j = 0; j <= header; ++j) {
out.push(dataView.getUint8(i + j + 1));
}
i += header + 1;
}
}
return new Uint8Array(out).buffer;
}
}

View file

@ -0,0 +1,7 @@
import BaseDecoder from './basedecoder.js';
export default class RawDecoder extends BaseDecoder {
decodeBlock(buffer) {
return buffer;
}
}

View file

@ -0,0 +1,40 @@
import BaseDecoder from './basedecoder.js';
/**
* class WebImageDecoder
*
* This decoder uses the browsers image decoding facilities to read image
* formats like WebP when supported.
*/
export default class WebImageDecoder extends BaseDecoder {
constructor() {
super();
if (typeof createImageBitmap === 'undefined') {
throw new Error('Cannot decode WebImage as `createImageBitmap` is not available');
} else if (typeof document === 'undefined' && typeof OffscreenCanvas === 'undefined') {
throw new Error('Cannot decode WebImage as neither `document` nor `OffscreenCanvas` is not available');
}
}
async decode(fileDirectory, buffer) {
const blob = new Blob([buffer]);
const imageBitmap = await createImageBitmap(blob);
let canvas;
if (typeof document !== 'undefined') {
canvas = document.createElement('canvas');
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
} else {
canvas = new OffscreenCanvas(imageBitmap.width, imageBitmap.height);
}
const ctx = canvas.getContext('2d');
ctx.drawImage(imageBitmap, 0, 0);
// TODO: check how many samples per pixel we have, and return RGB/RGBA accordingly
// it seems like GDAL always encodes via RGBA which does not require a translation
return ctx.getImageData(0, 0, imageBitmap.width, imageBitmap.height).data.buffer;
}
}

View file

@ -0,0 +1,140 @@
export default class DataSlice {
constructor(arrayBuffer, sliceOffset, littleEndian, bigTiff) {
this._dataView = new DataView(arrayBuffer);
this._sliceOffset = sliceOffset;
this._littleEndian = littleEndian;
this._bigTiff = bigTiff;
}
get sliceOffset() {
return this._sliceOffset;
}
get sliceTop() {
return this._sliceOffset + this.buffer.byteLength;
}
get littleEndian() {
return this._littleEndian;
}
get bigTiff() {
return this._bigTiff;
}
get buffer() {
return this._dataView.buffer;
}
covers(offset, length) {
return this.sliceOffset <= offset && this.sliceTop >= offset + length;
}
readUint8(offset) {
return this._dataView.getUint8(
offset - this._sliceOffset, this._littleEndian,
);
}
readInt8(offset) {
return this._dataView.getInt8(
offset - this._sliceOffset, this._littleEndian,
);
}
readUint16(offset) {
return this._dataView.getUint16(
offset - this._sliceOffset, this._littleEndian,
);
}
readInt16(offset) {
return this._dataView.getInt16(
offset - this._sliceOffset, this._littleEndian,
);
}
readUint32(offset) {
return this._dataView.getUint32(
offset - this._sliceOffset, this._littleEndian,
);
}
readInt32(offset) {
return this._dataView.getInt32(
offset - this._sliceOffset, this._littleEndian,
);
}
readFloat32(offset) {
return this._dataView.getFloat32(
offset - this._sliceOffset, this._littleEndian,
);
}
readFloat64(offset) {
return this._dataView.getFloat64(
offset - this._sliceOffset, this._littleEndian,
);
}
readUint64(offset) {
const left = this.readUint32(offset);
const right = this.readUint32(offset + 4);
let combined;
if (this._littleEndian) {
combined = left + ((2 ** 32) * right);
if (!Number.isSafeInteger(combined)) {
throw new Error(
`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues',
);
}
return combined;
}
combined = ((2 ** 32) * left) + right;
if (!Number.isSafeInteger(combined)) {
throw new Error(
`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues',
);
}
return combined;
}
// adapted from https://stackoverflow.com/a/55338384/8060591
readInt64(offset) {
let value = 0;
const isNegative = (this._dataView.getUint8(offset + (this._littleEndian ? 7 : 0)) & 0x80)
> 0;
let carrying = true;
for (let i = 0; i < 8; i++) {
let byte = this._dataView.getUint8(
offset + (this._littleEndian ? i : 7 - i),
);
if (isNegative) {
if (carrying) {
if (byte !== 0x00) {
byte = ~(byte - 1) & 0xff;
carrying = false;
}
} else {
byte = ~byte & 0xff;
}
}
value += byte * (256 ** i);
}
if (isNegative) {
value = -value;
}
return value;
}
readOffset(offset) {
if (this._bigTiff) {
return this.readUint64(offset);
}
return this.readUint32(offset);
}
}

View file

@ -0,0 +1,97 @@
import { getFloat16 } from '@petamoriken/float16';
export default class DataView64 {
constructor(arrayBuffer) {
this._dataView = new DataView(arrayBuffer);
}
get buffer() {
return this._dataView.buffer;
}
getUint64(offset, littleEndian) {
const left = this.getUint32(offset, littleEndian);
const right = this.getUint32(offset + 4, littleEndian);
let combined;
if (littleEndian) {
combined = left + ((2 ** 32) * right);
if (!Number.isSafeInteger(combined)) {
throw new Error(
`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues',
);
}
return combined;
}
combined = ((2 ** 32) * left) + right;
if (!Number.isSafeInteger(combined)) {
throw new Error(
`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues',
);
}
return combined;
}
// adapted from https://stackoverflow.com/a/55338384/8060591
getInt64(offset, littleEndian) {
let value = 0;
const isNegative = (this._dataView.getUint8(offset + (littleEndian ? 7 : 0)) & 0x80) > 0;
let carrying = true;
for (let i = 0; i < 8; i++) {
let byte = this._dataView.getUint8(offset + (littleEndian ? i : 7 - i));
if (isNegative) {
if (carrying) {
if (byte !== 0x00) {
byte = ~(byte - 1) & 0xff;
carrying = false;
}
} else {
byte = ~byte & 0xff;
}
}
value += byte * (256 ** i);
}
if (isNegative) {
value = -value;
}
return value;
}
getUint8(offset, littleEndian) {
return this._dataView.getUint8(offset, littleEndian);
}
getInt8(offset, littleEndian) {
return this._dataView.getInt8(offset, littleEndian);
}
getUint16(offset, littleEndian) {
return this._dataView.getUint16(offset, littleEndian);
}
getInt16(offset, littleEndian) {
return this._dataView.getInt16(offset, littleEndian);
}
getUint32(offset, littleEndian) {
return this._dataView.getUint32(offset, littleEndian);
}
getInt32(offset, littleEndian) {
return this._dataView.getInt32(offset, littleEndian);
}
getFloat16(offset, littleEndian) {
return getFloat16(this._dataView, offset, littleEndian);
}
getFloat32(offset, littleEndian) {
return this._dataView.getFloat32(offset, littleEndian);
}
getFloat64(offset, littleEndian) {
return this._dataView.getFloat64(offset, littleEndian);
}
}

View file

@ -0,0 +1,774 @@
/** @module geotiff */
import GeoTIFFImage from './geotiffimage.js';
import DataView64 from './dataview64.js';
import DataSlice from './dataslice.js';
import Pool from './pool.js';
import { makeRemoteSource, makeCustomSource } from './source/remote.js';
import { makeBufferSource } from './source/arraybuffer.js';
import { makeFileReaderSource } from './source/filereader.js';
import { makeFileSource } from './source/file.js';
import { BaseClient, BaseResponse } from './source/client/base.js';
import { fieldTypes, fieldTagNames, arrayFields, geoKeyNames } from './globals.js';
import { writeGeotiff } from './geotiffwriter.js';
import * as globals from './globals.js';
import * as rgb from './rgb.js';
import { getDecoder, addDecoder } from './compression/index.js';
import { setLogger } from './logging.js';
export { globals };
export { rgb };
export { default as BaseDecoder } from './compression/basedecoder.js';
export { getDecoder, addDecoder };
export { setLogger };
/**
* @typedef {Uint8Array | Int8Array | Uint16Array | Int16Array | Uint32Array | Int32Array | Float32Array | Float64Array}
* TypedArray
*/
/**
* @typedef {{ height:number, width: number }} Dimensions
*/
/**
* The autogenerated docs are a little confusing here. The effective type is:
*
* `TypedArray & { height: number; width: number}`
* @typedef {TypedArray & Dimensions} TypedArrayWithDimensions
*/
/**
* The autogenerated docs are a little confusing here. The effective type is:
*
* `TypedArray[] & { height: number; width: number}`
* @typedef {TypedArray[] & Dimensions} TypedArrayArrayWithDimensions
*/
/**
* The autogenerated docs are a little confusing here. The effective type is:
*
* `(TypedArray | TypedArray[]) & { height: number; width: number}`
* @typedef {TypedArrayWithDimensions | TypedArrayArrayWithDimensions} ReadRasterResult
*/
function getFieldTypeLength(fieldType) {
switch (fieldType) {
case fieldTypes.BYTE: case fieldTypes.ASCII: case fieldTypes.SBYTE: case fieldTypes.UNDEFINED:
return 1;
case fieldTypes.SHORT: case fieldTypes.SSHORT:
return 2;
case fieldTypes.LONG: case fieldTypes.SLONG: case fieldTypes.FLOAT: case fieldTypes.IFD:
return 4;
case fieldTypes.RATIONAL: case fieldTypes.SRATIONAL: case fieldTypes.DOUBLE:
case fieldTypes.LONG8: case fieldTypes.SLONG8: case fieldTypes.IFD8:
return 8;
default:
throw new RangeError(`Invalid field type: ${fieldType}`);
}
}
function parseGeoKeyDirectory(fileDirectory) {
const rawGeoKeyDirectory = fileDirectory.GeoKeyDirectory;
if (!rawGeoKeyDirectory) {
return null;
}
const geoKeyDirectory = {};
for (let i = 4; i <= rawGeoKeyDirectory[3] * 4; i += 4) {
const key = geoKeyNames[rawGeoKeyDirectory[i]];
const location = (rawGeoKeyDirectory[i + 1])
? (fieldTagNames[rawGeoKeyDirectory[i + 1]]) : null;
const count = rawGeoKeyDirectory[i + 2];
const offset = rawGeoKeyDirectory[i + 3];
let value = null;
if (!location) {
value = offset;
} else {
value = fileDirectory[location];
if (typeof value === 'undefined' || value === null) {
throw new Error(`Could not get value of geoKey '${key}'.`);
} else if (typeof value === 'string') {
value = value.substring(offset, offset + count - 1);
} else if (value.subarray) {
value = value.subarray(offset, offset + count);
if (count === 1) {
value = value[0];
}
}
}
geoKeyDirectory[key] = value;
}
return geoKeyDirectory;
}
function getValues(dataSlice, fieldType, count, offset) {
let values = null;
let readMethod = null;
const fieldTypeLength = getFieldTypeLength(fieldType);
switch (fieldType) {
case fieldTypes.BYTE: case fieldTypes.ASCII: case fieldTypes.UNDEFINED:
values = new Uint8Array(count); readMethod = dataSlice.readUint8;
break;
case fieldTypes.SBYTE:
values = new Int8Array(count); readMethod = dataSlice.readInt8;
break;
case fieldTypes.SHORT:
values = new Uint16Array(count); readMethod = dataSlice.readUint16;
break;
case fieldTypes.SSHORT:
values = new Int16Array(count); readMethod = dataSlice.readInt16;
break;
case fieldTypes.LONG: case fieldTypes.IFD:
values = new Uint32Array(count); readMethod = dataSlice.readUint32;
break;
case fieldTypes.SLONG:
values = new Int32Array(count); readMethod = dataSlice.readInt32;
break;
case fieldTypes.LONG8: case fieldTypes.IFD8:
values = new Array(count); readMethod = dataSlice.readUint64;
break;
case fieldTypes.SLONG8:
values = new Array(count); readMethod = dataSlice.readInt64;
break;
case fieldTypes.RATIONAL:
values = new Uint32Array(count * 2); readMethod = dataSlice.readUint32;
break;
case fieldTypes.SRATIONAL:
values = new Int32Array(count * 2); readMethod = dataSlice.readInt32;
break;
case fieldTypes.FLOAT:
values = new Float32Array(count); readMethod = dataSlice.readFloat32;
break;
case fieldTypes.DOUBLE:
values = new Float64Array(count); readMethod = dataSlice.readFloat64;
break;
default:
throw new RangeError(`Invalid field type: ${fieldType}`);
}
// normal fields
if (!(fieldType === fieldTypes.RATIONAL || fieldType === fieldTypes.SRATIONAL)) {
for (let i = 0; i < count; ++i) {
values[i] = readMethod.call(
dataSlice, offset + (i * fieldTypeLength),
);
}
} else { // RATIONAL or SRATIONAL
for (let i = 0; i < count; i += 2) {
values[i] = readMethod.call(
dataSlice, offset + (i * fieldTypeLength),
);
values[i + 1] = readMethod.call(
dataSlice, offset + ((i * fieldTypeLength) + 4),
);
}
}
if (fieldType === fieldTypes.ASCII) {
return new TextDecoder('utf-8').decode(values);
}
return values;
}
/**
* Data class to store the parsed file directory, geo key directory and
* offset to the next IFD
*/
class ImageFileDirectory {
constructor(fileDirectory, geoKeyDirectory, nextIFDByteOffset) {
this.fileDirectory = fileDirectory;
this.geoKeyDirectory = geoKeyDirectory;
this.nextIFDByteOffset = nextIFDByteOffset;
}
}
/**
* Error class for cases when an IFD index was requested, that does not exist
* in the file.
*/
class GeoTIFFImageIndexError extends Error {
constructor(index) {
super(`No image at index ${index}`);
this.index = index;
}
}
class GeoTIFFBase {
/**
* (experimental) Reads raster data from the best fitting image. This function uses
* the image with the lowest resolution that is still a higher resolution than the
* requested resolution.
* When specified, the `bbox` option is translated to the `window` option and the
* `resX` and `resY` to `width` and `height` respectively.
* Then, the [readRasters]{@link GeoTIFFImage#readRasters} method of the selected
* image is called and the result returned.
* @see GeoTIFFImage.readRasters
* @param {import('./geotiffimage').ReadRasterOptions} [options={}] optional parameters
* @returns {Promise<ReadRasterResult>} the decoded array(s), with `height` and `width`, as a promise
*/
async readRasters(options = {}) {
const { window: imageWindow, width, height } = options;
let { resX, resY, bbox } = options;
const firstImage = await this.getImage();
let usedImage = firstImage;
const imageCount = await this.getImageCount();
const imgBBox = firstImage.getBoundingBox();
if (imageWindow && bbox) {
throw new Error('Both "bbox" and "window" passed.');
}
// if width/height is passed, transform it to resolution
if (width || height) {
// if we have an image window (pixel coordinates), transform it to a BBox
// using the origin/resolution of the first image.
if (imageWindow) {
const [oX, oY] = firstImage.getOrigin();
const [rX, rY] = firstImage.getResolution();
bbox = [
oX + (imageWindow[0] * rX),
oY + (imageWindow[1] * rY),
oX + (imageWindow[2] * rX),
oY + (imageWindow[3] * rY),
];
}
// if we have a bbox (or calculated one)
const usedBBox = bbox || imgBBox;
if (width) {
if (resX) {
throw new Error('Both width and resX passed');
}
resX = (usedBBox[2] - usedBBox[0]) / width;
}
if (height) {
if (resY) {
throw new Error('Both width and resY passed');
}
resY = (usedBBox[3] - usedBBox[1]) / height;
}
}
// if resolution is set or calculated, try to get the image with the worst acceptable resolution
if (resX || resY) {
const allImages = [];
for (let i = 0; i < imageCount; ++i) {
const image = await this.getImage(i);
const { SubfileType: subfileType, NewSubfileType: newSubfileType } = image.fileDirectory;
if (i === 0 || subfileType === 2 || newSubfileType & 1) {
allImages.push(image);
}
}
allImages.sort((a, b) => a.getWidth() - b.getWidth());
for (let i = 0; i < allImages.length; ++i) {
const image = allImages[i];
const imgResX = (imgBBox[2] - imgBBox[0]) / image.getWidth();
const imgResY = (imgBBox[3] - imgBBox[1]) / image.getHeight();
usedImage = image;
if ((resX && resX > imgResX) || (resY && resY > imgResY)) {
break;
}
}
}
let wnd = imageWindow;
if (bbox) {
const [oX, oY] = firstImage.getOrigin();
const [imageResX, imageResY] = usedImage.getResolution(firstImage);
wnd = [
Math.round((bbox[0] - oX) / imageResX),
Math.round((bbox[1] - oY) / imageResY),
Math.round((bbox[2] - oX) / imageResX),
Math.round((bbox[3] - oY) / imageResY),
];
wnd = [
Math.min(wnd[0], wnd[2]),
Math.min(wnd[1], wnd[3]),
Math.max(wnd[0], wnd[2]),
Math.max(wnd[1], wnd[3]),
];
}
return usedImage.readRasters({ ...options, window: wnd });
}
}
/**
* @typedef {Object} GeoTIFFOptions
* @property {boolean} [cache=false] whether or not decoded tiles shall be cached.
*/
/**
* The abstraction for a whole GeoTIFF file.
* @augments GeoTIFFBase
*/
class GeoTIFF extends GeoTIFFBase {
/**
* @constructor
* @param {*} source The datasource to read from.
* @param {boolean} littleEndian Whether the image uses little endian.
* @param {boolean} bigTiff Whether the image uses bigTIFF conventions.
* @param {number} firstIFDOffset The numeric byte-offset from the start of the image
* to the first IFD.
* @param {GeoTIFFOptions} [options] further options.
*/
constructor(source, littleEndian, bigTiff, firstIFDOffset, options = {}) {
super();
this.source = source;
this.littleEndian = littleEndian;
this.bigTiff = bigTiff;
this.firstIFDOffset = firstIFDOffset;
this.cache = options.cache || false;
this.ifdRequests = [];
this.ghostValues = null;
}
async getSlice(offset, size) {
const fallbackSize = this.bigTiff ? 4048 : 1024;
return new DataSlice(
(await this.source.fetch([{
offset,
length: typeof size !== 'undefined' ? size : fallbackSize,
}]))[0],
offset,
this.littleEndian,
this.bigTiff,
);
}
/**
* Instructs to parse an image file directory at the given file offset.
* As there is no way to ensure that a location is indeed the start of an IFD,
* this function must be called with caution (e.g only using the IFD offsets from
* the headers or other IFDs).
* @param {number} offset the offset to parse the IFD at
* @returns {Promise<ImageFileDirectory>} the parsed IFD
*/
async parseFileDirectoryAt(offset) {
const entrySize = this.bigTiff ? 20 : 12;
const offsetSize = this.bigTiff ? 8 : 2;
let dataSlice = await this.getSlice(offset);
const numDirEntries = this.bigTiff
? dataSlice.readUint64(offset)
: dataSlice.readUint16(offset);
// if the slice does not cover the whole IFD, request a bigger slice, where the
// whole IFD fits: num of entries + n x tag length + offset to next IFD
const byteSize = (numDirEntries * entrySize) + (this.bigTiff ? 16 : 6);
if (!dataSlice.covers(offset, byteSize)) {
dataSlice = await this.getSlice(offset, byteSize);
}
const fileDirectory = {};
// loop over the IFD and create a file directory object
let i = offset + (this.bigTiff ? 8 : 2);
for (let entryCount = 0; entryCount < numDirEntries; i += entrySize, ++entryCount) {
const fieldTag = dataSlice.readUint16(i);
const fieldType = dataSlice.readUint16(i + 2);
const typeCount = this.bigTiff
? dataSlice.readUint64(i + 4)
: dataSlice.readUint32(i + 4);
let fieldValues;
let value;
const fieldTypeLength = getFieldTypeLength(fieldType);
const valueOffset = i + (this.bigTiff ? 12 : 8);
// check whether the value is directly encoded in the tag or refers to a
// different external byte range
if (fieldTypeLength * typeCount <= (this.bigTiff ? 8 : 4)) {
fieldValues = getValues(dataSlice, fieldType, typeCount, valueOffset);
} else {
// resolve the reference to the actual byte range
const actualOffset = dataSlice.readOffset(valueOffset);
const length = getFieldTypeLength(fieldType) * typeCount;
// check, whether we actually cover the referenced byte range; if not,
// request a new slice of bytes to read from it
if (dataSlice.covers(actualOffset, length)) {
fieldValues = getValues(dataSlice, fieldType, typeCount, actualOffset);
} else {
const fieldDataSlice = await this.getSlice(actualOffset, length);
fieldValues = getValues(fieldDataSlice, fieldType, typeCount, actualOffset);
}
}
// unpack single values from the array
if (typeCount === 1 && arrayFields.indexOf(fieldTag) === -1
&& !(fieldType === fieldTypes.RATIONAL || fieldType === fieldTypes.SRATIONAL)) {
value = fieldValues[0];
} else {
value = fieldValues;
}
// write the tags value to the file directly
fileDirectory[fieldTagNames[fieldTag]] = value;
}
const geoKeyDirectory = parseGeoKeyDirectory(fileDirectory);
const nextIFDByteOffset = dataSlice.readOffset(
offset + offsetSize + (entrySize * numDirEntries),
);
return new ImageFileDirectory(
fileDirectory,
geoKeyDirectory,
nextIFDByteOffset,
);
}
async requestIFD(index) {
// see if we already have that IFD index requested.
if (this.ifdRequests[index]) {
// attach to an already requested IFD
return this.ifdRequests[index];
} else if (index === 0) {
// special case for index 0
this.ifdRequests[index] = this.parseFileDirectoryAt(this.firstIFDOffset);
return this.ifdRequests[index];
} else if (!this.ifdRequests[index - 1]) {
// if the previous IFD was not yet loaded, load that one first
// this is the recursive call.
try {
this.ifdRequests[index - 1] = this.requestIFD(index - 1);
} catch (e) {
// if the previous one already was an index error, rethrow
// with the current index
if (e instanceof GeoTIFFImageIndexError) {
throw new GeoTIFFImageIndexError(index);
}
// rethrow anything else
throw e;
}
}
// if the previous IFD was loaded, we can finally fetch the one we are interested in.
// we need to wrap this in an IIFE, otherwise this.ifdRequests[index] would be delayed
this.ifdRequests[index] = (async () => {
const previousIfd = await this.ifdRequests[index - 1];
if (previousIfd.nextIFDByteOffset === 0) {
throw new GeoTIFFImageIndexError(index);
}
return this.parseFileDirectoryAt(previousIfd.nextIFDByteOffset);
})();
return this.ifdRequests[index];
}
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
async getImage(index = 0) {
const ifd = await this.requestIFD(index);
return new GeoTIFFImage(
ifd.fileDirectory, ifd.geoKeyDirectory,
this.dataView, this.littleEndian, this.cache, this.source,
);
}
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
async getImageCount() {
let index = 0;
// loop until we run out of IFDs
let hasNext = true;
while (hasNext) {
try {
await this.requestIFD(index);
++index;
} catch (e) {
if (e instanceof GeoTIFFImageIndexError) {
hasNext = false;
} else {
throw e;
}
}
}
return index;
}
/**
* Get the values of the COG ghost area as a parsed map.
* See https://gdal.org/drivers/raster/cog.html#header-ghost-area for reference
* @returns {Promise<Object>} the parsed ghost area or null, if no such area was found
*/
async getGhostValues() {
const offset = this.bigTiff ? 16 : 8;
if (this.ghostValues) {
return this.ghostValues;
}
const detectionString = 'GDAL_STRUCTURAL_METADATA_SIZE=';
const heuristicAreaSize = detectionString.length + 100;
let slice = await this.getSlice(offset, heuristicAreaSize);
if (detectionString === getValues(slice, fieldTypes.ASCII, detectionString.length, offset)) {
const valuesString = getValues(slice, fieldTypes.ASCII, heuristicAreaSize, offset);
const firstLine = valuesString.split('\n')[0];
const metadataSize = Number(firstLine.split('=')[1].split(' ')[0]) + firstLine.length;
if (metadataSize > heuristicAreaSize) {
slice = await this.getSlice(offset, metadataSize);
}
const fullString = getValues(slice, fieldTypes.ASCII, metadataSize, offset);
this.ghostValues = {};
fullString
.split('\n')
.filter((line) => line.length > 0)
.map((line) => line.split('='))
.forEach(([key, value]) => {
this.ghostValues[key] = value;
});
}
return this.ghostValues;
}
/**
* Parse a (Geo)TIFF file from the given source.
*
* @param {*} source The source of data to parse from.
* @param {GeoTIFFOptions} [options] Additional options.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
*/
static async fromSource(source, options, signal) {
const headerData = (await source.fetch([{ offset: 0, length: 1024 }], signal))[0];
const dataView = new DataView64(headerData);
const BOM = dataView.getUint16(0, 0);
let littleEndian;
if (BOM === 0x4949) {
littleEndian = true;
} else if (BOM === 0x4D4D) {
littleEndian = false;
} else {
throw new TypeError('Invalid byte order value.');
}
const magicNumber = dataView.getUint16(2, littleEndian);
let bigTiff;
if (magicNumber === 42) {
bigTiff = false;
} else if (magicNumber === 43) {
bigTiff = true;
const offsetByteSize = dataView.getUint16(4, littleEndian);
if (offsetByteSize !== 8) {
throw new Error('Unsupported offset byte-size.');
}
} else {
throw new TypeError('Invalid magic number.');
}
const firstIFDOffset = bigTiff
? dataView.getUint64(8, littleEndian)
: dataView.getUint32(4, littleEndian);
return new GeoTIFF(source, littleEndian, bigTiff, firstIFDOffset, options);
}
/**
* Closes the underlying file buffer
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
*/
close() {
if (typeof this.source.close === 'function') {
return this.source.close();
}
return false;
}
}
export { GeoTIFF };
export default GeoTIFF;
/**
* Wrapper for GeoTIFF files that have external overviews.
* @augments GeoTIFFBase
*/
class MultiGeoTIFF extends GeoTIFFBase {
/**
* Construct a new MultiGeoTIFF from a main and several overview files.
* @param {GeoTIFF} mainFile The main GeoTIFF file.
* @param {GeoTIFF[]} overviewFiles An array of overview files.
*/
constructor(mainFile, overviewFiles) {
super();
this.mainFile = mainFile;
this.overviewFiles = overviewFiles;
this.imageFiles = [mainFile].concat(overviewFiles);
this.fileDirectoriesPerFile = null;
this.fileDirectoriesPerFileParsing = null;
this.imageCount = null;
}
async parseFileDirectoriesPerFile() {
const requests = [this.mainFile.parseFileDirectoryAt(this.mainFile.firstIFDOffset)]
.concat(this.overviewFiles.map((file) => file.parseFileDirectoryAt(file.firstIFDOffset)));
this.fileDirectoriesPerFile = await Promise.all(requests);
return this.fileDirectoriesPerFile;
}
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
async getImage(index = 0) {
await this.getImageCount();
await this.parseFileDirectoriesPerFile();
let visited = 0;
let relativeIndex = 0;
for (let i = 0; i < this.imageFiles.length; i++) {
const imageFile = this.imageFiles[i];
for (let ii = 0; ii < this.imageCounts[i]; ii++) {
if (index === visited) {
const ifd = await imageFile.requestIFD(relativeIndex);
return new GeoTIFFImage(
ifd.fileDirectory, ifd.geoKeyDirectory,
imageFile.dataView, imageFile.littleEndian, imageFile.cache, imageFile.source,
);
}
visited++;
relativeIndex++;
}
relativeIndex = 0;
}
throw new RangeError('Invalid image index');
}
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
async getImageCount() {
if (this.imageCount !== null) {
return this.imageCount;
}
const requests = [this.mainFile.getImageCount()]
.concat(this.overviewFiles.map((file) => file.getImageCount()));
this.imageCounts = await Promise.all(requests);
this.imageCount = this.imageCounts.reduce((count, ifds) => count + ifds, 0);
return this.imageCount;
}
}
export { MultiGeoTIFF };
/**
* Creates a new GeoTIFF from a remote URL.
* @param {string} url The URL to access the image from
* @param {object} [options] Additional options to pass to the source.
* See {@link makeRemoteSource} for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export async function fromUrl(url, options = {}, signal) {
return GeoTIFF.fromSource(makeRemoteSource(url, options), signal);
}
/**
* Creates a new GeoTIFF from a custom {@link BaseClient}.
* @param {BaseClient} client The client.
* @param {object} [options] Additional options to pass to the source.
* See {@link makeRemoteSource} for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export async function fromCustomClient(client, options = {}, signal) {
return GeoTIFF.fromSource(makeCustomSource(client, options), signal);
}
/**
* Construct a new GeoTIFF from an
* [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.
* @param {ArrayBuffer} arrayBuffer The data to read the file from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export async function fromArrayBuffer(arrayBuffer, signal) {
return GeoTIFF.fromSource(makeBufferSource(arrayBuffer), signal);
}
/**
* Construct a GeoTIFF from a local file path. This uses the node
* [filesystem API]{@link https://nodejs.org/api/fs.html} and is
* not available on browsers.
*
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
* @param {string} path The file path to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export async function fromFile(path, signal) {
return GeoTIFF.fromSource(makeFileSource(path), signal);
}
/**
* Construct a GeoTIFF from an HTML
* [Blob]{@link https://developer.mozilla.org/en-US/docs/Web/API/Blob} or
* [File]{@link https://developer.mozilla.org/en-US/docs/Web/API/File}
* object.
* @param {Blob|File} blob The Blob or File object to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export async function fromBlob(blob, signal) {
return GeoTIFF.fromSource(makeFileReaderSource(blob), signal);
}
/**
* Construct a MultiGeoTIFF from the given URLs.
* @param {string} mainUrl The URL for the main file.
* @param {string[]} overviewUrls An array of URLs for the overview images.
* @param {Object} [options] Additional options to pass to the source.
* See [makeRemoteSource]{@link module:source.makeRemoteSource}
* for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<MultiGeoTIFF>} The resulting MultiGeoTIFF file.
*/
export async function fromUrls(mainUrl, overviewUrls = [], options = {}, signal) {
const mainFile = await GeoTIFF.fromSource(makeRemoteSource(mainUrl, options), signal);
const overviewFiles = await Promise.all(
overviewUrls.map((url) => GeoTIFF.fromSource(makeRemoteSource(url, options))),
);
return new MultiGeoTIFF(mainFile, overviewFiles);
}
/**
* Main creating function for GeoTIFF files.
* @param {(Array)} array of pixel values
* @returns {metadata} metadata
*/
export function writeArrayBuffer(values, metadata) {
return writeGeotiff(values, metadata);
}
export { Pool };
export { GeoTIFFImage };
export { BaseClient, BaseResponse };

View file

@ -0,0 +1,945 @@
/** @module geotiffimage */
import { getFloat16 } from '@petamoriken/float16';
import getAttribute from 'xml-utils/get-attribute.js';
import findTagsByName from 'xml-utils/find-tags-by-name.js';
import { photometricInterpretations, ExtraSamplesValues } from './globals.js';
import { fromWhiteIsZero, fromBlackIsZero, fromPalette, fromCMYK, fromYCbCr, fromCIELab } from './rgb.js';
import { getDecoder } from './compression/index.js';
import { resample, resampleInterleaved } from './resample.js';
/**
* @typedef {Object} ReadRasterOptions
* @property {Array<number>} [window=whole window] the subset to read data from in pixels.
* @property {Array<number>} [bbox=whole image] the subset to read data from in
* geographical coordinates.
* @property {Array<number>} [samples=all samples] the selection of samples to read from. Default is all samples.
* @property {boolean} [interleave=false] whether the data shall be read
* in one single array or separate
* arrays.
* @property {Pool} [pool=null] The optional decoder pool to use.
* @property {number} [width] The desired width of the output. When the width is not the
* same as the images, resampling will be performed.
* @property {number} [height] The desired height of the output. When the width is not the
* same as the images, resampling will be performed.
* @property {string} [resampleMethod='nearest'] The desired resampling method.
* @property {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @property {number|number[]} [fillValue] The value to use for parts of the image
* outside of the images extent. When multiple
* samples are requested, an array of fill values
* can be passed.
*/
/** @typedef {import("./geotiff.js").TypedArray} TypedArray */
/** @typedef {import("./geotiff.js").ReadRasterResult} ReadRasterResult */
function sum(array, start, end) {
let s = 0;
for (let i = start; i < end; ++i) {
s += array[i];
}
return s;
}
function arrayForType(format, bitsPerSample, size) {
switch (format) {
case 1: // unsigned integer data
if (bitsPerSample <= 8) {
return new Uint8Array(size);
} else if (bitsPerSample <= 16) {
return new Uint16Array(size);
} else if (bitsPerSample <= 32) {
return new Uint32Array(size);
}
break;
case 2: // twos complement signed integer data
if (bitsPerSample === 8) {
return new Int8Array(size);
} else if (bitsPerSample === 16) {
return new Int16Array(size);
} else if (bitsPerSample === 32) {
return new Int32Array(size);
}
break;
case 3: // floating point data
switch (bitsPerSample) {
case 16:
case 32:
return new Float32Array(size);
case 64:
return new Float64Array(size);
default:
break;
}
break;
default:
break;
}
throw Error('Unsupported data format/bitsPerSample');
}
function needsNormalization(format, bitsPerSample) {
if ((format === 1 || format === 2) && bitsPerSample <= 32 && bitsPerSample % 8 === 0) {
return false;
} else if (format === 3 && (bitsPerSample === 16 || bitsPerSample === 32 || bitsPerSample === 64)) {
return false;
}
return true;
}
function normalizeArray(inBuffer, format, planarConfiguration, samplesPerPixel, bitsPerSample, tileWidth, tileHeight) {
// const inByteArray = new Uint8Array(inBuffer);
const view = new DataView(inBuffer);
const outSize = planarConfiguration === 2
? tileHeight * tileWidth
: tileHeight * tileWidth * samplesPerPixel;
const samplesToTransfer = planarConfiguration === 2
? 1 : samplesPerPixel;
const outArray = arrayForType(format, bitsPerSample, outSize);
// let pixel = 0;
const bitMask = parseInt('1'.repeat(bitsPerSample), 2);
if (format === 1) { // unsigned integer
// translation of https://github.com/OSGeo/gdal/blob/master/gdal/frmts/gtiff/geotiff.cpp#L7337
let pixelBitSkip;
// let sampleBitOffset = 0;
if (planarConfiguration === 1) {
pixelBitSkip = samplesPerPixel * bitsPerSample;
// sampleBitOffset = (samplesPerPixel - 1) * bitsPerSample;
} else {
pixelBitSkip = bitsPerSample;
}
// Bits per line rounds up to next byte boundary.
let bitsPerLine = tileWidth * pixelBitSkip;
if ((bitsPerLine & 7) !== 0) {
bitsPerLine = (bitsPerLine + 7) & (~7);
}
for (let y = 0; y < tileHeight; ++y) {
const lineBitOffset = y * bitsPerLine;
for (let x = 0; x < tileWidth; ++x) {
const pixelBitOffset = lineBitOffset + (x * samplesToTransfer * bitsPerSample);
for (let i = 0; i < samplesToTransfer; ++i) {
const bitOffset = pixelBitOffset + (i * bitsPerSample);
const outIndex = (((y * tileWidth) + x) * samplesToTransfer) + i;
const byteOffset = Math.floor(bitOffset / 8);
const innerBitOffset = bitOffset % 8;
if (innerBitOffset + bitsPerSample <= 8) {
outArray[outIndex] = (view.getUint8(byteOffset) >> (8 - bitsPerSample) - innerBitOffset) & bitMask;
} else if (innerBitOffset + bitsPerSample <= 16) {
outArray[outIndex] = (view.getUint16(byteOffset) >> (16 - bitsPerSample) - innerBitOffset) & bitMask;
} else if (innerBitOffset + bitsPerSample <= 24) {
const raw = (view.getUint16(byteOffset) << 8) | (view.getUint8(byteOffset + 2));
outArray[outIndex] = (raw >> (24 - bitsPerSample) - innerBitOffset) & bitMask;
} else {
outArray[outIndex] = (view.getUint32(byteOffset) >> (32 - bitsPerSample) - innerBitOffset) & bitMask;
}
// let outWord = 0;
// for (let bit = 0; bit < bitsPerSample; ++bit) {
// if (inByteArray[bitOffset >> 3]
// & (0x80 >> (bitOffset & 7))) {
// outWord |= (1 << (bitsPerSample - 1 - bit));
// }
// ++bitOffset;
// }
// outArray[outIndex] = outWord;
// outArray[pixel] = outWord;
// pixel += 1;
}
// bitOffset = bitOffset + pixelBitSkip - bitsPerSample;
}
}
} else if (format === 3) { // floating point
// Float16 is handled elsewhere
// normalize 16/24 bit floats to 32 bit floats in the array
// console.time();
// if (bitsPerSample === 16) {
// for (let byte = 0, outIndex = 0; byte < inBuffer.byteLength; byte += 2, ++outIndex) {
// outArray[outIndex] = getFloat16(view, byte);
// }
// }
// console.timeEnd()
}
return outArray.buffer;
}
/**
* GeoTIFF sub-file image.
*/
class GeoTIFFImage {
/**
* @constructor
* @param {Object} fileDirectory The parsed file directory
* @param {Object} geoKeys The parsed geo-keys
* @param {DataView} dataView The DataView for the underlying file.
* @param {Boolean} littleEndian Whether the file is encoded in little or big endian
* @param {Boolean} cache Whether or not decoded tiles shall be cached
* @param {import('./source/basesource').BaseSource} source The datasource to read from
*/
constructor(fileDirectory, geoKeys, dataView, littleEndian, cache, source) {
this.fileDirectory = fileDirectory;
this.geoKeys = geoKeys;
this.dataView = dataView;
this.littleEndian = littleEndian;
this.tiles = cache ? {} : null;
this.isTiled = !fileDirectory.StripOffsets;
const planarConfiguration = fileDirectory.PlanarConfiguration;
this.planarConfiguration = (typeof planarConfiguration === 'undefined') ? 1 : planarConfiguration;
if (this.planarConfiguration !== 1 && this.planarConfiguration !== 2) {
throw new Error('Invalid planar configuration.');
}
this.source = source;
}
/**
* Returns the associated parsed file directory.
* @returns {Object} the parsed file directory
*/
getFileDirectory() {
return this.fileDirectory;
}
/**
* Returns the associated parsed geo keys.
* @returns {Object} the parsed geo keys
*/
getGeoKeys() {
return this.geoKeys;
}
/**
* Returns the width of the image.
* @returns {Number} the width of the image
*/
getWidth() {
return this.fileDirectory.ImageWidth;
}
/**
* Returns the height of the image.
* @returns {Number} the height of the image
*/
getHeight() {
return this.fileDirectory.ImageLength;
}
/**
* Returns the number of samples per pixel.
* @returns {Number} the number of samples per pixel
*/
getSamplesPerPixel() {
return typeof this.fileDirectory.SamplesPerPixel !== 'undefined'
? this.fileDirectory.SamplesPerPixel : 1;
}
/**
* Returns the width of each tile.
* @returns {Number} the width of each tile
*/
getTileWidth() {
return this.isTiled ? this.fileDirectory.TileWidth : this.getWidth();
}
/**
* Returns the height of each tile.
* @returns {Number} the height of each tile
*/
getTileHeight() {
if (this.isTiled) {
return this.fileDirectory.TileLength;
}
if (typeof this.fileDirectory.RowsPerStrip !== 'undefined') {
return Math.min(this.fileDirectory.RowsPerStrip, this.getHeight());
}
return this.getHeight();
}
getBlockWidth() {
return this.getTileWidth();
}
getBlockHeight(y) {
if (this.isTiled || (y + 1) * this.getTileHeight() <= this.getHeight()) {
return this.getTileHeight();
} else {
return this.getHeight() - (y * this.getTileHeight());
}
}
/**
* Calculates the number of bytes for each pixel across all samples. Only full
* bytes are supported, an exception is thrown when this is not the case.
* @returns {Number} the bytes per pixel
*/
getBytesPerPixel() {
let bytes = 0;
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; ++i) {
bytes += this.getSampleByteSize(i);
}
return bytes;
}
getSampleByteSize(i) {
if (i >= this.fileDirectory.BitsPerSample.length) {
throw new RangeError(`Sample index ${i} is out of range.`);
}
return Math.ceil(this.fileDirectory.BitsPerSample[i] / 8);
}
getReaderForSample(sampleIndex) {
const format = this.fileDirectory.SampleFormat
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];
switch (format) {
case 1: // unsigned integer data
if (bitsPerSample <= 8) {
return DataView.prototype.getUint8;
} else if (bitsPerSample <= 16) {
return DataView.prototype.getUint16;
} else if (bitsPerSample <= 32) {
return DataView.prototype.getUint32;
}
break;
case 2: // twos complement signed integer data
if (bitsPerSample <= 8) {
return DataView.prototype.getInt8;
} else if (bitsPerSample <= 16) {
return DataView.prototype.getInt16;
} else if (bitsPerSample <= 32) {
return DataView.prototype.getInt32;
}
break;
case 3:
switch (bitsPerSample) {
case 16:
return function (offset, littleEndian) {
return getFloat16(this, offset, littleEndian);
};
case 32:
return DataView.prototype.getFloat32;
case 64:
return DataView.prototype.getFloat64;
default:
break;
}
break;
default:
break;
}
throw Error('Unsupported data format/bitsPerSample');
}
getSampleFormat(sampleIndex = 0) {
return this.fileDirectory.SampleFormat
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
}
getBitsPerSample(sampleIndex = 0) {
return this.fileDirectory.BitsPerSample[sampleIndex];
}
getArrayForSample(sampleIndex, size) {
const format = this.getSampleFormat(sampleIndex);
const bitsPerSample = this.getBitsPerSample(sampleIndex);
return arrayForType(format, bitsPerSample, size);
}
/**
* Returns the decoded strip or tile.
* @param {Number} x the strip or tile x-offset
* @param {Number} y the tile y-offset (0 for stripped images)
* @param {Number} sample the sample to get for separated samples
* @param {import("./geotiff").Pool|import("./geotiff").BaseDecoder} poolOrDecoder the decoder or decoder pool
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise.<ArrayBuffer>}
*/
async getTileOrStrip(x, y, sample, poolOrDecoder, signal) {
const numTilesPerRow = Math.ceil(this.getWidth() / this.getTileWidth());
const numTilesPerCol = Math.ceil(this.getHeight() / this.getTileHeight());
let index;
const { tiles } = this;
if (this.planarConfiguration === 1) {
index = (y * numTilesPerRow) + x;
} else if (this.planarConfiguration === 2) {
index = (sample * numTilesPerRow * numTilesPerCol) + (y * numTilesPerRow) + x;
}
let offset;
let byteCount;
if (this.isTiled) {
offset = this.fileDirectory.TileOffsets[index];
byteCount = this.fileDirectory.TileByteCounts[index];
} else {
offset = this.fileDirectory.StripOffsets[index];
byteCount = this.fileDirectory.StripByteCounts[index];
}
const slice = (await this.source.fetch([{ offset, length: byteCount }], signal))[0];
let request;
if (tiles === null || !tiles[index]) {
// resolve each request by potentially applying array normalization
request = (async () => {
let data = await poolOrDecoder.decode(this.fileDirectory, slice);
const sampleFormat = this.getSampleFormat();
const bitsPerSample = this.getBitsPerSample();
if (needsNormalization(sampleFormat, bitsPerSample)) {
data = normalizeArray(
data,
sampleFormat,
this.planarConfiguration,
this.getSamplesPerPixel(),
bitsPerSample,
this.getTileWidth(),
this.getBlockHeight(y),
);
}
return data;
})();
// set the cache
if (tiles !== null) {
tiles[index] = request;
}
} else {
// get from the cache
request = tiles[index];
}
// cache the tile request
return { x, y, sample, data: await request };
}
/**
* Internal read function.
* @private
* @param {Array} imageWindow The image window in pixel coordinates
* @param {Array} samples The selected samples (0-based indices)
* @param {TypedArray|TypedArray[]} valueArrays The array(s) to write into
* @param {Boolean} interleave Whether or not to write in an interleaved manner
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
* @param {number} width the width of window to be read into
* @param {number} height the height of window to be read into
* @param {number} resampleMethod the resampling method to be used when interpolating
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<ReadRasterResult>}
*/
async _readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width,
height, resampleMethod, signal) {
const tileWidth = this.getTileWidth();
const tileHeight = this.getTileHeight();
const imageWidth = this.getWidth();
const imageHeight = this.getHeight();
const minXTile = Math.max(Math.floor(imageWindow[0] / tileWidth), 0);
const maxXTile = Math.min(
Math.ceil(imageWindow[2] / tileWidth),
Math.ceil(imageWidth / tileWidth),
);
const minYTile = Math.max(Math.floor(imageWindow[1] / tileHeight), 0);
const maxYTile = Math.min(
Math.ceil(imageWindow[3] / tileHeight),
Math.ceil(imageHeight / tileHeight),
);
const windowWidth = imageWindow[2] - imageWindow[0];
let bytesPerPixel = this.getBytesPerPixel();
const srcSampleOffsets = [];
const sampleReaders = [];
for (let i = 0; i < samples.length; ++i) {
if (this.planarConfiguration === 1) {
srcSampleOffsets.push(sum(this.fileDirectory.BitsPerSample, 0, samples[i]) / 8);
} else {
srcSampleOffsets.push(0);
}
sampleReaders.push(this.getReaderForSample(samples[i]));
}
const promises = [];
const { littleEndian } = this;
for (let yTile = minYTile; yTile < maxYTile; ++yTile) {
for (let xTile = minXTile; xTile < maxXTile; ++xTile) {
let getPromise;
if (this.planarConfiguration === 1) {
getPromise = this.getTileOrStrip(xTile, yTile, 0, poolOrDecoder, signal);
}
for (let sampleIndex = 0; sampleIndex < samples.length; ++sampleIndex) {
const si = sampleIndex;
const sample = samples[sampleIndex];
if (this.planarConfiguration === 2) {
bytesPerPixel = this.getSampleByteSize(sample);
getPromise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder, signal);
}
const promise = getPromise.then((tile) => {
const buffer = tile.data;
const dataView = new DataView(buffer);
const blockHeight = this.getBlockHeight(tile.y);
const firstLine = tile.y * tileHeight;
const firstCol = tile.x * tileWidth;
const lastLine = firstLine + blockHeight;
const lastCol = (tile.x + 1) * tileWidth;
const reader = sampleReaders[si];
const ymax = Math.min(blockHeight, blockHeight - (lastLine - imageWindow[3]), imageHeight - firstLine);
const xmax = Math.min(tileWidth, tileWidth - (lastCol - imageWindow[2]), imageWidth - firstCol);
for (let y = Math.max(0, imageWindow[1] - firstLine); y < ymax; ++y) {
for (let x = Math.max(0, imageWindow[0] - firstCol); x < xmax; ++x) {
const pixelOffset = ((y * tileWidth) + x) * bytesPerPixel;
const value = reader.call(
dataView, pixelOffset + srcSampleOffsets[si], littleEndian,
);
let windowCoordinate;
if (interleave) {
windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth * samples.length)
+ ((x + firstCol - imageWindow[0]) * samples.length)
+ si;
valueArrays[windowCoordinate] = value;
} else {
windowCoordinate = (
(y + firstLine - imageWindow[1]) * windowWidth
) + x + firstCol - imageWindow[0];
valueArrays[si][windowCoordinate] = value;
}
}
}
});
promises.push(promise);
}
}
}
await Promise.all(promises);
if ((width && (imageWindow[2] - imageWindow[0]) !== width)
|| (height && (imageWindow[3] - imageWindow[1]) !== height)) {
let resampled;
if (interleave) {
resampled = resampleInterleaved(
valueArrays,
imageWindow[2] - imageWindow[0],
imageWindow[3] - imageWindow[1],
width, height,
samples.length,
resampleMethod,
);
} else {
resampled = resample(
valueArrays,
imageWindow[2] - imageWindow[0],
imageWindow[3] - imageWindow[1],
width, height,
resampleMethod,
);
}
resampled.width = width;
resampled.height = height;
return resampled;
}
valueArrays.width = width || imageWindow[2] - imageWindow[0];
valueArrays.height = height || imageWindow[3] - imageWindow[1];
return valueArrays;
}
/**
* Reads raster data from the image. This function reads all selected samples
* into separate arrays of the correct type for that sample or into a single
* combined array when `interleave` is set. When provided, only a subset
* of the raster is read for each sample.
*
* @param {ReadRasterOptions} [options={}] optional parameters
* @returns {Promise<ReadRasterResult>} the decoded arrays as a promise
*/
async readRasters({
window: wnd, samples = [], interleave, pool = null,
width, height, resampleMethod, fillValue, signal,
} = {}) {
const imageWindow = wnd || [0, 0, this.getWidth(), this.getHeight()];
// check parameters
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
throw new Error('Invalid subsets');
}
const imageWindowWidth = imageWindow[2] - imageWindow[0];
const imageWindowHeight = imageWindow[3] - imageWindow[1];
const numPixels = imageWindowWidth * imageWindowHeight;
const samplesPerPixel = this.getSamplesPerPixel();
if (!samples || !samples.length) {
for (let i = 0; i < samplesPerPixel; ++i) {
samples.push(i);
}
} else {
for (let i = 0; i < samples.length; ++i) {
if (samples[i] >= samplesPerPixel) {
return Promise.reject(new RangeError(`Invalid sample index '${samples[i]}'.`));
}
}
}
let valueArrays;
if (interleave) {
const format = this.fileDirectory.SampleFormat
? Math.max.apply(null, this.fileDirectory.SampleFormat) : 1;
const bitsPerSample = Math.max.apply(null, this.fileDirectory.BitsPerSample);
valueArrays = arrayForType(format, bitsPerSample, numPixels * samples.length);
if (fillValue) {
valueArrays.fill(fillValue);
}
} else {
valueArrays = [];
for (let i = 0; i < samples.length; ++i) {
const valueArray = this.getArrayForSample(samples[i], numPixels);
if (Array.isArray(fillValue) && i < fillValue.length) {
valueArray.fill(fillValue[i]);
} else if (fillValue && !Array.isArray(fillValue)) {
valueArray.fill(fillValue);
}
valueArrays.push(valueArray);
}
}
const poolOrDecoder = pool || await getDecoder(this.fileDirectory);
const result = await this._readRaster(
imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod, signal,
);
return result;
}
/**
* Reads raster data from the image as RGB. The result is always an
* interleaved typed array.
* Colorspaces other than RGB will be transformed to RGB, color maps expanded.
* When no other method is applicable, the first sample is used to produce a
* grayscale image.
* When provided, only a subset of the raster is read for each sample.
*
* @param {Object} [options] optional parameters
* @param {Array<number>} [options.window] the subset to read data from in pixels.
* @param {boolean} [options.interleave=true] whether the data shall be read
* in one single array or separate
* arrays.
* @param {import("./geotiff").Pool} [options.pool=null] The optional decoder pool to use.
* @param {number} [options.width] The desired width of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {number} [options.height] The desired height of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {string} [options.resampleMethod='nearest'] The desired resampling method.
* @param {boolean} [options.enableAlpha=false] Enable reading alpha channel if present.
* @param {AbortSignal} [options.signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<ReadRasterResult>} the RGB array as a Promise
*/
async readRGB({ window, interleave = true, pool = null, width, height,
resampleMethod, enableAlpha = false, signal } = {}) {
const imageWindow = window || [0, 0, this.getWidth(), this.getHeight()];
// check parameters
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
throw new Error('Invalid subsets');
}
const pi = this.fileDirectory.PhotometricInterpretation;
if (pi === photometricInterpretations.RGB) {
let s = [0, 1, 2];
if ((!(this.fileDirectory.ExtraSamples === ExtraSamplesValues.Unspecified)) && enableAlpha) {
s = [];
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; i += 1) {
s.push(i);
}
}
return this.readRasters({
window,
interleave,
samples: s,
pool,
width,
height,
resampleMethod,
signal,
});
}
let samples;
switch (pi) {
case photometricInterpretations.WhiteIsZero:
case photometricInterpretations.BlackIsZero:
case photometricInterpretations.Palette:
samples = [0];
break;
case photometricInterpretations.CMYK:
samples = [0, 1, 2, 3];
break;
case photometricInterpretations.YCbCr:
case photometricInterpretations.CIELab:
samples = [0, 1, 2];
break;
default:
throw new Error('Invalid or unsupported photometric interpretation.');
}
const subOptions = {
window: imageWindow,
interleave: true,
samples,
pool,
width,
height,
resampleMethod,
signal,
};
const { fileDirectory } = this;
const raster = await this.readRasters(subOptions);
const max = 2 ** this.fileDirectory.BitsPerSample[0];
let data;
switch (pi) {
case photometricInterpretations.WhiteIsZero:
data = fromWhiteIsZero(raster, max);
break;
case photometricInterpretations.BlackIsZero:
data = fromBlackIsZero(raster, max);
break;
case photometricInterpretations.Palette:
data = fromPalette(raster, fileDirectory.ColorMap);
break;
case photometricInterpretations.CMYK:
data = fromCMYK(raster);
break;
case photometricInterpretations.YCbCr:
data = fromYCbCr(raster);
break;
case photometricInterpretations.CIELab:
data = fromCIELab(raster);
break;
default:
throw new Error('Unsupported photometric interpretation.');
}
// if non-interleaved data is requested, we must split the channels
// into their respective arrays
if (!interleave) {
const red = new Uint8Array(data.length / 3);
const green = new Uint8Array(data.length / 3);
const blue = new Uint8Array(data.length / 3);
for (let i = 0, j = 0; i < data.length; i += 3, ++j) {
red[j] = data[i];
green[j] = data[i + 1];
blue[j] = data[i + 2];
}
data = [red, green, blue];
}
data.width = raster.width;
data.height = raster.height;
return data;
}
/**
* Returns an array of tiepoints.
* @returns {Object[]}
*/
getTiePoints() {
if (!this.fileDirectory.ModelTiepoint) {
return [];
}
const tiePoints = [];
for (let i = 0; i < this.fileDirectory.ModelTiepoint.length; i += 6) {
tiePoints.push({
i: this.fileDirectory.ModelTiepoint[i],
j: this.fileDirectory.ModelTiepoint[i + 1],
k: this.fileDirectory.ModelTiepoint[i + 2],
x: this.fileDirectory.ModelTiepoint[i + 3],
y: this.fileDirectory.ModelTiepoint[i + 4],
z: this.fileDirectory.ModelTiepoint[i + 5],
});
}
return tiePoints;
}
/**
* Returns the parsed GDAL metadata items.
*
* If sample is passed to null, dataset-level metadata will be returned.
* Otherwise only metadata specific to the provided sample will be returned.
*
* @param {number} [sample=null] The sample index.
* @returns {Object}
*/
getGDALMetadata(sample = null) {
const metadata = {};
if (!this.fileDirectory.GDAL_METADATA) {
return null;
}
const string = this.fileDirectory.GDAL_METADATA;
let items = findTagsByName(string, 'Item');
if (sample === null) {
items = items.filter((item) => getAttribute(item, 'sample') === undefined);
} else {
items = items.filter((item) => Number(getAttribute(item, 'sample')) === sample);
}
for (let i = 0; i < items.length; ++i) {
const item = items[i];
metadata[getAttribute(item, 'name')] = item.inner;
}
return metadata;
}
/**
* Returns the GDAL nodata value
* @returns {number|null}
*/
getGDALNoData() {
if (!this.fileDirectory.GDAL_NODATA) {
return null;
}
const string = this.fileDirectory.GDAL_NODATA;
return Number(string.substring(0, string.length - 1));
}
/**
* Returns the image origin as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @returns {Array<number>} The origin as a vector
*/
getOrigin() {
const tiePoints = this.fileDirectory.ModelTiepoint;
const modelTransformation = this.fileDirectory.ModelTransformation;
if (tiePoints && tiePoints.length === 6) {
return [
tiePoints[3],
tiePoints[4],
tiePoints[5],
];
}
if (modelTransformation) {
return [
modelTransformation[3],
modelTransformation[7],
modelTransformation[11],
];
}
throw new Error('The image does not have an affine transformation.');
}
/**
* Returns the image resolution as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from
* in cases when the current image does not have the
* required tags on its own.
* @returns {Array<number>} The resolution as a vector
*/
getResolution(referenceImage = null) {
const modelPixelScale = this.fileDirectory.ModelPixelScale;
const modelTransformation = this.fileDirectory.ModelTransformation;
if (modelPixelScale) {
return [
modelPixelScale[0],
-modelPixelScale[1],
modelPixelScale[2],
];
}
if (modelTransformation) {
return [
modelTransformation[0],
-modelTransformation[5],
modelTransformation[10],
];
}
if (referenceImage) {
const [refResX, refResY, refResZ] = referenceImage.getResolution();
return [
refResX * referenceImage.getWidth() / this.getWidth(),
refResY * referenceImage.getHeight() / this.getHeight(),
refResZ * referenceImage.getWidth() / this.getWidth(),
];
}
throw new Error('The image does not have an affine transformation.');
}
/**
* Returns whether or not the pixels of the image depict an area (or point).
* @returns {Boolean} Whether the pixels are a point
*/
pixelIsArea() {
return this.geoKeys.GTRasterTypeGeoKey === 1;
}
/**
* Returns the image bounding box as an array of 4 values: min-x, min-y,
* max-x and max-y. When the image has no affine transformation, then an
* exception is thrown.
* @returns {Array<number>} The bounding box
*/
getBoundingBox() {
const height = this.getHeight();
const width = this.getWidth();
if (this.fileDirectory.ModelTransformation) {
// eslint-disable-next-line no-unused-vars
const [a, b, c, d, e, f, g, h] = this.fileDirectory.ModelTransformation;
const corners = [
[0, 0],
[0, height],
[width, 0],
[width, height],
];
const projected = corners.map(([I, J]) => [
d + (a * I) + (b * J),
h + (e * I) + (f * J),
]);
const xs = projected.map((pt) => pt[0]);
const ys = projected.map((pt) => pt[1]);
return [
Math.min(...xs),
Math.min(...ys),
Math.max(...xs),
Math.max(...ys),
];
} else {
const origin = this.getOrigin();
const resolution = this.getResolution();
const x1 = origin[0];
const y1 = origin[1];
const x2 = x1 + (resolution[0] * this.getWidth());
const y2 = y1 + (resolution[1] * this.getHeight());
return [
Math.min(x1, x2),
Math.min(y1, y2),
Math.max(x1, x2),
Math.max(y1, y2),
];
}
}
}
export default GeoTIFFImage;

View file

@ -0,0 +1,457 @@
/*
Some parts of this file are based on UTIF.js,
which was released under the MIT License.
You can view that here:
https://github.com/photopea/UTIF.js/blob/master/LICENSE
*/
import { fieldTagNames, fieldTagTypes, fieldTypeNames, geoKeyNames } from './globals.js';
import { assign, endsWith, forEach, invert, times } from './utils.js';
const tagName2Code = invert(fieldTagNames);
const geoKeyName2Code = invert(geoKeyNames);
const name2code = {};
assign(name2code, tagName2Code);
assign(name2code, geoKeyName2Code);
const typeName2byte = invert(fieldTypeNames);
// config variables
const numBytesInIfd = 1000;
const _binBE = {
nextZero: (data, o) => {
let oincr = o;
while (data[oincr] !== 0) {
oincr++;
}
return oincr;
},
readUshort: (buff, p) => {
return (buff[p] << 8) | buff[p + 1];
},
readShort: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 1];
a[1] = buff[p + 0];
return _binBE.i16[0];
},
readInt: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 3];
a[1] = buff[p + 2];
a[2] = buff[p + 1];
a[3] = buff[p + 0];
return _binBE.i32[0];
},
readUint: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 3];
a[1] = buff[p + 2];
a[2] = buff[p + 1];
a[3] = buff[p + 0];
return _binBE.ui32[0];
},
readASCII: (buff, p, l) => {
return l.map((i) => String.fromCharCode(buff[p + i])).join('');
},
readFloat: (buff, p) => {
const a = _binBE.ui8;
times(4, (i) => {
a[i] = buff[p + 3 - i];
});
return _binBE.fl32[0];
},
readDouble: (buff, p) => {
const a = _binBE.ui8;
times(8, (i) => {
a[i] = buff[p + 7 - i];
});
return _binBE.fl64[0];
},
writeUshort: (buff, p, n) => {
buff[p] = (n >> 8) & 255;
buff[p + 1] = n & 255;
},
writeUint: (buff, p, n) => {
buff[p] = (n >> 24) & 255;
buff[p + 1] = (n >> 16) & 255;
buff[p + 2] = (n >> 8) & 255;
buff[p + 3] = (n >> 0) & 255;
},
writeASCII: (buff, p, s) => {
times(s.length, (i) => {
buff[p + i] = s.charCodeAt(i);
});
},
ui8: new Uint8Array(8),
};
_binBE.fl64 = new Float64Array(_binBE.ui8.buffer);
_binBE.writeDouble = (buff, p, n) => {
_binBE.fl64[0] = n;
times(8, (i) => {
buff[p + i] = _binBE.ui8[7 - i];
});
};
const _writeIFD = (bin, data, _offset, ifd) => {
let offset = _offset;
const keys = Object.keys(ifd).filter((key) => {
return key !== undefined && key !== null && key !== 'undefined';
});
bin.writeUshort(data, offset, keys.length);
offset += 2;
let eoff = offset + (12 * keys.length) + 4;
for (const key of keys) {
let tag = null;
if (typeof key === 'number') {
tag = key;
} else if (typeof key === 'string') {
tag = parseInt(key, 10);
}
const typeName = fieldTagTypes[tag];
const typeNum = typeName2byte[typeName];
if (typeName == null || typeName === undefined || typeof typeName === 'undefined') {
throw new Error(`unknown type of tag: ${tag}`);
}
let val = ifd[key];
if (val === undefined) {
throw new Error(`failed to get value for key ${key}`);
}
// ASCIIZ format with trailing 0 character
// http://www.fileformat.info/format/tiff/corion.htm
// https://stackoverflow.com/questions/7783044/whats-the-difference-between-asciiz-vs-ascii
if (typeName === 'ASCII' && typeof val === 'string' && endsWith(val, '\u0000') === false) {
val += '\u0000';
}
const num = val.length;
bin.writeUshort(data, offset, tag);
offset += 2;
bin.writeUshort(data, offset, typeNum);
offset += 2;
bin.writeUint(data, offset, num);
offset += 4;
let dlen = [-1, 1, 1, 2, 4, 8, 0, 0, 0, 0, 0, 0, 8][typeNum] * num;
let toff = offset;
if (dlen > 4) {
bin.writeUint(data, offset, eoff);
toff = eoff;
}
if (typeName === 'ASCII') {
bin.writeASCII(data, toff, val);
} else if (typeName === 'SHORT') {
times(num, (i) => {
bin.writeUshort(data, toff + (2 * i), val[i]);
});
} else if (typeName === 'LONG') {
times(num, (i) => {
bin.writeUint(data, toff + (4 * i), val[i]);
});
} else if (typeName === 'RATIONAL') {
times(num, (i) => {
bin.writeUint(data, toff + (8 * i), Math.round(val[i] * 10000));
bin.writeUint(data, toff + (8 * i) + 4, 10000);
});
} else if (typeName === 'DOUBLE') {
times(num, (i) => {
bin.writeDouble(data, toff + (8 * i), val[i]);
});
}
if (dlen > 4) {
dlen += (dlen & 1);
eoff += dlen;
}
offset += 4;
}
return [offset, eoff];
};
const encodeIfds = (ifds) => {
const data = new Uint8Array(numBytesInIfd);
let offset = 4;
const bin = _binBE;
// set big-endian byte-order
// https://en.wikipedia.org/wiki/TIFF#Byte_order
data[0] = 77;
data[1] = 77;
// set format-version number
// https://en.wikipedia.org/wiki/TIFF#Byte_order
data[3] = 42;
let ifdo = 8;
bin.writeUint(data, offset, ifdo);
offset += 4;
ifds.forEach((ifd, i) => {
const noffs = _writeIFD(bin, data, ifdo, ifd);
ifdo = noffs[1];
if (i < ifds.length - 1) {
bin.writeUint(data, noffs[0], ifdo);
}
});
if (data.slice) {
return data.slice(0, ifdo).buffer;
}
// node hasn't implemented slice on Uint8Array yet
const result = new Uint8Array(ifdo);
for (let i = 0; i < ifdo; i++) {
result[i] = data[i];
}
return result.buffer;
};
const encodeImage = (values, width, height, metadata) => {
if (height === undefined || height === null) {
throw new Error(`you passed into encodeImage a width of type ${height}`);
}
if (width === undefined || width === null) {
throw new Error(`you passed into encodeImage a width of type ${width}`);
}
const ifd = {
256: [width], // ImageWidth
257: [height], // ImageLength
273: [numBytesInIfd], // strips offset
278: [height], // RowsPerStrip
305: 'geotiff.js', // no array for ASCII(Z)
};
if (metadata) {
for (const i in metadata) {
if (metadata.hasOwnProperty(i)) {
ifd[i] = metadata[i];
}
}
}
const prfx = new Uint8Array(encodeIfds([ifd]));
const img = new Uint8Array(values);
const samplesPerPixel = ifd[277];
const data = new Uint8Array(numBytesInIfd + (width * height * samplesPerPixel));
times(prfx.length, (i) => {
data[i] = prfx[i];
});
forEach(img, (value, i) => {
data[numBytesInIfd + i] = value;
});
return data.buffer;
};
const convertToTids = (input) => {
const result = {};
for (const key in input) {
if (key !== 'StripOffsets') {
if (!name2code[key]) {
console.error(key, 'not in name2code:', Object.keys(name2code));
}
result[name2code[key]] = input[key];
}
}
return result;
};
const toArray = (input) => {
if (Array.isArray(input)) {
return input;
}
return [input];
};
const metadataDefaults = [
['Compression', 1], // no compression
['PlanarConfiguration', 1],
['ExtraSamples', 0],
];
export function writeGeotiff(data, metadata) {
const isFlattened = typeof data[0] === 'number';
let height;
let numBands;
let width;
let flattenedValues;
if (isFlattened) {
height = metadata.height || metadata.ImageLength;
width = metadata.width || metadata.ImageWidth;
numBands = data.length / (height * width);
flattenedValues = data;
} else {
numBands = data.length;
height = data[0].length;
width = data[0][0].length;
flattenedValues = [];
times(height, (rowIndex) => {
times(width, (columnIndex) => {
times(numBands, (bandIndex) => {
flattenedValues.push(data[bandIndex][rowIndex][columnIndex]);
});
});
});
}
metadata.ImageLength = height;
delete metadata.height;
metadata.ImageWidth = width;
delete metadata.width;
// consult https://www.loc.gov/preservation/digital/formats/content/tiff_tags.shtml
if (!metadata.BitsPerSample) {
metadata.BitsPerSample = times(numBands, () => 8);
}
metadataDefaults.forEach((tag) => {
const key = tag[0];
if (!metadata[key]) {
const value = tag[1];
metadata[key] = value;
}
});
// The color space of the image data.
// 1=black is zero and 2=RGB.
if (!metadata.PhotometricInterpretation) {
metadata.PhotometricInterpretation = metadata.BitsPerSample.length === 3 ? 2 : 1;
}
// The number of components per pixel.
if (!metadata.SamplesPerPixel) {
metadata.SamplesPerPixel = [numBands];
}
if (!metadata.StripByteCounts) {
// we are only writing one strip
metadata.StripByteCounts = [numBands * height * width];
}
if (!metadata.ModelPixelScale) {
// assumes raster takes up exactly the whole globe
metadata.ModelPixelScale = [360 / width, 180 / height, 0];
}
if (!metadata.SampleFormat) {
metadata.SampleFormat = times(numBands, () => 1);
}
// if didn't pass in projection information, assume the popular 4326 "geographic projection"
if (!metadata.hasOwnProperty('GeographicTypeGeoKey') && !metadata.hasOwnProperty('ProjectedCSTypeGeoKey')) {
metadata.GeographicTypeGeoKey = 4326;
metadata.ModelTiepoint = [0, 0, 0, -180, 90, 0]; // raster fits whole globe
metadata.GeogCitationGeoKey = 'WGS 84';
metadata.GTModelTypeGeoKey = 2;
}
const geoKeys = Object.keys(metadata)
.filter((key) => endsWith(key, 'GeoKey'))
.sort((a, b) => name2code[a] - name2code[b]);
if (!metadata.GeoAsciiParams) {
let geoAsciiParams = '';
geoKeys.forEach((name) => {
const code = Number(name2code[name]);
const tagType = fieldTagTypes[code];
if (tagType === 'ASCII') {
geoAsciiParams += `${metadata[name].toString()}\u0000`;
}
});
if (geoAsciiParams.length > 0) {
metadata.GeoAsciiParams = geoAsciiParams;
}
}
if (!metadata.GeoKeyDirectory) {
const NumberOfKeys = geoKeys.length;
const GeoKeyDirectory = [1, 1, 0, NumberOfKeys];
geoKeys.forEach((geoKey) => {
const KeyID = Number(name2code[geoKey]);
GeoKeyDirectory.push(KeyID);
let Count;
let TIFFTagLocation;
let valueOffset;
if (fieldTagTypes[KeyID] === 'SHORT') {
Count = 1;
TIFFTagLocation = 0;
valueOffset = metadata[geoKey];
} else if (geoKey === 'GeogCitationGeoKey') {
Count = metadata.GeoAsciiParams.length;
TIFFTagLocation = Number(name2code.GeoAsciiParams);
valueOffset = 0;
} else {
console.log(`[geotiff.js] couldn't get TIFFTagLocation for ${geoKey}`);
}
GeoKeyDirectory.push(TIFFTagLocation);
GeoKeyDirectory.push(Count);
GeoKeyDirectory.push(valueOffset);
});
metadata.GeoKeyDirectory = GeoKeyDirectory;
}
// delete GeoKeys from metadata, because stored in GeoKeyDirectory tag
for (const geoKey of geoKeys) {
if (metadata.hasOwnProperty(geoKey)) {
delete metadata[geoKey];
}
}
[
'Compression',
'ExtraSamples',
'GeographicTypeGeoKey',
'GTModelTypeGeoKey',
'GTRasterTypeGeoKey',
'ImageLength', // synonym of ImageHeight
'ImageWidth',
'Orientation',
'PhotometricInterpretation',
'ProjectedCSTypeGeoKey',
'PlanarConfiguration',
'ResolutionUnit',
'SamplesPerPixel',
'XPosition',
'YPosition',
'RowsPerStrip',
].forEach((name) => {
if (metadata[name]) {
metadata[name] = toArray(metadata[name]);
}
});
const encodedMetadata = convertToTids(metadata);
const outputImage = encodeImage(flattenedValues, width, height, encodedMetadata);
return outputImage;
}

View file

@ -0,0 +1,296 @@
export const fieldTagNames = {
// TIFF Baseline
0x013B: 'Artist',
0x0102: 'BitsPerSample',
0x0109: 'CellLength',
0x0108: 'CellWidth',
0x0140: 'ColorMap',
0x0103: 'Compression',
0x8298: 'Copyright',
0x0132: 'DateTime',
0x0152: 'ExtraSamples',
0x010A: 'FillOrder',
0x0121: 'FreeByteCounts',
0x0120: 'FreeOffsets',
0x0123: 'GrayResponseCurve',
0x0122: 'GrayResponseUnit',
0x013C: 'HostComputer',
0x010E: 'ImageDescription',
0x0101: 'ImageLength',
0x0100: 'ImageWidth',
0x010F: 'Make',
0x0119: 'MaxSampleValue',
0x0118: 'MinSampleValue',
0x0110: 'Model',
0x00FE: 'NewSubfileType',
0x0112: 'Orientation',
0x0106: 'PhotometricInterpretation',
0x011C: 'PlanarConfiguration',
0x0128: 'ResolutionUnit',
0x0116: 'RowsPerStrip',
0x0115: 'SamplesPerPixel',
0x0131: 'Software',
0x0117: 'StripByteCounts',
0x0111: 'StripOffsets',
0x00FF: 'SubfileType',
0x0107: 'Threshholding',
0x011A: 'XResolution',
0x011B: 'YResolution',
// TIFF Extended
0x0146: 'BadFaxLines',
0x0147: 'CleanFaxData',
0x0157: 'ClipPath',
0x0148: 'ConsecutiveBadFaxLines',
0x01B1: 'Decode',
0x01B2: 'DefaultImageColor',
0x010D: 'DocumentName',
0x0150: 'DotRange',
0x0141: 'HalftoneHints',
0x015A: 'Indexed',
0x015B: 'JPEGTables',
0x011D: 'PageName',
0x0129: 'PageNumber',
0x013D: 'Predictor',
0x013F: 'PrimaryChromaticities',
0x0214: 'ReferenceBlackWhite',
0x0153: 'SampleFormat',
0x0154: 'SMinSampleValue',
0x0155: 'SMaxSampleValue',
0x022F: 'StripRowCounts',
0x014A: 'SubIFDs',
0x0124: 'T4Options',
0x0125: 'T6Options',
0x0145: 'TileByteCounts',
0x0143: 'TileLength',
0x0144: 'TileOffsets',
0x0142: 'TileWidth',
0x012D: 'TransferFunction',
0x013E: 'WhitePoint',
0x0158: 'XClipPathUnits',
0x011E: 'XPosition',
0x0211: 'YCbCrCoefficients',
0x0213: 'YCbCrPositioning',
0x0212: 'YCbCrSubSampling',
0x0159: 'YClipPathUnits',
0x011F: 'YPosition',
// EXIF
0x9202: 'ApertureValue',
0xA001: 'ColorSpace',
0x9004: 'DateTimeDigitized',
0x9003: 'DateTimeOriginal',
0x8769: 'Exif IFD',
0x9000: 'ExifVersion',
0x829A: 'ExposureTime',
0xA300: 'FileSource',
0x9209: 'Flash',
0xA000: 'FlashpixVersion',
0x829D: 'FNumber',
0xA420: 'ImageUniqueID',
0x9208: 'LightSource',
0x927C: 'MakerNote',
0x9201: 'ShutterSpeedValue',
0x9286: 'UserComment',
// IPTC
0x83BB: 'IPTC',
// ICC
0x8773: 'ICC Profile',
// XMP
0x02BC: 'XMP',
// GDAL
0xA480: 'GDAL_METADATA',
0xA481: 'GDAL_NODATA',
// Photoshop
0x8649: 'Photoshop',
// GeoTiff
0x830E: 'ModelPixelScale',
0x8482: 'ModelTiepoint',
0x85D8: 'ModelTransformation',
0x87AF: 'GeoKeyDirectory',
0x87B0: 'GeoDoubleParams',
0x87B1: 'GeoAsciiParams',
// LERC
0xC5F2: 'LercParameters',
};
export const fieldTags = {};
for (const key in fieldTagNames) {
if (fieldTagNames.hasOwnProperty(key)) {
fieldTags[fieldTagNames[key]] = parseInt(key, 10);
}
}
export const fieldTagTypes = {
256: 'SHORT',
257: 'SHORT',
258: 'SHORT',
259: 'SHORT',
262: 'SHORT',
273: 'LONG',
274: 'SHORT',
277: 'SHORT',
278: 'LONG',
279: 'LONG',
282: 'RATIONAL',
283: 'RATIONAL',
284: 'SHORT',
286: 'SHORT',
287: 'RATIONAL',
296: 'SHORT',
297: 'SHORT',
305: 'ASCII',
306: 'ASCII',
338: 'SHORT',
339: 'SHORT',
513: 'LONG',
514: 'LONG',
1024: 'SHORT',
1025: 'SHORT',
2048: 'SHORT',
2049: 'ASCII',
3072: 'SHORT',
3073: 'ASCII',
33550: 'DOUBLE',
33922: 'DOUBLE',
34264: 'DOUBLE',
34665: 'LONG',
34735: 'SHORT',
34736: 'DOUBLE',
34737: 'ASCII',
42113: 'ASCII',
};
export const arrayFields = [
fieldTags.BitsPerSample,
fieldTags.ExtraSamples,
fieldTags.SampleFormat,
fieldTags.StripByteCounts,
fieldTags.StripOffsets,
fieldTags.StripRowCounts,
fieldTags.TileByteCounts,
fieldTags.TileOffsets,
fieldTags.SubIFDs,
];
export const fieldTypeNames = {
0x0001: 'BYTE',
0x0002: 'ASCII',
0x0003: 'SHORT',
0x0004: 'LONG',
0x0005: 'RATIONAL',
0x0006: 'SBYTE',
0x0007: 'UNDEFINED',
0x0008: 'SSHORT',
0x0009: 'SLONG',
0x000A: 'SRATIONAL',
0x000B: 'FLOAT',
0x000C: 'DOUBLE',
// IFD offset, suggested by https://owl.phy.queensu.ca/~phil/exiftool/standards.html
0x000D: 'IFD',
// introduced by BigTIFF
0x0010: 'LONG8',
0x0011: 'SLONG8',
0x0012: 'IFD8',
};
export const fieldTypes = {};
for (const key in fieldTypeNames) {
if (fieldTypeNames.hasOwnProperty(key)) {
fieldTypes[fieldTypeNames[key]] = parseInt(key, 10);
}
}
export const photometricInterpretations = {
WhiteIsZero: 0,
BlackIsZero: 1,
RGB: 2,
Palette: 3,
TransparencyMask: 4,
CMYK: 5,
YCbCr: 6,
CIELab: 8,
ICCLab: 9,
};
export const ExtraSamplesValues = {
Unspecified: 0,
Assocalpha: 1,
Unassalpha: 2,
};
export const LercParameters = {
Version: 0,
AddCompression: 1,
};
export const LercAddCompression = {
None: 0,
Deflate: 1,
Zstandard: 2,
};
export const geoKeyNames = {
1024: 'GTModelTypeGeoKey',
1025: 'GTRasterTypeGeoKey',
1026: 'GTCitationGeoKey',
2048: 'GeographicTypeGeoKey',
2049: 'GeogCitationGeoKey',
2050: 'GeogGeodeticDatumGeoKey',
2051: 'GeogPrimeMeridianGeoKey',
2052: 'GeogLinearUnitsGeoKey',
2053: 'GeogLinearUnitSizeGeoKey',
2054: 'GeogAngularUnitsGeoKey',
2055: 'GeogAngularUnitSizeGeoKey',
2056: 'GeogEllipsoidGeoKey',
2057: 'GeogSemiMajorAxisGeoKey',
2058: 'GeogSemiMinorAxisGeoKey',
2059: 'GeogInvFlatteningGeoKey',
2060: 'GeogAzimuthUnitsGeoKey',
2061: 'GeogPrimeMeridianLongGeoKey',
2062: 'GeogTOWGS84GeoKey',
3072: 'ProjectedCSTypeGeoKey',
3073: 'PCSCitationGeoKey',
3074: 'ProjectionGeoKey',
3075: 'ProjCoordTransGeoKey',
3076: 'ProjLinearUnitsGeoKey',
3077: 'ProjLinearUnitSizeGeoKey',
3078: 'ProjStdParallel1GeoKey',
3079: 'ProjStdParallel2GeoKey',
3080: 'ProjNatOriginLongGeoKey',
3081: 'ProjNatOriginLatGeoKey',
3082: 'ProjFalseEastingGeoKey',
3083: 'ProjFalseNorthingGeoKey',
3084: 'ProjFalseOriginLongGeoKey',
3085: 'ProjFalseOriginLatGeoKey',
3086: 'ProjFalseOriginEastingGeoKey',
3087: 'ProjFalseOriginNorthingGeoKey',
3088: 'ProjCenterLongGeoKey',
3089: 'ProjCenterLatGeoKey',
3090: 'ProjCenterEastingGeoKey',
3091: 'ProjCenterNorthingGeoKey',
3092: 'ProjScaleAtNatOriginGeoKey',
3093: 'ProjScaleAtCenterGeoKey',
3094: 'ProjAzimuthAngleGeoKey',
3095: 'ProjStraightVertPoleLongGeoKey',
3096: 'ProjRectifiedGridAngleGeoKey',
4096: 'VerticalCSTypeGeoKey',
4097: 'VerticalCitationGeoKey',
4098: 'VerticalDatumGeoKey',
4099: 'VerticalUnitsGeoKey',
};
export const geoKeys = {};
for (const key in geoKeyNames) {
if (geoKeyNames.hasOwnProperty(key)) {
geoKeys[geoKeyNames[key]] = parseInt(key, 10);
}
}

View file

@ -0,0 +1,56 @@
/**
* A no-op logger
*/
class DummyLogger {
log() {}
debug() {}
info() {}
warn() {}
error() {}
time() {}
timeEnd() {}
}
let LOGGER = new DummyLogger();
/**
*
* @param {object} logger the new logger. e.g `console`
*/
export function setLogger(logger = new DummyLogger()) {
LOGGER = logger;
}
export function debug(...args) {
return LOGGER.debug(...args);
}
export function log(...args) {
return LOGGER.log(...args);
}
export function info(...args) {
return LOGGER.info(...args);
}
export function warn(...args) {
return LOGGER.warn(...args);
}
export function error(...args) {
return LOGGER.error(...args);
}
export function time(...args) {
return LOGGER.time(...args);
}
export function timeEnd(...args) {
return LOGGER.timeEnd(...args);
}

View file

@ -0,0 +1,101 @@
import { getDecoder } from './compression/index.js';
const defaultPoolSize = typeof navigator !== 'undefined' ? (navigator.hardwareConcurrency || 2) : 2;
/**
* @module pool
*/
/**
* Pool for workers to decode chunks of the images.
*/
class Pool {
/**
* @constructor
* @param {Number} [size] The size of the pool. Defaults to the number of CPUs
* available. When this parameter is `null` or 0, then the
* decoding will be done in the main thread.
* @param {function(): Worker} [createWorker] A function that creates the decoder worker.
* Defaults to a worker with all decoders that ship with geotiff.js. The `createWorker()`
* function is expected to return a `Worker` compatible with Web Workers. For code that
* runs in Node, [web-worker](https://www.npmjs.com/package/web-worker) is a good choice.
*
* A worker that uses a custom lzw decoder would look like this `my-custom-worker.js` file:
* ```js
* import { addDecoder, getDecoder } from 'geotiff';
* addDecoder(5, () => import ('./my-custom-lzw').then((m) => m.default));
* self.addEventListener('message', async (e) => {
* const { id, fileDirectory, buffer } = e.data;
* const decoder = await getDecoder(fileDirectory);
* const decoded = await decoder.decode(fileDirectory, buffer);
* self.postMessage({ decoded, id }, [decoded]);
* });
* ```
* The way the above code is built into a worker by the `createWorker()` function
* depends on the used bundler. For most bundlers, something like this will work:
* ```js
* function createWorker() {
* return new Worker(new URL('./my-custom-worker.js', import.meta.url));
* }
* ```
*/
constructor(size = defaultPoolSize, createWorker) {
this.workers = null;
this._awaitingDecoder = null;
this.size = size;
this.messageId = 0;
if (size) {
this._awaitingDecoder = createWorker ? Promise.resolve(createWorker) : new Promise((resolve) => {
import('./worker/decoder.js').then((module) => {
resolve(module.create);
});
});
this._awaitingDecoder.then((create) => {
this._awaitingDecoder = null;
this.workers = [];
for (let i = 0; i < size; i++) {
this.workers.push({ worker: create(), idle: true });
}
});
}
}
/**
* Decode the given block of bytes with the set compression method.
* @param {ArrayBuffer} buffer the array buffer of bytes to decode.
* @returns {Promise<ArrayBuffer>} the decoded result as a `Promise`
*/
async decode(fileDirectory, buffer) {
if (this._awaitingDecoder) {
await this._awaitingDecoder;
}
return this.size === 0
? getDecoder(fileDirectory).then((decoder) => decoder.decode(fileDirectory, buffer))
: new Promise((resolve) => {
const worker = this.workers.find((candidate) => candidate.idle)
|| this.workers[Math.floor(Math.random() * this.size)];
worker.idle = false;
const id = this.messageId++;
const onMessage = (e) => {
if (e.data.id === id) {
worker.idle = true;
resolve(e.data.decoded);
worker.worker.removeEventListener('message', onMessage);
}
};
worker.worker.addEventListener('message', onMessage);
worker.worker.postMessage({ fileDirectory, buffer, id }, [buffer]);
});
}
destroy() {
if (this.workers) {
this.workers.forEach((worker) => {
worker.worker.terminate();
});
this.workers = null;
}
}
}
export default Pool;

View file

@ -0,0 +1,88 @@
function decodeRowAcc(row, stride) {
let length = row.length - stride;
let offset = 0;
do {
for (let i = stride; i > 0; i--) {
row[offset + stride] += row[offset];
offset++;
}
length -= stride;
} while (length > 0);
}
function decodeRowFloatingPoint(row, stride, bytesPerSample) {
let index = 0;
let count = row.length;
const wc = count / bytesPerSample;
while (count > stride) {
for (let i = stride; i > 0; --i) {
row[index + stride] += row[index];
++index;
}
count -= stride;
}
const copy = row.slice();
for (let i = 0; i < wc; ++i) {
for (let b = 0; b < bytesPerSample; ++b) {
row[(bytesPerSample * i) + b] = copy[((bytesPerSample - b - 1) * wc) + i];
}
}
}
export function applyPredictor(block, predictor, width, height, bitsPerSample,
planarConfiguration) {
if (!predictor || predictor === 1) {
return block;
}
for (let i = 0; i < bitsPerSample.length; ++i) {
if (bitsPerSample[i] % 8 !== 0) {
throw new Error('When decoding with predictor, only multiple of 8 bits are supported.');
}
if (bitsPerSample[i] !== bitsPerSample[0]) {
throw new Error('When decoding with predictor, all samples must have the same size.');
}
}
const bytesPerSample = bitsPerSample[0] / 8;
const stride = planarConfiguration === 2 ? 1 : bitsPerSample.length;
for (let i = 0; i < height; ++i) {
// Last strip will be truncated if height % stripHeight != 0
if (i * stride * width * bytesPerSample >= block.byteLength) {
break;
}
let row;
if (predictor === 2) { // horizontal prediction
switch (bitsPerSample[0]) {
case 8:
row = new Uint8Array(
block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,
);
break;
case 16:
row = new Uint16Array(
block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 2,
);
break;
case 32:
row = new Uint32Array(
block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 4,
);
break;
default:
throw new Error(`Predictor 2 not allowed with ${bitsPerSample[0]} bits per sample.`);
}
decodeRowAcc(row, stride, bytesPerSample);
} else if (predictor === 3) { // horizontal floating point
row = new Uint8Array(
block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,
);
decodeRowFloatingPoint(row, stride, bytesPerSample);
}
}
return block;
}

View file

@ -0,0 +1,211 @@
/**
* @module resample
*/
function copyNewSize(array, width, height, samplesPerPixel = 1) {
return new (Object.getPrototypeOf(array).constructor)(width * height * samplesPerPixel);
}
/**
* Resample the input arrays using nearest neighbor value selection.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
export function resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
return valueArrays.map((array) => {
const newArray = copyNewSize(array, outWidth, outHeight);
for (let y = 0; y < outHeight; ++y) {
const cy = Math.min(Math.round(relY * y), inHeight - 1);
for (let x = 0; x < outWidth; ++x) {
const cx = Math.min(Math.round(relX * x), inWidth - 1);
const value = array[(cy * inWidth) + cx];
newArray[(y * outWidth) + x] = value;
}
}
return newArray;
});
}
// simple linear interpolation, code from:
// https://en.wikipedia.org/wiki/Linear_interpolation#Programming_language_support
function lerp(v0, v1, t) {
return ((1 - t) * v0) + (t * v1);
}
/**
* Resample the input arrays using bilinear interpolation.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
export function resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
return valueArrays.map((array) => {
const newArray = copyNewSize(array, outWidth, outHeight);
for (let y = 0; y < outHeight; ++y) {
const rawY = relY * y;
const yl = Math.floor(rawY);
const yh = Math.min(Math.ceil(rawY), (inHeight - 1));
for (let x = 0; x < outWidth; ++x) {
const rawX = relX * x;
const tx = rawX % 1;
const xl = Math.floor(rawX);
const xh = Math.min(Math.ceil(rawX), (inWidth - 1));
const ll = array[(yl * inWidth) + xl];
const hl = array[(yl * inWidth) + xh];
const lh = array[(yh * inWidth) + xl];
const hh = array[(yh * inWidth) + xh];
const value = lerp(
lerp(ll, hl, tx),
lerp(lh, hh, tx),
rawY % 1,
);
newArray[(y * outWidth) + x] = value;
}
}
return newArray;
});
}
/**
* Resample the input arrays using the selected resampling method.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray[]} The resampled rasters
*/
export function resample(valueArrays, inWidth, inHeight, outWidth, outHeight, method = 'nearest') {
switch (method.toLowerCase()) {
case 'nearest':
return resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight);
case 'bilinear':
case 'linear':
return resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight);
default:
throw new Error(`Unsupported resampling method: '${method}'`);
}
}
/**
* Resample the pixel interleaved input array using nearest neighbor value selection.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
export function resampleNearestInterleaved(
valueArray, inWidth, inHeight, outWidth, outHeight, samples) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);
for (let y = 0; y < outHeight; ++y) {
const cy = Math.min(Math.round(relY * y), inHeight - 1);
for (let x = 0; x < outWidth; ++x) {
const cx = Math.min(Math.round(relX * x), inWidth - 1);
for (let i = 0; i < samples; ++i) {
const value = valueArray[(cy * inWidth * samples) + (cx * samples) + i];
newArray[(y * outWidth * samples) + (x * samples) + i] = value;
}
}
}
return newArray;
}
/**
* Resample the pixel interleaved input array using bilinear interpolation.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
export function resampleBilinearInterleaved(
valueArray, inWidth, inHeight, outWidth, outHeight, samples) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);
for (let y = 0; y < outHeight; ++y) {
const rawY = relY * y;
const yl = Math.floor(rawY);
const yh = Math.min(Math.ceil(rawY), (inHeight - 1));
for (let x = 0; x < outWidth; ++x) {
const rawX = relX * x;
const tx = rawX % 1;
const xl = Math.floor(rawX);
const xh = Math.min(Math.ceil(rawX), (inWidth - 1));
for (let i = 0; i < samples; ++i) {
const ll = valueArray[(yl * inWidth * samples) + (xl * samples) + i];
const hl = valueArray[(yl * inWidth * samples) + (xh * samples) + i];
const lh = valueArray[(yh * inWidth * samples) + (xl * samples) + i];
const hh = valueArray[(yh * inWidth * samples) + (xh * samples) + i];
const value = lerp(
lerp(ll, hl, tx),
lerp(lh, hh, tx),
rawY % 1,
);
newArray[(y * outWidth * samples) + (x * samples) + i] = value;
}
}
}
return newArray;
}
/**
* Resample the pixel interleaved input array using the selected resampling method.
* @param {TypedArray} valueArray The input array to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray} The resampled rasters
*/
export function resampleInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples, method = 'nearest') {
switch (method.toLowerCase()) {
case 'nearest':
return resampleNearestInterleaved(
valueArray, inWidth, inHeight, outWidth, outHeight, samples,
);
case 'bilinear':
case 'linear':
return resampleBilinearInterleaved(
valueArray, inWidth, inHeight, outWidth, outHeight, samples,
);
default:
throw new Error(`Unsupported resampling method: '${method}'`);
}
}

View file

@ -0,0 +1,111 @@
export function fromWhiteIsZero(raster, max) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
let value;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
value = 256 - (raster[i] / max * 256);
rgbRaster[j] = value;
rgbRaster[j + 1] = value;
rgbRaster[j + 2] = value;
}
return rgbRaster;
}
export function fromBlackIsZero(raster, max) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
let value;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
value = raster[i] / max * 256;
rgbRaster[j] = value;
rgbRaster[j + 1] = value;
rgbRaster[j + 2] = value;
}
return rgbRaster;
}
export function fromPalette(raster, colorMap) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
const greenOffset = colorMap.length / 3;
const blueOffset = colorMap.length / 3 * 2;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
const mapIndex = raster[i];
rgbRaster[j] = colorMap[mapIndex] / 65536 * 256;
rgbRaster[j + 1] = colorMap[mapIndex + greenOffset] / 65536 * 256;
rgbRaster[j + 2] = colorMap[mapIndex + blueOffset] / 65536 * 256;
}
return rgbRaster;
}
export function fromCMYK(cmykRaster) {
const { width, height } = cmykRaster;
const rgbRaster = new Uint8Array(width * height * 3);
for (let i = 0, j = 0; i < cmykRaster.length; i += 4, j += 3) {
const c = cmykRaster[i];
const m = cmykRaster[i + 1];
const y = cmykRaster[i + 2];
const k = cmykRaster[i + 3];
rgbRaster[j] = 255 * ((255 - c) / 256) * ((255 - k) / 256);
rgbRaster[j + 1] = 255 * ((255 - m) / 256) * ((255 - k) / 256);
rgbRaster[j + 2] = 255 * ((255 - y) / 256) * ((255 - k) / 256);
}
return rgbRaster;
}
export function fromYCbCr(yCbCrRaster) {
const { width, height } = yCbCrRaster;
const rgbRaster = new Uint8ClampedArray(width * height * 3);
for (let i = 0, j = 0; i < yCbCrRaster.length; i += 3, j += 3) {
const y = yCbCrRaster[i];
const cb = yCbCrRaster[i + 1];
const cr = yCbCrRaster[i + 2];
rgbRaster[j] = (y + (1.40200 * (cr - 0x80)));
rgbRaster[j + 1] = (y - (0.34414 * (cb - 0x80)) - (0.71414 * (cr - 0x80)));
rgbRaster[j + 2] = (y + (1.77200 * (cb - 0x80)));
}
return rgbRaster;
}
const Xn = 0.95047;
const Yn = 1.00000;
const Zn = 1.08883;
// from https://github.com/antimatter15/rgb-lab/blob/master/color.js
export function fromCIELab(cieLabRaster) {
const { width, height } = cieLabRaster;
const rgbRaster = new Uint8Array(width * height * 3);
for (let i = 0, j = 0; i < cieLabRaster.length; i += 3, j += 3) {
const L = cieLabRaster[i + 0];
const a_ = cieLabRaster[i + 1] << 24 >> 24; // conversion from uint8 to int8
const b_ = cieLabRaster[i + 2] << 24 >> 24; // same
let y = (L + 16) / 116;
let x = (a_ / 500) + y;
let z = y - (b_ / 200);
let r;
let g;
let b;
x = Xn * ((x * x * x > 0.008856) ? x * x * x : (x - (16 / 116)) / 7.787);
y = Yn * ((y * y * y > 0.008856) ? y * y * y : (y - (16 / 116)) / 7.787);
z = Zn * ((z * z * z > 0.008856) ? z * z * z : (z - (16 / 116)) / 7.787);
r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);
g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);
b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);
r = (r > 0.0031308) ? ((1.055 * (r ** (1 / 2.4))) - 0.055) : 12.92 * r;
g = (g > 0.0031308) ? ((1.055 * (g ** (1 / 2.4))) - 0.055) : 12.92 * g;
b = (b > 0.0031308) ? ((1.055 * (b ** (1 / 2.4))) - 0.055) : 12.92 * b;
rgbRaster[j] = Math.max(0, Math.min(1, r)) * 255;
rgbRaster[j + 1] = Math.max(0, Math.min(1, g)) * 255;
rgbRaster[j + 2] = Math.max(0, Math.min(1, b)) * 255;
}
return rgbRaster;
}

View file

@ -0,0 +1,20 @@
import { BaseSource } from './basesource.js';
import { AbortError } from '../utils.js';
class ArrayBufferSource extends BaseSource {
constructor(arrayBuffer) {
super();
this.arrayBuffer = arrayBuffer;
}
fetchSlice(slice, signal) {
if (signal && signal.aborted) {
throw new AbortError('Request aborted');
}
return this.arrayBuffer.slice(slice.offset, slice.offset + slice.length);
}
}
export function makeBufferSource(arrayBuffer) {
return new ArrayBufferSource(arrayBuffer);
}

View file

@ -0,0 +1,38 @@
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
export class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
async fetch(slices, signal = undefined) {
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
async fetchSlice(slice) {
throw new Error(`fetching of slice ${slice} not possible, not implemented`);
}
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize() {
return null;
}
async close() {
// no-op by default
}
}

View file

@ -0,0 +1,296 @@
import QuickLRU from 'quick-lru';
import { BaseSource } from './basesource.js';
import { AbortError, AggregateError, wait, zip } from '../utils.js';
class Block {
/**
*
* @param {number} offset
* @param {number} length
* @param {ArrayBuffer} [data]
*/
constructor(offset, length, data = null) {
this.offset = offset;
this.length = length;
this.data = data;
}
/**
* @returns {number} the top byte border
*/
get top() {
return this.offset + this.length;
}
}
class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset, length, blockIds) {
this.offset = offset;
this.length = length;
this.blockIds = blockIds;
}
}
export class BlockedSource extends BaseSource {
/**
*
* @param {BaseSource} source The underlying source that shall be blocked and cached
* @param {object} options
* @param {number} [options.blockSize]
* @param {number} [options.cacheSize]
*/
constructor(source, { blockSize = 65536, cacheSize = 100 } = {}) {
super();
this.source = source;
this.blockSize = blockSize;
this.blockCache = new QuickLRU({
maxSize: cacheSize,
onEviction: (blockId, block) => {
this.evictedBlocks.set(blockId, block);
},
});
/** @type {Map<number, Block>} */
this.evictedBlocks = new Map();
// mapping blockId -> Block instance
this.blockRequests = new Map();
// set of blockIds missing for the current requests
this.blockIdsToFetch = new Set();
this.abortedBlockIds = new Set();
}
get fileSize() {
return this.source.fileSize;
}
/**
*
* @param {import("./basesource").Slice[]} slices
*/
async fetch(slices, signal) {
const blockRequests = [];
const missingBlockIds = [];
const allBlockIds = [];
this.evictedBlocks.clear();
for (const { offset, length } of slices) {
let top = offset + length;
const { fileSize } = this;
if (fileSize !== null) {
top = Math.min(top, fileSize);
}
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
const blockId = Math.floor(current / this.blockSize);
if (!this.blockCache.has(blockId) && !this.blockRequests.has(blockId)) {
this.blockIdsToFetch.add(blockId);
missingBlockIds.push(blockId);
}
if (this.blockRequests.has(blockId)) {
blockRequests.push(this.blockRequests.get(blockId));
}
allBlockIds.push(blockId);
}
}
// allow additional block requests to accumulate
await wait();
this.fetchBlocks(signal);
// Gather all of the new requests that this fetch call is contributing to `fetch`.
const missingRequests = [];
for (const blockId of missingBlockIds) {
// The requested missing block could already be in the cache
// instead of having its request still be outstanding.
if (this.blockRequests.has(blockId)) {
missingRequests.push(this.blockRequests.get(blockId));
}
}
// Actually await all pending requests that are needed for this `fetch`.
await Promise.allSettled(blockRequests);
await Promise.allSettled(missingRequests);
// Perform retries if a block was interrupted by a previous signal
const abortedBlockRequests = [];
const abortedBlockIds = allBlockIds
.filter((id) => this.abortedBlockIds.has(id) || !this.blockCache.has(id));
abortedBlockIds.forEach((id) => this.blockIdsToFetch.add(id));
// start the retry of some blocks if required
if (abortedBlockIds.length > 0 && signal && !signal.aborted) {
this.fetchBlocks(null);
for (const blockId of abortedBlockIds) {
const block = this.blockRequests.get(blockId);
if (!block) {
throw new Error(`Block ${blockId} is not in the block requests`);
}
abortedBlockRequests.push(block);
}
await Promise.allSettled(abortedBlockRequests);
}
// throw an abort error
if (signal && signal.aborted) {
throw new AbortError('Request was aborted');
}
const blocks = allBlockIds.map((id) => this.blockCache.get(id) || this.evictedBlocks.get(id));
const failedBlocks = blocks.filter((i) => !i);
if (failedBlocks.length) {
throw new AggregateError(failedBlocks, 'Request failed');
}
// create a final Map, with all required blocks for this request to satisfy
const requiredBlocks = new Map(zip(allBlockIds, blocks));
// TODO: satisfy each slice
return this.readSliceData(slices, requiredBlocks);
}
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal) {
// check if we still need to
if (this.blockIdsToFetch.size > 0) {
const groups = this.groupBlocks(this.blockIdsToFetch);
// start requesting slices of data
const groupRequests = this.source.fetch(groups, signal);
for (let groupIndex = 0; groupIndex < groups.length; ++groupIndex) {
const group = groups[groupIndex];
for (const blockId of group.blockIds) {
// make an async IIFE for each block
this.blockRequests.set(blockId, (async () => {
try {
const response = (await groupRequests)[groupIndex];
const blockOffset = blockId * this.blockSize;
const o = blockOffset - response.offset;
const t = Math.min(o + this.blockSize, response.data.byteLength);
const data = response.data.slice(o, t);
const block = new Block(
blockOffset,
data.byteLength,
data,
blockId,
);
this.blockCache.set(blockId, block);
this.abortedBlockIds.delete(blockId);
} catch (err) {
if (err.name === 'AbortError') {
// store the signal here, we need it to determine later if an
// error was caused by this signal
err.signal = signal;
this.blockCache.delete(blockId);
this.abortedBlockIds.add(blockId);
} else {
throw err;
}
} finally {
this.blockRequests.delete(blockId);
}
})());
}
}
this.blockIdsToFetch.clear();
}
}
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds) {
const sortedBlockIds = Array.from(blockIds).sort((a, b) => a - b);
if (sortedBlockIds.length === 0) {
return [];
}
let current = [];
let lastBlockId = null;
const groups = [];
for (const blockId of sortedBlockIds) {
if (lastBlockId === null || lastBlockId + 1 === blockId) {
current.push(blockId);
lastBlockId = blockId;
} else {
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
current = [blockId];
lastBlockId = blockId;
}
}
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
return groups;
}
/**
*
* @param {import("./basesource").Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices, blocks) {
return slices.map((slice) => {
let top = slice.offset + slice.length;
if (this.fileSize !== null) {
top = Math.min(this.fileSize, top);
}
const blockIdLow = Math.floor(slice.offset / this.blockSize);
const blockIdHigh = Math.floor(top / this.blockSize);
const sliceData = new ArrayBuffer(slice.length);
const sliceView = new Uint8Array(sliceData);
for (let blockId = blockIdLow; blockId <= blockIdHigh; ++blockId) {
const block = blocks.get(blockId);
const delta = block.offset - slice.offset;
const topDelta = block.top - top;
let blockInnerOffset = 0;
let rangeInnerOffset = 0;
let usedBlockLength;
if (delta < 0) {
blockInnerOffset = -delta;
} else if (delta > 0) {
rangeInnerOffset = delta;
}
if (topDelta < 0) {
usedBlockLength = block.length - blockInnerOffset;
} else {
usedBlockLength = top - block.offset - blockInnerOffset;
}
const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);
sliceView.set(blockView, rangeInnerOffset);
}
return sliceData;
});
}
}

View file

@ -0,0 +1,45 @@
export class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok() {
return this.status >= 200 && this.status <= 299;
}
/**
* Returns the status code of the response
*/
get status() {
throw new Error('not implemented');
}
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName) { // eslint-disable-line no-unused-vars
throw new Error('not implemented');
}
/**
* @returns {ArrayBuffer} the response data of the request
*/
async getData() {
throw new Error('not implemented');
}
}
export class BaseClient {
constructor(url) {
this.url = url;
}
/**
* Send a request with the options
* @param {object} [options]
*/
async request({ headers, credentials, signal } = {}) { // eslint-disable-line no-unused-vars
throw new Error('request is not implemented');
}
}

View file

@ -0,0 +1,41 @@
import { BaseClient, BaseResponse } from './base.js';
class FetchResponse extends BaseResponse {
/**
* BaseResponse facade for fetch API Response
* @param {Response} response
*/
constructor(response) {
super();
this.response = response;
}
get status() {
return this.response.status;
}
getHeader(name) {
return this.response.headers.get(name);
}
async getData() {
const data = this.response.arrayBuffer
? await this.response.arrayBuffer()
: (await this.response.buffer()).buffer;
return data;
}
}
export class FetchClient extends BaseClient {
constructor(url, credentials) {
super(url);
this.credentials = credentials;
}
async request({ headers, credentials, signal } = {}) {
const response = await fetch(this.url, {
headers, credentials, signal,
});
return new FetchResponse(response);
}
}

View file

@ -0,0 +1,81 @@
import http from 'http';
import https from 'https';
import urlMod from 'url';
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class HttpResponse extends BaseResponse {
/**
* BaseResponse facade for node HTTP/HTTPS API Response
* @param {http.ServerResponse} response
*/
constructor(response, dataPromise) {
super();
this.response = response;
this.dataPromise = dataPromise;
}
get status() {
return this.response.statusCode;
}
getHeader(name) {
return this.response.headers[name];
}
async getData() {
const data = await this.dataPromise;
return data;
}
}
export class HttpClient extends BaseClient {
constructor(url) {
super(url);
this.parsedUrl = urlMod.parse(this.url);
this.httpApi = (this.parsedUrl.protocol === 'http:' ? http : https);
}
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const request = this.httpApi.get(
{
...this.parsedUrl,
headers,
},
(response) => {
const dataPromise = new Promise((resolveData) => {
const chunks = [];
// collect chunks
response.on('data', (chunk) => {
chunks.push(chunk);
});
// concatenate all chunks and resolve the promise with the resulting buffer
response.on('end', () => {
const data = Buffer.concat(chunks).buffer;
resolveData(data);
});
response.on('error', reject);
});
resolve(new HttpResponse(response, dataPromise));
},
);
request.on('error', reject);
if (signal) {
if (signal.aborted) {
request.destroy(new AbortError('Request aborted'));
}
signal.addEventListener('abort', () => request.destroy(new AbortError('Request aborted')));
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

View file

@ -0,0 +1,61 @@
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class XHRResponse extends BaseResponse {
/**
* BaseResponse facade for XMLHttpRequest
* @param {XMLHttpRequest} xhr
* @param {ArrayBuffer} data
*/
constructor(xhr, data) {
super();
this.xhr = xhr;
this.data = data;
}
get status() {
return this.xhr.status;
}
getHeader(name) {
return this.xhr.getResponseHeader(name);
}
async getData() {
return this.data;
}
}
export class XHRClient extends BaseClient {
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open('GET', this.url);
xhr.responseType = 'arraybuffer';
for (const [key, value] of Object.entries(headers)) {
xhr.setRequestHeader(key, value);
}
// hook signals
xhr.onload = () => {
const data = xhr.response;
resolve(new XHRResponse(xhr, data));
};
xhr.onerror = reject;
xhr.onabort = () => reject(new AbortError('Request aborted'));
xhr.send();
if (signal) {
if (signal.aborted) {
xhr.abort();
}
signal.addEventListener('abort', () => xhr.abort());
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

View file

@ -0,0 +1,68 @@
import fs from 'fs';
import { BaseSource } from './basesource.js';
function closeAsync(fd) {
return new Promise((resolve, reject) => {
fs.close(fd, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function openAsync(path, flags, mode = undefined) {
return new Promise((resolve, reject) => {
fs.open(path, flags, mode, (err, fd) => {
if (err) {
reject(err);
} else {
resolve(fd);
}
});
});
}
function readAsync(...args) {
return new Promise((resolve, reject) => {
fs.read(...args, (err, bytesRead, buffer) => {
if (err) {
reject(err);
} else {
resolve({ bytesRead, buffer });
}
});
});
}
class FileSource extends BaseSource {
constructor(path) {
super();
this.path = path;
this.openRequest = openAsync(path, 'r');
}
async fetchSlice(slice) {
// TODO: use `signal`
const fd = await this.openRequest;
const { buffer } = await readAsync(
fd,
Buffer.alloc(slice.length),
0,
slice.length,
slice.offset,
);
return buffer.buffer;
}
async close() {
const fd = await this.openRequest;
await closeAsync(fd);
}
}
export function makeFileSource(path) {
return new FileSource(path);
}

View file

@ -0,0 +1,32 @@
import { BaseSource } from './basesource.js';
class FileReaderSource extends BaseSource {
constructor(file) {
super();
this.file = file;
}
async fetchSlice(slice, signal) {
return new Promise((resolve, reject) => {
const blob = this.file.slice(slice.offset, slice.offset + slice.length);
const reader = new FileReader();
reader.onload = (event) => resolve(event.target.result);
reader.onerror = reject;
reader.onabort = reject;
reader.readAsArrayBuffer(blob);
if (signal) {
signal.addEventListener('abort', () => reader.abort());
}
});
}
}
/**
* Create a new source from a given file/blob.
* @param {Blob} file The file or blob to read from.
* @returns The constructed source
*/
export function makeFileReaderSource(file) {
return new FileReaderSource(file);
}

View file

@ -0,0 +1,145 @@
const CRLFCRLF = '\r\n\r\n';
/*
* Shim for 'Object.fromEntries'
*/
function itemsToObject(items) {
if (typeof Object.fromEntries !== 'undefined') {
return Object.fromEntries(items);
}
const obj = {};
for (const [key, value] of items) {
obj[key.toLowerCase()] = value;
}
return obj;
}
/**
* Parse HTTP headers from a given string.
* @param {String} text the text to parse the headers from
* @returns {Object} the parsed headers with lowercase keys
*/
function parseHeaders(text) {
const items = text
.split('\r\n')
.map((line) => {
const kv = line.split(':').map((str) => str.trim());
kv[0] = kv[0].toLowerCase();
return kv;
});
return itemsToObject(items);
}
/**
* Parse a 'Content-Type' header value to the content-type and parameters
* @param {String} rawContentType the raw string to parse from
* @returns {Object} the parsed content type with the fields: type and params
*/
export function parseContentType(rawContentType) {
const [type, ...rawParams] = rawContentType.split(';').map((s) => s.trim());
const paramsItems = rawParams.map((param) => param.split('='));
return { type, params: itemsToObject(paramsItems) };
}
/**
* Parse a 'Content-Range' header value to its start, end, and total parts
* @param {String} rawContentRange the raw string to parse from
* @returns {Object} the parsed parts
*/
export function parseContentRange(rawContentRange) {
let start;
let end;
let total;
if (rawContentRange) {
[, start, end, total] = rawContentRange.match(/bytes (\d+)-(\d+)\/(\d+)/);
start = parseInt(start, 10);
end = parseInt(end, 10);
total = parseInt(total, 10);
}
return { start, end, total };
}
/**
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
* Each item in the list has the following properties:
* - headers: the HTTP headers
* - data: the sliced ArrayBuffer for that specific part
* - offset: the offset of the byterange within its originating file
* - length: the length of the byterange
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
* @param {String} boundary the boundary string used to split the sections
* @returns {Object[]} the parsed byteranges
*/
export function parseByteRanges(responseArrayBuffer, boundary) {
let offset = null;
const decoder = new TextDecoder('ascii');
const out = [];
const startBoundary = `--${boundary}`;
const endBoundary = `${startBoundary}--`;
// search for the initial boundary, may be offset by some bytes
// TODO: more efficient to check for `--` in bytes directly
for (let i = 0; i < 10; ++i) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, i, startBoundary.length),
);
if (text === startBoundary) {
offset = i;
}
}
if (offset === null) {
throw new Error('Could not find initial boundary');
}
while (offset < responseArrayBuffer.byteLength) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, offset,
Math.min(startBoundary.length + 1024, responseArrayBuffer.byteLength - offset),
),
);
// break if we arrived at the end
if (text.length === 0 || text.startsWith(endBoundary)) {
break;
}
// assert that we are actually dealing with a byterange and are at the correct offset
if (!text.startsWith(startBoundary)) {
throw new Error('Part does not start with boundary');
}
// get a substring from where we read the headers
const innerText = text.substr(startBoundary.length + 2);
if (innerText.length === 0) {
break;
}
// find the double linebreak that denotes the end of the headers
const endOfHeaders = innerText.indexOf(CRLFCRLF);
// parse the headers to get the content range size
const headers = parseHeaders(innerText.substr(0, endOfHeaders));
const { start, end, total } = parseContentRange(headers['content-range']);
// calculate the length of the slice and the next offset
const startOfData = offset + startBoundary.length + endOfHeaders + CRLFCRLF.length;
const length = parseInt(end, 10) + 1 - parseInt(start, 10);
out.push({
headers,
data: responseArrayBuffer.slice(startOfData, startOfData + length),
offset: start,
length,
fileSize: total,
});
offset = startOfData + length + 4;
}
return out;
}

View file

@ -0,0 +1,196 @@
import { parseByteRanges, parseContentRange, parseContentType } from './httputils.js';
import { BaseSource } from './basesource.js';
import { BlockedSource } from './blockedsource.js';
import { FetchClient } from './client/fetch.js';
import { XHRClient } from './client/xhr.js';
import { HttpClient } from './client/http.js';
class RemoteSource extends BaseSource {
/**
*
* @param {BaseClient} client
* @param {object} headers
* @param {numbers} maxRanges
* @param {boolean} allowFullFile
*/
constructor(client, headers, maxRanges, allowFullFile) {
super();
this.client = client;
this.headers = headers;
this.maxRanges = maxRanges;
this.allowFullFile = allowFullFile;
this._fileSize = null;
}
/**
*
* @param {Slice[]} slices
*/
async fetch(slices, signal) {
// if we allow multi-ranges, split the incoming request into that many sub-requests
// and join them afterwards
if (this.maxRanges >= slices.length) {
return this.fetchSlices(slices, signal);
} else if (this.maxRanges > 0 && slices.length > 1) {
// TODO: split into multiple multi-range requests
// const subSlicesRequests = [];
// for (let i = 0; i < slices.length; i += this.maxRanges) {
// subSlicesRequests.push(
// this.fetchSlices(slices.slice(i, i + this.maxRanges), signal),
// );
// }
// return (await Promise.all(subSlicesRequests)).flat();
}
// otherwise make a single request for each slice
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
async fetchSlices(slices, signal) {
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${slices
.map(({ offset, length }) => `${offset}-${offset + length}`)
.join(',')
}`,
},
signal,
});
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const { type, params } = parseContentType(response.getHeader('content-type'));
if (type === 'multipart/byteranges') {
const byteRanges = parseByteRanges(await response.getData(), params.boundary);
this._fileSize = byteRanges[0].fileSize || null;
return byteRanges;
}
const data = await response.getData();
const { start, end, total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
const first = [{
data,
offset: start,
length: end - start,
}];
if (slices.length > 1) {
// we requested more than one slice, but got only the first
// unfortunately, some HTTP Servers don't support multi-ranges
// and return only the first
// get the rest of the slices and fetch them iteratively
const others = await Promise.all(slices.slice(1).map((slice) => this.fetchSlice(slice, signal)));
return first.concat(others);
}
return first;
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return [{
data,
offset: 0,
length: data.byteLength,
}];
}
}
async fetchSlice(slice, signal) {
const { offset, length } = slice;
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${offset}-${offset + length}`,
},
signal,
});
// check the response was okay and if the server actually understands range requests
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const data = await response.getData();
const { total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
return {
data,
offset,
length,
};
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return {
data,
offset: 0,
length: data.byteLength,
};
}
}
get fileSize() {
return this._fileSize;
}
}
function maybeWrapInBlockedSource(source, { blockSize, cacheSize }) {
if (blockSize === null) {
return source;
}
return new BlockedSource(source, { blockSize, cacheSize });
}
export function makeFetchSource(url, { headers = {}, credentials, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new FetchClient(url, credentials);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeXHRSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new XHRClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeHttpSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new HttpClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeCustomSource(client, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
/**
*
* @param {string} url
* @param {object} options
*/
export function makeRemoteSource(url, { forceXHR = false, ...clientOptions } = {}) {
if (typeof fetch === 'function' && !forceXHR) {
return makeFetchSource(url, clientOptions);
}
if (typeof XMLHttpRequest !== 'undefined') {
return makeXHRSource(url, clientOptions);
}
return makeHttpSource(url, clientOptions);
}

View file

@ -0,0 +1,158 @@
export function assign(target, source) {
for (const key in source) {
if (source.hasOwnProperty(key)) {
target[key] = source[key];
}
}
}
export function chunk(iterable, length) {
const results = [];
const lengthOfIterable = iterable.length;
for (let i = 0; i < lengthOfIterable; i += length) {
const chunked = [];
for (let ci = i; ci < i + length; ci++) {
chunked.push(iterable[ci]);
}
results.push(chunked);
}
return results;
}
export function endsWith(string, expectedEnding) {
if (string.length < expectedEnding.length) {
return false;
}
const actualEnding = string.substr(string.length - expectedEnding.length);
return actualEnding === expectedEnding;
}
export function forEach(iterable, func) {
const { length } = iterable;
for (let i = 0; i < length; i++) {
func(iterable[i], i);
}
}
export function invert(oldObj) {
const newObj = {};
for (const key in oldObj) {
if (oldObj.hasOwnProperty(key)) {
const value = oldObj[key];
newObj[value] = key;
}
}
return newObj;
}
export function range(n) {
const results = [];
for (let i = 0; i < n; i++) {
results.push(i);
}
return results;
}
export function times(numTimes, func) {
const results = [];
for (let i = 0; i < numTimes; i++) {
results.push(func(i));
}
return results;
}
export function toArray(iterable) {
const results = [];
const { length } = iterable;
for (let i = 0; i < length; i++) {
results.push(iterable[i]);
}
return results;
}
export function toArrayRecursively(input) {
if (input.length) {
return toArray(input).map(toArrayRecursively);
}
return input;
}
// copied from https://github.com/academia-de-codigo/parse-content-range-header/blob/master/index.js
export function parseContentRange(headerValue) {
if (!headerValue) {
return null;
}
if (typeof headerValue !== 'string') {
throw new Error('invalid argument');
}
const parseInt = (number) => Number.parseInt(number, 10);
// Check for presence of unit
let matches = headerValue.match(/^(\w*) /);
const unit = matches && matches[1];
// check for start-end/size header format
matches = headerValue.match(/(\d+)-(\d+)\/(\d+|\*)/);
if (matches) {
return {
unit,
first: parseInt(matches[1]),
last: parseInt(matches[2]),
length: matches[3] === '*' ? null : parseInt(matches[3]),
};
}
// check for size header format
matches = headerValue.match(/(\d+|\*)/);
if (matches) {
return {
unit,
first: null,
last: null,
length: matches[1] === '*' ? null : parseInt(matches[1]),
};
}
return null;
}
/*
* Promisified wrapper around 'setTimeout' to allow 'await'
*/
export async function wait(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
export function zip(a, b) {
const A = Array.isArray(a) ? a : Array.from(a);
const B = Array.isArray(b) ? b : Array.from(b);
return A.map((k, i) => [k, B[i]]);
}
// Based on https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error
export class AbortError extends Error {
constructor(params) {
// Pass remaining arguments (including vendor specific ones) to parent constructor
super(params);
// Maintains proper stack trace for where our error was thrown (only available on V8)
if (Error.captureStackTrace) {
Error.captureStackTrace(this, AbortError);
}
this.name = 'AbortError';
}
}
export class CustomAggregateError extends Error {
constructor(errors, message) {
super(message);
this.errors = errors;
this.message = message;
this.name = 'AggregateError';
}
}
export const AggregateError = CustomAggregateError;

View file

@ -0,0 +1,14 @@
/* global globalThis */
/* eslint-disable import/no-mutable-exports */
import { getDecoder } from '../compression/index.js';
const worker = globalThis;
worker.addEventListener('message', async (e) => {
const { id, fileDirectory, buffer } = e.data;
const decoder = await getDecoder(fileDirectory);
const decoded = await decoder.decode(fileDirectory, buffer);
worker.postMessage({ decoded, id }, [decoded]);
});
export let create;

View file

@ -0,0 +1 @@
data

View file

@ -0,0 +1,3 @@
FROM nginx
COPY nginx.conf /etc/nginx/conf.d/default.conf

View file

@ -0,0 +1,4 @@
*zip*
*tif*
*html
*xml

View file

@ -0,0 +1,91 @@
set -e
wget https://github.com/EOxServer/autotest/raw/f8d9f4bde6686abbda09c711d4bf5239f5378aa9/autotest/data/meris/MER_FRS_1P_reduced/ENVISAT-MER_FRS_1PNPDE20060816_090929_000001972050_00222_23322_0058_uint16_reduced_compressed.tif -O initial.tiff
wget https://github.com/EOxServer/autotest/raw/f8d9f4bde6686abbda09c711d4bf5239f5378aa9/autotest/data/meris/mosaic_MER_FRS_1P_RGB_reduced/mosaic_ENVISAT-MER_FRS_1PNPDE20060816_090929_000001972050_00222_23322_0058_RGB_reduced.tif -O rgb.tiff
wget https://raw.githubusercontent.com/hubmapconsortium/portal-containers/master/containers/ome-tiff-offsets/test-input/multi-channel.ome.tif -O multi-channel.ome.tif
gdal_translate -of GTiff initial.tiff stripped.tiff
gdal_translate -of GTiff -co TILED=YES -co BLOCKXSIZE=32 -co BLOCKYSIZE=32 stripped.tiff tiled.tiff
gdal_translate -of GTiff -ot Int32 stripped.tiff int32.tiff
gdal_translate -of GTiff -ot UInt32 stripped.tiff uint32.tiff
gdal_translate -of GTiff -ot Float32 stripped.tiff float32.tiff
gdal_translate -of GTiff -ot Float64 stripped.tiff float64.tiff
gdal_translate -of GTiff -co COMPRESS=LZW stripped.tiff lzw.tiff
gdal_translate -of GTiff -co COMPRESS=DEFLATE stripped.tiff deflate.tiff
gdal_translate -of GTiff -co COMPRESS=DEFLATE -co PREDICTOR=2 stripped.tiff deflate_predictor.tiff
gdal_translate -of GTiff -co COMPRESS=DEFLATE -co PREDICTOR=2 -co BLOCKYSIZE=128 stripped.tiff deflate_predictor_big_strips.tiff
gdal_translate -of GTiff -co TILED=YES -co BLOCKXSIZE=32 -co BLOCKYSIZE=32 -co COMPRESS=DEFLATE -co PREDICTOR=2 stripped.tiff deflate_predictor_tiled.tiff
gdal_translate -of GTiff -co COMPRESS=PACKBITS stripped.tiff packbits.tiff
gdal_translate -of GTiff -co INTERLEAVE=BAND stripped.tiff interleave.tiff
gdal_translate -of GTiff -co TILED=YES -co BLOCKXSIZE=32 -co BLOCKYSIZE=32 -co INTERLEAVE=BAND stripped.tiff tiledplanar.tiff
gdal_translate -of GTiff -co COMPRESS=LZW -co TILED=YES -co BLOCKXSIZE=32 -co BLOCKYSIZE=32 -co INTERLEAVE=BAND stripped.tiff tiledplanarlzw.tiff
gdal_translate -of GTiff -co COMPRESS=LZW -ot Float64 stripped.tiff float64lzw.tiff
gdal_translate -of GTiff -co COMPRESS=LZW -co PREDICTOR=2 stripped.tiff lzw_predictor.tiff
gdal_translate -of GTiff -outsize 10% 10% stripped.tiff small.tiff
gdal_translate -of GTiff -co BIGTIFF=YES stripped.tiff bigtiff.tiff
gdal_translate -of GTiff -co COMPRESS=LERC -co MAX_Z_ERROR=1000 stripped.tiff lerc.tiff
gdal_translate -of GTiff -co COMPRESS=LERC -co MAX_Z_ERROR=1000 -co INTERLEAVE=BAND stripped.tiff lerc_interleave.tiff
gdal_translate -of GTiff -co COMPRESS=LERC_DEFLATE -co MAX_Z_ERROR=1000 stripped.tiff lerc_deflate.tiff
gdal_translate -of GTiff -co COMPRESS=LERC_ZSTD -co MAX_Z_ERROR=1000 stripped.tiff lerc_zstd.tiff
gdal_translate -of GTiff -ot Float32 -co COMPRESS=LERC -co MAX_Z_ERROR=1000 stripped.tiff float32lerc.tiff
gdal_translate -of GTiff -ot Float32 -co COMPRESS=LERC -co MAX_Z_ERROR=1000 -co INTERLEAVE=BAND stripped.tiff float32lerc_interleave.tiff
gdal_translate -of GTiff -ot Float32 -co COMPRESS=LERC_DEFLATE -co MAX_Z_ERROR=1000 stripped.tiff float32lerc_deflate.tiff
gdal_translate -of GTiff -ot Float32 -co COMPRESS=LERC_ZSTD -co MAX_Z_ERROR=1000 stripped.tiff float32lerc_zstd.tiff
gdal_translate -of COG initial.tiff cog.tiff
# overviews
cp stripped.tiff overviews.tiff
gdaladdo overviews.tiff 2 4 8 16
cp stripped.tiff overviews_external.tiff
gdaladdo -ro overviews_external.tiff 2 4 8 16
# bigtiff
wget http://www.awaresystems.be/imaging/tiff/bigtiff/BigTIFFSamples.zip
unzip -o BigTIFFSamples.zip -d .
rm BigTIFFSamples.zip
# color images
rgb2pct.py rgb.tiff rgb_paletted.tiff
# convert rgb.tiff -colorspace YCbCr ycbcr.tif
# rgb2ycbcr rgb.tiff ycbcr.tif -h 1 -v 1
gdal_translate -co PHOTOMETRIC=YCBCR -co COMPRESS=JPEG -co JPEG_QUALITY=100 rgb.tiff ycbcr.tif
convert rgb.tiff -colorspace CMYK cmyk.tif
convert rgb.tiff -colorspace Lab cielab.tif
gdal_translate -of GTiff -co COMPRESS=JPEG rgb.tiff jpeg.tiff
gdal_translate -of GTiff -co COMPRESS=JPEG -co PHOTOMETRIC=YCBCR rgb.tiff jpeg_ycbcr.tiff
# modeltransformation tag
#wget https://s3.amazonaws.com/wdt-external/no_pixelscale_or_tiepoints.tiff
# RGBA example
wget https://s3.eu-central-1.amazonaws.com/waterview.geotiff/RGBA.tiff
# special LZW file
wget https://github.com/geotiffjs/geotiff.js/files/4186628/nasa_raster.tiff.zip
unzip -o nasa_raster.tiff.zip -d .
# additional test for LZW: EOI_CODE after CLEAR_CODE
wget https://github.com/geotiffjs/geotiff.js/files/2378479/lzw.zip
mkdir -p lzw_clear_eoi
unzip -o lzw.zip -d lzw_clear_eoi
# n-bit support
for i in 10 11 12 13 14 15; do
gdal_translate -of GTiff -co NBITS=$i -ot UInt16 initial.tiff n_bit_${i}.tiff || true
gdal_translate -of GTiff -co NBITS=$i -co TILED=YES -ot UInt16 initial.tiff n_bit_tiled_${i}.tiff || true
gdal_translate -of GTiff -co NBITS=$i -ot UInt16 -co INTERLEAVE=BAND initial.tiff n_bit_interleave_${i}.tiff || true
done
gdal_translate -of GTiff -co NBITS=16 -ot Float32 initial.tiff float_n_bit_16.tiff || true
gdal_translate -of GTiff -co NBITS=16 -ot Float32 -co TILED=YES initial.tiff float_n_bit_tiled_16.tiff || true
gdal_translate -of GTiff -co NBITS=16 -ot Float32 -co INTERLEAVE=BAND initial.tiff float_n_bit_interleave_16.tiff || true
# GDAL_METADATA support
wget https://github.com/GeoTIFF/test-data/archive/6ec42abc044a6884037c148d67a87a5d28228ce5.zip -O geotiff-test-data.zip
unzip -j -o geotiff-test-data.zip "test-data-*/files/*" -d .
rm geotiff-test-data.zip
# add top-level metadata to a tiff for testing purposes
gdal_edit.py -mo DATUM=WGS84 wind_direction.tif

View file

@ -0,0 +1,261 @@
/* global GeoTIFF:false, plotty:false */
const { Pool, fromUrl } = GeoTIFF;
const imageWindow = [0, 0, 500, 500];
const tiffs = [
'stripped.tiff',
'tiled.tiff',
'interleave.tiff',
'tiledplanar.tiff',
'float32.tiff',
'uint32.tiff',
'int32.tiff',
'float64.tiff',
'lzw.tiff',
'tiledplanarlzw.tiff',
'float64lzw.tiff',
'lzw_predictor.tiff',
'deflate.tiff',
'deflate_predictor.tiff',
'deflate_predictor_tiled.tiff',
'lerc.tiff',
'lerc_interleave.tiff',
'lerc_deflate.tiff',
'float32lerc.tiff',
'float32lerc_interleave.tiff',
'float32lerc_deflate.tiff',
// "n_bit_tiled_10.tiff",
// "n_bit_11.tiff",
// "n_bit_12.tiff",
// "n_bit_13.tiff",
// "n_bit_14.tiff",
// "n_bit_15.tiff",
// "n_bit_interleave_10.tiff",
// "n_bit_interleave_12.tiff",
// "n_bit_interleave_14.tiff",
// "n_bit_interleave_15.tiff",
// "float_n_bit_16.tiff",
// "float_n_bit_tiled_16.tiff",
// "float_n_bit_interleave_16.tiff",
];
const rgbtiffs = [
'stripped.tiff',
'rgb.tiff',
'BigTIFF.tif',
'rgb_paletted.tiff',
'cmyk.tif',
'ycbcr.tif',
'cielab.tif',
'5ae862e00b093000130affda.tif',
'jpeg.tiff',
'jpeg_ycbcr.tiff',
];
const pool = new Pool();
const bandsSelect = document.getElementById('bands');
for (let i = 0; i < 15; ++i) {
const option = document.createElement('option');
option.value = i;
option.text = i + 1;
bandsSelect.appendChild(option);
}
async function render(image, sample, canvas, width, height) {
try {
const data = await image.readRasters({
samples: [sample],
window: imageWindow,
fillValue: 0,
pool,
});
const plot = new plotty.plot(canvas, data[0], width, height, [10, 65000], 'viridis', false); // eslint-disable-line new-cap
plot.render();
} catch (exc) {
// pass
}
}
async function renderRGB(image, canvas, width, height) {
try {
const rgb = await image.readRGB({
window: imageWindow,
pool,
});
const ctx = canvas.getContext('2d');
const imageData = ctx.createImageData(width, height);
const { data } = imageData;
let o = 0;
for (let i = 0; i < rgb.length; i += 3) {
data[o] = rgb[i];
data[o + 1] = rgb[i + 1];
data[o + 2] = rgb[i + 2];
data[o + 3] = 255;
o += 4;
}
ctx.putImageData(imageData, 0, 0);
} catch (exc) {
// pass
}
}
tiffs.forEach(async (filename) => {
const div = document.createElement('div');
div.style.float = 'left';
const header = document.createElement('p');
header.innerHTML = filename;
const canvas = document.createElement('canvas');
canvas.id = filename;
canvas.width = imageWindow[2] - imageWindow[0];
canvas.height = imageWindow[3] - imageWindow[1];
div.appendChild(header);
div.appendChild(canvas);
document.getElementById('canvases').appendChild(div);
const tiff = await fromUrl(`http://localhost:8090/test/data/${filename}`, {
allowFullFile: true,
cache: true,
});
const image = await tiff.getImage();
await render(image, 0, canvas, canvas.width, canvas.height);
bandsSelect.addEventListener('change', () => {
render(image, parseInt(bandsSelect.options[bandsSelect.selectedIndex].value, 10), canvas, canvas.width, canvas.height);
});
});
rgbtiffs.forEach(async (filename) => {
const div = document.createElement('div');
div.style.float = 'left';
const header = document.createElement('p');
header.innerHTML = filename;
const canvas = document.createElement('canvas');
canvas.id = filename;
canvas.width = imageWindow[2] - imageWindow[0];
canvas.height = imageWindow[3] - imageWindow[1];
div.appendChild(header);
div.appendChild(canvas);
document.getElementById('canvases').appendChild(div);
const tiff = await fromUrl(`http://localhost:8090/test/data/${filename}`, {
allowFullFile: true,
cache: true,
});
const image = await tiff.getImage();
await renderRGB(image, canvas, canvas.width, canvas.height);
});
// tiffs.forEach(function (filename) {
// const xhr = new XMLHttpRequest();
// xhr.open('GET', 'data/' + filename, true);
// xhr.responseType = 'arraybuffer';
// const div = document.createElement("div");
// div.style.float = "left";
// const header = document.createElement("p");
// header.innerHTML = filename;
// const canvas = document.createElement("canvas");
// canvas.id = filename;
// canvas.width = 500;
// canvas.height = 500;
// div.appendChild(header);
// div.appendChild(canvas);
// document.getElementById("canvases").appendChild(div);
// xhr.onload = function (e) {
// console.time("readRasters " + filename);
// fromArrayBuffer(this.response)
// .then(parser => parser.getImage())
// .then((image) => {
// // console.log(image);
// // console.log(image.getTiePoints());
// // var imageWindow = null;
// let width = image.getWidth();
// let height = image.getHeight();
// if (imageWindow) {
// width = imageWindow[2] - imageWindow[0];
// height = imageWindow[3] - imageWindow[1];
// }
// let plot;
// bandsSelect.addEventListener("change", function (e) {
// image.readRasters({ samples: [parseInt(bandsSelect.options[bandsSelect.selectedIndex].value)], poolSize: 8 })
// .then(function (rasters) {
// const canvas = document.getElementById(filename);
// plot = new plotty.plot(canvas, rasters[0], width, height, [10, 65000], "viridis", false);
// plot.render();
// });
// });
// image.readRasters({
// samples: [0],
// window: imageWindow,
// fillValue: 0,
// pool,
// })
// .then(function (rasters) {
// console.timeEnd("readRasters " + filename);
// const canvas = document.getElementById(filename);
// plot = new plotty.plot(canvas, rasters[0], width, height, [10, 65000], "viridis", false);
// plot.render();
// });
// });
// };
// xhr.send();
// });
// rgbtiffs.forEach(function (filename) {
// const xhr = new XMLHttpRequest();
// xhr.open('GET', 'data/' + filename, true);
// xhr.responseType = 'arraybuffer';
// const div = document.createElement("div");
// div.style.float = "left";
// const header = document.createElement("p");
// header.innerHTML = filename;
// const canvas = document.createElement("canvas");
// canvas.id = filename;
// div.appendChild(header);
// div.appendChild(canvas);
// document.getElementById("canvases").appendChild(div);
// xhr.onload = function (e) {
// fromArrayBuffer(this.response)
// .then(parser => parser.getImage())
// .then((image) => {
// console.time("readRGB " + filename);
// image.readRGB({ pool }).then(function (raster) {
// console.timeEnd("readRGB " + filename);
// canvas.width = image.getWidth();
// canvas.height = image.getHeight();
// const ctx = canvas.getContext("2d");
// const imageData = ctx.createImageData(image.getWidth(), image.getHeight());
// const data = imageData.data;
// let o = 0;
// for (var i = 0; i < raster.length; i += 3) {
// data[o] = raster[i];
// data[o + 1] = raster[i + 1];
// data[o + 2] = raster[i + 2];
// data[o + 3] = 255;
// o += 4;
// }
// ctx.putImageData(imageData, 0, 0);
// });
// });
// };
// xhr.send();
// });

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,11 @@
<html>
<head>
</head>
<body>
<div id="canvases"></div>
<select id="bands"></select>
<script src="../dist-browser/geotiff.js"></script>
<script src="./dev.js"></script>
<script src="lib/plotty.min.js"></script>
</body>
</html>

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,87 @@
server {
listen 80;
server_name localhost;
#charset koi8-r;
#access_log /var/log/nginx/host.access.log main;
proxy_force_ranges on;
max_ranges 10;
location / {
max_ranges 10;
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
#
# Om nom nom cookies
#
add_header 'Access-Control-Allow-Credentials' 'true';
add_header 'Access-Control-Allow-Methods' '*';
#
# Custom headers and headers various browsers *should* be OK with but aren't
#
add_header 'Access-Control-Allow-Headers' '*';
add_header 'Access-Control-Expose-Headers' '*';
#
# Tell client that this pre-flight info is valid for 20 days
#
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
if ($request_method = 'POST') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Credentials' 'true';
add_header 'Access-Control-Allow-Methods' '*';
add_header 'Access-Control-Allow-Headers' '*';
add_header 'Access-Control-Expose-Headers' '*';
}
if ($request_method = 'GET') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Credentials' 'true';
add_header 'Access-Control-Allow-Methods' '*';
add_header 'Access-Control-Allow-Headers' '*';
add_header 'Access-Control-Expose-Headers' '*';
}
root /usr/share/nginx/html;
index index.html index.htm;
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ \.php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
# include fastcgi_params;
#}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}

View file

@ -0,0 +1,38 @@
/* eslint-disable global-require, no-unused-expressions */
import isNode from 'detect-node';
import { expect } from 'chai';
import { makeFetchSource } from '../src/source/remote.js';
const port = 9999;
let server = null;
before(() => {
if (isNode) {
const express = require('express');
const app = express();
app.use(express.static('.'));
server = app.listen(9999);
}
});
after(() => {
if (server) {
server.close();
}
});
describe('makeFetchSource', () => {
it('shall fetch the first n bytes', async () => {
const blockSize = 512;
const source = makeFetchSource(`http://localhost:${port}/test/data/stripped.tiff`, { blockSize });
expect(source).to.be.ok;
const response = await source.fetch(0, 10);
expect(response.byteLength).to.equal(10);
const firstBlock = source.blocks.get(0);
expect(firstBlock).to.be.ok;
expect(firstBlock.offset).to.equal(0);
expect(firstBlock.length).to.equal(blockSize);
expect(firstBlock.data.byteLength).to.equal(blockSize);
});
});

View file

@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"outDir": "./dist-node",
"lib": ["ES2020"],
"allowJs": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"importHelpers": false,
"skipLibCheck": true,
"strict": false,
"strictNullChecks": true,
"moduleResolution": "node",
"esModuleInterop": true,
"inlineSources": false,
"downlevelIteration": true
},
"include": [
"./dist-module/**/*.js"
],
}

View file

@ -0,0 +1,21 @@
{
"compilerOptions": {
"target": "ES5",
"lib": ["ES2020", "DOM"],
"allowJs": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"emitDeclarationOnly": true,
"importHelpers": false,
"strict": false,
"strictNullChecks": true,
"moduleResolution": "node",
"esModuleInterop": false,
"inlineSources": false,
"downlevelIteration": true
},
"include": [
"./src/**/*.js"
]
}