add geotiffJS library full git

This commit is contained in:
JS 2023-10-29 22:27:19 +01:00
parent 2e27417fc5
commit aef672986a
62 changed files with 21999 additions and 0 deletions

View file

@ -0,0 +1,20 @@
import { BaseSource } from './basesource.js';
import { AbortError } from '../utils.js';
class ArrayBufferSource extends BaseSource {
constructor(arrayBuffer) {
super();
this.arrayBuffer = arrayBuffer;
}
fetchSlice(slice, signal) {
if (signal && signal.aborted) {
throw new AbortError('Request aborted');
}
return this.arrayBuffer.slice(slice.offset, slice.offset + slice.length);
}
}
export function makeBufferSource(arrayBuffer) {
return new ArrayBufferSource(arrayBuffer);
}

View file

@ -0,0 +1,38 @@
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
export class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
async fetch(slices, signal = undefined) {
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
async fetchSlice(slice) {
throw new Error(`fetching of slice ${slice} not possible, not implemented`);
}
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize() {
return null;
}
async close() {
// no-op by default
}
}

View file

@ -0,0 +1,296 @@
import QuickLRU from 'quick-lru';
import { BaseSource } from './basesource.js';
import { AbortError, AggregateError, wait, zip } from '../utils.js';
class Block {
/**
*
* @param {number} offset
* @param {number} length
* @param {ArrayBuffer} [data]
*/
constructor(offset, length, data = null) {
this.offset = offset;
this.length = length;
this.data = data;
}
/**
* @returns {number} the top byte border
*/
get top() {
return this.offset + this.length;
}
}
class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset, length, blockIds) {
this.offset = offset;
this.length = length;
this.blockIds = blockIds;
}
}
export class BlockedSource extends BaseSource {
/**
*
* @param {BaseSource} source The underlying source that shall be blocked and cached
* @param {object} options
* @param {number} [options.blockSize]
* @param {number} [options.cacheSize]
*/
constructor(source, { blockSize = 65536, cacheSize = 100 } = {}) {
super();
this.source = source;
this.blockSize = blockSize;
this.blockCache = new QuickLRU({
maxSize: cacheSize,
onEviction: (blockId, block) => {
this.evictedBlocks.set(blockId, block);
},
});
/** @type {Map<number, Block>} */
this.evictedBlocks = new Map();
// mapping blockId -> Block instance
this.blockRequests = new Map();
// set of blockIds missing for the current requests
this.blockIdsToFetch = new Set();
this.abortedBlockIds = new Set();
}
get fileSize() {
return this.source.fileSize;
}
/**
*
* @param {import("./basesource").Slice[]} slices
*/
async fetch(slices, signal) {
const blockRequests = [];
const missingBlockIds = [];
const allBlockIds = [];
this.evictedBlocks.clear();
for (const { offset, length } of slices) {
let top = offset + length;
const { fileSize } = this;
if (fileSize !== null) {
top = Math.min(top, fileSize);
}
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
const blockId = Math.floor(current / this.blockSize);
if (!this.blockCache.has(blockId) && !this.blockRequests.has(blockId)) {
this.blockIdsToFetch.add(blockId);
missingBlockIds.push(blockId);
}
if (this.blockRequests.has(blockId)) {
blockRequests.push(this.blockRequests.get(blockId));
}
allBlockIds.push(blockId);
}
}
// allow additional block requests to accumulate
await wait();
this.fetchBlocks(signal);
// Gather all of the new requests that this fetch call is contributing to `fetch`.
const missingRequests = [];
for (const blockId of missingBlockIds) {
// The requested missing block could already be in the cache
// instead of having its request still be outstanding.
if (this.blockRequests.has(blockId)) {
missingRequests.push(this.blockRequests.get(blockId));
}
}
// Actually await all pending requests that are needed for this `fetch`.
await Promise.allSettled(blockRequests);
await Promise.allSettled(missingRequests);
// Perform retries if a block was interrupted by a previous signal
const abortedBlockRequests = [];
const abortedBlockIds = allBlockIds
.filter((id) => this.abortedBlockIds.has(id) || !this.blockCache.has(id));
abortedBlockIds.forEach((id) => this.blockIdsToFetch.add(id));
// start the retry of some blocks if required
if (abortedBlockIds.length > 0 && signal && !signal.aborted) {
this.fetchBlocks(null);
for (const blockId of abortedBlockIds) {
const block = this.blockRequests.get(blockId);
if (!block) {
throw new Error(`Block ${blockId} is not in the block requests`);
}
abortedBlockRequests.push(block);
}
await Promise.allSettled(abortedBlockRequests);
}
// throw an abort error
if (signal && signal.aborted) {
throw new AbortError('Request was aborted');
}
const blocks = allBlockIds.map((id) => this.blockCache.get(id) || this.evictedBlocks.get(id));
const failedBlocks = blocks.filter((i) => !i);
if (failedBlocks.length) {
throw new AggregateError(failedBlocks, 'Request failed');
}
// create a final Map, with all required blocks for this request to satisfy
const requiredBlocks = new Map(zip(allBlockIds, blocks));
// TODO: satisfy each slice
return this.readSliceData(slices, requiredBlocks);
}
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal) {
// check if we still need to
if (this.blockIdsToFetch.size > 0) {
const groups = this.groupBlocks(this.blockIdsToFetch);
// start requesting slices of data
const groupRequests = this.source.fetch(groups, signal);
for (let groupIndex = 0; groupIndex < groups.length; ++groupIndex) {
const group = groups[groupIndex];
for (const blockId of group.blockIds) {
// make an async IIFE for each block
this.blockRequests.set(blockId, (async () => {
try {
const response = (await groupRequests)[groupIndex];
const blockOffset = blockId * this.blockSize;
const o = blockOffset - response.offset;
const t = Math.min(o + this.blockSize, response.data.byteLength);
const data = response.data.slice(o, t);
const block = new Block(
blockOffset,
data.byteLength,
data,
blockId,
);
this.blockCache.set(blockId, block);
this.abortedBlockIds.delete(blockId);
} catch (err) {
if (err.name === 'AbortError') {
// store the signal here, we need it to determine later if an
// error was caused by this signal
err.signal = signal;
this.blockCache.delete(blockId);
this.abortedBlockIds.add(blockId);
} else {
throw err;
}
} finally {
this.blockRequests.delete(blockId);
}
})());
}
}
this.blockIdsToFetch.clear();
}
}
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds) {
const sortedBlockIds = Array.from(blockIds).sort((a, b) => a - b);
if (sortedBlockIds.length === 0) {
return [];
}
let current = [];
let lastBlockId = null;
const groups = [];
for (const blockId of sortedBlockIds) {
if (lastBlockId === null || lastBlockId + 1 === blockId) {
current.push(blockId);
lastBlockId = blockId;
} else {
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
current = [blockId];
lastBlockId = blockId;
}
}
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
return groups;
}
/**
*
* @param {import("./basesource").Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices, blocks) {
return slices.map((slice) => {
let top = slice.offset + slice.length;
if (this.fileSize !== null) {
top = Math.min(this.fileSize, top);
}
const blockIdLow = Math.floor(slice.offset / this.blockSize);
const blockIdHigh = Math.floor(top / this.blockSize);
const sliceData = new ArrayBuffer(slice.length);
const sliceView = new Uint8Array(sliceData);
for (let blockId = blockIdLow; blockId <= blockIdHigh; ++blockId) {
const block = blocks.get(blockId);
const delta = block.offset - slice.offset;
const topDelta = block.top - top;
let blockInnerOffset = 0;
let rangeInnerOffset = 0;
let usedBlockLength;
if (delta < 0) {
blockInnerOffset = -delta;
} else if (delta > 0) {
rangeInnerOffset = delta;
}
if (topDelta < 0) {
usedBlockLength = block.length - blockInnerOffset;
} else {
usedBlockLength = top - block.offset - blockInnerOffset;
}
const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);
sliceView.set(blockView, rangeInnerOffset);
}
return sliceData;
});
}
}

View file

@ -0,0 +1,45 @@
export class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok() {
return this.status >= 200 && this.status <= 299;
}
/**
* Returns the status code of the response
*/
get status() {
throw new Error('not implemented');
}
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName) { // eslint-disable-line no-unused-vars
throw new Error('not implemented');
}
/**
* @returns {ArrayBuffer} the response data of the request
*/
async getData() {
throw new Error('not implemented');
}
}
export class BaseClient {
constructor(url) {
this.url = url;
}
/**
* Send a request with the options
* @param {object} [options]
*/
async request({ headers, credentials, signal } = {}) { // eslint-disable-line no-unused-vars
throw new Error('request is not implemented');
}
}

View file

@ -0,0 +1,41 @@
import { BaseClient, BaseResponse } from './base.js';
class FetchResponse extends BaseResponse {
/**
* BaseResponse facade for fetch API Response
* @param {Response} response
*/
constructor(response) {
super();
this.response = response;
}
get status() {
return this.response.status;
}
getHeader(name) {
return this.response.headers.get(name);
}
async getData() {
const data = this.response.arrayBuffer
? await this.response.arrayBuffer()
: (await this.response.buffer()).buffer;
return data;
}
}
export class FetchClient extends BaseClient {
constructor(url, credentials) {
super(url);
this.credentials = credentials;
}
async request({ headers, credentials, signal } = {}) {
const response = await fetch(this.url, {
headers, credentials, signal,
});
return new FetchResponse(response);
}
}

View file

@ -0,0 +1,81 @@
import http from 'http';
import https from 'https';
import urlMod from 'url';
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class HttpResponse extends BaseResponse {
/**
* BaseResponse facade for node HTTP/HTTPS API Response
* @param {http.ServerResponse} response
*/
constructor(response, dataPromise) {
super();
this.response = response;
this.dataPromise = dataPromise;
}
get status() {
return this.response.statusCode;
}
getHeader(name) {
return this.response.headers[name];
}
async getData() {
const data = await this.dataPromise;
return data;
}
}
export class HttpClient extends BaseClient {
constructor(url) {
super(url);
this.parsedUrl = urlMod.parse(this.url);
this.httpApi = (this.parsedUrl.protocol === 'http:' ? http : https);
}
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const request = this.httpApi.get(
{
...this.parsedUrl,
headers,
},
(response) => {
const dataPromise = new Promise((resolveData) => {
const chunks = [];
// collect chunks
response.on('data', (chunk) => {
chunks.push(chunk);
});
// concatenate all chunks and resolve the promise with the resulting buffer
response.on('end', () => {
const data = Buffer.concat(chunks).buffer;
resolveData(data);
});
response.on('error', reject);
});
resolve(new HttpResponse(response, dataPromise));
},
);
request.on('error', reject);
if (signal) {
if (signal.aborted) {
request.destroy(new AbortError('Request aborted'));
}
signal.addEventListener('abort', () => request.destroy(new AbortError('Request aborted')));
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

View file

@ -0,0 +1,61 @@
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class XHRResponse extends BaseResponse {
/**
* BaseResponse facade for XMLHttpRequest
* @param {XMLHttpRequest} xhr
* @param {ArrayBuffer} data
*/
constructor(xhr, data) {
super();
this.xhr = xhr;
this.data = data;
}
get status() {
return this.xhr.status;
}
getHeader(name) {
return this.xhr.getResponseHeader(name);
}
async getData() {
return this.data;
}
}
export class XHRClient extends BaseClient {
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open('GET', this.url);
xhr.responseType = 'arraybuffer';
for (const [key, value] of Object.entries(headers)) {
xhr.setRequestHeader(key, value);
}
// hook signals
xhr.onload = () => {
const data = xhr.response;
resolve(new XHRResponse(xhr, data));
};
xhr.onerror = reject;
xhr.onabort = () => reject(new AbortError('Request aborted'));
xhr.send();
if (signal) {
if (signal.aborted) {
xhr.abort();
}
signal.addEventListener('abort', () => xhr.abort());
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

View file

@ -0,0 +1,68 @@
import fs from 'fs';
import { BaseSource } from './basesource.js';
function closeAsync(fd) {
return new Promise((resolve, reject) => {
fs.close(fd, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function openAsync(path, flags, mode = undefined) {
return new Promise((resolve, reject) => {
fs.open(path, flags, mode, (err, fd) => {
if (err) {
reject(err);
} else {
resolve(fd);
}
});
});
}
function readAsync(...args) {
return new Promise((resolve, reject) => {
fs.read(...args, (err, bytesRead, buffer) => {
if (err) {
reject(err);
} else {
resolve({ bytesRead, buffer });
}
});
});
}
class FileSource extends BaseSource {
constructor(path) {
super();
this.path = path;
this.openRequest = openAsync(path, 'r');
}
async fetchSlice(slice) {
// TODO: use `signal`
const fd = await this.openRequest;
const { buffer } = await readAsync(
fd,
Buffer.alloc(slice.length),
0,
slice.length,
slice.offset,
);
return buffer.buffer;
}
async close() {
const fd = await this.openRequest;
await closeAsync(fd);
}
}
export function makeFileSource(path) {
return new FileSource(path);
}

View file

@ -0,0 +1,32 @@
import { BaseSource } from './basesource.js';
class FileReaderSource extends BaseSource {
constructor(file) {
super();
this.file = file;
}
async fetchSlice(slice, signal) {
return new Promise((resolve, reject) => {
const blob = this.file.slice(slice.offset, slice.offset + slice.length);
const reader = new FileReader();
reader.onload = (event) => resolve(event.target.result);
reader.onerror = reject;
reader.onabort = reject;
reader.readAsArrayBuffer(blob);
if (signal) {
signal.addEventListener('abort', () => reader.abort());
}
});
}
}
/**
* Create a new source from a given file/blob.
* @param {Blob} file The file or blob to read from.
* @returns The constructed source
*/
export function makeFileReaderSource(file) {
return new FileReaderSource(file);
}

View file

@ -0,0 +1,145 @@
const CRLFCRLF = '\r\n\r\n';
/*
* Shim for 'Object.fromEntries'
*/
function itemsToObject(items) {
if (typeof Object.fromEntries !== 'undefined') {
return Object.fromEntries(items);
}
const obj = {};
for (const [key, value] of items) {
obj[key.toLowerCase()] = value;
}
return obj;
}
/**
* Parse HTTP headers from a given string.
* @param {String} text the text to parse the headers from
* @returns {Object} the parsed headers with lowercase keys
*/
function parseHeaders(text) {
const items = text
.split('\r\n')
.map((line) => {
const kv = line.split(':').map((str) => str.trim());
kv[0] = kv[0].toLowerCase();
return kv;
});
return itemsToObject(items);
}
/**
* Parse a 'Content-Type' header value to the content-type and parameters
* @param {String} rawContentType the raw string to parse from
* @returns {Object} the parsed content type with the fields: type and params
*/
export function parseContentType(rawContentType) {
const [type, ...rawParams] = rawContentType.split(';').map((s) => s.trim());
const paramsItems = rawParams.map((param) => param.split('='));
return { type, params: itemsToObject(paramsItems) };
}
/**
* Parse a 'Content-Range' header value to its start, end, and total parts
* @param {String} rawContentRange the raw string to parse from
* @returns {Object} the parsed parts
*/
export function parseContentRange(rawContentRange) {
let start;
let end;
let total;
if (rawContentRange) {
[, start, end, total] = rawContentRange.match(/bytes (\d+)-(\d+)\/(\d+)/);
start = parseInt(start, 10);
end = parseInt(end, 10);
total = parseInt(total, 10);
}
return { start, end, total };
}
/**
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
* Each item in the list has the following properties:
* - headers: the HTTP headers
* - data: the sliced ArrayBuffer for that specific part
* - offset: the offset of the byterange within its originating file
* - length: the length of the byterange
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
* @param {String} boundary the boundary string used to split the sections
* @returns {Object[]} the parsed byteranges
*/
export function parseByteRanges(responseArrayBuffer, boundary) {
let offset = null;
const decoder = new TextDecoder('ascii');
const out = [];
const startBoundary = `--${boundary}`;
const endBoundary = `${startBoundary}--`;
// search for the initial boundary, may be offset by some bytes
// TODO: more efficient to check for `--` in bytes directly
for (let i = 0; i < 10; ++i) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, i, startBoundary.length),
);
if (text === startBoundary) {
offset = i;
}
}
if (offset === null) {
throw new Error('Could not find initial boundary');
}
while (offset < responseArrayBuffer.byteLength) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, offset,
Math.min(startBoundary.length + 1024, responseArrayBuffer.byteLength - offset),
),
);
// break if we arrived at the end
if (text.length === 0 || text.startsWith(endBoundary)) {
break;
}
// assert that we are actually dealing with a byterange and are at the correct offset
if (!text.startsWith(startBoundary)) {
throw new Error('Part does not start with boundary');
}
// get a substring from where we read the headers
const innerText = text.substr(startBoundary.length + 2);
if (innerText.length === 0) {
break;
}
// find the double linebreak that denotes the end of the headers
const endOfHeaders = innerText.indexOf(CRLFCRLF);
// parse the headers to get the content range size
const headers = parseHeaders(innerText.substr(0, endOfHeaders));
const { start, end, total } = parseContentRange(headers['content-range']);
// calculate the length of the slice and the next offset
const startOfData = offset + startBoundary.length + endOfHeaders + CRLFCRLF.length;
const length = parseInt(end, 10) + 1 - parseInt(start, 10);
out.push({
headers,
data: responseArrayBuffer.slice(startOfData, startOfData + length),
offset: start,
length,
fileSize: total,
});
offset = startOfData + length + 4;
}
return out;
}

View file

@ -0,0 +1,196 @@
import { parseByteRanges, parseContentRange, parseContentType } from './httputils.js';
import { BaseSource } from './basesource.js';
import { BlockedSource } from './blockedsource.js';
import { FetchClient } from './client/fetch.js';
import { XHRClient } from './client/xhr.js';
import { HttpClient } from './client/http.js';
class RemoteSource extends BaseSource {
/**
*
* @param {BaseClient} client
* @param {object} headers
* @param {numbers} maxRanges
* @param {boolean} allowFullFile
*/
constructor(client, headers, maxRanges, allowFullFile) {
super();
this.client = client;
this.headers = headers;
this.maxRanges = maxRanges;
this.allowFullFile = allowFullFile;
this._fileSize = null;
}
/**
*
* @param {Slice[]} slices
*/
async fetch(slices, signal) {
// if we allow multi-ranges, split the incoming request into that many sub-requests
// and join them afterwards
if (this.maxRanges >= slices.length) {
return this.fetchSlices(slices, signal);
} else if (this.maxRanges > 0 && slices.length > 1) {
// TODO: split into multiple multi-range requests
// const subSlicesRequests = [];
// for (let i = 0; i < slices.length; i += this.maxRanges) {
// subSlicesRequests.push(
// this.fetchSlices(slices.slice(i, i + this.maxRanges), signal),
// );
// }
// return (await Promise.all(subSlicesRequests)).flat();
}
// otherwise make a single request for each slice
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
async fetchSlices(slices, signal) {
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${slices
.map(({ offset, length }) => `${offset}-${offset + length}`)
.join(',')
}`,
},
signal,
});
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const { type, params } = parseContentType(response.getHeader('content-type'));
if (type === 'multipart/byteranges') {
const byteRanges = parseByteRanges(await response.getData(), params.boundary);
this._fileSize = byteRanges[0].fileSize || null;
return byteRanges;
}
const data = await response.getData();
const { start, end, total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
const first = [{
data,
offset: start,
length: end - start,
}];
if (slices.length > 1) {
// we requested more than one slice, but got only the first
// unfortunately, some HTTP Servers don't support multi-ranges
// and return only the first
// get the rest of the slices and fetch them iteratively
const others = await Promise.all(slices.slice(1).map((slice) => this.fetchSlice(slice, signal)));
return first.concat(others);
}
return first;
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return [{
data,
offset: 0,
length: data.byteLength,
}];
}
}
async fetchSlice(slice, signal) {
const { offset, length } = slice;
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${offset}-${offset + length}`,
},
signal,
});
// check the response was okay and if the server actually understands range requests
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const data = await response.getData();
const { total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
return {
data,
offset,
length,
};
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return {
data,
offset: 0,
length: data.byteLength,
};
}
}
get fileSize() {
return this._fileSize;
}
}
function maybeWrapInBlockedSource(source, { blockSize, cacheSize }) {
if (blockSize === null) {
return source;
}
return new BlockedSource(source, { blockSize, cacheSize });
}
export function makeFetchSource(url, { headers = {}, credentials, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new FetchClient(url, credentials);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeXHRSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new XHRClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeHttpSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new HttpClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeCustomSource(client, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
/**
*
* @param {string} url
* @param {object} options
*/
export function makeRemoteSource(url, { forceXHR = false, ...clientOptions } = {}) {
if (typeof fetch === 'function' && !forceXHR) {
return makeFetchSource(url, clientOptions);
}
if (typeof XMLHttpRequest !== 'undefined') {
return makeXHRSource(url, clientOptions);
}
return makeHttpSource(url, clientOptions);
}