diff --git a/.gitignore b/.gitignore
index 45ec153..7cece78 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,10 @@
-# Dependency directories
+# Зависимости
node_modules/
# VS Code
-.vscode/
\ No newline at end of file
+.vscode/
+
+# В модулях надо брать всё
+!modules/
+!modules/*/
+!modules/*/node_modules
\ No newline at end of file
diff --git a/modules/gar.js b/modules/gar.js
new file mode 100644
index 0000000..88a7bce
--- /dev/null
+++ b/modules/gar.js
@@ -0,0 +1,451 @@
+/*
+ Сервис интеграции ПП Парус 8 с WEB API
+ Дополнительный модуль: Интеграция с ГАР (GAR)
+*/
+
+//------------------------------
+// Подключаем внешние библиотеки
+//------------------------------
+
+const fs = require("fs"); //Работа с файлами
+const { pipeline } = require("stream"); //Работа с потоками
+const { promisify } = require("util"); //Вспомогательные инструменты
+const xml2js = require("xml2js"); //Конвертация XML в JSON и JSON в XML
+const confServ = require("../config"); //Настройки сервера приложений
+const conf = require("./gar_config"); //Параметры расширения "Интеграция с ГАР"
+const StreamZip = require("./gar_utils/node_modules/node-stream-zip"); //Работа с ZIP-архивами
+const fetch = require("./gar_utils/node_modules/node-fetch"); //Работа с запросами
+const { WorkersPool } = require("./gar_utils/workers_pool"); //Пул обработчиков
+const { logInf, makeTaskMessage, logWrn, logErr, stringToDate, dateToISOString } = require("./gar_utils/utils"); //Вспомогательные функции
+
+//--------------------------
+// Глобальные идентификаторы
+//--------------------------
+
+//Название модудля для протокола
+const MODULE = `GAR`;
+//Параметры пула обработчиков
+const workersPoolOptions = {
+ workerPath: "./modules/gar_utils/import.js",
+ limit: conf.common.nThreads,
+ timeout: 0,
+ drainTimeout: 60000
+};
+//Пул обработчиков
+let WP = null;
+//Очередь на парсинг
+let PARSE_QUEUE = [];
+//Обрабатываемые элементы архива
+let ENTRIES = [];
+//Всего элементов в архиве
+let ENTRIES_COUNT = 0;
+//Всего файлов в архиве
+let FILES_COUNT = 0;
+//Общий объем файлов в архиве
+let TOTAL_SIZE = 0;
+//Объем успешно обработанных файлов
+let PROCESSED_SIZE = 0;
+//Количество успешно обработанных файлов
+let PROCESSED_COUNT = 0;
+//Объем файлов обработанных с ошибками
+let ERROR_SIZE = 0;
+//Количество файлов обработанных с ошибками
+let ERROR_COUNT = 0;
+//Начало
+let START_TIME = null;
+//Окончание
+let END_TIME = null;
+//Протокол выполнения
+let LOAD_LOG = null;
+//Флаг распакованности архива
+let ZIP_UNPACKED = false;
+
+//------------
+// Тело модуля
+//------------
+
+//Выдача общей статистики
+const printCommonStats = () => {
+ logWrn(`Всего элементов: ${ENTRIES_COUNT}, файлов для обработки: ${FILES_COUNT}`, MODULE, LOAD_LOG);
+ logWrn(`Объем файлов для обработки: ${TOTAL_SIZE} байт`, MODULE, LOAD_LOG);
+};
+
+//Выдача статистики импорта
+const printImportStats = () => {
+ logWrn(`Количество необработанных файлов: ${FILES_COUNT - ERROR_COUNT - PROCESSED_COUNT}`, MODULE, LOAD_LOG);
+ logWrn(`Объем необработанных файлов: ${TOTAL_SIZE - ERROR_SIZE - PROCESSED_SIZE} байт`, MODULE, LOAD_LOG);
+ logWrn(`Количество файлов обработанных с ошибками: ${ERROR_COUNT}`, MODULE, LOAD_LOG);
+ logWrn(`Объем файлов обработанных с ошибками: ${ERROR_SIZE} байт`, MODULE, LOAD_LOG);
+ logWrn(`Количество файлов успешно обработанных: ${PROCESSED_COUNT}`, MODULE, LOAD_LOG);
+ logWrn(`Объем файлов успешно обработанных: ${PROCESSED_SIZE} байт`, MODULE, LOAD_LOG);
+ logWrn(`Начало: ${START_TIME}`, MODULE, LOAD_LOG);
+ logWrn(`Окончание: ${END_TIME}`, MODULE, LOAD_LOG);
+ logWrn(`Длительность: ${(END_TIME.getTime() - START_TIME.getTime()) / 1000} секунд`, MODULE, LOAD_LOG);
+};
+
+//Подчистка временного файла
+const removeTempFile = fileFullName => {
+ logInf(`Удаляю временный "${fileFullName}"...`, MODULE, LOAD_LOG);
+ fs.rm(fileFullName, { maxRetries: 5, retryDelay: 1000 }, err => {
+ if (err) logErr(`Ошибка удаления временного файла "${fileFullName}": ${err.message}`, MODULE, LOAD_LOG);
+ else logInf(`Удалено "${fileFullName}".`, MODULE, LOAD_LOG);
+ });
+};
+
+//Проверка необходимости загрузки элемента
+const needLoad = ({ processedCount, entry, processLimit, processFilter }) =>
+ (processLimit === 0 || processedCount <= processLimit) &&
+ !entry.isDirectory &&
+ entry.name.toLowerCase().endsWith("xml") &&
+ (processFilter === null || (processFilter != null && entry.name.match(processFilter)));
+
+//Обработка очереди на распаковку
+const processParseQueue = async () => {
+ //Если в очереди еще есть необработанные элементы
+ if (PARSE_QUEUE.length > 0) {
+ //Получим данные элемента очереди
+ const { entry, fileFullName, fileName, garVersionInfo } = PARSE_QUEUE.shift();
+ //Если обработчик запущен
+ if (WP.started) {
+ //Отправим задачу на выполнение
+ try {
+ await WP.sendTask(makeTaskMessage({ payload: { garVersionInfo, fileFullName, fileName } }), (e, p) => {
+ //Удалим временный файл
+ removeTempFile(fileFullName);
+ //Если ошибка
+ if (e) {
+ //Размер файлов, обработанных с ошибками
+ ERROR_SIZE += entry.size;
+ //Количество ошибок
+ ERROR_COUNT++;
+ //Сообщение об ошибке
+ let msg = `При обработке "${entry.name}": ${e.message}`;
+ logErr(msg, MODULE, LOAD_LOG);
+ } else {
+ //Размер успешно обработанных файлов
+ PROCESSED_SIZE += entry.size;
+ //Количество успешно обработанных файлов
+ PROCESSED_COUNT++;
+ logWrn(`Обработано успешно "${entry.name}".`, MODULE, LOAD_LOG);
+ }
+ logWrn(
+ `Всего обработано: ${PROCESSED_SIZE + ERROR_SIZE} байт, ${Math.round(((PROCESSED_SIZE + ERROR_SIZE) / TOTAL_SIZE) * 100)}%`,
+ MODULE,
+ LOAD_LOG
+ );
+ });
+ } catch (e) {
+ //Удалим временный файл
+ logErr(`При размещении задачи для "${entry.name}": ${e.message}`, MODULE, LOAD_LOG);
+ removeTempFile(fileFullName);
+ }
+ } else {
+ //Пул фоновых обработчиков остановлен (могла прийти команда принудительного выключения)
+ logErr(`При размещении задачи для "${entry.name}": пул уже остановлен. Прекращаю работу.`, MODULE, LOAD_LOG);
+ removeTempFile(fileFullName);
+ }
+ }
+ if (PARSE_QUEUE.length > 0 || !ZIP_UNPACKED) setTimeout(processParseQueue, 0);
+};
+
+//Конвертация в XML
+const toXML = obj => {
+ const builder = new xml2js.Builder();
+ return builder.buildObject(obj);
+};
+
+//Обработчик после получения обновлений ГАР
+const afterLoad = async prms => {
+ if (!conf.common.sDownloadsDir) throw new Error(`Не указан путь для размещения загруженных файлов.`);
+ if (!conf.common.sTmpDir) throw new Error(`Не указан путь для размещения временных файлов.`);
+ if (!conf.common.sLogDir) throw new Error(`Не указан путь для размещения файлов протоколирования.`);
+ //Информация о загружаемых данных
+ const LOAD_INFO = {
+ REGIONS: prms.options.sRegions,
+ GARDATELAST: stringToDate(prms.options.dGarDateLast),
+ HOUSESLOADED: Number(prms.options.nHousesLoaded),
+ STEADSLOADED: Number(prms.options.nSteadsLoaded)
+ };
+ //Если указаны загружаемые регионы и дата последней загруженной версии ГАР
+ if (LOAD_INFO.REGIONS && LOAD_INFO.GARDATELAST) {
+ //Идентификаторы загружаемых процессов
+ let loadIdents = [];
+ //Идентификатор протоколирования
+ const logIdent = Date.now();
+ //Открываю лог выполнения
+ LOAD_LOG = fs.createWriteStream(`${conf.common.sLogDir}/gar_load_${logIdent}.log`);
+ LOAD_LOG.on("error", e => {});
+ LOAD_LOG.on("close", () => {});
+ logInf("Протокол выполнения загрузки ГАР открыт.", MODULE, LOAD_LOG);
+ //Информация о версиях ГАР
+ const requestRespJson = JSON.parse(prms.queue.blResp.toString());
+ //Обработаем полученную информацию о версиях ГАР
+ logInf(`Обрабатываю полученую информацию о версиях ГАР...`, MODULE, LOAD_LOG);
+ //Версии ГАР для загрузки
+ let garVersions = [];
+ //Регионы
+ const regions = LOAD_INFO.REGIONS.split(";");
+ //Последняя загруженная версия ГАР
+ const garDateLast = LOAD_INFO.GARDATELAST;
+ //Признак загрузки домов
+ const housesLoaded = LOAD_INFO.HOUSESLOADED ? LOAD_INFO.HOUSESLOADED : 0;
+ //Признак загрузки участков
+ const steadsLoaded = LOAD_INFO.STEADSLOADED ? LOAD_INFO.STEADSLOADED : 0;
+ //Если не указана последняя загруженная версия ГАР
+ if (!garDateLast) throw new Error(`Не указана последняя загруженная версия ГАР, обновление недоступно.`);
+ //Обойдем элементы ответа
+ for (let respElement of requestRespJson) {
+ //Дата версии ГАР
+ const garVersionDate = stringToDate(respElement.Date);
+ //Ссылка на данные обновления
+ const garXmlDeltaUrl = respElement.GarXMLDeltaURL;
+ //Если указана дата и ссылка на обновление
+ if (garVersionDate && garXmlDeltaUrl) {
+ //Если версия вышла позже последней загруженной
+ if (garDateLast < garVersionDate) {
+ //Сохраним версию ГАР
+ garVersions.push({
+ versionDate: dateToISOString(garVersionDate),
+ xmlDeltaUrl: garXmlDeltaUrl
+ });
+ }
+ } else {
+ throw new Error(`Не удалось корректно определить информацию о версиях ГАР.`);
+ }
+ }
+ logInf(`Полученая информация о версиях ГАР обработана.`, MODULE, LOAD_LOG);
+ //Если не указаны необходимые для загрузки версии ГАР
+ if (!garVersions || garVersions.length == 0)
+ throw new Error(
+ `Не удалось определить необходимые для загрузки версии ГАР, вышедшие после ${garDateLast.toISOString().substring(0, 10)}.`
+ );
+ //Обработаем версии ГАР
+ logInf(`Обрабатываю версии ГАР...`, MODULE, LOAD_LOG);
+ //Отсортируем версии ГАР по возрастанию
+ garVersions.sort((a, b) => {
+ if (a.versionDate > b.versionDate) return 1;
+ if (a.versionDate === b.versionDate) return 0;
+ if (a.versionDate < b.versionDate) return -1;
+ });
+ //Пул обработчиков
+ WP = new WorkersPool(workersPoolOptions);
+ //Запуск фоновых процессов
+ logInf(`Стартую обработчики...`, MODULE, LOAD_LOG);
+ await WP.start({
+ dbBuferSize: conf.dbConnect.nBufferSize,
+ fileChunkSize: conf.common.nFileChunkSize,
+ loadLog: LOAD_LOG,
+ dbConn: {
+ sUser: confServ.dbConnect.sUser,
+ sPassword: confServ.dbConnect.sPassword,
+ sConnectString: confServ.dbConnect.sConnectString,
+ sSchema: confServ.dbConnect.sSchema
+ }
+ });
+ logInf(`Обработчики запущены.`, MODULE, LOAD_LOG);
+ //Обрабатываемая версия ГАР
+ let garVersion = garVersions[0];
+ // Обработаем версию ГАР
+ logInf(`Обрабатываю версию ГАР "${garVersion.versionDate}"...`, MODULE, LOAD_LOG);
+ //Флаг необходимости загрузки файла
+ let downloadFlag = true;
+ //Полный путь к загрузке (временная переменная)
+ let fileFullNameTmp = `${conf.common.sDownloadsDir}/${garVersion.versionDate}.zip`;
+ //Если файл был загружен ранее
+ if (fs.existsSync(fileFullNameTmp)) {
+ logInf(`Файл "${fileFullNameTmp}" уже существует.`, MODULE, LOAD_LOG);
+ //Если разрешено использование существующего файла
+ if (conf.common.bDownloadsUseExists) downloadFlag = false;
+ else fileFullNameTmp = `${conf.common.sDownloadsDir}/${garVersion.versionDate}_${logIdent}.zip`;
+ }
+ //Полный путь к загрузке
+ const fileFullName = fileFullNameTmp;
+ //Если необходимо загрузить файл
+ if (downloadFlag) {
+ //Загружаем файл
+ try {
+ logInf(`Загружаю файл по ссылке "${garVersion.xmlDeltaUrl}" в каталог "${conf.common.sDownloadsDir}"...`, MODULE, LOAD_LOG);
+ const streamPipeline = promisify(pipeline);
+ const fileData = await fetch(garVersion.xmlDeltaUrl, { redirect: "follow", follow: 20 });
+ if (!fileData.ok) throw new Error(`Не удалось загрузить файл по ссылке "${garVersion.xmlDeltaUrl}": ${fileData.statusText}.`);
+ await streamPipeline(fileData.body, fs.createWriteStream(fileFullName));
+ logInf(`Файл "${fileFullName}" загружен.`, MODULE, LOAD_LOG);
+ } catch (e) {
+ const errorMessage = `Ошибка загрузки файла по ссылке "${garVersion.xmlDeltaUrl}": ${e.message}.`;
+ logErr(errorMessage, MODULE, LOAD_LOG);
+ throw new Error(errorMessage);
+ }
+ }
+ //Обнулим переменные
+ ENTRIES = [];
+ TOTAL_SIZE = 0;
+ FILES_COUNT = 0;
+ PARSE_QUEUE = [];
+ ENTRIES_COUNT = 0;
+ PROCESSED_SIZE = 0;
+ PROCESSED_COUNT = 0;
+ ERROR_SIZE = 0;
+ ERROR_COUNT = 0;
+ START_TIME = null;
+ END_TIME = null;
+ ZIP_UNPACKED = false;
+ //Анализ архива
+ logInf(`Читаю архив...`, MODULE, LOAD_LOG);
+ const zip = new StreamZip.async({ file: fileFullName });
+ const entries = await zip.entries();
+ //Обойдем файлы архива
+ for (const entry of Object.values(entries)) {
+ //Количество файлов архива
+ ENTRIES_COUNT++;
+ //Путь к фалу в архиве
+ const path = entry.name.split("/");
+ //Если подходящий путь к файлу
+ if ([1, 2].includes(path.length)) {
+ //Регион
+ const region = path.length == 2 ? path[0] : "";
+ //Если указан регион и он входит в состав регионов, которые необходимо загрузить и файл попадает под условия загрузки
+ if (
+ (!region || !regions || (region && regions && regions.includes(region))) &&
+ needLoad({
+ processedCount: FILES_COUNT,
+ entry,
+ processLimit: conf.common.nLoadFilesLimit,
+ processFilter: conf.common.sLoadFilesMask
+ }) &&
+ (housesLoaded == 1 || ((!housesLoaded || housesLoaded != 1) && !path[path.length - 1].startsWith(`AS_HOUSES`))) &&
+ (steadsLoaded == 1 || ((!steadsLoaded || steadsLoaded != 1) && !path[path.length - 1].startsWith(`AS_STEADS`)))
+ ) {
+ //Количество, подошедших под условия загрузки, файлов
+ FILES_COUNT++;
+ //Общий размер файлов, подошедших под условия загрузки
+ TOTAL_SIZE += entry.size;
+ //Запомним файл
+ ENTRIES.push(entry);
+ }
+ }
+ }
+ //Отсортируем файлы в порядке возрастания по размеру файла
+ ENTRIES.sort((a, b) => (a.size > b.size ? 1 : a.size < b.size ? -1 : 0));
+ printCommonStats();
+ logInf(`Архив прочитан.`, MODULE, LOAD_LOG);
+ //Обработка очереди на парсинг
+ setTimeout(processParseQueue, 0);
+ //Время начала обработки архива
+ START_TIME = new Date();
+ //Идентификатор процесса
+ const ident = Date.now();
+ //Директория для размещения временных файлов архива
+ const garVersionDir = `${conf.common.sTmpDir}/${garVersion.versionDate}`;
+ //Если не существует директории для размещения временных файлов
+ if (!fs.existsSync(garVersionDir)) {
+ //Создадим директорию
+ try {
+ fs.mkdirSync(garVersionDir);
+ } catch (e) {
+ throw new Error(`Не удалось создать директорию "${garVersionDir}": ${e.message}`);
+ }
+ }
+ //Обойдем файлы архива
+ for (const entry of ENTRIES) {
+ //Путь к файлу архива
+ const path = entry.name.split("/");
+ //Имя файла
+ const unzipFileName = path[path.length - 1];
+ //Регион
+ const region = path.length == 2 ? path[0] : "";
+ //Полный путь к файлу
+ const unzipFileFullName = `${garVersionDir}/${region ? `${region}/` : ""}${unzipFileName}`;
+ //Если указан регион
+ if (region)
+ if (!fs.existsSync(`${garVersionDir}/${region}`))
+ //Если еще не существует диретории для региона
+ //Создадим директорию для региона
+ try {
+ fs.mkdirSync(`${garVersionDir}/${region}`);
+ } catch (e) {
+ throw new Error(`Не удалось создать директорию "${garVersionDir}/${region}": ${e.message}`);
+ }
+ //Если файл еще не существует
+ if (!fs.existsSync(unzipFileFullName)) {
+ //Распакуем файл
+ logInf(`Распаковываю "${entry.name}" (${entry.size} байт) в "${unzipFileFullName}"...`, MODULE, LOAD_LOG);
+ await zip.extract(entry.name, unzipFileFullName);
+ logInf(`Распаковано "${entry.name}" в "${unzipFileFullName}".`, MODULE, LOAD_LOG);
+ } else {
+ logInf(`Файл "${entry.name}" уже распакован в директорию "${garVersionDir}".`, MODULE, LOAD_LOG);
+ }
+ //Отдаём его в обработку фоновому процессу
+ PARSE_QUEUE.push({
+ entry,
+ fileName: unzipFileName,
+ fileFullName: unzipFileFullName,
+ garVersionInfo: {
+ ident,
+ region,
+ versionDate: garVersion.versionDate
+ }
+ });
+ }
+ //Закрываем архив
+ logInf("Закрываю архив...", MODULE, LOAD_LOG);
+ await zip.close();
+ logInf("Архив закрыт.", MODULE, LOAD_LOG);
+ //Флаг закрытия архива
+ ZIP_UNPACKED = true;
+ //Ожидаем, пока всё отработает
+ logInf("Жду завершения фоновой обработки...", MODULE, LOAD_LOG);
+ while (PARSE_QUEUE.length > 0 || WP.available != conf.common.nThreads) await new Promise(resolve => setTimeout(resolve, 1000));
+ logInf("Фоновая обработка завершена.", MODULE, LOAD_LOG);
+ //Очистка директорий для размещения временных файлов
+ logInf(`Очищаю директорию "${garVersionDir}" для размещения временных файлов...`, MODULE, LOAD_LOG);
+ fs.rmSync(garVersionDir, { recursive: true });
+ logInf(`Каталог "${garVersionDir}" для размещения временных файлов очищена.`, MODULE, LOAD_LOG);
+ //Если необходимо удалить загруженные файлы
+ if (conf.common.bDownloadsDelete) {
+ logInf(`Удаляю загруженный файл "${fileFullName}"...`, MODULE, LOAD_LOG);
+ fs.unlinkSync(fileFullName);
+ logInf(`Загруженный файл "${fileFullName}" удален.`, MODULE, LOAD_LOG);
+ }
+ //Время завершения выполнения загрузки
+ END_TIME = new Date();
+ printCommonStats();
+ printImportStats();
+ //Если обработка прошла успешно
+ if (ERROR_COUNT == 0) {
+ //Запомним обработанную версию
+ loadIdents.push({
+ GAR_VERSION: {
+ IDENT: ident,
+ VERSION_DATE: garVersion.versionDate,
+ REGIONS: LOAD_INFO.REGIONS,
+ HOUSES_LOADED: housesLoaded,
+ STEADS_LOADED: steadsLoaded
+ }
+ });
+ logInf(`Версия ГАР "${garVersion.versionDate}" обработана.`, MODULE, LOAD_LOG);
+ } else {
+ loadIdents = null;
+ logErr(`Версия ГАР "${garVersion.versionDate}" обработана с ошибками.`, MODULE, LOAD_LOG);
+ }
+ //Выключаю пул обработчиков
+ logInf("Останавливаю обработчики...", MODULE, LOAD_LOG);
+ await WP.stop(LOAD_LOG);
+ WP = null;
+ logInf("Обработчики остановлены.", MODULE, LOAD_LOG);
+ logInf(`Версии ГАР обработаны.`, MODULE, LOAD_LOG);
+ //Закрываю протокол выполнения
+ logInf("Закрываю протокол выполнения загрузки ГАР...", MODULE, LOAD_LOG);
+ if (LOAD_LOG) LOAD_LOG.destroy();
+ if (!loadIdents) throw new Error(`Не удалось загрузить данные обновления ГАР.`);
+ //Вернем результат
+ return { blResp: Buffer.from(toXML(loadIdents[0])) };
+ } else {
+ throw new Error(`Не указан регион и/или дата для загрузки обновлений ГАР.`);
+ }
+};
+
+//-----------------
+// Интерфейс модуля
+//-----------------
+
+exports.afterLoad = afterLoad;
diff --git a/modules/gar_config.js b/modules/gar_config.js
new file mode 100644
index 0000000..77483e5
--- /dev/null
+++ b/modules/gar_config.js
@@ -0,0 +1,45 @@
+/*
+ Сервис интеграции ПП Парус 8 с WEB API
+ Дополнительный модуль: Интеграция с ГАР (GAR) - настройки
+*/
+
+//--------------------------
+// Глобальные идентификаторы
+//--------------------------
+
+//Общие параметры
+let common = {
+ //Количество потоков импорта
+ nThreads: 11,
+ //Каталог для размещения загруженных данных
+ sDownloadsDir: "./gar_downloads",
+ //Удалять загруженные файлы
+ bDownloadsDelete: true,
+ //Использовать уже существующие файлы (если они есть)
+ bDownloadsUseExists: true,
+ //Каталог для размещения временных данных
+ sTmpDir: "./gar_tmp",
+ //Каталог для протоколирования
+ sLogDir: "./gar_logs",
+ //Размер буфера для потока считывания файла (байт)
+ nFileChunkSize: 256 * 1024,
+ //Количество обрабатываемых файлов (0 - не ограничивать)
+ nLoadFilesLimit: 0,
+ //Маска обрабатываемых файлов (null - все, поддерживаются рег. выражения, например: /AS_ROOM_TYPES_(\d{8})_(.*)/i /(.*)\/AS_STEADS_(\d{8})_(.*)/i /01\/(.*)/i)
+ sLoadFilesMask: null
+};
+
+//Параметры подключения к БД
+let dbConnect = {
+ //Размер буфера для сброса в БД (количество записей)
+ nBufferSize: 30000
+};
+
+//-----------------
+// Интерфейс модуля
+//-----------------
+
+module.exports = {
+ common,
+ dbConnect
+};
diff --git a/modules/gar_utils/import.js b/modules/gar_utils/import.js
new file mode 100644
index 0000000..97ca921
--- /dev/null
+++ b/modules/gar_utils/import.js
@@ -0,0 +1,225 @@
+/*
+ Сервис интеграции ПП Парус 8 с WEB API
+ Дополнительный модуль: Интеграция с ГАР (GAR) - обработчик импорта данных
+*/
+
+//------------------------------
+// Подключаем внешние библиотеки
+//------------------------------
+
+const { workerData, parentPort } = require("worker_threads"); //Параллельные обработчики
+const fs = require("fs"); //Работа с файлами
+const oracledb = require("oracledb"); //Работа с СУБД Oracle
+const { WRK_MSG_TYPE, logInf, logErr, makeTaskOKResult, makeTaskErrResult, makeStopMessage } = require("./utils"); //Вспомогательные функции
+const { PARSERS, findModelByFileName } = require("./parsers"); //Модели и парсеры
+const sax = require("./node_modules/sax"); //Событийный XML-парсер
+
+//--------------------------
+// Глобальные идентификаторы
+//--------------------------
+
+//Название модудля для протокола
+const MODULE = `GAR_INPUT_PROCESSOR_${workerData.number}`;
+//Флаг подключения к БД
+let CONNECTED = false;
+//Флаг занятости подключения к БД
+let CONNECTION_IN_USE = false;
+//Подключение к БД
+let CONNECTION = null;
+//Флаг останова
+let STOP_FLAG = false;
+//Протокол выполнения
+let LOAD_LOG = null;
+
+//------------
+// Тело модуля
+//------------
+
+//Подключение к БД
+const connectDb = async ({ user, password, connectString, schema }) => {
+ CONNECTION = await oracledb.getConnection({ user, password, connectString });
+ await CONNECTION.execute(`ALTER SESSION SET CURRENT_SCHEMA=${schema} RECYCLEBIN=OFF`);
+ CONNECTED = true;
+};
+
+//Отключение от БД
+const disconnectDb = async () => {
+ while (CONNECTION_IN_USE) {
+ await new Promise(resolve => setTimeout(resolve, 0));
+ }
+ if (CONNECTION) {
+ await CONNECTION.close();
+ CONNECTION = null;
+ }
+ CONNECTED = false;
+};
+
+//Сохранение буфера в БД
+const saveBufferToDb = async (buffer, parser, insertProcedureName, ident, region) => {
+ if (!STOP_FLAG) {
+ CONNECTION_IN_USE = true;
+ try {
+ await parser.save(CONNECTION, ident, buffer, insertProcedureName, region);
+ } catch (e) {
+ throw e;
+ } finally {
+ CONNECTION_IN_USE = false;
+ }
+ }
+};
+
+//Чтение файла в потоке
+const parseFile = ({ fileFullName, dbBuferSize, fileChunkSize, parser, insertProcedureName, ident, region }) => {
+ return new Promise((resolve, reject) => {
+ //Создаём поток для файла
+ const fsStream = fs.createReadStream(fileFullName, { highWaterMark: fileChunkSize });
+ //Создаём поток для парсера
+ const saxStream = sax.createStream(false);
+ //Буфер для сброса в базу
+ let buffer = [];
+ //Количество разобранных элементов
+ let cntItems = 0;
+ //Ошибка парсера
+ let parserErr = null;
+ //Последний обработанный элемент
+ let lastItem = null;
+ //События парсера - ошибка
+ saxStream.on("error", e => {
+ parserErr = e.message;
+ });
+ //События парсера - новый элемент
+ saxStream.on("opentag", node => {
+ if (node.name == parser.element) {
+ cntItems++;
+ lastItem = node;
+ buffer.push(node);
+ }
+ });
+ //События файла - считана порция
+ fsStream.on("data", chunk => {
+ if (!STOP_FLAG) {
+ saxStream.write(chunk);
+ if (buffer.length >= dbBuferSize) {
+ fsStream.pause();
+ }
+ if (parserErr) fsStream.destroy();
+ } else fsStream.destroy();
+ });
+ //События файла - пауза считывания
+ fsStream.on("pause", async () => {
+ if (buffer.length >= dbBuferSize) {
+ try {
+ await saveBufferToDb(buffer, parser, insertProcedureName, ident, region);
+ } catch (e) {
+ reject(e);
+ }
+ buffer = [];
+ }
+ if (!STOP_FLAG) fsStream.resume();
+ else fsStream.destroy();
+ });
+ //События файла - ошибка чтения
+ fsStream.on("error", error => reject(error));
+ //События файла - закрылся
+ fsStream.on("close", async error => {
+ saxStream._parser.close();
+ if (!STOP_FLAG) {
+ if (buffer.length > 0) {
+ try {
+ await saveBufferToDb(buffer, parser, insertProcedureName, ident, region);
+ } catch (e) {
+ reject(e);
+ }
+ buffer = [];
+ }
+ if (parserErr)
+ reject(
+ Error(
+ `Ошибка разбора данных: "${parserErr}". Разобрано элементов - ${cntItems}, последний разобранный: "${JSON.stringify(
+ lastItem
+ )}"`
+ )
+ );
+ else if (error) reject(error);
+ else resolve();
+ } else {
+ reject(Error("Обработчик остановлен принудительно"));
+ }
+ });
+ });
+};
+
+//Обработка сообщения с задачей
+const processTask = async ({ garVersionInfo, fileFullName, fileName }) => {
+ const model = findModelByFileName(fileName);
+ if (model) {
+ await parseFile({
+ fileFullName,
+ dbBuferSize: workerData.dbBuferSize,
+ fileChunkSize: workerData.fileChunkSize,
+ parser: PARSERS[model.parser],
+ insertProcedureName: model.insertProcedureName,
+ ident: garVersionInfo.ident,
+ region: garVersionInfo.region
+ });
+ }
+ return true;
+};
+
+//Подписка на сообщения от родительского потока
+parentPort.on("message", async msg => {
+ //Открываю лог выполнения
+ if (!LOAD_LOG && workerData.loadLog) {
+ LOAD_LOG = fs.createWriteStream(JSON.parse(workerData.loadLog).path, { flags: "a" });
+ LOAD_LOG.on("error", e => {});
+ LOAD_LOG.on("close", () => {});
+ }
+ logInf(`Обработчик #${workerData.number} получил новое сообщение: ${JSON.stringify(msg)}`, MODULE, LOAD_LOG);
+ if (msg.type === WRK_MSG_TYPE.TASK) {
+ try {
+ //Подключение к БД
+ const dbConn = workerData.dbConn;
+ if (!CONNECTED)
+ await connectDb({
+ user: dbConn.sUser,
+ password: dbConn.sPassword,
+ connectString: dbConn.sConnectString,
+ schema: dbConn.sSchema
+ });
+ let resp = await processTask({ ...msg.payload });
+ parentPort.postMessage(makeTaskOKResult(resp));
+ } catch (e) {
+ parentPort.postMessage(makeTaskErrResult(e));
+ }
+ } else {
+ if (msg.type === WRK_MSG_TYPE.STOP) {
+ //Флаг остановки
+ STOP_FLAG = true;
+ //Отключимся от БД
+ try {
+ if (CONNECTED) await disconnectDb();
+ } catch (e) {
+ logErr(`При остановке обработчика: ${e.message}`, MODULE, LOAD_LOG);
+ }
+ //Обнулим данные для протоколирования
+ if (LOAD_LOG) LOAD_LOG.destroy();
+ parentPort.postMessage(makeStopMessage());
+ } else {
+ parentPort.postMessage(makeTaskErrResult(Error(`Обработчик #${workerData.number} получил сообщение неподдерживаемого типа`)));
+ }
+ }
+});
+
+//Отлов неожиданных ошибок
+process.on("uncaughtException", e => {
+ logErr(`Неожиданная ошибка: ${e.message}`, MODULE, LOAD_LOG);
+ //Обнулим данные для протоколирования
+ if (LOAD_LOG) LOAD_LOG.destroy();
+});
+
+//Отлов неожиданных прерываний
+process.on("unhandledRejection", e => {
+ logErr(`Неожиданное прерывание: ${e.message}`, MODULE, LOAD_LOG);
+ //Обнулим данные для протоколирования
+ if (LOAD_LOG) LOAD_LOG.destroy();
+});
diff --git a/modules/gar_utils/node_modules/node-fetch/LICENSE.md b/modules/gar_utils/node_modules/node-fetch/LICENSE.md
new file mode 100644
index 0000000..660ffec
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/LICENSE.md
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 David Frank
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/modules/gar_utils/node_modules/node-fetch/README.md b/modules/gar_utils/node_modules/node-fetch/README.md
new file mode 100644
index 0000000..4f87a59
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/README.md
@@ -0,0 +1,633 @@
+node-fetch
+==========
+
+[![npm version][npm-image]][npm-url]
+[![build status][travis-image]][travis-url]
+[![coverage status][codecov-image]][codecov-url]
+[![install size][install-size-image]][install-size-url]
+[![Discord][discord-image]][discord-url]
+
+A light-weight module that brings `window.fetch` to Node.js
+
+(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
+
+[![Backers][opencollective-image]][opencollective-url]
+
+
+
+- [Motivation](#motivation)
+- [Features](#features)
+- [Difference from client-side fetch](#difference-from-client-side-fetch)
+- [Installation](#installation)
+- [Loading and configuring the module](#loading-and-configuring-the-module)
+- [Common Usage](#common-usage)
+ - [Plain text or HTML](#plain-text-or-html)
+ - [JSON](#json)
+ - [Simple Post](#simple-post)
+ - [Post with JSON](#post-with-json)
+ - [Post with form parameters](#post-with-form-parameters)
+ - [Handling exceptions](#handling-exceptions)
+ - [Handling client and server errors](#handling-client-and-server-errors)
+- [Advanced Usage](#advanced-usage)
+ - [Streams](#streams)
+ - [Buffer](#buffer)
+ - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data)
+ - [Extract Set-Cookie Header](#extract-set-cookie-header)
+ - [Post data using a file stream](#post-data-using-a-file-stream)
+ - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart)
+ - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal)
+- [API](#api)
+ - [fetch(url[, options])](#fetchurl-options)
+ - [Options](#options)
+ - [Class: Request](#class-request)
+ - [Class: Response](#class-response)
+ - [Class: Headers](#class-headers)
+ - [Interface: Body](#interface-body)
+ - [Class: FetchError](#class-fetcherror)
+- [License](#license)
+- [Acknowledgement](#acknowledgement)
+
+
+
+## Motivation
+
+Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
+
+See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
+
+## Features
+
+- Stay consistent with `window.fetch` API.
+- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
+- Use native promise but allow substituting it with [insert your favorite promise library].
+- Use native Node streams for body on both request and response.
+- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
+- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
+
+## Difference from client-side fetch
+
+- See [Known Differences](LIMITS.md) for details.
+- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
+- Pull requests are welcomed too!
+
+## Installation
+
+Current stable release (`2.x`)
+
+```sh
+$ npm install node-fetch
+```
+
+## Loading and configuring the module
+We suggest you load the module via `require` until the stabilization of ES modules in node:
+```js
+const fetch = require('node-fetch');
+```
+
+If you are using a Promise library other than native, set it through `fetch.Promise`:
+```js
+const Bluebird = require('bluebird');
+
+fetch.Promise = Bluebird;
+```
+
+## Common Usage
+
+NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
+
+#### Plain text or HTML
+```js
+fetch('https://github.com/')
+ .then(res => res.text())
+ .then(body => console.log(body));
+```
+
+#### JSON
+
+```js
+
+fetch('https://api.github.com/users/github')
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Simple Post
+```js
+fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
+ .then(res => res.json()) // expecting a json response
+ .then(json => console.log(json));
+```
+
+#### Post with JSON
+
+```js
+const body = { a: 1 };
+
+fetch('https://httpbin.org/post', {
+ method: 'post',
+ body: JSON.stringify(body),
+ headers: { 'Content-Type': 'application/json' },
+ })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Post with form parameters
+`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
+
+NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
+
+```js
+const { URLSearchParams } = require('url');
+
+const params = new URLSearchParams();
+params.append('a', 1);
+
+fetch('https://httpbin.org/post', { method: 'POST', body: params })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Handling exceptions
+NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information.
+
+Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
+
+```js
+fetch('https://domain.invalid/')
+ .catch(err => console.error(err));
+```
+
+#### Handling client and server errors
+It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
+
+```js
+function checkStatus(res) {
+ if (res.ok) { // res.status >= 200 && res.status < 300
+ return res;
+ } else {
+ throw MyCustomError(res.statusText);
+ }
+}
+
+fetch('https://httpbin.org/status/400')
+ .then(checkStatus)
+ .then(res => console.log('will not get here...'))
+```
+
+## Advanced Usage
+
+#### Streams
+The "Node.js way" is to use streams when possible:
+
+```js
+fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
+ .then(res => {
+ const dest = fs.createWriteStream('./octocat.png');
+ res.body.pipe(dest);
+ });
+```
+
+In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch
+errors -- the longer a response runs, the more likely it is to encounter an error.
+
+```js
+const fetch = require('node-fetch');
+const response = await fetch('https://httpbin.org/stream/3');
+try {
+ for await (const chunk of response.body) {
+ console.dir(JSON.parse(chunk.toString()));
+ }
+} catch (err) {
+ console.error(err.stack);
+}
+```
+
+In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams
+did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors
+directly from the stream and wait on it response to fully close.
+
+```js
+const fetch = require('node-fetch');
+const read = async body => {
+ let error;
+ body.on('error', err => {
+ error = err;
+ });
+ for await (const chunk of body) {
+ console.dir(JSON.parse(chunk.toString()));
+ }
+ return new Promise((resolve, reject) => {
+ body.on('close', () => {
+ error ? reject(error) : resolve();
+ });
+ });
+};
+try {
+ const response = await fetch('https://httpbin.org/stream/3');
+ await read(response.body);
+} catch (err) {
+ console.error(err.stack);
+}
+```
+
+#### Buffer
+If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API)
+
+```js
+const fileType = require('file-type');
+
+fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
+ .then(res => res.buffer())
+ .then(buffer => fileType(buffer))
+ .then(type => { /* ... */ });
+```
+
+#### Accessing Headers and other Meta data
+```js
+fetch('https://github.com/')
+ .then(res => {
+ console.log(res.ok);
+ console.log(res.status);
+ console.log(res.statusText);
+ console.log(res.headers.raw());
+ console.log(res.headers.get('content-type'));
+ });
+```
+
+#### Extract Set-Cookie Header
+
+Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API.
+
+```js
+fetch(url).then(res => {
+ // returns an array of values, instead of a string of comma-separated values
+ console.log(res.headers.raw()['set-cookie']);
+});
+```
+
+#### Post data using a file stream
+
+```js
+const { createReadStream } = require('fs');
+
+const stream = createReadStream('input.txt');
+
+fetch('https://httpbin.org/post', { method: 'POST', body: stream })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Post with form-data (detect multipart)
+
+```js
+const FormData = require('form-data');
+
+const form = new FormData();
+form.append('a', 1);
+
+fetch('https://httpbin.org/post', { method: 'POST', body: form })
+ .then(res => res.json())
+ .then(json => console.log(json));
+
+// OR, using custom headers
+// NOTE: getHeaders() is non-standard API
+
+const form = new FormData();
+form.append('a', 1);
+
+const options = {
+ method: 'POST',
+ body: form,
+ headers: form.getHeaders()
+}
+
+fetch('https://httpbin.org/post', options)
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Request cancellation with AbortSignal
+
+> NOTE: You may cancel streamed requests only on Node >= v8.0.0
+
+You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
+
+An example of timing out a request after 150ms could be achieved as the following:
+
+```js
+import AbortController from 'abort-controller';
+
+const controller = new AbortController();
+const timeout = setTimeout(
+ () => { controller.abort(); },
+ 150,
+);
+
+fetch(url, { signal: controller.signal })
+ .then(res => res.json())
+ .then(
+ data => {
+ useData(data)
+ },
+ err => {
+ if (err.name === 'AbortError') {
+ // request was aborted
+ }
+ },
+ )
+ .finally(() => {
+ clearTimeout(timeout);
+ });
+```
+
+See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
+
+
+## API
+
+### fetch(url[, options])
+
+- `url` A string representing the URL for fetching
+- `options` [Options](#fetch-options) for the HTTP(S) request
+- Returns: Promise<[Response](#class-response)>
+
+Perform an HTTP(S) fetch.
+
+`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`.
+
+
+### Options
+
+The default values are shown after each option key.
+
+```js
+{
+ // These properties are part of the Fetch Standard
+ method: 'GET',
+ headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
+ body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
+ redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
+ signal: null, // pass an instance of AbortSignal to optionally abort requests
+
+ // The following properties are node-fetch extensions
+ follow: 20, // maximum redirect count. 0 to not follow redirect
+ timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
+ compress: true, // support gzip/deflate content encoding. false to disable
+ size: 0, // maximum response body size in bytes. 0 to disable
+ agent: null // http(s).Agent instance or function that returns an instance (see below)
+}
+```
+
+##### Default Headers
+
+If no values are set, the following request headers will be sent automatically:
+
+Header | Value
+------------------- | --------------------------------------------------------
+`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
+`Accept` | `*/*`
+`Connection` | `close` _(when no `options.agent` is present)_
+`Content-Length` | _(automatically calculated, if possible)_
+`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
+`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
+
+Note: when `body` is a `Stream`, `Content-Length` is not set automatically.
+
+##### Custom Agent
+
+The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following:
+
+- Support self-signed certificate
+- Use only IPv4 or IPv6
+- Custom DNS Lookup
+
+See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information.
+
+In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol.
+
+```js
+const httpAgent = new http.Agent({
+ keepAlive: true
+});
+const httpsAgent = new https.Agent({
+ keepAlive: true
+});
+
+const options = {
+ agent: function (_parsedURL) {
+ if (_parsedURL.protocol == 'http:') {
+ return httpAgent;
+ } else {
+ return httpsAgent;
+ }
+ }
+}
+```
+
+
+### Class: Request
+
+An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
+
+Due to the nature of Node.js, the following properties are not implemented at this moment:
+
+- `type`
+- `destination`
+- `referrer`
+- `referrerPolicy`
+- `mode`
+- `credentials`
+- `cache`
+- `integrity`
+- `keepalive`
+
+The following node-fetch extension properties are provided:
+
+- `follow`
+- `compress`
+- `counter`
+- `agent`
+
+See [options](#fetch-options) for exact meaning of these extensions.
+
+#### new Request(input[, options])
+
+*(spec-compliant)*
+
+- `input` A string representing a URL, or another `Request` (which will be cloned)
+- `options` [Options][#fetch-options] for the HTTP(S) request
+
+Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
+
+In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
+
+
+### Class: Response
+
+An HTTP(S) response. This class implements the [Body](#iface-body) interface.
+
+The following properties are not implemented in node-fetch at this moment:
+
+- `Response.error()`
+- `Response.redirect()`
+- `type`
+- `trailer`
+
+#### new Response([body[, options]])
+
+*(spec-compliant)*
+
+- `body` A `String` or [`Readable` stream][node-readable]
+- `options` A [`ResponseInit`][response-init] options dictionary
+
+Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
+
+Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
+
+#### response.ok
+
+*(spec-compliant)*
+
+Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
+
+#### response.redirected
+
+*(spec-compliant)*
+
+Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
+
+
+### Class: Headers
+
+This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
+
+#### new Headers([init])
+
+*(spec-compliant)*
+
+- `init` Optional argument to pre-fill the `Headers` object
+
+Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object.
+
+```js
+// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
+
+const meta = {
+ 'Content-Type': 'text/xml',
+ 'Breaking-Bad': '<3'
+};
+const headers = new Headers(meta);
+
+// The above is equivalent to
+const meta = [
+ [ 'Content-Type', 'text/xml' ],
+ [ 'Breaking-Bad', '<3' ]
+];
+const headers = new Headers(meta);
+
+// You can in fact use any iterable objects, like a Map or even another Headers
+const meta = new Map();
+meta.set('Content-Type', 'text/xml');
+meta.set('Breaking-Bad', '<3');
+const headers = new Headers(meta);
+const copyOfHeaders = new Headers(headers);
+```
+
+
+### Interface: Body
+
+`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
+
+The following methods are not yet implemented in node-fetch at this moment:
+
+- `formData()`
+
+#### body.body
+
+*(deviation from spec)*
+
+* Node.js [`Readable` stream][node-readable]
+
+Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
+
+#### body.bodyUsed
+
+*(spec-compliant)*
+
+* `Boolean`
+
+A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again.
+
+#### body.arrayBuffer()
+#### body.blob()
+#### body.json()
+#### body.text()
+
+*(spec-compliant)*
+
+* Returns: Promise
+
+Consume the body and return a promise that will resolve to one of these formats.
+
+#### body.buffer()
+
+*(node-fetch extension)*
+
+* Returns: Promise<Buffer>
+
+Consume the body and return a promise that will resolve to a Buffer.
+
+#### body.textConverted()
+
+*(node-fetch extension)*
+
+* Returns: Promise<String>
+
+Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible.
+
+(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
+
+
+### Class: FetchError
+
+*(node-fetch extension)*
+
+An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
+
+
+### Class: AbortError
+
+*(node-fetch extension)*
+
+An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
+
+## Acknowledgement
+
+Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
+
+`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
+
+## License
+
+MIT
+
+[npm-image]: https://flat.badgen.net/npm/v/node-fetch
+[npm-url]: https://www.npmjs.com/package/node-fetch
+[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
+[travis-url]: https://travis-ci.org/bitinn/node-fetch
+[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master
+[codecov-url]: https://codecov.io/gh/bitinn/node-fetch
+[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
+[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
+[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square
+[discord-url]: https://discord.gg/Zxbndcm
+[opencollective-image]: https://opencollective.com/node-fetch/backers.svg
+[opencollective-url]: https://opencollective.com/node-fetch
+[whatwg-fetch]: https://fetch.spec.whatwg.org/
+[response-init]: https://fetch.spec.whatwg.org/#responseinit
+[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
+[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
+[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
+[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
+[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md
diff --git a/modules/gar_utils/node_modules/node-fetch/browser.js b/modules/gar_utils/node_modules/node-fetch/browser.js
new file mode 100644
index 0000000..ee86265
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/browser.js
@@ -0,0 +1,25 @@
+"use strict";
+
+// ref: https://github.com/tc39/proposal-global
+var getGlobal = function () {
+ // the only reliable means to get the global object is
+ // `Function('return this')()`
+ // However, this causes CSP violations in Chrome apps.
+ if (typeof self !== 'undefined') { return self; }
+ if (typeof window !== 'undefined') { return window; }
+ if (typeof global !== 'undefined') { return global; }
+ throw new Error('unable to locate global object');
+}
+
+var globalObject = getGlobal();
+
+module.exports = exports = globalObject.fetch;
+
+// Needed for TypeScript and Webpack.
+if (globalObject.fetch) {
+ exports.default = globalObject.fetch.bind(globalObject);
+}
+
+exports.Headers = globalObject.Headers;
+exports.Request = globalObject.Request;
+exports.Response = globalObject.Response;
diff --git a/modules/gar_utils/node_modules/node-fetch/lib/index.es.js b/modules/gar_utils/node_modules/node-fetch/lib/index.es.js
new file mode 100644
index 0000000..79d717b
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/lib/index.es.js
@@ -0,0 +1,1778 @@
+process.emitWarning("The .es.js file is deprecated. Use .mjs instead.");
+
+import Stream from 'stream';
+import http from 'http';
+import Url from 'url';
+import whatwgUrl from 'whatwg-url';
+import https from 'https';
+import zlib from 'zlib';
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+export default fetch;
+export { Headers, Request, Response, FetchError };
diff --git a/modules/gar_utils/node_modules/node-fetch/lib/index.js b/modules/gar_utils/node_modules/node-fetch/lib/index.js
new file mode 100644
index 0000000..337d6e5
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/lib/index.js
@@ -0,0 +1,1787 @@
+'use strict';
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var Stream = _interopDefault(require('stream'));
+var http = _interopDefault(require('http'));
+var Url = _interopDefault(require('url'));
+var whatwgUrl = _interopDefault(require('whatwg-url'));
+var https = _interopDefault(require('https'));
+var zlib = _interopDefault(require('zlib'));
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
diff --git a/modules/gar_utils/node_modules/node-fetch/lib/index.mjs b/modules/gar_utils/node_modules/node-fetch/lib/index.mjs
new file mode 100644
index 0000000..ace669f
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/lib/index.mjs
@@ -0,0 +1,1776 @@
+import Stream from 'stream';
+import http from 'http';
+import Url from 'url';
+import whatwgUrl from 'whatwg-url';
+import https from 'https';
+import zlib from 'zlib';
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+export default fetch;
+export { Headers, Request, Response, FetchError };
diff --git a/modules/gar_utils/node_modules/node-fetch/package.json b/modules/gar_utils/node_modules/node-fetch/package.json
new file mode 100644
index 0000000..c97967d
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-fetch/package.json
@@ -0,0 +1,117 @@
+{
+ "_from": "node-fetch@2.6.9",
+ "_id": "node-fetch@2.6.9",
+ "_inBundle": false,
+ "_integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==",
+ "_location": "/node-fetch",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "node-fetch@2.6.9",
+ "name": "node-fetch",
+ "escapedName": "node-fetch",
+ "rawSpec": "2.6.9",
+ "saveSpec": null,
+ "fetchSpec": "2.6.9"
+ },
+ "_requiredBy": [
+ "#USER",
+ "/"
+ ],
+ "_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz",
+ "_shasum": "7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6",
+ "_spec": "node-fetch@2.6.9",
+ "_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1",
+ "author": {
+ "name": "David Frank"
+ },
+ "browser": "./browser.js",
+ "bugs": {
+ "url": "https://github.com/bitinn/node-fetch/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "whatwg-url": "^5.0.0"
+ },
+ "deprecated": false,
+ "description": "A light-weight module that brings window.fetch to node.js",
+ "devDependencies": {
+ "@ungap/url-search-params": "^0.1.2",
+ "abort-controller": "^1.1.0",
+ "abortcontroller-polyfill": "^1.3.0",
+ "babel-core": "^6.26.3",
+ "babel-plugin-istanbul": "^4.1.6",
+ "babel-plugin-transform-async-generator-functions": "^6.24.1",
+ "babel-polyfill": "^6.26.0",
+ "babel-preset-env": "1.4.0",
+ "babel-register": "^6.16.3",
+ "chai": "^3.5.0",
+ "chai-as-promised": "^7.1.1",
+ "chai-iterator": "^1.1.1",
+ "chai-string": "~1.3.0",
+ "codecov": "3.3.0",
+ "cross-env": "^5.2.0",
+ "form-data": "^2.3.3",
+ "is-builtin-module": "^1.0.0",
+ "mocha": "^5.0.0",
+ "nyc": "11.9.0",
+ "parted": "^0.1.1",
+ "promise": "^8.0.3",
+ "resumer": "0.0.0",
+ "rollup": "^0.63.4",
+ "rollup-plugin-babel": "^3.0.7",
+ "string-to-arraybuffer": "^1.0.2",
+ "teeny-request": "3.7.0"
+ },
+ "engines": {
+ "node": "4.x || >=6.0.0"
+ },
+ "files": [
+ "lib/index.js",
+ "lib/index.mjs",
+ "lib/index.es.js",
+ "browser.js"
+ ],
+ "homepage": "https://github.com/bitinn/node-fetch",
+ "keywords": [
+ "fetch",
+ "http",
+ "promise"
+ ],
+ "license": "MIT",
+ "main": "lib/index.js",
+ "module": "lib/index.mjs",
+ "name": "node-fetch",
+ "peerDependencies": {
+ "encoding": "^0.1.0"
+ },
+ "peerDependenciesMeta": {
+ "encoding": {
+ "optional": true
+ }
+ },
+ "release": {
+ "branches": [
+ "+([0-9]).x",
+ "main",
+ "next",
+ {
+ "name": "beta",
+ "prerelease": true
+ }
+ ]
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/bitinn/node-fetch.git"
+ },
+ "scripts": {
+ "build": "cross-env BABEL_ENV=rollup rollup -c",
+ "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json",
+ "prepare": "npm run build",
+ "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
+ "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js"
+ },
+ "version": "2.6.9"
+}
diff --git a/modules/gar_utils/node_modules/node-stream-zip/LICENSE b/modules/gar_utils/node_modules/node-stream-zip/LICENSE
new file mode 100644
index 0000000..37ac867
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-stream-zip/LICENSE
@@ -0,0 +1,44 @@
+Copyright (c) 2021 Antelle https://github.com/antelle
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+== dependency license: adm-zip ==
+
+Copyright (c) 2012 Another-D-Mention Software and other contributors,
+http://www.another-d-mention.ro/
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/modules/gar_utils/node_modules/node-stream-zip/README.md b/modules/gar_utils/node_modules/node-stream-zip/README.md
new file mode 100644
index 0000000..98b5a56
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-stream-zip/README.md
@@ -0,0 +1,224 @@
+# node-stream-zip 
+
+node.js library for reading and extraction of ZIP archives.
+Features:
+
+- it never loads entire archive into memory, everything is read by chunks
+- large archives support
+- all operations are non-blocking, no sync i/o
+- fast initialization
+- no dependencies, no binary addons
+- decompression with built-in zlib module
+- deflate, sfx, macosx/windows built-in archives
+- ZIP64 support
+
+## Installation
+
+```sh
+npm i node-stream-zip
+```
+
+## Usage
+
+There are two APIs provided:
+1. [promise-based / async](#async-api)
+2. [callbacks](#callback-api)
+
+It's recommended to use the new, promise API, however the legacy callback API
+may be more flexible for certain operations.
+
+### Async API
+
+Open a zip file
+```javascript
+const StreamZip = require('node-stream-zip');
+const zip = new StreamZip.async({ file: 'archive.zip' });
+```
+
+Stream one entry to stdout
+```javascript
+const stm = await zip.stream('path/inside/zip.txt');
+stm.pipe(process.stdout);
+stm.on('end', () => zip.close());
+```
+
+Read a file as buffer
+```javascript
+const data = await zip.entryData('path/inside/zip.txt');
+await zip.close();
+```
+
+Extract one file to disk
+```javascript
+await zip.extract('path/inside/zip.txt', './extracted.txt');
+await zip.close();
+```
+
+List entries
+```javascript
+const entriesCount = await zip.entriesCount;
+console.log(`Entries read: ${entriesCount}`);
+
+const entries = await zip.entries();
+for (const entry of Object.values(entries)) {
+ const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
+ console.log(`Entry ${entry.name}: ${desc}`);
+}
+
+// Do not forget to close the file once you're done
+await zip.close();
+```
+
+Extract a folder from archive to disk
+```javascript
+fs.mkdirSync('extracted');
+await zip.extract('path/inside/zip/', './extracted');
+await zip.close();
+```
+
+Extract everything
+```javascript
+fs.mkdirSync('extracted');
+const count = await zip.extract(null, './extracted');
+console.log(`Extracted ${count} entries`);
+await zip.close();
+```
+
+When extracting a folder, you can listen to `extract` event
+```javascript
+zip.on('extract', (entry, file) => {
+ console.log(`Extracted ${entry.name} to ${file}`);
+});
+```
+
+`entry` event is generated for every entry during loading
+```javascript
+zip.on('entry', entry => {
+ // you can already stream this entry,
+ // without waiting until all entry descriptions are read (suitable for very large archives)
+ console.log(`Read entry ${entry.name}`);
+});
+```
+
+### Callback API
+
+Open a zip file
+```javascript
+const StreamZip = require('node-stream-zip');
+const zip = new StreamZip({ file: 'archive.zip' });
+
+// Handle errors
+zip.on('error', err => { /*...*/ });
+```
+
+List entries
+```javascript
+zip.on('ready', () => {
+ console.log('Entries read: ' + zip.entriesCount);
+ for (const entry of Object.values(zip.entries())) {
+ const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
+ console.log(`Entry ${entry.name}: ${desc}`);
+ }
+ // Do not forget to close the file once you're done
+ zip.close();
+});
+```
+
+Stream one entry to stdout
+```javascript
+zip.on('ready', () => {
+ zip.stream('path/inside/zip.txt', (err, stm) => {
+ stm.pipe(process.stdout);
+ stm.on('end', () => zip.close());
+ });
+});
+```
+
+Extract one file to disk
+```javascript
+zip.on('ready', () => {
+ zip.extract('path/inside/zip.txt', './extracted.txt', err => {
+ console.log(err ? 'Extract error' : 'Extracted');
+ zip.close();
+ });
+});
+```
+
+Extract a folder from archive to disk
+```javascript
+zip.on('ready', () => {
+ fs.mkdirSync('extracted');
+ zip.extract('path/inside/zip/', './extracted', err => {
+ console.log(err ? 'Extract error' : 'Extracted');
+ zip.close();
+ });
+});
+```
+
+Extract everything
+```javascript
+zip.on('ready', () => {
+ fs.mkdirSync('extracted');
+ zip.extract(null, './extracted', (err, count) => {
+ console.log(err ? 'Extract error' : `Extracted ${count} entries`);
+ zip.close();
+ });
+});
+```
+
+Read a file as buffer in sync way
+```javascript
+zip.on('ready', () => {
+ const data = zip.entryDataSync('path/inside/zip.txt');
+ zip.close();
+});
+```
+
+When extracting a folder, you can listen to `extract` event
+```javascript
+zip.on('extract', (entry, file) => {
+ console.log(`Extracted ${entry.name} to ${file}`);
+});
+```
+
+`entry` event is generated for every entry during loading
+```javascript
+zip.on('entry', entry => {
+ // you can already stream this entry,
+ // without waiting until all entry descriptions are read (suitable for very large archives)
+ console.log(`Read entry ${entry.name}`);
+});
+```
+
+## Options
+
+You can pass these options to the constructor
+- `storeEntries: true` - you will be able to work with entries inside zip archive, otherwise the only way to access them is `entry` event
+- `skipEntryNameValidation: true` - by default, entry name is checked for malicious characters, like `../` or `c:\123`, pass this flag to disable validation errors
+
+## Methods
+
+- `zip.entries()` - get all entries description
+- `zip.entry(name)` - get entry description by name
+- `zip.stream(entry, function(err, stm) { })` - get entry data reader stream
+- `zip.entryDataSync(entry)` - get entry data in sync way
+- `zip.close()` - cleanup after all entries have been read, streamed, extracted, and you don't need the archive
+
+## Building
+
+The project doesn't require building. To run unit tests with [nodeunit](https://github.com/caolan/nodeunit):
+```sh
+npm test
+```
+
+## Known issues
+
+- [utf8](https://github.com/rubyzip/rubyzip/wiki/Files-with-non-ascii-filenames) file names
+
+## Out of scope
+
+- AES encrypted files: the library will throw an error if you try to open it
+
+## Contributors
+
+ZIP parsing code has been partially forked from [cthackers/adm-zip](https://github.com/cthackers/adm-zip) (MIT license).
diff --git a/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.d.ts b/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.d.ts
new file mode 100644
index 0000000..f076c72
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.d.ts
@@ -0,0 +1,199 @@
+///
+
+declare namespace StreamZip {
+ interface StreamZipOptions {
+ /**
+ * File to read
+ * @default undefined
+ */
+ file?: string;
+
+ /**
+ * Alternatively, you can pass fd here
+ * @default undefined
+ */
+ fd?: number;
+
+ /**
+ * You will be able to work with entries inside zip archive,
+ * otherwise the only way to access them is entry event
+ * @default true
+ */
+ storeEntries?: boolean;
+
+ /**
+ * By default, entry name is checked for malicious characters, like ../ or c:\123,
+ * pass this flag to disable validation error
+ * @default false
+ */
+ skipEntryNameValidation?: boolean;
+
+ /**
+ * Filesystem read chunk size
+ * @default automatic based on file size
+ */
+ chunkSize?: number;
+
+ /**
+ * Encoding used to decode file names
+ * @default UTF8
+ */
+ nameEncoding?: string;
+ }
+
+ interface ZipEntry {
+ /**
+ * file name
+ */
+ name: string;
+
+ /**
+ * true if it's a directory entry
+ */
+ isDirectory: boolean;
+
+ /**
+ * true if it's a file entry, see also isDirectory
+ */
+ isFile: boolean;
+
+ /**
+ * file comment
+ */
+ comment: string;
+
+ /**
+ * if the file is encrypted
+ */
+ encrypted: boolean;
+
+ /**
+ * version made by
+ */
+ verMade: number;
+
+ /**
+ * version needed to extract
+ */
+ version: number;
+
+ /**
+ * encrypt, decrypt flags
+ */
+ flags: number;
+
+ /**
+ * compression method
+ */
+ method: number;
+
+ /**
+ * modification time
+ */
+ time: number;
+
+ /**
+ * uncompressed file crc-32 value
+ */
+ crc: number;
+
+ /**
+ * compressed size
+ */
+ compressedSize: number;
+
+ /**
+ * uncompressed size
+ */
+ size: number;
+
+ /**
+ * volume number start
+ */
+ diskStart: number;
+
+ /**
+ * internal file attributes
+ */
+ inattr: number;
+
+ /**
+ * external file attributes
+ */
+ attr: number;
+
+ /**
+ * LOC header offset
+ */
+ offset: number;
+ }
+
+ class StreamZipAsync {
+ constructor(config: StreamZipOptions);
+
+ entriesCount: Promise;
+ comment: Promise;
+
+ entry(name: string): Promise;
+ entries(): Promise<{ [name: string]: ZipEntry }>;
+ entryData(entry: string | ZipEntry): Promise;
+ stream(entry: string | ZipEntry): Promise;
+ extract(entry: string | ZipEntry | null, outPath: string): Promise;
+
+ on(event: 'entry', handler: (entry: ZipEntry) => void): void;
+ on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
+
+ close(): Promise;
+ }
+}
+
+type StreamZipOptions = StreamZip.StreamZipOptions;
+type ZipEntry = StreamZip.ZipEntry;
+
+declare class StreamZip {
+ constructor(config: StreamZipOptions);
+
+ /**
+ * number of entries in the archive
+ */
+ entriesCount: number;
+
+ /**
+ * archive comment
+ */
+ comment: string;
+
+ on(event: 'error', handler: (error: any) => void): void;
+ on(event: 'entry', handler: (entry: ZipEntry) => void): void;
+ on(event: 'ready', handler: () => void): void;
+ on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
+
+ entry(name: string): ZipEntry | undefined;
+
+ entries(): { [name: string]: ZipEntry };
+
+ stream(
+ entry: string | ZipEntry,
+ callback: (err: any | null, stream?: NodeJS.ReadableStream) => void
+ ): void;
+
+ entryDataSync(entry: string | ZipEntry): Buffer;
+
+ openEntry(
+ entry: string | ZipEntry,
+ callback: (err: any | null, entry?: ZipEntry) => void,
+ sync: boolean
+ ): void;
+
+ extract(
+ entry: string | ZipEntry | null,
+ outPath: string,
+ callback: (err?: any, res?: number) => void
+ ): void;
+
+ close(callback?: (err?: any) => void): void;
+
+ static async: typeof StreamZip.StreamZipAsync;
+}
+
+export = StreamZip;
diff --git a/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.js b/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.js
new file mode 100644
index 0000000..d95bbef
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-stream-zip/node_stream_zip.js
@@ -0,0 +1,1210 @@
+/**
+ * @license node-stream-zip | (c) 2020 Antelle | https://github.com/antelle/node-stream-zip/blob/master/LICENSE
+ * Portions copyright https://github.com/cthackers/adm-zip | https://raw.githubusercontent.com/cthackers/adm-zip/master/LICENSE
+ */
+
+let fs = require('fs');
+const util = require('util');
+const path = require('path');
+const events = require('events');
+const zlib = require('zlib');
+const stream = require('stream');
+
+const consts = {
+ /* The local file header */
+ LOCHDR: 30, // LOC header size
+ LOCSIG: 0x04034b50, // "PK\003\004"
+ LOCVER: 4, // version needed to extract
+ LOCFLG: 6, // general purpose bit flag
+ LOCHOW: 8, // compression method
+ LOCTIM: 10, // modification time (2 bytes time, 2 bytes date)
+ LOCCRC: 14, // uncompressed file crc-32 value
+ LOCSIZ: 18, // compressed size
+ LOCLEN: 22, // uncompressed size
+ LOCNAM: 26, // filename length
+ LOCEXT: 28, // extra field length
+
+ /* The Data descriptor */
+ EXTSIG: 0x08074b50, // "PK\007\008"
+ EXTHDR: 16, // EXT header size
+ EXTCRC: 4, // uncompressed file crc-32 value
+ EXTSIZ: 8, // compressed size
+ EXTLEN: 12, // uncompressed size
+
+ /* The central directory file header */
+ CENHDR: 46, // CEN header size
+ CENSIG: 0x02014b50, // "PK\001\002"
+ CENVEM: 4, // version made by
+ CENVER: 6, // version needed to extract
+ CENFLG: 8, // encrypt, decrypt flags
+ CENHOW: 10, // compression method
+ CENTIM: 12, // modification time (2 bytes time, 2 bytes date)
+ CENCRC: 16, // uncompressed file crc-32 value
+ CENSIZ: 20, // compressed size
+ CENLEN: 24, // uncompressed size
+ CENNAM: 28, // filename length
+ CENEXT: 30, // extra field length
+ CENCOM: 32, // file comment length
+ CENDSK: 34, // volume number start
+ CENATT: 36, // internal file attributes
+ CENATX: 38, // external file attributes (host system dependent)
+ CENOFF: 42, // LOC header offset
+
+ /* The entries in the end of central directory */
+ ENDHDR: 22, // END header size
+ ENDSIG: 0x06054b50, // "PK\005\006"
+ ENDSIGFIRST: 0x50,
+ ENDSUB: 8, // number of entries on this disk
+ ENDTOT: 10, // total number of entries
+ ENDSIZ: 12, // central directory size in bytes
+ ENDOFF: 16, // offset of first CEN header
+ ENDCOM: 20, // zip file comment length
+ MAXFILECOMMENT: 0xffff,
+
+ /* The entries in the end of ZIP64 central directory locator */
+ ENDL64HDR: 20, // ZIP64 end of central directory locator header size
+ ENDL64SIG: 0x07064b50, // ZIP64 end of central directory locator signature
+ ENDL64SIGFIRST: 0x50,
+ ENDL64OFS: 8, // ZIP64 end of central directory offset
+
+ /* The entries in the end of ZIP64 central directory */
+ END64HDR: 56, // ZIP64 end of central directory header size
+ END64SIG: 0x06064b50, // ZIP64 end of central directory signature
+ END64SIGFIRST: 0x50,
+ END64SUB: 24, // number of entries on this disk
+ END64TOT: 32, // total number of entries
+ END64SIZ: 40,
+ END64OFF: 48,
+
+ /* Compression methods */
+ STORED: 0, // no compression
+ SHRUNK: 1, // shrunk
+ REDUCED1: 2, // reduced with compression factor 1
+ REDUCED2: 3, // reduced with compression factor 2
+ REDUCED3: 4, // reduced with compression factor 3
+ REDUCED4: 5, // reduced with compression factor 4
+ IMPLODED: 6, // imploded
+ // 7 reserved
+ DEFLATED: 8, // deflated
+ ENHANCED_DEFLATED: 9, // deflate64
+ PKWARE: 10, // PKWare DCL imploded
+ // 11 reserved
+ BZIP2: 12, // compressed using BZIP2
+ // 13 reserved
+ LZMA: 14, // LZMA
+ // 15-17 reserved
+ IBM_TERSE: 18, // compressed using IBM TERSE
+ IBM_LZ77: 19, //IBM LZ77 z
+
+ /* General purpose bit flag */
+ FLG_ENC: 0, // encrypted file
+ FLG_COMP1: 1, // compression option
+ FLG_COMP2: 2, // compression option
+ FLG_DESC: 4, // data descriptor
+ FLG_ENH: 8, // enhanced deflation
+ FLG_STR: 16, // strong encryption
+ FLG_LNG: 1024, // language encoding
+ FLG_MSK: 4096, // mask header values
+ FLG_ENTRY_ENC: 1,
+
+ /* 4.5 Extensible data fields */
+ EF_ID: 0,
+ EF_SIZE: 2,
+
+ /* Header IDs */
+ ID_ZIP64: 0x0001,
+ ID_AVINFO: 0x0007,
+ ID_PFS: 0x0008,
+ ID_OS2: 0x0009,
+ ID_NTFS: 0x000a,
+ ID_OPENVMS: 0x000c,
+ ID_UNIX: 0x000d,
+ ID_FORK: 0x000e,
+ ID_PATCH: 0x000f,
+ ID_X509_PKCS7: 0x0014,
+ ID_X509_CERTID_F: 0x0015,
+ ID_X509_CERTID_C: 0x0016,
+ ID_STRONGENC: 0x0017,
+ ID_RECORD_MGT: 0x0018,
+ ID_X509_PKCS7_RL: 0x0019,
+ ID_IBM1: 0x0065,
+ ID_IBM2: 0x0066,
+ ID_POSZIP: 0x4690,
+
+ EF_ZIP64_OR_32: 0xffffffff,
+ EF_ZIP64_OR_16: 0xffff,
+};
+
+const StreamZip = function (config) {
+ let fd, fileSize, chunkSize, op, centralDirectory, closed;
+ const ready = false,
+ that = this,
+ entries = config.storeEntries !== false ? {} : null,
+ fileName = config.file,
+ textDecoder = config.nameEncoding ? new TextDecoder(config.nameEncoding) : null;
+
+ open();
+
+ function open() {
+ if (config.fd) {
+ fd = config.fd;
+ readFile();
+ } else {
+ fs.open(fileName, 'r', (err, f) => {
+ if (err) {
+ return that.emit('error', err);
+ }
+ fd = f;
+ readFile();
+ });
+ }
+ }
+
+ function readFile() {
+ fs.fstat(fd, (err, stat) => {
+ if (err) {
+ return that.emit('error', err);
+ }
+ fileSize = stat.size;
+ chunkSize = config.chunkSize || Math.round(fileSize / 1000);
+ chunkSize = Math.max(
+ Math.min(chunkSize, Math.min(128 * 1024, fileSize)),
+ Math.min(1024, fileSize)
+ );
+ readCentralDirectory();
+ });
+ }
+
+ function readUntilFoundCallback(err, bytesRead) {
+ if (err || !bytesRead) {
+ return that.emit('error', err || new Error('Archive read error'));
+ }
+ let pos = op.lastPos;
+ let bufferPosition = pos - op.win.position;
+ const buffer = op.win.buffer;
+ const minPos = op.minPos;
+ while (--pos >= minPos && --bufferPosition >= 0) {
+ if (buffer.length - bufferPosition >= 4 && buffer[bufferPosition] === op.firstByte) {
+ // quick check first signature byte
+ if (buffer.readUInt32LE(bufferPosition) === op.sig) {
+ op.lastBufferPosition = bufferPosition;
+ op.lastBytesRead = bytesRead;
+ op.complete();
+ return;
+ }
+ }
+ }
+ if (pos === minPos) {
+ return that.emit('error', new Error('Bad archive'));
+ }
+ op.lastPos = pos + 1;
+ op.chunkSize *= 2;
+ if (pos <= minPos) {
+ return that.emit('error', new Error('Bad archive'));
+ }
+ const expandLength = Math.min(op.chunkSize, pos - minPos);
+ op.win.expandLeft(expandLength, readUntilFoundCallback);
+ }
+
+ function readCentralDirectory() {
+ const totalReadLength = Math.min(consts.ENDHDR + consts.MAXFILECOMMENT, fileSize);
+ op = {
+ win: new FileWindowBuffer(fd),
+ totalReadLength,
+ minPos: fileSize - totalReadLength,
+ lastPos: fileSize,
+ chunkSize: Math.min(1024, chunkSize),
+ firstByte: consts.ENDSIGFIRST,
+ sig: consts.ENDSIG,
+ complete: readCentralDirectoryComplete,
+ };
+ op.win.read(fileSize - op.chunkSize, op.chunkSize, readUntilFoundCallback);
+ }
+
+ function readCentralDirectoryComplete() {
+ const buffer = op.win.buffer;
+ const pos = op.lastBufferPosition;
+ try {
+ centralDirectory = new CentralDirectoryHeader();
+ centralDirectory.read(buffer.slice(pos, pos + consts.ENDHDR));
+ centralDirectory.headerOffset = op.win.position + pos;
+ if (centralDirectory.commentLength) {
+ that.comment = buffer
+ .slice(
+ pos + consts.ENDHDR,
+ pos + consts.ENDHDR + centralDirectory.commentLength
+ )
+ .toString();
+ } else {
+ that.comment = null;
+ }
+ that.entriesCount = centralDirectory.volumeEntries;
+ that.centralDirectory = centralDirectory;
+ if (
+ (centralDirectory.volumeEntries === consts.EF_ZIP64_OR_16 &&
+ centralDirectory.totalEntries === consts.EF_ZIP64_OR_16) ||
+ centralDirectory.size === consts.EF_ZIP64_OR_32 ||
+ centralDirectory.offset === consts.EF_ZIP64_OR_32
+ ) {
+ readZip64CentralDirectoryLocator();
+ } else {
+ op = {};
+ readEntries();
+ }
+ } catch (err) {
+ that.emit('error', err);
+ }
+ }
+
+ function readZip64CentralDirectoryLocator() {
+ const length = consts.ENDL64HDR;
+ if (op.lastBufferPosition > length) {
+ op.lastBufferPosition -= length;
+ readZip64CentralDirectoryLocatorComplete();
+ } else {
+ op = {
+ win: op.win,
+ totalReadLength: length,
+ minPos: op.win.position - length,
+ lastPos: op.win.position,
+ chunkSize: op.chunkSize,
+ firstByte: consts.ENDL64SIGFIRST,
+ sig: consts.ENDL64SIG,
+ complete: readZip64CentralDirectoryLocatorComplete,
+ };
+ op.win.read(op.lastPos - op.chunkSize, op.chunkSize, readUntilFoundCallback);
+ }
+ }
+
+ function readZip64CentralDirectoryLocatorComplete() {
+ const buffer = op.win.buffer;
+ const locHeader = new CentralDirectoryLoc64Header();
+ locHeader.read(
+ buffer.slice(op.lastBufferPosition, op.lastBufferPosition + consts.ENDL64HDR)
+ );
+ const readLength = fileSize - locHeader.headerOffset;
+ op = {
+ win: op.win,
+ totalReadLength: readLength,
+ minPos: locHeader.headerOffset,
+ lastPos: op.lastPos,
+ chunkSize: op.chunkSize,
+ firstByte: consts.END64SIGFIRST,
+ sig: consts.END64SIG,
+ complete: readZip64CentralDirectoryComplete,
+ };
+ op.win.read(fileSize - op.chunkSize, op.chunkSize, readUntilFoundCallback);
+ }
+
+ function readZip64CentralDirectoryComplete() {
+ const buffer = op.win.buffer;
+ const zip64cd = new CentralDirectoryZip64Header();
+ zip64cd.read(buffer.slice(op.lastBufferPosition, op.lastBufferPosition + consts.END64HDR));
+ that.centralDirectory.volumeEntries = zip64cd.volumeEntries;
+ that.centralDirectory.totalEntries = zip64cd.totalEntries;
+ that.centralDirectory.size = zip64cd.size;
+ that.centralDirectory.offset = zip64cd.offset;
+ that.entriesCount = zip64cd.volumeEntries;
+ op = {};
+ readEntries();
+ }
+
+ function readEntries() {
+ op = {
+ win: new FileWindowBuffer(fd),
+ pos: centralDirectory.offset,
+ chunkSize,
+ entriesLeft: centralDirectory.volumeEntries,
+ };
+ op.win.read(op.pos, Math.min(chunkSize, fileSize - op.pos), readEntriesCallback);
+ }
+
+ function readEntriesCallback(err, bytesRead) {
+ if (err || !bytesRead) {
+ return that.emit('error', err || new Error('Entries read error'));
+ }
+ let bufferPos = op.pos - op.win.position;
+ let entry = op.entry;
+ const buffer = op.win.buffer;
+ const bufferLength = buffer.length;
+ try {
+ while (op.entriesLeft > 0) {
+ if (!entry) {
+ entry = new ZipEntry();
+ entry.readHeader(buffer, bufferPos);
+ entry.headerOffset = op.win.position + bufferPos;
+ op.entry = entry;
+ op.pos += consts.CENHDR;
+ bufferPos += consts.CENHDR;
+ }
+ const entryHeaderSize = entry.fnameLen + entry.extraLen + entry.comLen;
+ const advanceBytes = entryHeaderSize + (op.entriesLeft > 1 ? consts.CENHDR : 0);
+ if (bufferLength - bufferPos < advanceBytes) {
+ op.win.moveRight(chunkSize, readEntriesCallback, bufferPos);
+ op.move = true;
+ return;
+ }
+ entry.read(buffer, bufferPos, textDecoder);
+ if (!config.skipEntryNameValidation) {
+ entry.validateName();
+ }
+ if (entries) {
+ entries[entry.name] = entry;
+ }
+ that.emit('entry', entry);
+ op.entry = entry = null;
+ op.entriesLeft--;
+ op.pos += entryHeaderSize;
+ bufferPos += entryHeaderSize;
+ }
+ that.emit('ready');
+ } catch (err) {
+ that.emit('error', err);
+ }
+ }
+
+ function checkEntriesExist() {
+ if (!entries) {
+ throw new Error('storeEntries disabled');
+ }
+ }
+
+ Object.defineProperty(this, 'ready', {
+ get() {
+ return ready;
+ },
+ });
+
+ this.entry = function (name) {
+ checkEntriesExist();
+ return entries[name];
+ };
+
+ this.entries = function () {
+ checkEntriesExist();
+ return entries;
+ };
+
+ this.stream = function (entry, callback) {
+ return this.openEntry(
+ entry,
+ (err, entry) => {
+ if (err) {
+ return callback(err);
+ }
+ const offset = dataOffset(entry);
+ let entryStream = new EntryDataReaderStream(fd, offset, entry.compressedSize);
+ if (entry.method === consts.STORED) {
+ // nothing to do
+ } else if (entry.method === consts.DEFLATED) {
+ entryStream = entryStream.pipe(zlib.createInflateRaw());
+ } else {
+ return callback(new Error('Unknown compression method: ' + entry.method));
+ }
+ if (canVerifyCrc(entry)) {
+ entryStream = entryStream.pipe(
+ new EntryVerifyStream(entryStream, entry.crc, entry.size)
+ );
+ }
+ callback(null, entryStream);
+ },
+ false
+ );
+ };
+
+ this.entryDataSync = function (entry) {
+ let err = null;
+ this.openEntry(
+ entry,
+ (e, en) => {
+ err = e;
+ entry = en;
+ },
+ true
+ );
+ if (err) {
+ throw err;
+ }
+ let data = Buffer.alloc(entry.compressedSize);
+ new FsRead(fd, data, 0, entry.compressedSize, dataOffset(entry), (e) => {
+ err = e;
+ }).read(true);
+ if (err) {
+ throw err;
+ }
+ if (entry.method === consts.STORED) {
+ // nothing to do
+ } else if (entry.method === consts.DEFLATED || entry.method === consts.ENHANCED_DEFLATED) {
+ data = zlib.inflateRawSync(data);
+ } else {
+ throw new Error('Unknown compression method: ' + entry.method);
+ }
+ if (data.length !== entry.size) {
+ throw new Error('Invalid size');
+ }
+ if (canVerifyCrc(entry)) {
+ const verify = new CrcVerify(entry.crc, entry.size);
+ verify.data(data);
+ }
+ return data;
+ };
+
+ this.openEntry = function (entry, callback, sync) {
+ if (typeof entry === 'string') {
+ checkEntriesExist();
+ entry = entries[entry];
+ if (!entry) {
+ return callback(new Error('Entry not found'));
+ }
+ }
+ if (!entry.isFile) {
+ return callback(new Error('Entry is not file'));
+ }
+ if (!fd) {
+ return callback(new Error('Archive closed'));
+ }
+ const buffer = Buffer.alloc(consts.LOCHDR);
+ new FsRead(fd, buffer, 0, buffer.length, entry.offset, (err) => {
+ if (err) {
+ return callback(err);
+ }
+ let readEx;
+ try {
+ entry.readDataHeader(buffer);
+ if (entry.encrypted) {
+ readEx = new Error('Entry encrypted');
+ }
+ } catch (ex) {
+ readEx = ex;
+ }
+ callback(readEx, entry);
+ }).read(sync);
+ };
+
+ function dataOffset(entry) {
+ return entry.offset + consts.LOCHDR + entry.fnameLen + entry.extraLen;
+ }
+
+ function canVerifyCrc(entry) {
+ // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
+ return (entry.flags & 0x8) !== 0x8;
+ }
+
+ function extract(entry, outPath, callback) {
+ that.stream(entry, (err, stm) => {
+ if (err) {
+ callback(err);
+ } else {
+ let fsStm, errThrown;
+ stm.on('error', (err) => {
+ errThrown = err;
+ if (fsStm) {
+ stm.unpipe(fsStm);
+ fsStm.close(() => {
+ callback(err);
+ });
+ }
+ });
+ fs.open(outPath, 'w', (err, fdFile) => {
+ if (err) {
+ return callback(err);
+ }
+ if (errThrown) {
+ fs.close(fd, () => {
+ callback(errThrown);
+ });
+ return;
+ }
+ fsStm = fs.createWriteStream(outPath, { fd: fdFile });
+ fsStm.on('finish', () => {
+ that.emit('extract', entry, outPath);
+ if (!errThrown) {
+ callback();
+ }
+ });
+ stm.pipe(fsStm);
+ });
+ }
+ });
+ }
+
+ function createDirectories(baseDir, dirs, callback) {
+ if (!dirs.length) {
+ return callback();
+ }
+ let dir = dirs.shift();
+ dir = path.join(baseDir, path.join(...dir));
+ fs.mkdir(dir, { recursive: true }, (err) => {
+ if (err && err.code !== 'EEXIST') {
+ return callback(err);
+ }
+ createDirectories(baseDir, dirs, callback);
+ });
+ }
+
+ function extractFiles(baseDir, baseRelPath, files, callback, extractedCount) {
+ if (!files.length) {
+ return callback(null, extractedCount);
+ }
+ const file = files.shift();
+ const targetPath = path.join(baseDir, file.name.replace(baseRelPath, ''));
+ extract(file, targetPath, (err) => {
+ if (err) {
+ return callback(err, extractedCount);
+ }
+ extractFiles(baseDir, baseRelPath, files, callback, extractedCount + 1);
+ });
+ }
+
+ this.extract = function (entry, outPath, callback) {
+ let entryName = entry || '';
+ if (typeof entry === 'string') {
+ entry = this.entry(entry);
+ if (entry) {
+ entryName = entry.name;
+ } else {
+ if (entryName.length && entryName[entryName.length - 1] !== '/') {
+ entryName += '/';
+ }
+ }
+ }
+ if (!entry || entry.isDirectory) {
+ const files = [],
+ dirs = [],
+ allDirs = {};
+ for (const e in entries) {
+ if (
+ Object.prototype.hasOwnProperty.call(entries, e) &&
+ e.lastIndexOf(entryName, 0) === 0
+ ) {
+ let relPath = e.replace(entryName, '');
+ const childEntry = entries[e];
+ if (childEntry.isFile) {
+ files.push(childEntry);
+ relPath = path.dirname(relPath);
+ }
+ if (relPath && !allDirs[relPath] && relPath !== '.') {
+ allDirs[relPath] = true;
+ let parts = relPath.split('/').filter((f) => {
+ return f;
+ });
+ if (parts.length) {
+ dirs.push(parts);
+ }
+ while (parts.length > 1) {
+ parts = parts.slice(0, parts.length - 1);
+ const partsPath = parts.join('/');
+ if (allDirs[partsPath] || partsPath === '.') {
+ break;
+ }
+ allDirs[partsPath] = true;
+ dirs.push(parts);
+ }
+ }
+ }
+ }
+ dirs.sort((x, y) => {
+ return x.length - y.length;
+ });
+ if (dirs.length) {
+ createDirectories(outPath, dirs, (err) => {
+ if (err) {
+ callback(err);
+ } else {
+ extractFiles(outPath, entryName, files, callback, 0);
+ }
+ });
+ } else {
+ extractFiles(outPath, entryName, files, callback, 0);
+ }
+ } else {
+ fs.stat(outPath, (err, stat) => {
+ if (stat && stat.isDirectory()) {
+ extract(entry, path.join(outPath, path.basename(entry.name)), callback);
+ } else {
+ extract(entry, outPath, callback);
+ }
+ });
+ }
+ };
+
+ this.close = function (callback) {
+ if (closed || !fd) {
+ closed = true;
+ if (callback) {
+ callback();
+ }
+ } else {
+ closed = true;
+ fs.close(fd, (err) => {
+ fd = null;
+ if (callback) {
+ callback(err);
+ }
+ });
+ }
+ };
+
+ const originalEmit = events.EventEmitter.prototype.emit;
+ this.emit = function (...args) {
+ if (!closed) {
+ return originalEmit.call(this, ...args);
+ }
+ };
+};
+
+StreamZip.setFs = function (customFs) {
+ fs = customFs;
+};
+
+StreamZip.debugLog = (...args) => {
+ if (StreamZip.debug) {
+ // eslint-disable-next-line no-console
+ console.log(...args);
+ }
+};
+
+util.inherits(StreamZip, events.EventEmitter);
+
+const propZip = Symbol('zip');
+
+StreamZip.async = class StreamZipAsync extends events.EventEmitter {
+ constructor(config) {
+ super();
+
+ const zip = new StreamZip(config);
+
+ zip.on('entry', (entry) => this.emit('entry', entry));
+ zip.on('extract', (entry, outPath) => this.emit('extract', entry, outPath));
+
+ this[propZip] = new Promise((resolve, reject) => {
+ zip.on('ready', () => {
+ zip.removeListener('error', reject);
+ resolve(zip);
+ });
+ zip.on('error', reject);
+ });
+ }
+
+ get entriesCount() {
+ return this[propZip].then((zip) => zip.entriesCount);
+ }
+
+ get comment() {
+ return this[propZip].then((zip) => zip.comment);
+ }
+
+ async entry(name) {
+ const zip = await this[propZip];
+ return zip.entry(name);
+ }
+
+ async entries() {
+ const zip = await this[propZip];
+ return zip.entries();
+ }
+
+ async stream(entry) {
+ const zip = await this[propZip];
+ return new Promise((resolve, reject) => {
+ zip.stream(entry, (err, stm) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(stm);
+ }
+ });
+ });
+ }
+
+ async entryData(entry) {
+ const stm = await this.stream(entry);
+ return new Promise((resolve, reject) => {
+ const data = [];
+ stm.on('data', (chunk) => data.push(chunk));
+ stm.on('end', () => {
+ resolve(Buffer.concat(data));
+ });
+ stm.on('error', (err) => {
+ stm.removeAllListeners('end');
+ reject(err);
+ });
+ });
+ }
+
+ async extract(entry, outPath) {
+ const zip = await this[propZip];
+ return new Promise((resolve, reject) => {
+ zip.extract(entry, outPath, (err, res) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(res);
+ }
+ });
+ });
+ }
+
+ async close() {
+ const zip = await this[propZip];
+ return new Promise((resolve, reject) => {
+ zip.close((err) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve();
+ }
+ });
+ });
+ }
+};
+
+class CentralDirectoryHeader {
+ read(data) {
+ if (data.length !== consts.ENDHDR || data.readUInt32LE(0) !== consts.ENDSIG) {
+ throw new Error('Invalid central directory');
+ }
+ // number of entries on this volume
+ this.volumeEntries = data.readUInt16LE(consts.ENDSUB);
+ // total number of entries
+ this.totalEntries = data.readUInt16LE(consts.ENDTOT);
+ // central directory size in bytes
+ this.size = data.readUInt32LE(consts.ENDSIZ);
+ // offset of first CEN header
+ this.offset = data.readUInt32LE(consts.ENDOFF);
+ // zip file comment length
+ this.commentLength = data.readUInt16LE(consts.ENDCOM);
+ }
+}
+
+class CentralDirectoryLoc64Header {
+ read(data) {
+ if (data.length !== consts.ENDL64HDR || data.readUInt32LE(0) !== consts.ENDL64SIG) {
+ throw new Error('Invalid zip64 central directory locator');
+ }
+ // ZIP64 EOCD header offset
+ this.headerOffset = readUInt64LE(data, consts.ENDSUB);
+ }
+}
+
+class CentralDirectoryZip64Header {
+ read(data) {
+ if (data.length !== consts.END64HDR || data.readUInt32LE(0) !== consts.END64SIG) {
+ throw new Error('Invalid central directory');
+ }
+ // number of entries on this volume
+ this.volumeEntries = readUInt64LE(data, consts.END64SUB);
+ // total number of entries
+ this.totalEntries = readUInt64LE(data, consts.END64TOT);
+ // central directory size in bytes
+ this.size = readUInt64LE(data, consts.END64SIZ);
+ // offset of first CEN header
+ this.offset = readUInt64LE(data, consts.END64OFF);
+ }
+}
+
+class ZipEntry {
+ readHeader(data, offset) {
+ // data should be 46 bytes and start with "PK 01 02"
+ if (data.length < offset + consts.CENHDR || data.readUInt32LE(offset) !== consts.CENSIG) {
+ throw new Error('Invalid entry header');
+ }
+ // version made by
+ this.verMade = data.readUInt16LE(offset + consts.CENVEM);
+ // version needed to extract
+ this.version = data.readUInt16LE(offset + consts.CENVER);
+ // encrypt, decrypt flags
+ this.flags = data.readUInt16LE(offset + consts.CENFLG);
+ // compression method
+ this.method = data.readUInt16LE(offset + consts.CENHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ const timebytes = data.readUInt16LE(offset + consts.CENTIM);
+ const datebytes = data.readUInt16LE(offset + consts.CENTIM + 2);
+ this.time = parseZipTime(timebytes, datebytes);
+
+ // uncompressed file crc-32 value
+ this.crc = data.readUInt32LE(offset + consts.CENCRC);
+ // compressed size
+ this.compressedSize = data.readUInt32LE(offset + consts.CENSIZ);
+ // uncompressed size
+ this.size = data.readUInt32LE(offset + consts.CENLEN);
+ // filename length
+ this.fnameLen = data.readUInt16LE(offset + consts.CENNAM);
+ // extra field length
+ this.extraLen = data.readUInt16LE(offset + consts.CENEXT);
+ // file comment length
+ this.comLen = data.readUInt16LE(offset + consts.CENCOM);
+ // volume number start
+ this.diskStart = data.readUInt16LE(offset + consts.CENDSK);
+ // internal file attributes
+ this.inattr = data.readUInt16LE(offset + consts.CENATT);
+ // external file attributes
+ this.attr = data.readUInt32LE(offset + consts.CENATX);
+ // LOC header offset
+ this.offset = data.readUInt32LE(offset + consts.CENOFF);
+ }
+
+ readDataHeader(data) {
+ // 30 bytes and should start with "PK\003\004"
+ if (data.readUInt32LE(0) !== consts.LOCSIG) {
+ throw new Error('Invalid local header');
+ }
+ // version needed to extract
+ this.version = data.readUInt16LE(consts.LOCVER);
+ // general purpose bit flag
+ this.flags = data.readUInt16LE(consts.LOCFLG);
+ // compression method
+ this.method = data.readUInt16LE(consts.LOCHOW);
+ // modification time (2 bytes time ; 2 bytes date)
+ const timebytes = data.readUInt16LE(consts.LOCTIM);
+ const datebytes = data.readUInt16LE(consts.LOCTIM + 2);
+ this.time = parseZipTime(timebytes, datebytes);
+
+ // uncompressed file crc-32 value
+ this.crc = data.readUInt32LE(consts.LOCCRC) || this.crc;
+ // compressed size
+ const compressedSize = data.readUInt32LE(consts.LOCSIZ);
+ if (compressedSize && compressedSize !== consts.EF_ZIP64_OR_32) {
+ this.compressedSize = compressedSize;
+ }
+ // uncompressed size
+ const size = data.readUInt32LE(consts.LOCLEN);
+ if (size && size !== consts.EF_ZIP64_OR_32) {
+ this.size = size;
+ }
+ // filename length
+ this.fnameLen = data.readUInt16LE(consts.LOCNAM);
+ // extra field length
+ this.extraLen = data.readUInt16LE(consts.LOCEXT);
+ }
+
+ read(data, offset, textDecoder) {
+ const nameData = data.slice(offset, (offset += this.fnameLen));
+ this.name = textDecoder
+ ? textDecoder.decode(new Uint8Array(nameData))
+ : nameData.toString('utf8');
+ const lastChar = data[offset - 1];
+ this.isDirectory = lastChar === 47 || lastChar === 92;
+
+ if (this.extraLen) {
+ this.readExtra(data, offset);
+ offset += this.extraLen;
+ }
+ this.comment = this.comLen ? data.slice(offset, offset + this.comLen).toString() : null;
+ }
+
+ validateName() {
+ if (/\\|^\w+:|^\/|(^|\/)\.\.(\/|$)/.test(this.name)) {
+ throw new Error('Malicious entry: ' + this.name);
+ }
+ }
+
+ readExtra(data, offset) {
+ let signature, size;
+ const maxPos = offset + this.extraLen;
+ while (offset < maxPos) {
+ signature = data.readUInt16LE(offset);
+ offset += 2;
+ size = data.readUInt16LE(offset);
+ offset += 2;
+ if (consts.ID_ZIP64 === signature) {
+ this.parseZip64Extra(data, offset, size);
+ }
+ offset += size;
+ }
+ }
+
+ parseZip64Extra(data, offset, length) {
+ if (length >= 8 && this.size === consts.EF_ZIP64_OR_32) {
+ this.size = readUInt64LE(data, offset);
+ offset += 8;
+ length -= 8;
+ }
+ if (length >= 8 && this.compressedSize === consts.EF_ZIP64_OR_32) {
+ this.compressedSize = readUInt64LE(data, offset);
+ offset += 8;
+ length -= 8;
+ }
+ if (length >= 8 && this.offset === consts.EF_ZIP64_OR_32) {
+ this.offset = readUInt64LE(data, offset);
+ offset += 8;
+ length -= 8;
+ }
+ if (length >= 4 && this.diskStart === consts.EF_ZIP64_OR_16) {
+ this.diskStart = data.readUInt32LE(offset);
+ // offset += 4; length -= 4;
+ }
+ }
+
+ get encrypted() {
+ return (this.flags & consts.FLG_ENTRY_ENC) === consts.FLG_ENTRY_ENC;
+ }
+
+ get isFile() {
+ return !this.isDirectory;
+ }
+}
+
+class FsRead {
+ constructor(fd, buffer, offset, length, position, callback) {
+ this.fd = fd;
+ this.buffer = buffer;
+ this.offset = offset;
+ this.length = length;
+ this.position = position;
+ this.callback = callback;
+ this.bytesRead = 0;
+ this.waiting = false;
+ }
+
+ read(sync) {
+ StreamZip.debugLog('read', this.position, this.bytesRead, this.length, this.offset);
+ this.waiting = true;
+ let err;
+ if (sync) {
+ let bytesRead = 0;
+ try {
+ bytesRead = fs.readSync(
+ this.fd,
+ this.buffer,
+ this.offset + this.bytesRead,
+ this.length - this.bytesRead,
+ this.position + this.bytesRead
+ );
+ } catch (e) {
+ err = e;
+ }
+ this.readCallback(sync, err, err ? bytesRead : null);
+ } else {
+ fs.read(
+ this.fd,
+ this.buffer,
+ this.offset + this.bytesRead,
+ this.length - this.bytesRead,
+ this.position + this.bytesRead,
+ this.readCallback.bind(this, sync)
+ );
+ }
+ }
+
+ readCallback(sync, err, bytesRead) {
+ if (typeof bytesRead === 'number') {
+ this.bytesRead += bytesRead;
+ }
+ if (err || !bytesRead || this.bytesRead === this.length) {
+ this.waiting = false;
+ return this.callback(err, this.bytesRead);
+ } else {
+ this.read(sync);
+ }
+ }
+}
+
+class FileWindowBuffer {
+ constructor(fd) {
+ this.position = 0;
+ this.buffer = Buffer.alloc(0);
+ this.fd = fd;
+ this.fsOp = null;
+ }
+
+ checkOp() {
+ if (this.fsOp && this.fsOp.waiting) {
+ throw new Error('Operation in progress');
+ }
+ }
+
+ read(pos, length, callback) {
+ this.checkOp();
+ if (this.buffer.length < length) {
+ this.buffer = Buffer.alloc(length);
+ }
+ this.position = pos;
+ this.fsOp = new FsRead(this.fd, this.buffer, 0, length, this.position, callback).read();
+ }
+
+ expandLeft(length, callback) {
+ this.checkOp();
+ this.buffer = Buffer.concat([Buffer.alloc(length), this.buffer]);
+ this.position -= length;
+ if (this.position < 0) {
+ this.position = 0;
+ }
+ this.fsOp = new FsRead(this.fd, this.buffer, 0, length, this.position, callback).read();
+ }
+
+ expandRight(length, callback) {
+ this.checkOp();
+ const offset = this.buffer.length;
+ this.buffer = Buffer.concat([this.buffer, Buffer.alloc(length)]);
+ this.fsOp = new FsRead(
+ this.fd,
+ this.buffer,
+ offset,
+ length,
+ this.position + offset,
+ callback
+ ).read();
+ }
+
+ moveRight(length, callback, shift) {
+ this.checkOp();
+ if (shift) {
+ this.buffer.copy(this.buffer, 0, shift);
+ } else {
+ shift = 0;
+ }
+ this.position += shift;
+ this.fsOp = new FsRead(
+ this.fd,
+ this.buffer,
+ this.buffer.length - shift,
+ shift,
+ this.position + this.buffer.length - shift,
+ callback
+ ).read();
+ }
+}
+
+class EntryDataReaderStream extends stream.Readable {
+ constructor(fd, offset, length) {
+ super();
+ this.fd = fd;
+ this.offset = offset;
+ this.length = length;
+ this.pos = 0;
+ this.readCallback = this.readCallback.bind(this);
+ }
+
+ _read(n) {
+ const buffer = Buffer.alloc(Math.min(n, this.length - this.pos));
+ if (buffer.length) {
+ fs.read(this.fd, buffer, 0, buffer.length, this.offset + this.pos, this.readCallback);
+ } else {
+ this.push(null);
+ }
+ }
+
+ readCallback(err, bytesRead, buffer) {
+ this.pos += bytesRead;
+ if (err) {
+ this.emit('error', err);
+ this.push(null);
+ } else if (!bytesRead) {
+ this.push(null);
+ } else {
+ if (bytesRead !== buffer.length) {
+ buffer = buffer.slice(0, bytesRead);
+ }
+ this.push(buffer);
+ }
+ }
+}
+
+class EntryVerifyStream extends stream.Transform {
+ constructor(baseStm, crc, size) {
+ super();
+ this.verify = new CrcVerify(crc, size);
+ baseStm.on('error', (e) => {
+ this.emit('error', e);
+ });
+ }
+
+ _transform(data, encoding, callback) {
+ let err;
+ try {
+ this.verify.data(data);
+ } catch (e) {
+ err = e;
+ }
+ callback(err, data);
+ }
+}
+
+class CrcVerify {
+ constructor(crc, size) {
+ this.crc = crc;
+ this.size = size;
+ this.state = {
+ crc: ~0,
+ size: 0,
+ };
+ }
+
+ data(data) {
+ const crcTable = CrcVerify.getCrcTable();
+ let crc = this.state.crc;
+ let off = 0;
+ let len = data.length;
+ while (--len >= 0) {
+ crc = crcTable[(crc ^ data[off++]) & 0xff] ^ (crc >>> 8);
+ }
+ this.state.crc = crc;
+ this.state.size += data.length;
+ if (this.state.size >= this.size) {
+ const buf = Buffer.alloc(4);
+ buf.writeInt32LE(~this.state.crc & 0xffffffff, 0);
+ crc = buf.readUInt32LE(0);
+ if (crc !== this.crc) {
+ throw new Error('Invalid CRC');
+ }
+ if (this.state.size !== this.size) {
+ throw new Error('Invalid size');
+ }
+ }
+ }
+
+ static getCrcTable() {
+ let crcTable = CrcVerify.crcTable;
+ if (!crcTable) {
+ CrcVerify.crcTable = crcTable = [];
+ const b = Buffer.alloc(4);
+ for (let n = 0; n < 256; n++) {
+ let c = n;
+ for (let k = 8; --k >= 0; ) {
+ if ((c & 1) !== 0) {
+ c = 0xedb88320 ^ (c >>> 1);
+ } else {
+ c = c >>> 1;
+ }
+ }
+ if (c < 0) {
+ b.writeInt32LE(c, 0);
+ c = b.readUInt32LE(0);
+ }
+ crcTable[n] = c;
+ }
+ }
+ return crcTable;
+ }
+}
+
+function parseZipTime(timebytes, datebytes) {
+ const timebits = toBits(timebytes, 16);
+ const datebits = toBits(datebytes, 16);
+
+ const mt = {
+ h: parseInt(timebits.slice(0, 5).join(''), 2),
+ m: parseInt(timebits.slice(5, 11).join(''), 2),
+ s: parseInt(timebits.slice(11, 16).join(''), 2) * 2,
+ Y: parseInt(datebits.slice(0, 7).join(''), 2) + 1980,
+ M: parseInt(datebits.slice(7, 11).join(''), 2),
+ D: parseInt(datebits.slice(11, 16).join(''), 2),
+ };
+ const dt_str = [mt.Y, mt.M, mt.D].join('-') + ' ' + [mt.h, mt.m, mt.s].join(':') + ' GMT+0';
+ return new Date(dt_str).getTime();
+}
+
+function toBits(dec, size) {
+ let b = (dec >>> 0).toString(2);
+ while (b.length < size) {
+ b = '0' + b;
+ }
+ return b.split('');
+}
+
+function readUInt64LE(buffer, offset) {
+ return buffer.readUInt32LE(offset + 4) * 0x0000000100000000 + buffer.readUInt32LE(offset);
+}
+
+module.exports = StreamZip;
diff --git a/modules/gar_utils/node_modules/node-stream-zip/package.json b/modules/gar_utils/node_modules/node-stream-zip/package.json
new file mode 100644
index 0000000..70d389b
--- /dev/null
+++ b/modules/gar_utils/node_modules/node-stream-zip/package.json
@@ -0,0 +1,77 @@
+{
+ "_from": "node-stream-zip",
+ "_id": "node-stream-zip@1.15.0",
+ "_inBundle": false,
+ "_integrity": "sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw==",
+ "_location": "/node-stream-zip",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "tag",
+ "registry": true,
+ "raw": "node-stream-zip",
+ "name": "node-stream-zip",
+ "escapedName": "node-stream-zip",
+ "rawSpec": "",
+ "saveSpec": null,
+ "fetchSpec": "latest"
+ },
+ "_requiredBy": [
+ "#USER",
+ "/"
+ ],
+ "_resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.15.0.tgz",
+ "_shasum": "158adb88ed8004c6c49a396b50a6a5de3bca33ea",
+ "_spec": "node-stream-zip",
+ "_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1",
+ "author": {
+ "name": "Antelle",
+ "email": "antelle.net@gmail.com",
+ "url": "https://github.com/antelle"
+ },
+ "bugs": {
+ "url": "https://github.com/antelle/node-stream-zip/issues",
+ "email": "antelle.net@gmail.com"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "node.js library for reading and extraction of ZIP archives",
+ "devDependencies": {
+ "@types/node": "^14.14.6",
+ "eslint": "^7.19.0",
+ "nodeunit": "^0.11.3",
+ "prettier": "^2.2.1"
+ },
+ "engines": {
+ "node": ">=0.12.0"
+ },
+ "files": [
+ "LICENSE",
+ "node_stream_zip.js",
+ "node_stream_zip.d.ts"
+ ],
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/antelle"
+ },
+ "homepage": "https://github.com/antelle/node-stream-zip",
+ "keywords": [
+ "zip",
+ "archive",
+ "unzip",
+ "stream"
+ ],
+ "license": "MIT",
+ "main": "node_stream_zip.js",
+ "name": "node-stream-zip",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/antelle/node-stream-zip.git"
+ },
+ "scripts": {
+ "check-types": "tsc node_stream_zip.d.ts",
+ "lint": "eslint node_stream_zip.js test/tests.js",
+ "test": "nodeunit test/tests.js"
+ },
+ "types": "node_stream_zip.d.ts",
+ "version": "1.15.0"
+}
diff --git a/modules/gar_utils/node_modules/sax/LICENSE b/modules/gar_utils/node_modules/sax/LICENSE
new file mode 100644
index 0000000..6e8e4c1
--- /dev/null
+++ b/modules/gar_utils/node_modules/sax/LICENSE
@@ -0,0 +1,41 @@
+The ISC License
+
+Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+====
+
+`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
+License, as follows:
+
+Copyright (c) 2010-2022 Mathias Bynens
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/modules/gar_utils/node_modules/sax/README.md b/modules/gar_utils/node_modules/sax/README.md
new file mode 100644
index 0000000..afcd3f3
--- /dev/null
+++ b/modules/gar_utils/node_modules/sax/README.md
@@ -0,0 +1,225 @@
+# sax js
+
+A sax-style parser for XML and HTML.
+
+Designed with [node](http://nodejs.org/) in mind, but should work fine in
+the browser or other CommonJS implementations.
+
+## What This Is
+
+* A very simple tool to parse through an XML string.
+* A stepping stone to a streaming HTML parser.
+* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML
+ docs.
+
+## What This Is (probably) Not
+
+* An HTML Parser - That's a fine goal, but this isn't it. It's just
+ XML.
+* A DOM Builder - You can use it to build an object model out of XML,
+ but it doesn't do that out of the box.
+* XSLT - No DOM = no querying.
+* 100% Compliant with (some other SAX implementation) - Most SAX
+ implementations are in Java and do a lot more than this does.
+* An XML Validator - It does a little validation when in strict mode, but
+ not much.
+* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic
+ masochism.
+* A DTD-aware Thing - Fetching DTDs is a much bigger job.
+
+## Regarding `Hello, world!').close();
+
+// stream usage
+// takes the same options as the parser
+var saxStream = require("sax").createStream(strict, options)
+saxStream.on("error", function (e) {
+ // unhandled errors will throw, since this is a proper node
+ // event emitter.
+ console.error("error!", e)
+ // clear the error
+ this._parser.error = null
+ this._parser.resume()
+})
+saxStream.on("opentag", function (node) {
+ // same object as above
+})
+// pipe is supported, and it's readable/writable
+// same chunks coming in also go out.
+fs.createReadStream("file.xml")
+ .pipe(saxStream)
+ .pipe(fs.createWriteStream("file-copy.xml"))
+```
+
+
+## Arguments
+
+Pass the following arguments to the parser function. All are optional.
+
+`strict` - Boolean. Whether or not to be a jerk. Default: `false`.
+
+`opt` - Object bag of settings regarding string formatting. All default to `false`.
+
+Settings supported:
+
+* `trim` - Boolean. Whether or not to trim text and comment nodes.
+* `normalize` - Boolean. If true, then turn any whitespace into a single
+ space.
+* `lowercase` - Boolean. If true, then lowercase tag names and attribute names
+ in loose mode, rather than uppercasing them.
+* `xmlns` - Boolean. If true, then namespaces are supported.
+* `position` - Boolean. If false, then don't track line/col/position.
+* `strictEntities` - Boolean. If true, only parse [predefined XML
+ entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent)
+ (`&`, `'`, `>`, `<`, and `"`)
+
+## Methods
+
+`write` - Write bytes onto the stream. You don't have to do this all at
+once. You can keep writing as much as you want.
+
+`close` - Close the stream. Once closed, no more data may be written until
+it is done processing the buffer, which is signaled by the `end` event.
+
+`resume` - To gracefully handle errors, assign a listener to the `error`
+event. Then, when the error is taken care of, you can call `resume` to
+continue parsing. Otherwise, the parser will not continue while in an error
+state.
+
+## Members
+
+At all times, the parser object will have the following members:
+
+`line`, `column`, `position` - Indications of the position in the XML
+document where the parser currently is looking.
+
+`startTagPosition` - Indicates the position where the current tag starts.
+
+`closed` - Boolean indicating whether or not the parser can be written to.
+If it's `true`, then wait for the `ready` event to write again.
+
+`strict` - Boolean indicating whether or not the parser is a jerk.
+
+`opt` - Any options passed into the constructor.
+
+`tag` - The current tag being dealt with.
+
+And a bunch of other stuff that you probably shouldn't touch.
+
+## Events
+
+All events emit with a single argument. To listen to an event, assign a
+function to `on`. Functions get executed in the this-context of
+the parser object. The list of supported events are also in the exported
+`EVENTS` array.
+
+When using the stream interface, assign handlers using the EventEmitter
+`on` function in the normal fashion.
+
+`error` - Indication that something bad happened. The error will be hanging
+out on `parser.error`, and must be deleted before parsing can continue. By
+listening to this event, you can keep an eye on that kind of stuff. Note:
+this happens *much* more in strict mode. Argument: instance of `Error`.
+
+`text` - Text node. Argument: string of text.
+
+`doctype` - The ``. Argument:
+object with `name` and `body` members. Attributes are not parsed, as
+processing instructions have implementation dependent semantics.
+
+`sgmldeclaration` - Random SGML declarations. Stuff like ``
+would trigger this kind of event. This is a weird thing to support, so it
+might go away at some point. SAX isn't intended to be used to parse SGML,
+after all.
+
+`opentagstart` - Emitted immediately when the tag name is available,
+but before any attributes are encountered. Argument: object with a
+`name` field and an empty `attributes` set. Note that this is the
+same object that will later be emitted in the `opentag` event.
+
+`opentag` - An opening tag. Argument: object with `name` and `attributes`.
+In non-strict mode, tag names are uppercased, unless the `lowercase`
+option is set. If the `xmlns` option is set, then it will contain
+namespace binding information on the `ns` member, and will have a
+`local`, `prefix`, and `uri` member.
+
+`closetag` - A closing tag. In loose mode, tags are auto-closed if their
+parent closes. In strict mode, well-formedness is enforced. Note that
+self-closing tags will have `closeTag` emitted immediately after `openTag`.
+Argument: tag name.
+
+`attribute` - An attribute node. Argument: object with `name` and `value`.
+In non-strict mode, attribute names are uppercased, unless the `lowercase`
+option is set. If the `xmlns` option is set, it will also contains namespace
+information.
+
+`comment` - A comment node. Argument: the string of the comment.
+
+`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"`
+event, and their contents are not checked for special xml characters.
+If you pass `noscript: true`, then this behavior is suppressed.
+
+## Reporting Problems
+
+It's best to write a failing test if you find an issue. I will always
+accept pull requests with failing tests if they demonstrate intended
+behavior, but it is very hard to figure out what issue you're describing
+without a test. Writing a test is also the best way for you yourself
+to figure out if you really understand the issue you think you have with
+sax-js.
diff --git a/modules/gar_utils/node_modules/sax/lib/sax.js b/modules/gar_utils/node_modules/sax/lib/sax.js
new file mode 100644
index 0000000..ffd441a
--- /dev/null
+++ b/modules/gar_utils/node_modules/sax/lib/sax.js
@@ -0,0 +1,1574 @@
+;(function (sax) { // wrapper for non-node envs
+ sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
+ sax.SAXParser = SAXParser
+ sax.SAXStream = SAXStream
+ sax.createStream = createStream
+
+ // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
+ // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
+ // since that's the earliest that a buffer overrun could occur. This way, checks are
+ // as rare as required, but as often as necessary to ensure never crossing this bound.
+ // Furthermore, buffers are only tested at most once per write(), so passing a very
+ // large string into write() might have undesirable effects, but this is manageable by
+ // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
+ // edge case, result in creating at most one complete copy of the string passed in.
+ // Set to Infinity to have unlimited buffers.
+ sax.MAX_BUFFER_LENGTH = 64 * 1024
+
+ var buffers = [
+ 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
+ 'procInstName', 'procInstBody', 'entity', 'attribName',
+ 'attribValue', 'cdata', 'script'
+ ]
+
+ sax.EVENTS = [
+ 'text',
+ 'processinginstruction',
+ 'sgmldeclaration',
+ 'doctype',
+ 'comment',
+ 'opentagstart',
+ 'attribute',
+ 'opentag',
+ 'closetag',
+ 'opencdata',
+ 'cdata',
+ 'closecdata',
+ 'error',
+ 'end',
+ 'ready',
+ 'script',
+ 'opennamespace',
+ 'closenamespace'
+ ]
+
+ function SAXParser (strict, opt) {
+ if (!(this instanceof SAXParser)) {
+ return new SAXParser(strict, opt)
+ }
+
+ var parser = this
+ clearBuffers(parser)
+ parser.q = parser.c = ''
+ parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
+ parser.opt = opt || {}
+ parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
+ parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
+ parser.tags = []
+ parser.closed = parser.closedRoot = parser.sawRoot = false
+ parser.tag = parser.error = null
+ parser.strict = !!strict
+ parser.noscript = !!(strict || parser.opt.noscript)
+ parser.state = S.BEGIN
+ parser.strictEntities = parser.opt.strictEntities
+ parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
+ parser.attribList = []
+
+ // namespaces form a prototype chain.
+ // it always points at the current tag,
+ // which protos to its parent tag.
+ if (parser.opt.xmlns) {
+ parser.ns = Object.create(rootNS)
+ }
+
+ // mostly just for error reporting
+ parser.trackPosition = parser.opt.position !== false
+ if (parser.trackPosition) {
+ parser.position = parser.line = parser.column = 0
+ }
+ emit(parser, 'onready')
+ }
+
+ if (!Object.create) {
+ Object.create = function (o) {
+ function F () {}
+ F.prototype = o
+ var newf = new F()
+ return newf
+ }
+ }
+
+ if (!Object.keys) {
+ Object.keys = function (o) {
+ var a = []
+ for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
+ return a
+ }
+ }
+
+ function checkBufferLength (parser) {
+ var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
+ var maxActual = 0
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ var len = parser[buffers[i]].length
+ if (len > maxAllowed) {
+ // Text/cdata nodes can get big, and since they're buffered,
+ // we can get here under normal conditions.
+ // Avoid issues by emitting the text node now,
+ // so at least it won't get any bigger.
+ switch (buffers[i]) {
+ case 'textNode':
+ closeText(parser)
+ break
+
+ case 'cdata':
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ break
+
+ case 'script':
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ break
+
+ default:
+ error(parser, 'Max buffer length exceeded: ' + buffers[i])
+ }
+ }
+ maxActual = Math.max(maxActual, len)
+ }
+ // schedule the next check for the earliest possible buffer overrun.
+ var m = sax.MAX_BUFFER_LENGTH - maxActual
+ parser.bufferCheckPosition = m + parser.position
+ }
+
+ function clearBuffers (parser) {
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ parser[buffers[i]] = ''
+ }
+ }
+
+ function flushBuffers (parser) {
+ closeText(parser)
+ if (parser.cdata !== '') {
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ }
+ if (parser.script !== '') {
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ }
+ }
+
+ SAXParser.prototype = {
+ end: function () { end(this) },
+ write: write,
+ resume: function () { this.error = null; return this },
+ close: function () { return this.write(null) },
+ flush: function () { flushBuffers(this) }
+ }
+
+ var Stream
+ try {
+ Stream = require('stream').Stream
+ } catch (ex) {
+ Stream = function () {}
+ }
+ if (!Stream) Stream = function () {}
+
+ var streamWraps = sax.EVENTS.filter(function (ev) {
+ return ev !== 'error' && ev !== 'end'
+ })
+
+ function createStream (strict, opt) {
+ return new SAXStream(strict, opt)
+ }
+
+ function SAXStream (strict, opt) {
+ if (!(this instanceof SAXStream)) {
+ return new SAXStream(strict, opt)
+ }
+
+ Stream.apply(this)
+
+ this._parser = new SAXParser(strict, opt)
+ this.writable = true
+ this.readable = true
+
+ var me = this
+
+ this._parser.onend = function () {
+ me.emit('end')
+ }
+
+ this._parser.onerror = function (er) {
+ me.emit('error', er)
+
+ // if didn't throw, then means error was handled.
+ // go ahead and clear error, so we can write again.
+ me._parser.error = null
+ }
+
+ this._decoder = null
+
+ streamWraps.forEach(function (ev) {
+ Object.defineProperty(me, 'on' + ev, {
+ get: function () {
+ return me._parser['on' + ev]
+ },
+ set: function (h) {
+ if (!h) {
+ me.removeAllListeners(ev)
+ me._parser['on' + ev] = h
+ return h
+ }
+ me.on(ev, h)
+ },
+ enumerable: true,
+ configurable: false
+ })
+ })
+ }
+
+ SAXStream.prototype = Object.create(Stream.prototype, {
+ constructor: {
+ value: SAXStream
+ }
+ })
+
+ SAXStream.prototype.write = function (data) {
+ if (typeof Buffer === 'function' &&
+ typeof Buffer.isBuffer === 'function' &&
+ Buffer.isBuffer(data)) {
+ if (!this._decoder) {
+ var SD = require('string_decoder').StringDecoder
+ this._decoder = new SD('utf8')
+ }
+ data = this._decoder.write(data)
+ }
+
+ this._parser.write(data.toString())
+ this.emit('data', data)
+ return true
+ }
+
+ SAXStream.prototype.end = function (chunk) {
+ if (chunk && chunk.length) {
+ this.write(chunk)
+ }
+ this._parser.end()
+ return true
+ }
+
+ SAXStream.prototype.on = function (ev, handler) {
+ var me = this
+ if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
+ me._parser['on' + ev] = function () {
+ var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
+ args.splice(0, 0, ev)
+ me.emit.apply(me, args)
+ }
+ }
+
+ return Stream.prototype.on.call(me, ev, handler)
+ }
+
+ // this really needs to be replaced with character classes.
+ // XML allows all manner of ridiculous numbers and digits.
+ var CDATA = '[CDATA['
+ var DOCTYPE = 'DOCTYPE'
+ var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
+ var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
+ var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
+
+ // http://www.w3.org/TR/REC-xml/#NT-NameStartChar
+ // This implementation works on strings, a single character at a time
+ // as such, it cannot ever support astral-plane characters (10000-EFFFF)
+ // without a significant breaking change to either this parser, or the
+ // JavaScript language. Implementation of an emoji-capable xml parser
+ // is left as an exercise for the reader.
+ var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+
+ var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+ var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ function isWhitespace (c) {
+ return c === ' ' || c === '\n' || c === '\r' || c === '\t'
+ }
+
+ function isQuote (c) {
+ return c === '"' || c === '\''
+ }
+
+ function isAttribEnd (c) {
+ return c === '>' || isWhitespace(c)
+ }
+
+ function isMatch (regex, c) {
+ return regex.test(c)
+ }
+
+ function notMatch (regex, c) {
+ return !isMatch(regex, c)
+ }
+
+ var S = 0
+ sax.STATE = {
+ BEGIN: S++, // leading byte order mark or whitespace
+ BEGIN_WHITESPACE: S++, // leading whitespace
+ TEXT: S++, // general stuff
+ TEXT_ENTITY: S++, // & and such.
+ OPEN_WAKA: S++, // <
+ SGML_DECL: S++, //
+ SCRIPT: S++, //