ЦИТК-689 - расширение "Интеграция с ГАР" (GAR)

This commit is contained in:
Mikhail Chechnev 2023-11-02 19:24:44 +03:00
parent 1b6e01a19e
commit a1025af9ad
41 changed files with 13818 additions and 2 deletions

9
.gitignore vendored
View File

@ -1,5 +1,10 @@
# Dependency directories
# Зависимости
node_modules/
# VS Code
.vscode/
.vscode/
# В модулях надо брать всё
!modules/
!modules/*/
!modules/*/node_modules

451
modules/gar.js Normal file
View File

@ -0,0 +1,451 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR)
*/
//------------------------------
// Подключаем внешние библиотеки
//------------------------------
const fs = require("fs"); //Работа с файлами
const { pipeline } = require("stream"); //Работа с потоками
const { promisify } = require("util"); //Вспомогательные инструменты
const xml2js = require("xml2js"); //Конвертация XML в JSON и JSON в XML
const confServ = require("../config"); //Настройки сервера приложений
const conf = require("./gar_config"); //Параметры расширения "Интеграция с ГАР"
const StreamZip = require("./gar_utils/node_modules/node-stream-zip"); //Работа с ZIP-архивами
const fetch = require("./gar_utils/node_modules/node-fetch"); //Работа с запросами
const { WorkersPool } = require("./gar_utils/workers_pool"); //Пул обработчиков
const { logInf, makeTaskMessage, logWrn, logErr, stringToDate, dateToISOString } = require("./gar_utils/utils"); //Вспомогательные функции
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Название модудля для протокола
const MODULE = `GAR`;
//Параметры пула обработчиков
const workersPoolOptions = {
workerPath: "./modules/gar_utils/import.js",
limit: conf.common.nThreads,
timeout: 0,
drainTimeout: 60000
};
//Пул обработчиков
let WP = null;
//Очередь на парсинг
let PARSE_QUEUE = [];
//Обрабатываемые элементы архива
let ENTRIES = [];
//Всего элементов в архиве
let ENTRIES_COUNT = 0;
//Всего файлов в архиве
let FILES_COUNT = 0;
//Общий объем файлов в архиве
let TOTAL_SIZE = 0;
//Объем успешно обработанных файлов
let PROCESSED_SIZE = 0;
//Количество успешно обработанных файлов
let PROCESSED_COUNT = 0;
//Объем файлов обработанных с ошибками
let ERROR_SIZE = 0;
//Количество файлов обработанных с ошибками
let ERROR_COUNT = 0;
//Начало
let START_TIME = null;
//Окончание
let END_TIME = null;
//Протокол выполнения
let LOAD_LOG = null;
//Флаг распакованности архива
let ZIP_UNPACKED = false;
//------------
// Тело модуля
//------------
//Выдача общей статистики
const printCommonStats = () => {
logWrn(`Всего элементов: ${ENTRIES_COUNT}, файлов для обработки: ${FILES_COUNT}`, MODULE, LOAD_LOG);
logWrn(`Объем файлов для обработки: ${TOTAL_SIZE} байт`, MODULE, LOAD_LOG);
};
//Выдача статистики импорта
const printImportStats = () => {
logWrn(`Количество необработанных файлов: ${FILES_COUNT - ERROR_COUNT - PROCESSED_COUNT}`, MODULE, LOAD_LOG);
logWrn(`Объем необработанных файлов: ${TOTAL_SIZE - ERROR_SIZE - PROCESSED_SIZE} байт`, MODULE, LOAD_LOG);
logWrn(`Количество файлов обработанных с ошибками: ${ERROR_COUNT}`, MODULE, LOAD_LOG);
logWrn(`Объем файлов обработанных с ошибками: ${ERROR_SIZE} байт`, MODULE, LOAD_LOG);
logWrn(`Количество файлов успешно обработанных: ${PROCESSED_COUNT}`, MODULE, LOAD_LOG);
logWrn(`Объем файлов успешно обработанных: ${PROCESSED_SIZE} байт`, MODULE, LOAD_LOG);
logWrn(`Начало: ${START_TIME}`, MODULE, LOAD_LOG);
logWrn(`Окончание: ${END_TIME}`, MODULE, LOAD_LOG);
logWrn(`Длительность: ${(END_TIME.getTime() - START_TIME.getTime()) / 1000} секунд`, MODULE, LOAD_LOG);
};
//Подчистка временного файла
const removeTempFile = fileFullName => {
logInf(`Удаляю временный "${fileFullName}"...`, MODULE, LOAD_LOG);
fs.rm(fileFullName, { maxRetries: 5, retryDelay: 1000 }, err => {
if (err) logErr(`Ошибка удаления временного файла "${fileFullName}": ${err.message}`, MODULE, LOAD_LOG);
else logInf(`Удалено "${fileFullName}".`, MODULE, LOAD_LOG);
});
};
//Проверка необходимости загрузки элемента
const needLoad = ({ processedCount, entry, processLimit, processFilter }) =>
(processLimit === 0 || processedCount <= processLimit) &&
!entry.isDirectory &&
entry.name.toLowerCase().endsWith("xml") &&
(processFilter === null || (processFilter != null && entry.name.match(processFilter)));
//Обработка очереди на распаковку
const processParseQueue = async () => {
//Если в очереди еще есть необработанные элементы
if (PARSE_QUEUE.length > 0) {
//Получим данные элемента очереди
const { entry, fileFullName, fileName, garVersionInfo } = PARSE_QUEUE.shift();
//Если обработчик запущен
if (WP.started) {
//Отправим задачу на выполнение
try {
await WP.sendTask(makeTaskMessage({ payload: { garVersionInfo, fileFullName, fileName } }), (e, p) => {
//Удалим временный файл
removeTempFile(fileFullName);
//Если ошибка
if (e) {
//Размер файлов, обработанных с ошибками
ERROR_SIZE += entry.size;
//Количество ошибок
ERROR_COUNT++;
//Сообщение об ошибке
let msg = `При обработке "${entry.name}": ${e.message}`;
logErr(msg, MODULE, LOAD_LOG);
} else {
//Размер успешно обработанных файлов
PROCESSED_SIZE += entry.size;
//Количество успешно обработанных файлов
PROCESSED_COUNT++;
logWrn(`Обработано успешно "${entry.name}".`, MODULE, LOAD_LOG);
}
logWrn(
`Всего обработано: ${PROCESSED_SIZE + ERROR_SIZE} байт, ${Math.round(((PROCESSED_SIZE + ERROR_SIZE) / TOTAL_SIZE) * 100)}%`,
MODULE,
LOAD_LOG
);
});
} catch (e) {
//Удалим временный файл
logErr(`При размещении задачи для "${entry.name}": ${e.message}`, MODULE, LOAD_LOG);
removeTempFile(fileFullName);
}
} else {
//Пул фоновых обработчиков остановлен (могла прийти команда принудительного выключения)
logErr(`При размещении задачи для "${entry.name}": пул уже остановлен. Прекращаю работу.`, MODULE, LOAD_LOG);
removeTempFile(fileFullName);
}
}
if (PARSE_QUEUE.length > 0 || !ZIP_UNPACKED) setTimeout(processParseQueue, 0);
};
//Конвертация в XML
const toXML = obj => {
const builder = new xml2js.Builder();
return builder.buildObject(obj);
};
//Обработчик после получения обновлений ГАР
const afterLoad = async prms => {
if (!conf.common.sDownloadsDir) throw new Error(`Не указан путь для размещения загруженных файлов.`);
if (!conf.common.sTmpDir) throw new Error(`Не указан путь для размещения временных файлов.`);
if (!conf.common.sLogDir) throw new Error(`Не указан путь для размещения файлов протоколирования.`);
//Информация о загружаемых данных
const LOAD_INFO = {
REGIONS: prms.options.sRegions,
GARDATELAST: stringToDate(prms.options.dGarDateLast),
HOUSESLOADED: Number(prms.options.nHousesLoaded),
STEADSLOADED: Number(prms.options.nSteadsLoaded)
};
//Если указаны загружаемые регионы и дата последней загруженной версии ГАР
if (LOAD_INFO.REGIONS && LOAD_INFO.GARDATELAST) {
//Идентификаторы загружаемых процессов
let loadIdents = [];
//Идентификатор протоколирования
const logIdent = Date.now();
//Открываю лог выполнения
LOAD_LOG = fs.createWriteStream(`${conf.common.sLogDir}/gar_load_${logIdent}.log`);
LOAD_LOG.on("error", e => {});
LOAD_LOG.on("close", () => {});
logInf("Протокол выполнения загрузки ГАР открыт.", MODULE, LOAD_LOG);
//Информация о версиях ГАР
const requestRespJson = JSON.parse(prms.queue.blResp.toString());
//Обработаем полученную информацию о версиях ГАР
logInf(`Обрабатываю полученую информацию о версиях ГАР...`, MODULE, LOAD_LOG);
//Версии ГАР для загрузки
let garVersions = [];
//Регионы
const regions = LOAD_INFO.REGIONS.split(";");
//Последняя загруженная версия ГАР
const garDateLast = LOAD_INFO.GARDATELAST;
//Признак загрузки домов
const housesLoaded = LOAD_INFO.HOUSESLOADED ? LOAD_INFO.HOUSESLOADED : 0;
//Признак загрузки участков
const steadsLoaded = LOAD_INFO.STEADSLOADED ? LOAD_INFO.STEADSLOADED : 0;
//Если не указана последняя загруженная версия ГАР
if (!garDateLast) throw new Error(`Не указана последняя загруженная версия ГАР, обновление недоступно.`);
//Обойдем элементы ответа
for (let respElement of requestRespJson) {
//Дата версии ГАР
const garVersionDate = stringToDate(respElement.Date);
//Ссылка на данные обновления
const garXmlDeltaUrl = respElement.GarXMLDeltaURL;
//Если указана дата и ссылка на обновление
if (garVersionDate && garXmlDeltaUrl) {
//Если версия вышла позже последней загруженной
if (garDateLast < garVersionDate) {
//Сохраним версию ГАР
garVersions.push({
versionDate: dateToISOString(garVersionDate),
xmlDeltaUrl: garXmlDeltaUrl
});
}
} else {
throw new Error(`Не удалось корректно определить информацию о версиях ГАР.`);
}
}
logInf(`Полученая информация о версиях ГАР обработана.`, MODULE, LOAD_LOG);
//Если не указаны необходимые для загрузки версии ГАР
if (!garVersions || garVersions.length == 0)
throw new Error(
`Не удалось определить необходимые для загрузки версии ГАР, вышедшие после ${garDateLast.toISOString().substring(0, 10)}.`
);
//Обработаем версии ГАР
logInf(`Обрабатываю версии ГАР...`, MODULE, LOAD_LOG);
//Отсортируем версии ГАР по возрастанию
garVersions.sort((a, b) => {
if (a.versionDate > b.versionDate) return 1;
if (a.versionDate === b.versionDate) return 0;
if (a.versionDate < b.versionDate) return -1;
});
//Пул обработчиков
WP = new WorkersPool(workersPoolOptions);
//Запуск фоновых процессов
logInf(`Стартую обработчики...`, MODULE, LOAD_LOG);
await WP.start({
dbBuferSize: conf.dbConnect.nBufferSize,
fileChunkSize: conf.common.nFileChunkSize,
loadLog: LOAD_LOG,
dbConn: {
sUser: confServ.dbConnect.sUser,
sPassword: confServ.dbConnect.sPassword,
sConnectString: confServ.dbConnect.sConnectString,
sSchema: confServ.dbConnect.sSchema
}
});
logInf(`Обработчики запущены.`, MODULE, LOAD_LOG);
//Обрабатываемая версия ГАР
let garVersion = garVersions[0];
// Обработаем версию ГАР
logInf(`Обрабатываю версию ГАР "${garVersion.versionDate}"...`, MODULE, LOAD_LOG);
//Флаг необходимости загрузки файла
let downloadFlag = true;
//Полный путь к загрузке (временная переменная)
let fileFullNameTmp = `${conf.common.sDownloadsDir}/${garVersion.versionDate}.zip`;
//Если файл был загружен ранее
if (fs.existsSync(fileFullNameTmp)) {
logInf(`Файл "${fileFullNameTmp}" уже существует.`, MODULE, LOAD_LOG);
//Если разрешено использование существующего файла
if (conf.common.bDownloadsUseExists) downloadFlag = false;
else fileFullNameTmp = `${conf.common.sDownloadsDir}/${garVersion.versionDate}_${logIdent}.zip`;
}
//Полный путь к загрузке
const fileFullName = fileFullNameTmp;
//Если необходимо загрузить файл
if (downloadFlag) {
//Загружаем файл
try {
logInf(`Загружаю файл по ссылке "${garVersion.xmlDeltaUrl}" в каталог "${conf.common.sDownloadsDir}"...`, MODULE, LOAD_LOG);
const streamPipeline = promisify(pipeline);
const fileData = await fetch(garVersion.xmlDeltaUrl, { redirect: "follow", follow: 20 });
if (!fileData.ok) throw new Error(`Не удалось загрузить файл по ссылке "${garVersion.xmlDeltaUrl}": ${fileData.statusText}.`);
await streamPipeline(fileData.body, fs.createWriteStream(fileFullName));
logInf(`Файл "${fileFullName}" загружен.`, MODULE, LOAD_LOG);
} catch (e) {
const errorMessage = `Ошибка загрузки файла по ссылке "${garVersion.xmlDeltaUrl}": ${e.message}.`;
logErr(errorMessage, MODULE, LOAD_LOG);
throw new Error(errorMessage);
}
}
//Обнулим переменные
ENTRIES = [];
TOTAL_SIZE = 0;
FILES_COUNT = 0;
PARSE_QUEUE = [];
ENTRIES_COUNT = 0;
PROCESSED_SIZE = 0;
PROCESSED_COUNT = 0;
ERROR_SIZE = 0;
ERROR_COUNT = 0;
START_TIME = null;
END_TIME = null;
ZIP_UNPACKED = false;
//Анализ архива
logInf(`Читаю архив...`, MODULE, LOAD_LOG);
const zip = new StreamZip.async({ file: fileFullName });
const entries = await zip.entries();
//Обойдем файлы архива
for (const entry of Object.values(entries)) {
//Количество файлов архива
ENTRIES_COUNT++;
//Путь к фалу в архиве
const path = entry.name.split("/");
//Если подходящий путь к файлу
if ([1, 2].includes(path.length)) {
//Регион
const region = path.length == 2 ? path[0] : "";
//Если указан регион и он входит в состав регионов, которые необходимо загрузить и файл попадает под условия загрузки
if (
(!region || !regions || (region && regions && regions.includes(region))) &&
needLoad({
processedCount: FILES_COUNT,
entry,
processLimit: conf.common.nLoadFilesLimit,
processFilter: conf.common.sLoadFilesMask
}) &&
(housesLoaded == 1 || ((!housesLoaded || housesLoaded != 1) && !path[path.length - 1].startsWith(`AS_HOUSES`))) &&
(steadsLoaded == 1 || ((!steadsLoaded || steadsLoaded != 1) && !path[path.length - 1].startsWith(`AS_STEADS`)))
) {
//Количество, подошедших под условия загрузки, файлов
FILES_COUNT++;
//Общий размер файлов, подошедших под условия загрузки
TOTAL_SIZE += entry.size;
//Запомним файл
ENTRIES.push(entry);
}
}
}
//Отсортируем файлы в порядке возрастания по размеру файла
ENTRIES.sort((a, b) => (a.size > b.size ? 1 : a.size < b.size ? -1 : 0));
printCommonStats();
logInf(`Архив прочитан.`, MODULE, LOAD_LOG);
//Обработка очереди на парсинг
setTimeout(processParseQueue, 0);
//Время начала обработки архива
START_TIME = new Date();
//Идентификатор процесса
const ident = Date.now();
//Директория для размещения временных файлов архива
const garVersionDir = `${conf.common.sTmpDir}/${garVersion.versionDate}`;
//Если не существует директории для размещения временных файлов
if (!fs.existsSync(garVersionDir)) {
//Создадим директорию
try {
fs.mkdirSync(garVersionDir);
} catch (e) {
throw new Error(`Не удалось создать директорию "${garVersionDir}": ${e.message}`);
}
}
//Обойдем файлы архива
for (const entry of ENTRIES) {
//Путь к файлу архива
const path = entry.name.split("/");
//Имя файла
const unzipFileName = path[path.length - 1];
//Регион
const region = path.length == 2 ? path[0] : "";
//Полный путь к файлу
const unzipFileFullName = `${garVersionDir}/${region ? `${region}/` : ""}${unzipFileName}`;
//Если указан регион
if (region)
if (!fs.existsSync(`${garVersionDir}/${region}`))
//Если еще не существует диретории для региона
//Создадим директорию для региона
try {
fs.mkdirSync(`${garVersionDir}/${region}`);
} catch (e) {
throw new Error(`Не удалось создать директорию "${garVersionDir}/${region}": ${e.message}`);
}
//Если файл еще не существует
if (!fs.existsSync(unzipFileFullName)) {
//Распакуем файл
logInf(`Распаковываю "${entry.name}" (${entry.size} байт) в "${unzipFileFullName}"...`, MODULE, LOAD_LOG);
await zip.extract(entry.name, unzipFileFullName);
logInf(`Распаковано "${entry.name}" в "${unzipFileFullName}".`, MODULE, LOAD_LOG);
} else {
logInf(`Файл "${entry.name}" уже распакован в директорию "${garVersionDir}".`, MODULE, LOAD_LOG);
}
//Отдаём его в обработку фоновому процессу
PARSE_QUEUE.push({
entry,
fileName: unzipFileName,
fileFullName: unzipFileFullName,
garVersionInfo: {
ident,
region,
versionDate: garVersion.versionDate
}
});
}
//Закрываем архив
logInf("Закрываю архив...", MODULE, LOAD_LOG);
await zip.close();
logInf("Архив закрыт.", MODULE, LOAD_LOG);
//Флаг закрытия архива
ZIP_UNPACKED = true;
//Ожидаем, пока всё отработает
logInf("Жду завершения фоновой обработки...", MODULE, LOAD_LOG);
while (PARSE_QUEUE.length > 0 || WP.available != conf.common.nThreads) await new Promise(resolve => setTimeout(resolve, 1000));
logInf("Фоновая обработка завершена.", MODULE, LOAD_LOG);
//Очистка директорий для размещения временных файлов
logInf(`Очищаю директорию "${garVersionDir}" для размещения временных файлов...`, MODULE, LOAD_LOG);
fs.rmSync(garVersionDir, { recursive: true });
logInf(`Каталог "${garVersionDir}" для размещения временных файлов очищена.`, MODULE, LOAD_LOG);
//Если необходимо удалить загруженные файлы
if (conf.common.bDownloadsDelete) {
logInf(`Удаляю загруженный файл "${fileFullName}"...`, MODULE, LOAD_LOG);
fs.unlinkSync(fileFullName);
logInf(`Загруженный файл "${fileFullName}" удален.`, MODULE, LOAD_LOG);
}
//Время завершения выполнения загрузки
END_TIME = new Date();
printCommonStats();
printImportStats();
//Если обработка прошла успешно
if (ERROR_COUNT == 0) {
//Запомним обработанную версию
loadIdents.push({
GAR_VERSION: {
IDENT: ident,
VERSION_DATE: garVersion.versionDate,
REGIONS: LOAD_INFO.REGIONS,
HOUSES_LOADED: housesLoaded,
STEADS_LOADED: steadsLoaded
}
});
logInf(`Версия ГАР "${garVersion.versionDate}" обработана.`, MODULE, LOAD_LOG);
} else {
loadIdents = null;
logErr(`Версия ГАР "${garVersion.versionDate}" обработана с ошибками.`, MODULE, LOAD_LOG);
}
//Выключаю пул обработчиков
logInf("Останавливаю обработчики...", MODULE, LOAD_LOG);
await WP.stop(LOAD_LOG);
WP = null;
logInf("Обработчики остановлены.", MODULE, LOAD_LOG);
logInf(`Версии ГАР обработаны.`, MODULE, LOAD_LOG);
//Закрываю протокол выполнения
logInf("Закрываю протокол выполнения загрузки ГАР...", MODULE, LOAD_LOG);
if (LOAD_LOG) LOAD_LOG.destroy();
if (!loadIdents) throw new Error(`Не удалось загрузить данные обновления ГАР.`);
//Вернем результат
return { blResp: Buffer.from(toXML(loadIdents[0])) };
} else {
throw new Error(`Не указан регион и/или дата для загрузки обновлений ГАР.`);
}
};
//-----------------
// Интерфейс модуля
//-----------------
exports.afterLoad = afterLoad;

45
modules/gar_config.js Normal file
View File

@ -0,0 +1,45 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR) - настройки
*/
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Общие параметры
let common = {
//Количество потоков импорта
nThreads: 11,
//Каталог для размещения загруженных данных
sDownloadsDir: "./gar_downloads",
//Удалять загруженные файлы
bDownloadsDelete: true,
//Использовать уже существующие файлы (если они есть)
bDownloadsUseExists: true,
//Каталог для размещения временных данных
sTmpDir: "./gar_tmp",
//Каталог для протоколирования
sLogDir: "./gar_logs",
//Размер буфера для потока считывания файла (байт)
nFileChunkSize: 256 * 1024,
//Количество обрабатываемых файлов (0 - не ограничивать)
nLoadFilesLimit: 0,
//Маска обрабатываемых файлов (null - все, поддерживаются рег. выражения, например: /AS_ROOM_TYPES_(\d{8})_(.*)/i /(.*)\/AS_STEADS_(\d{8})_(.*)/i /01\/(.*)/i)
sLoadFilesMask: null
};
//Параметры подключения к БД
let dbConnect = {
//Размер буфера для сброса в БД (количество записей)
nBufferSize: 30000
};
//-----------------
// Интерфейс модуля
//-----------------
module.exports = {
common,
dbConnect
};

225
modules/gar_utils/import.js Normal file
View File

@ -0,0 +1,225 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR) - обработчик импорта данных
*/
//------------------------------
// Подключаем внешние библиотеки
//------------------------------
const { workerData, parentPort } = require("worker_threads"); //Параллельные обработчики
const fs = require("fs"); //Работа с файлами
const oracledb = require("oracledb"); //Работа с СУБД Oracle
const { WRK_MSG_TYPE, logInf, logErr, makeTaskOKResult, makeTaskErrResult, makeStopMessage } = require("./utils"); //Вспомогательные функции
const { PARSERS, findModelByFileName } = require("./parsers"); //Модели и парсеры
const sax = require("./node_modules/sax"); //Событийный XML-парсер
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Название модудля для протокола
const MODULE = `GAR_INPUT_PROCESSOR_${workerData.number}`;
//Флаг подключения к БД
let CONNECTED = false;
//Флаг занятости подключения к БД
let CONNECTION_IN_USE = false;
//Подключение к БД
let CONNECTION = null;
//Флаг останова
let STOP_FLAG = false;
//Протокол выполнения
let LOAD_LOG = null;
//------------
// Тело модуля
//------------
//Подключение к БД
const connectDb = async ({ user, password, connectString, schema }) => {
CONNECTION = await oracledb.getConnection({ user, password, connectString });
await CONNECTION.execute(`ALTER SESSION SET CURRENT_SCHEMA=${schema} RECYCLEBIN=OFF`);
CONNECTED = true;
};
//Отключение от БД
const disconnectDb = async () => {
while (CONNECTION_IN_USE) {
await new Promise(resolve => setTimeout(resolve, 0));
}
if (CONNECTION) {
await CONNECTION.close();
CONNECTION = null;
}
CONNECTED = false;
};
//Сохранение буфера в БД
const saveBufferToDb = async (buffer, parser, insertProcedureName, ident, region) => {
if (!STOP_FLAG) {
CONNECTION_IN_USE = true;
try {
await parser.save(CONNECTION, ident, buffer, insertProcedureName, region);
} catch (e) {
throw e;
} finally {
CONNECTION_IN_USE = false;
}
}
};
//Чтение файла в потоке
const parseFile = ({ fileFullName, dbBuferSize, fileChunkSize, parser, insertProcedureName, ident, region }) => {
return new Promise((resolve, reject) => {
//Создаём поток для файла
const fsStream = fs.createReadStream(fileFullName, { highWaterMark: fileChunkSize });
//Создаём поток для парсера
const saxStream = sax.createStream(false);
//Буфер для сброса в базу
let buffer = [];
//Количество разобранных элементов
let cntItems = 0;
//Ошибка парсера
let parserErr = null;
//Последний обработанный элемент
let lastItem = null;
//События парсера - ошибка
saxStream.on("error", e => {
parserErr = e.message;
});
//События парсера - новый элемент
saxStream.on("opentag", node => {
if (node.name == parser.element) {
cntItems++;
lastItem = node;
buffer.push(node);
}
});
//События файла - считана порция
fsStream.on("data", chunk => {
if (!STOP_FLAG) {
saxStream.write(chunk);
if (buffer.length >= dbBuferSize) {
fsStream.pause();
}
if (parserErr) fsStream.destroy();
} else fsStream.destroy();
});
//События файла - пауза считывания
fsStream.on("pause", async () => {
if (buffer.length >= dbBuferSize) {
try {
await saveBufferToDb(buffer, parser, insertProcedureName, ident, region);
} catch (e) {
reject(e);
}
buffer = [];
}
if (!STOP_FLAG) fsStream.resume();
else fsStream.destroy();
});
//События файла - ошибка чтения
fsStream.on("error", error => reject(error));
//События файла - закрылся
fsStream.on("close", async error => {
saxStream._parser.close();
if (!STOP_FLAG) {
if (buffer.length > 0) {
try {
await saveBufferToDb(buffer, parser, insertProcedureName, ident, region);
} catch (e) {
reject(e);
}
buffer = [];
}
if (parserErr)
reject(
Error(
`Ошибка разбора данных: "${parserErr}". Разобрано элементов - ${cntItems}, последний разобранный: "${JSON.stringify(
lastItem
)}"`
)
);
else if (error) reject(error);
else resolve();
} else {
reject(Error("Обработчик остановлен принудительно"));
}
});
});
};
//Обработка сообщения с задачей
const processTask = async ({ garVersionInfo, fileFullName, fileName }) => {
const model = findModelByFileName(fileName);
if (model) {
await parseFile({
fileFullName,
dbBuferSize: workerData.dbBuferSize,
fileChunkSize: workerData.fileChunkSize,
parser: PARSERS[model.parser],
insertProcedureName: model.insertProcedureName,
ident: garVersionInfo.ident,
region: garVersionInfo.region
});
}
return true;
};
//Подписка на сообщения от родительского потока
parentPort.on("message", async msg => {
//Открываю лог выполнения
if (!LOAD_LOG && workerData.loadLog) {
LOAD_LOG = fs.createWriteStream(JSON.parse(workerData.loadLog).path, { flags: "a" });
LOAD_LOG.on("error", e => {});
LOAD_LOG.on("close", () => {});
}
logInf(`Обработчик #${workerData.number} получил новое сообщение: ${JSON.stringify(msg)}`, MODULE, LOAD_LOG);
if (msg.type === WRK_MSG_TYPE.TASK) {
try {
//Подключение к БД
const dbConn = workerData.dbConn;
if (!CONNECTED)
await connectDb({
user: dbConn.sUser,
password: dbConn.sPassword,
connectString: dbConn.sConnectString,
schema: dbConn.sSchema
});
let resp = await processTask({ ...msg.payload });
parentPort.postMessage(makeTaskOKResult(resp));
} catch (e) {
parentPort.postMessage(makeTaskErrResult(e));
}
} else {
if (msg.type === WRK_MSG_TYPE.STOP) {
//Флаг остановки
STOP_FLAG = true;
//Отключимся от БД
try {
if (CONNECTED) await disconnectDb();
} catch (e) {
logErr(`При остановке обработчика: ${e.message}`, MODULE, LOAD_LOG);
}
//Обнулим данные для протоколирования
if (LOAD_LOG) LOAD_LOG.destroy();
parentPort.postMessage(makeStopMessage());
} else {
parentPort.postMessage(makeTaskErrResult(Error(`Обработчик #${workerData.number} получил сообщение неподдерживаемого типа`)));
}
}
});
//Отлов неожиданных ошибок
process.on("uncaughtException", e => {
logErr(`Неожиданная ошибка: ${e.message}`, MODULE, LOAD_LOG);
//Обнулим данные для протоколирования
if (LOAD_LOG) LOAD_LOG.destroy();
});
//Отлов неожиданных прерываний
process.on("unhandledRejection", e => {
logErr(`Неожиданное прерывание: ${e.message}`, MODULE, LOAD_LOG);
//Обнулим данные для протоколирования
if (LOAD_LOG) LOAD_LOG.destroy();
});

22
modules/gar_utils/node_modules/node-fetch/LICENSE.md generated vendored Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

633
modules/gar_utils/node_modules/node-fetch/README.md generated vendored Normal file
View File

@ -0,0 +1,633 @@
node-fetch
==========
[![npm version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![coverage status][codecov-image]][codecov-url]
[![install size][install-size-image]][install-size-url]
[![Discord][discord-image]][discord-url]
A light-weight module that brings `window.fetch` to Node.js
(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
[![Backers][opencollective-image]][opencollective-url]
<!-- TOC -->
- [Motivation](#motivation)
- [Features](#features)
- [Difference from client-side fetch](#difference-from-client-side-fetch)
- [Installation](#installation)
- [Loading and configuring the module](#loading-and-configuring-the-module)
- [Common Usage](#common-usage)
- [Plain text or HTML](#plain-text-or-html)
- [JSON](#json)
- [Simple Post](#simple-post)
- [Post with JSON](#post-with-json)
- [Post with form parameters](#post-with-form-parameters)
- [Handling exceptions](#handling-exceptions)
- [Handling client and server errors](#handling-client-and-server-errors)
- [Advanced Usage](#advanced-usage)
- [Streams](#streams)
- [Buffer](#buffer)
- [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data)
- [Extract Set-Cookie Header](#extract-set-cookie-header)
- [Post data using a file stream](#post-data-using-a-file-stream)
- [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart)
- [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal)
- [API](#api)
- [fetch(url[, options])](#fetchurl-options)
- [Options](#options)
- [Class: Request](#class-request)
- [Class: Response](#class-response)
- [Class: Headers](#class-headers)
- [Interface: Body](#interface-body)
- [Class: FetchError](#class-fetcherror)
- [License](#license)
- [Acknowledgement](#acknowledgement)
<!-- /TOC -->
## Motivation
Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
## Features
- Stay consistent with `window.fetch` API.
- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
- Use native promise but allow substituting it with [insert your favorite promise library].
- Use native Node streams for body on both request and response.
- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
## Difference from client-side fetch
- See [Known Differences](LIMITS.md) for details.
- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
- Pull requests are welcomed too!
## Installation
Current stable release (`2.x`)
```sh
$ npm install node-fetch
```
## Loading and configuring the module
We suggest you load the module via `require` until the stabilization of ES modules in node:
```js
const fetch = require('node-fetch');
```
If you are using a Promise library other than native, set it through `fetch.Promise`:
```js
const Bluebird = require('bluebird');
fetch.Promise = Bluebird;
```
## Common Usage
NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
#### Plain text or HTML
```js
fetch('https://github.com/')
.then(res => res.text())
.then(body => console.log(body));
```
#### JSON
```js
fetch('https://api.github.com/users/github')
.then(res => res.json())
.then(json => console.log(json));
```
#### Simple Post
```js
fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
.then(res => res.json()) // expecting a json response
.then(json => console.log(json));
```
#### Post with JSON
```js
const body = { a: 1 };
fetch('https://httpbin.org/post', {
method: 'post',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' },
})
.then(res => res.json())
.then(json => console.log(json));
```
#### Post with form parameters
`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
```js
const { URLSearchParams } = require('url');
const params = new URLSearchParams();
params.append('a', 1);
fetch('https://httpbin.org/post', { method: 'POST', body: params })
.then(res => res.json())
.then(json => console.log(json));
```
#### Handling exceptions
NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information.
Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
```js
fetch('https://domain.invalid/')
.catch(err => console.error(err));
```
#### Handling client and server errors
It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
```js
function checkStatus(res) {
if (res.ok) { // res.status >= 200 && res.status < 300
return res;
} else {
throw MyCustomError(res.statusText);
}
}
fetch('https://httpbin.org/status/400')
.then(checkStatus)
.then(res => console.log('will not get here...'))
```
## Advanced Usage
#### Streams
The "Node.js way" is to use streams when possible:
```js
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => {
const dest = fs.createWriteStream('./octocat.png');
res.body.pipe(dest);
});
```
In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch
errors -- the longer a response runs, the more likely it is to encounter an error.
```js
const fetch = require('node-fetch');
const response = await fetch('https://httpbin.org/stream/3');
try {
for await (const chunk of response.body) {
console.dir(JSON.parse(chunk.toString()));
}
} catch (err) {
console.error(err.stack);
}
```
In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams
did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors
directly from the stream and wait on it response to fully close.
```js
const fetch = require('node-fetch');
const read = async body => {
let error;
body.on('error', err => {
error = err;
});
for await (const chunk of body) {
console.dir(JSON.parse(chunk.toString()));
}
return new Promise((resolve, reject) => {
body.on('close', () => {
error ? reject(error) : resolve();
});
});
};
try {
const response = await fetch('https://httpbin.org/stream/3');
await read(response.body);
} catch (err) {
console.error(err.stack);
}
```
#### Buffer
If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API)
```js
const fileType = require('file-type');
fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
.then(res => res.buffer())
.then(buffer => fileType(buffer))
.then(type => { /* ... */ });
```
#### Accessing Headers and other Meta data
```js
fetch('https://github.com/')
.then(res => {
console.log(res.ok);
console.log(res.status);
console.log(res.statusText);
console.log(res.headers.raw());
console.log(res.headers.get('content-type'));
});
```
#### Extract Set-Cookie Header
Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API.
```js
fetch(url).then(res => {
// returns an array of values, instead of a string of comma-separated values
console.log(res.headers.raw()['set-cookie']);
});
```
#### Post data using a file stream
```js
const { createReadStream } = require('fs');
const stream = createReadStream('input.txt');
fetch('https://httpbin.org/post', { method: 'POST', body: stream })
.then(res => res.json())
.then(json => console.log(json));
```
#### Post with form-data (detect multipart)
```js
const FormData = require('form-data');
const form = new FormData();
form.append('a', 1);
fetch('https://httpbin.org/post', { method: 'POST', body: form })
.then(res => res.json())
.then(json => console.log(json));
// OR, using custom headers
// NOTE: getHeaders() is non-standard API
const form = new FormData();
form.append('a', 1);
const options = {
method: 'POST',
body: form,
headers: form.getHeaders()
}
fetch('https://httpbin.org/post', options)
.then(res => res.json())
.then(json => console.log(json));
```
#### Request cancellation with AbortSignal
> NOTE: You may cancel streamed requests only on Node >= v8.0.0
You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
An example of timing out a request after 150ms could be achieved as the following:
```js
import AbortController from 'abort-controller';
const controller = new AbortController();
const timeout = setTimeout(
() => { controller.abort(); },
150,
);
fetch(url, { signal: controller.signal })
.then(res => res.json())
.then(
data => {
useData(data)
},
err => {
if (err.name === 'AbortError') {
// request was aborted
}
},
)
.finally(() => {
clearTimeout(timeout);
});
```
See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
## API
### fetch(url[, options])
- `url` A string representing the URL for fetching
- `options` [Options](#fetch-options) for the HTTP(S) request
- Returns: <code>Promise&lt;[Response](#class-response)&gt;</code>
Perform an HTTP(S) fetch.
`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`.
<a id="fetch-options"></a>
### Options
The default values are shown after each option key.
```js
{
// These properties are part of the Fetch Standard
method: 'GET',
headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
signal: null, // pass an instance of AbortSignal to optionally abort requests
// The following properties are node-fetch extensions
follow: 20, // maximum redirect count. 0 to not follow redirect
timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
compress: true, // support gzip/deflate content encoding. false to disable
size: 0, // maximum response body size in bytes. 0 to disable
agent: null // http(s).Agent instance or function that returns an instance (see below)
}
```
##### Default Headers
If no values are set, the following request headers will be sent automatically:
Header | Value
------------------- | --------------------------------------------------------
`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
`Accept` | `*/*`
`Connection` | `close` _(when no `options.agent` is present)_
`Content-Length` | _(automatically calculated, if possible)_
`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
Note: when `body` is a `Stream`, `Content-Length` is not set automatically.
##### Custom Agent
The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following:
- Support self-signed certificate
- Use only IPv4 or IPv6
- Custom DNS Lookup
See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information.
In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol.
```js
const httpAgent = new http.Agent({
keepAlive: true
});
const httpsAgent = new https.Agent({
keepAlive: true
});
const options = {
agent: function (_parsedURL) {
if (_parsedURL.protocol == 'http:') {
return httpAgent;
} else {
return httpsAgent;
}
}
}
```
<a id="class-request"></a>
### Class: Request
An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
Due to the nature of Node.js, the following properties are not implemented at this moment:
- `type`
- `destination`
- `referrer`
- `referrerPolicy`
- `mode`
- `credentials`
- `cache`
- `integrity`
- `keepalive`
The following node-fetch extension properties are provided:
- `follow`
- `compress`
- `counter`
- `agent`
See [options](#fetch-options) for exact meaning of these extensions.
#### new Request(input[, options])
<small>*(spec-compliant)*</small>
- `input` A string representing a URL, or another `Request` (which will be cloned)
- `options` [Options][#fetch-options] for the HTTP(S) request
Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
<a id="class-response"></a>
### Class: Response
An HTTP(S) response. This class implements the [Body](#iface-body) interface.
The following properties are not implemented in node-fetch at this moment:
- `Response.error()`
- `Response.redirect()`
- `type`
- `trailer`
#### new Response([body[, options]])
<small>*(spec-compliant)*</small>
- `body` A `String` or [`Readable` stream][node-readable]
- `options` A [`ResponseInit`][response-init] options dictionary
Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
#### response.ok
<small>*(spec-compliant)*</small>
Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
#### response.redirected
<small>*(spec-compliant)*</small>
Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
<a id="class-headers"></a>
### Class: Headers
This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
#### new Headers([init])
<small>*(spec-compliant)*</small>
- `init` Optional argument to pre-fill the `Headers` object
Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object.
```js
// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
const meta = {
'Content-Type': 'text/xml',
'Breaking-Bad': '<3'
};
const headers = new Headers(meta);
// The above is equivalent to
const meta = [
[ 'Content-Type', 'text/xml' ],
[ 'Breaking-Bad', '<3' ]
];
const headers = new Headers(meta);
// You can in fact use any iterable objects, like a Map or even another Headers
const meta = new Map();
meta.set('Content-Type', 'text/xml');
meta.set('Breaking-Bad', '<3');
const headers = new Headers(meta);
const copyOfHeaders = new Headers(headers);
```
<a id="iface-body"></a>
### Interface: Body
`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
The following methods are not yet implemented in node-fetch at this moment:
- `formData()`
#### body.body
<small>*(deviation from spec)*</small>
* Node.js [`Readable` stream][node-readable]
Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
#### body.bodyUsed
<small>*(spec-compliant)*</small>
* `Boolean`
A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again.
#### body.arrayBuffer()
#### body.blob()
#### body.json()
#### body.text()
<small>*(spec-compliant)*</small>
* Returns: <code>Promise</code>
Consume the body and return a promise that will resolve to one of these formats.
#### body.buffer()
<small>*(node-fetch extension)*</small>
* Returns: <code>Promise&lt;Buffer&gt;</code>
Consume the body and return a promise that will resolve to a Buffer.
#### body.textConverted()
<small>*(node-fetch extension)*</small>
* Returns: <code>Promise&lt;String&gt;</code>
Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible.
(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
<a id="class-fetcherror"></a>
### Class: FetchError
<small>*(node-fetch extension)*</small>
An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
<a id="class-aborterror"></a>
### Class: AbortError
<small>*(node-fetch extension)*</small>
An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
## Acknowledgement
Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
## License
MIT
[npm-image]: https://flat.badgen.net/npm/v/node-fetch
[npm-url]: https://www.npmjs.com/package/node-fetch
[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
[travis-url]: https://travis-ci.org/bitinn/node-fetch
[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master
[codecov-url]: https://codecov.io/gh/bitinn/node-fetch
[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square
[discord-url]: https://discord.gg/Zxbndcm
[opencollective-image]: https://opencollective.com/node-fetch/backers.svg
[opencollective-url]: https://opencollective.com/node-fetch
[whatwg-fetch]: https://fetch.spec.whatwg.org/
[response-init]: https://fetch.spec.whatwg.org/#responseinit
[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md

25
modules/gar_utils/node_modules/node-fetch/browser.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
"use strict";
// ref: https://github.com/tc39/proposal-global
var getGlobal = function () {
// the only reliable means to get the global object is
// `Function('return this')()`
// However, this causes CSP violations in Chrome apps.
if (typeof self !== 'undefined') { return self; }
if (typeof window !== 'undefined') { return window; }
if (typeof global !== 'undefined') { return global; }
throw new Error('unable to locate global object');
}
var globalObject = getGlobal();
module.exports = exports = globalObject.fetch;
// Needed for TypeScript and Webpack.
if (globalObject.fetch) {
exports.default = globalObject.fetch.bind(globalObject);
}
exports.Headers = globalObject.Headers;
exports.Request = globalObject.Request;
exports.Response = globalObject.Response;

1778
modules/gar_utils/node_modules/node-fetch/lib/index.es.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1787
modules/gar_utils/node_modules/node-fetch/lib/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1776
modules/gar_utils/node_modules/node-fetch/lib/index.mjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

117
modules/gar_utils/node_modules/node-fetch/package.json generated vendored Normal file
View File

@ -0,0 +1,117 @@
{
"_from": "node-fetch@2.6.9",
"_id": "node-fetch@2.6.9",
"_inBundle": false,
"_integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==",
"_location": "/node-fetch",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "node-fetch@2.6.9",
"name": "node-fetch",
"escapedName": "node-fetch",
"rawSpec": "2.6.9",
"saveSpec": null,
"fetchSpec": "2.6.9"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz",
"_shasum": "7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6",
"_spec": "node-fetch@2.6.9",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1",
"author": {
"name": "David Frank"
},
"browser": "./browser.js",
"bugs": {
"url": "https://github.com/bitinn/node-fetch/issues"
},
"bundleDependencies": false,
"dependencies": {
"whatwg-url": "^5.0.0"
},
"deprecated": false,
"description": "A light-weight module that brings window.fetch to node.js",
"devDependencies": {
"@ungap/url-search-params": "^0.1.2",
"abort-controller": "^1.1.0",
"abortcontroller-polyfill": "^1.3.0",
"babel-core": "^6.26.3",
"babel-plugin-istanbul": "^4.1.6",
"babel-plugin-transform-async-generator-functions": "^6.24.1",
"babel-polyfill": "^6.26.0",
"babel-preset-env": "1.4.0",
"babel-register": "^6.16.3",
"chai": "^3.5.0",
"chai-as-promised": "^7.1.1",
"chai-iterator": "^1.1.1",
"chai-string": "~1.3.0",
"codecov": "3.3.0",
"cross-env": "^5.2.0",
"form-data": "^2.3.3",
"is-builtin-module": "^1.0.0",
"mocha": "^5.0.0",
"nyc": "11.9.0",
"parted": "^0.1.1",
"promise": "^8.0.3",
"resumer": "0.0.0",
"rollup": "^0.63.4",
"rollup-plugin-babel": "^3.0.7",
"string-to-arraybuffer": "^1.0.2",
"teeny-request": "3.7.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"files": [
"lib/index.js",
"lib/index.mjs",
"lib/index.es.js",
"browser.js"
],
"homepage": "https://github.com/bitinn/node-fetch",
"keywords": [
"fetch",
"http",
"promise"
],
"license": "MIT",
"main": "lib/index.js",
"module": "lib/index.mjs",
"name": "node-fetch",
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
},
"release": {
"branches": [
"+([0-9]).x",
"main",
"next",
{
"name": "beta",
"prerelease": true
}
]
},
"repository": {
"type": "git",
"url": "git+https://github.com/bitinn/node-fetch.git"
},
"scripts": {
"build": "cross-env BABEL_ENV=rollup rollup -c",
"coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json",
"prepare": "npm run build",
"report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
"test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js"
},
"version": "2.6.9"
}

44
modules/gar_utils/node_modules/node-stream-zip/LICENSE generated vendored Normal file
View File

@ -0,0 +1,44 @@
Copyright (c) 2021 Antelle https://github.com/antelle
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
== dependency license: adm-zip ==
Copyright (c) 2012 Another-D-Mention Software and other contributors,
http://www.another-d-mention.ro/
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,224 @@
# node-stream-zip ![CI Checks](https://github.com/antelle/node-stream-zip/workflows/CI%20Checks/badge.svg)
node.js library for reading and extraction of ZIP archives.
Features:
- it never loads entire archive into memory, everything is read by chunks
- large archives support
- all operations are non-blocking, no sync i/o
- fast initialization
- no dependencies, no binary addons
- decompression with built-in zlib module
- deflate, sfx, macosx/windows built-in archives
- ZIP64 support
## Installation
```sh
npm i node-stream-zip
```
## Usage
There are two APIs provided:
1. [promise-based / async](#async-api)
2. [callbacks](#callback-api)
It's recommended to use the new, promise API, however the legacy callback API
may be more flexible for certain operations.
### Async API
Open a zip file
```javascript
const StreamZip = require('node-stream-zip');
const zip = new StreamZip.async({ file: 'archive.zip' });
```
Stream one entry to stdout
```javascript
const stm = await zip.stream('path/inside/zip.txt');
stm.pipe(process.stdout);
stm.on('end', () => zip.close());
```
Read a file as buffer
```javascript
const data = await zip.entryData('path/inside/zip.txt');
await zip.close();
```
Extract one file to disk
```javascript
await zip.extract('path/inside/zip.txt', './extracted.txt');
await zip.close();
```
List entries
```javascript
const entriesCount = await zip.entriesCount;
console.log(`Entries read: ${entriesCount}`);
const entries = await zip.entries();
for (const entry of Object.values(entries)) {
const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
console.log(`Entry ${entry.name}: ${desc}`);
}
// Do not forget to close the file once you're done
await zip.close();
```
Extract a folder from archive to disk
```javascript
fs.mkdirSync('extracted');
await zip.extract('path/inside/zip/', './extracted');
await zip.close();
```
Extract everything
```javascript
fs.mkdirSync('extracted');
const count = await zip.extract(null, './extracted');
console.log(`Extracted ${count} entries`);
await zip.close();
```
When extracting a folder, you can listen to `extract` event
```javascript
zip.on('extract', (entry, file) => {
console.log(`Extracted ${entry.name} to ${file}`);
});
```
`entry` event is generated for every entry during loading
```javascript
zip.on('entry', entry => {
// you can already stream this entry,
// without waiting until all entry descriptions are read (suitable for very large archives)
console.log(`Read entry ${entry.name}`);
});
```
### Callback API
Open a zip file
```javascript
const StreamZip = require('node-stream-zip');
const zip = new StreamZip({ file: 'archive.zip' });
// Handle errors
zip.on('error', err => { /*...*/ });
```
List entries
```javascript
zip.on('ready', () => {
console.log('Entries read: ' + zip.entriesCount);
for (const entry of Object.values(zip.entries())) {
const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
console.log(`Entry ${entry.name}: ${desc}`);
}
// Do not forget to close the file once you're done
zip.close();
});
```
Stream one entry to stdout
```javascript
zip.on('ready', () => {
zip.stream('path/inside/zip.txt', (err, stm) => {
stm.pipe(process.stdout);
stm.on('end', () => zip.close());
});
});
```
Extract one file to disk
```javascript
zip.on('ready', () => {
zip.extract('path/inside/zip.txt', './extracted.txt', err => {
console.log(err ? 'Extract error' : 'Extracted');
zip.close();
});
});
```
Extract a folder from archive to disk
```javascript
zip.on('ready', () => {
fs.mkdirSync('extracted');
zip.extract('path/inside/zip/', './extracted', err => {
console.log(err ? 'Extract error' : 'Extracted');
zip.close();
});
});
```
Extract everything
```javascript
zip.on('ready', () => {
fs.mkdirSync('extracted');
zip.extract(null, './extracted', (err, count) => {
console.log(err ? 'Extract error' : `Extracted ${count} entries`);
zip.close();
});
});
```
Read a file as buffer in sync way
```javascript
zip.on('ready', () => {
const data = zip.entryDataSync('path/inside/zip.txt');
zip.close();
});
```
When extracting a folder, you can listen to `extract` event
```javascript
zip.on('extract', (entry, file) => {
console.log(`Extracted ${entry.name} to ${file}`);
});
```
`entry` event is generated for every entry during loading
```javascript
zip.on('entry', entry => {
// you can already stream this entry,
// without waiting until all entry descriptions are read (suitable for very large archives)
console.log(`Read entry ${entry.name}`);
});
```
## Options
You can pass these options to the constructor
- `storeEntries: true` - you will be able to work with entries inside zip archive, otherwise the only way to access them is `entry` event
- `skipEntryNameValidation: true` - by default, entry name is checked for malicious characters, like `../` or `c:\123`, pass this flag to disable validation errors
## Methods
- `zip.entries()` - get all entries description
- `zip.entry(name)` - get entry description by name
- `zip.stream(entry, function(err, stm) { })` - get entry data reader stream
- `zip.entryDataSync(entry)` - get entry data in sync way
- `zip.close()` - cleanup after all entries have been read, streamed, extracted, and you don't need the archive
## Building
The project doesn't require building. To run unit tests with [nodeunit](https://github.com/caolan/nodeunit):
```sh
npm test
```
## Known issues
- [utf8](https://github.com/rubyzip/rubyzip/wiki/Files-with-non-ascii-filenames) file names
## Out of scope
- AES encrypted files: the library will throw an error if you try to open it
## Contributors
ZIP parsing code has been partially forked from [cthackers/adm-zip](https://github.com/cthackers/adm-zip) (MIT license).

View File

@ -0,0 +1,199 @@
/// <reference types="node" />
declare namespace StreamZip {
interface StreamZipOptions {
/**
* File to read
* @default undefined
*/
file?: string;
/**
* Alternatively, you can pass fd here
* @default undefined
*/
fd?: number;
/**
* You will be able to work with entries inside zip archive,
* otherwise the only way to access them is entry event
* @default true
*/
storeEntries?: boolean;
/**
* By default, entry name is checked for malicious characters, like ../ or c:\123,
* pass this flag to disable validation error
* @default false
*/
skipEntryNameValidation?: boolean;
/**
* Filesystem read chunk size
* @default automatic based on file size
*/
chunkSize?: number;
/**
* Encoding used to decode file names
* @default UTF8
*/
nameEncoding?: string;
}
interface ZipEntry {
/**
* file name
*/
name: string;
/**
* true if it's a directory entry
*/
isDirectory: boolean;
/**
* true if it's a file entry, see also isDirectory
*/
isFile: boolean;
/**
* file comment
*/
comment: string;
/**
* if the file is encrypted
*/
encrypted: boolean;
/**
* version made by
*/
verMade: number;
/**
* version needed to extract
*/
version: number;
/**
* encrypt, decrypt flags
*/
flags: number;
/**
* compression method
*/
method: number;
/**
* modification time
*/
time: number;
/**
* uncompressed file crc-32 value
*/
crc: number;
/**
* compressed size
*/
compressedSize: number;
/**
* uncompressed size
*/
size: number;
/**
* volume number start
*/
diskStart: number;
/**
* internal file attributes
*/
inattr: number;
/**
* external file attributes
*/
attr: number;
/**
* LOC header offset
*/
offset: number;
}
class StreamZipAsync {
constructor(config: StreamZipOptions);
entriesCount: Promise<number>;
comment: Promise<string>;
entry(name: string): Promise<ZipEntry | undefined>;
entries(): Promise<{ [name: string]: ZipEntry }>;
entryData(entry: string | ZipEntry): Promise<Buffer>;
stream(entry: string | ZipEntry): Promise<NodeJS.ReadableStream>;
extract(entry: string | ZipEntry | null, outPath: string): Promise<number | undefined>;
on(event: 'entry', handler: (entry: ZipEntry) => void): void;
on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
close(): Promise<void>;
}
}
type StreamZipOptions = StreamZip.StreamZipOptions;
type ZipEntry = StreamZip.ZipEntry;
declare class StreamZip {
constructor(config: StreamZipOptions);
/**
* number of entries in the archive
*/
entriesCount: number;
/**
* archive comment
*/
comment: string;
on(event: 'error', handler: (error: any) => void): void;
on(event: 'entry', handler: (entry: ZipEntry) => void): void;
on(event: 'ready', handler: () => void): void;
on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
entry(name: string): ZipEntry | undefined;
entries(): { [name: string]: ZipEntry };
stream(
entry: string | ZipEntry,
callback: (err: any | null, stream?: NodeJS.ReadableStream) => void
): void;
entryDataSync(entry: string | ZipEntry): Buffer;
openEntry(
entry: string | ZipEntry,
callback: (err: any | null, entry?: ZipEntry) => void,
sync: boolean
): void;
extract(
entry: string | ZipEntry | null,
outPath: string,
callback: (err?: any, res?: number) => void
): void;
close(callback?: (err?: any) => void): void;
static async: typeof StreamZip.StreamZipAsync;
}
export = StreamZip;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,77 @@
{
"_from": "node-stream-zip",
"_id": "node-stream-zip@1.15.0",
"_inBundle": false,
"_integrity": "sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw==",
"_location": "/node-stream-zip",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "node-stream-zip",
"name": "node-stream-zip",
"escapedName": "node-stream-zip",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.15.0.tgz",
"_shasum": "158adb88ed8004c6c49a396b50a6a5de3bca33ea",
"_spec": "node-stream-zip",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1",
"author": {
"name": "Antelle",
"email": "antelle.net@gmail.com",
"url": "https://github.com/antelle"
},
"bugs": {
"url": "https://github.com/antelle/node-stream-zip/issues",
"email": "antelle.net@gmail.com"
},
"bundleDependencies": false,
"deprecated": false,
"description": "node.js library for reading and extraction of ZIP archives",
"devDependencies": {
"@types/node": "^14.14.6",
"eslint": "^7.19.0",
"nodeunit": "^0.11.3",
"prettier": "^2.2.1"
},
"engines": {
"node": ">=0.12.0"
},
"files": [
"LICENSE",
"node_stream_zip.js",
"node_stream_zip.d.ts"
],
"funding": {
"type": "github",
"url": "https://github.com/sponsors/antelle"
},
"homepage": "https://github.com/antelle/node-stream-zip",
"keywords": [
"zip",
"archive",
"unzip",
"stream"
],
"license": "MIT",
"main": "node_stream_zip.js",
"name": "node-stream-zip",
"repository": {
"type": "git",
"url": "git+https://github.com/antelle/node-stream-zip.git"
},
"scripts": {
"check-types": "tsc node_stream_zip.d.ts",
"lint": "eslint node_stream_zip.js test/tests.js",
"test": "nodeunit test/tests.js"
},
"types": "node_stream_zip.d.ts",
"version": "1.15.0"
}

41
modules/gar_utils/node_modules/sax/LICENSE generated vendored Normal file
View File

@ -0,0 +1,41 @@
The ISC License
Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
====
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
License, as follows:
Copyright (c) 2010-2022 Mathias Bynens <https://mathiasbynens.be/>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

225
modules/gar_utils/node_modules/sax/README.md generated vendored Normal file
View File

@ -0,0 +1,225 @@
# sax js
A sax-style parser for XML and HTML.
Designed with [node](http://nodejs.org/) in mind, but should work fine in
the browser or other CommonJS implementations.
## What This Is
* A very simple tool to parse through an XML string.
* A stepping stone to a streaming HTML parser.
* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML
docs.
## What This Is (probably) Not
* An HTML Parser - That's a fine goal, but this isn't it. It's just
XML.
* A DOM Builder - You can use it to build an object model out of XML,
but it doesn't do that out of the box.
* XSLT - No DOM = no querying.
* 100% Compliant with (some other SAX implementation) - Most SAX
implementations are in Java and do a lot more than this does.
* An XML Validator - It does a little validation when in strict mode, but
not much.
* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic
masochism.
* A DTD-aware Thing - Fetching DTDs is a much bigger job.
## Regarding `<!DOCTYPE`s and `<!ENTITY`s
The parser will handle the basic XML entities in text nodes and attribute
values: `&amp; &lt; &gt; &apos; &quot;`. It's possible to define additional
entities in XML by putting them in the DTD. This parser doesn't do anything
with that. If you want to listen to the `ondoctype` event, and then fetch
the doctypes, and read the entities and add them to `parser.ENTITIES`, then
be my guest.
Unknown entities will fail in strict mode, and in loose mode, will pass
through unmolested.
## Usage
```javascript
var sax = require("./lib/sax"),
strict = true, // set to false for html-mode
parser = sax.parser(strict);
parser.onerror = function (e) {
// an error happened.
};
parser.ontext = function (t) {
// got some text. t is the string of text.
};
parser.onopentag = function (node) {
// opened a tag. node has "name" and "attributes"
};
parser.onattribute = function (attr) {
// an attribute. attr has "name" and "value"
};
parser.onend = function () {
// parser stream is done, and ready to have more stuff written to it.
};
parser.write('<xml>Hello, <who name="world">world</who>!</xml>').close();
// stream usage
// takes the same options as the parser
var saxStream = require("sax").createStream(strict, options)
saxStream.on("error", function (e) {
// unhandled errors will throw, since this is a proper node
// event emitter.
console.error("error!", e)
// clear the error
this._parser.error = null
this._parser.resume()
})
saxStream.on("opentag", function (node) {
// same object as above
})
// pipe is supported, and it's readable/writable
// same chunks coming in also go out.
fs.createReadStream("file.xml")
.pipe(saxStream)
.pipe(fs.createWriteStream("file-copy.xml"))
```
## Arguments
Pass the following arguments to the parser function. All are optional.
`strict` - Boolean. Whether or not to be a jerk. Default: `false`.
`opt` - Object bag of settings regarding string formatting. All default to `false`.
Settings supported:
* `trim` - Boolean. Whether or not to trim text and comment nodes.
* `normalize` - Boolean. If true, then turn any whitespace into a single
space.
* `lowercase` - Boolean. If true, then lowercase tag names and attribute names
in loose mode, rather than uppercasing them.
* `xmlns` - Boolean. If true, then namespaces are supported.
* `position` - Boolean. If false, then don't track line/col/position.
* `strictEntities` - Boolean. If true, only parse [predefined XML
entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent)
(`&amp;`, `&apos;`, `&gt;`, `&lt;`, and `&quot;`)
## Methods
`write` - Write bytes onto the stream. You don't have to do this all at
once. You can keep writing as much as you want.
`close` - Close the stream. Once closed, no more data may be written until
it is done processing the buffer, which is signaled by the `end` event.
`resume` - To gracefully handle errors, assign a listener to the `error`
event. Then, when the error is taken care of, you can call `resume` to
continue parsing. Otherwise, the parser will not continue while in an error
state.
## Members
At all times, the parser object will have the following members:
`line`, `column`, `position` - Indications of the position in the XML
document where the parser currently is looking.
`startTagPosition` - Indicates the position where the current tag starts.
`closed` - Boolean indicating whether or not the parser can be written to.
If it's `true`, then wait for the `ready` event to write again.
`strict` - Boolean indicating whether or not the parser is a jerk.
`opt` - Any options passed into the constructor.
`tag` - The current tag being dealt with.
And a bunch of other stuff that you probably shouldn't touch.
## Events
All events emit with a single argument. To listen to an event, assign a
function to `on<eventname>`. Functions get executed in the this-context of
the parser object. The list of supported events are also in the exported
`EVENTS` array.
When using the stream interface, assign handlers using the EventEmitter
`on` function in the normal fashion.
`error` - Indication that something bad happened. The error will be hanging
out on `parser.error`, and must be deleted before parsing can continue. By
listening to this event, you can keep an eye on that kind of stuff. Note:
this happens *much* more in strict mode. Argument: instance of `Error`.
`text` - Text node. Argument: string of text.
`doctype` - The `<!DOCTYPE` declaration. Argument: doctype string.
`processinginstruction` - Stuff like `<?xml foo="blerg" ?>`. Argument:
object with `name` and `body` members. Attributes are not parsed, as
processing instructions have implementation dependent semantics.
`sgmldeclaration` - Random SGML declarations. Stuff like `<!ENTITY p>`
would trigger this kind of event. This is a weird thing to support, so it
might go away at some point. SAX isn't intended to be used to parse SGML,
after all.
`opentagstart` - Emitted immediately when the tag name is available,
but before any attributes are encountered. Argument: object with a
`name` field and an empty `attributes` set. Note that this is the
same object that will later be emitted in the `opentag` event.
`opentag` - An opening tag. Argument: object with `name` and `attributes`.
In non-strict mode, tag names are uppercased, unless the `lowercase`
option is set. If the `xmlns` option is set, then it will contain
namespace binding information on the `ns` member, and will have a
`local`, `prefix`, and `uri` member.
`closetag` - A closing tag. In loose mode, tags are auto-closed if their
parent closes. In strict mode, well-formedness is enforced. Note that
self-closing tags will have `closeTag` emitted immediately after `openTag`.
Argument: tag name.
`attribute` - An attribute node. Argument: object with `name` and `value`.
In non-strict mode, attribute names are uppercased, unless the `lowercase`
option is set. If the `xmlns` option is set, it will also contains namespace
information.
`comment` - A comment node. Argument: the string of the comment.
`opencdata` - The opening tag of a `<![CDATA[` block.
`cdata` - The text of a `<![CDATA[` block. Since `<![CDATA[` blocks can get
quite large, this event may fire multiple times for a single block, if it
is broken up into multiple `write()`s. Argument: the string of random
character data.
`closecdata` - The closing tag (`]]>`) of a `<![CDATA[` block.
`opennamespace` - If the `xmlns` option is set, then this event will
signal the start of a new namespace binding.
`closenamespace` - If the `xmlns` option is set, then this event will
signal the end of a namespace binding.
`end` - Indication that the closed stream has ended.
`ready` - Indication that the stream has reset, and is ready to be written
to.
`noscript` - In non-strict mode, `<script>` tags trigger a `"script"`
event, and their contents are not checked for special xml characters.
If you pass `noscript: true`, then this behavior is suppressed.
## Reporting Problems
It's best to write a failing test if you find an issue. I will always
accept pull requests with failing tests if they demonstrate intended
behavior, but it is very hard to figure out what issue you're describing
without a test. Writing a test is also the best way for you yourself
to figure out if you really understand the issue you think you have with
sax-js.

1574
modules/gar_utils/node_modules/sax/lib/sax.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

66
modules/gar_utils/node_modules/sax/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"_from": "sax",
"_id": "sax@1.3.0",
"_inBundle": false,
"_integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==",
"_location": "/sax",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "sax",
"name": "sax",
"escapedName": "sax",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
"_shasum": "a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0",
"_spec": "sax",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/sax-js/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "An evented streaming XML parser in JavaScript",
"devDependencies": {
"tap": "^15.1.6"
},
"files": [
"lib/sax.js",
"LICENSE",
"README.md"
],
"homepage": "https://github.com/isaacs/sax-js#readme",
"license": "ISC",
"main": "lib/sax.js",
"name": "sax",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/sax-js.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --cov -j4"
},
"tap": {
"statements": 79,
"branches": 75,
"functions": 80,
"lines": 79
},
"version": "1.3.0"
}

4
modules/gar_utils/node_modules/tr46/.npmignore generated vendored Normal file
View File

@ -0,0 +1,4 @@
scripts/
test/
!lib/mapping_table.json

193
modules/gar_utils/node_modules/tr46/index.js generated vendored Normal file
View File

@ -0,0 +1,193 @@
"use strict";
var punycode = require("punycode");
var mappingTable = require("./lib/mappingTable.json");
var PROCESSING_OPTIONS = {
TRANSITIONAL: 0,
NONTRANSITIONAL: 1
};
function normalize(str) { // fix bug in v8
return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000');
}
function findStatus(val) {
var start = 0;
var end = mappingTable.length - 1;
while (start <= end) {
var mid = Math.floor((start + end) / 2);
var target = mappingTable[mid];
if (target[0][0] <= val && target[0][1] >= val) {
return target;
} else if (target[0][0] > val) {
end = mid - 1;
} else {
start = mid + 1;
}
}
return null;
}
var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
function countSymbols(string) {
return string
// replace every surrogate pair with a BMP symbol
.replace(regexAstralSymbols, '_')
// then get the length
.length;
}
function mapChars(domain_name, useSTD3, processing_option) {
var hasError = false;
var processed = "";
var len = countSymbols(domain_name);
for (var i = 0; i < len; ++i) {
var codePoint = domain_name.codePointAt(i);
var status = findStatus(codePoint);
switch (status[1]) {
case "disallowed":
hasError = true;
processed += String.fromCodePoint(codePoint);
break;
case "ignored":
break;
case "mapped":
processed += String.fromCodePoint.apply(String, status[2]);
break;
case "deviation":
if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {
processed += String.fromCodePoint.apply(String, status[2]);
} else {
processed += String.fromCodePoint(codePoint);
}
break;
case "valid":
processed += String.fromCodePoint(codePoint);
break;
case "disallowed_STD3_mapped":
if (useSTD3) {
hasError = true;
processed += String.fromCodePoint(codePoint);
} else {
processed += String.fromCodePoint.apply(String, status[2]);
}
break;
case "disallowed_STD3_valid":
if (useSTD3) {
hasError = true;
}
processed += String.fromCodePoint(codePoint);
break;
}
}
return {
string: processed,
error: hasError
};
}
var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/;
function validateLabel(label, processing_option) {
if (label.substr(0, 4) === "xn--") {
label = punycode.toUnicode(label);
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
}
var error = false;
if (normalize(label) !== label ||
(label[3] === "-" && label[4] === "-") ||
label[0] === "-" || label[label.length - 1] === "-" ||
label.indexOf(".") !== -1 ||
label.search(combiningMarksRegex) === 0) {
error = true;
}
var len = countSymbols(label);
for (var i = 0; i < len; ++i) {
var status = findStatus(label.codePointAt(i));
if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") ||
(processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&
status[1] !== "valid" && status[1] !== "deviation")) {
error = true;
break;
}
}
return {
label: label,
error: error
};
}
function processing(domain_name, useSTD3, processing_option) {
var result = mapChars(domain_name, useSTD3, processing_option);
result.string = normalize(result.string);
var labels = result.string.split(".");
for (var i = 0; i < labels.length; ++i) {
try {
var validation = validateLabel(labels[i]);
labels[i] = validation.label;
result.error = result.error || validation.error;
} catch(e) {
result.error = true;
}
}
return {
string: labels.join("."),
error: result.error
};
}
module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {
var result = processing(domain_name, useSTD3, processing_option);
var labels = result.string.split(".");
labels = labels.map(function(l) {
try {
return punycode.toASCII(l);
} catch(e) {
result.error = true;
return l;
}
});
if (verifyDnsLength) {
var total = labels.slice(0, labels.length - 1).join(".").length;
if (total.length > 253 || total.length === 0) {
result.error = true;
}
for (var i=0; i < labels.length; ++i) {
if (labels.length > 63 || labels.length === 0) {
result.error = true;
break;
}
}
}
if (result.error) return null;
return labels.join(".");
};
module.exports.toUnicode = function(domain_name, useSTD3) {
var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);
return {
domain: result.string,
error: result.error
};
};
module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;

0
modules/gar_utils/node_modules/tr46/lib/.gitkeep generated vendored Normal file
View File

File diff suppressed because one or more lines are too long

59
modules/gar_utils/node_modules/tr46/package.json generated vendored Normal file
View File

@ -0,0 +1,59 @@
{
"_from": "tr46@~0.0.3",
"_id": "tr46@0.0.3",
"_inBundle": false,
"_integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"_location": "/tr46",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "tr46@~0.0.3",
"name": "tr46",
"escapedName": "tr46",
"rawSpec": "~0.0.3",
"saveSpec": null,
"fetchSpec": "~0.0.3"
},
"_requiredBy": [
"/whatwg-url"
],
"_resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"_shasum": "8184fd347dac9cdc185992f3a6622e14b9d9ab6a",
"_spec": "tr46@~0.0.3",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1\\node_modules\\whatwg-url",
"author": {
"name": "Sebastian Mayr",
"email": "npm@smayr.name"
},
"bugs": {
"url": "https://github.com/Sebmaster/tr46.js/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "An implementation of the Unicode TR46 spec",
"devDependencies": {
"mocha": "^2.2.5",
"request": "^2.57.0"
},
"homepage": "https://github.com/Sebmaster/tr46.js#readme",
"keywords": [
"unicode",
"tr46",
"url",
"whatwg"
],
"license": "MIT",
"main": "index.js",
"name": "tr46",
"repository": {
"type": "git",
"url": "git+https://github.com/Sebmaster/tr46.js.git"
},
"scripts": {
"prepublish": "node scripts/generateMappingTable.js",
"pretest": "node scripts/getLatestUnicodeTests.js",
"test": "mocha"
},
"version": "0.0.3"
}

View File

@ -0,0 +1,12 @@
# The BSD 2-Clause License
Copyright (c) 2014, Domenic Denicola
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,53 @@
# WebIDL Type Conversions on JavaScript Values
This package implements, in JavaScript, the algorithms to convert a given JavaScript value according to a given [WebIDL](http://heycam.github.io/webidl/) [type](http://heycam.github.io/webidl/#idl-types).
The goal is that you should be able to write code like
```js
const conversions = require("webidl-conversions");
function doStuff(x, y) {
x = conversions["boolean"](x);
y = conversions["unsigned long"](y);
// actual algorithm code here
}
```
and your function `doStuff` will behave the same as a WebIDL operation declared as
```webidl
void doStuff(boolean x, unsigned long y);
```
## API
This package's main module's default export is an object with a variety of methods, each corresponding to a different WebIDL type. Each method, when invoked on a JavaScript value, will give back the new JavaScript value that results after passing through the WebIDL conversion rules. (See below for more details on what that means.) Alternately, the method could throw an error, if the WebIDL algorithm is specified to do so: for example `conversions["float"](NaN)` [will throw a `TypeError`](http://heycam.github.io/webidl/#es-float).
## Status
All of the numeric types are implemented (float being implemented as double) and some others are as well - check the source for all of them. This list will grow over time in service of the [HTML as Custom Elements](https://github.com/dglazkov/html-as-custom-elements) project, but in the meantime, pull requests welcome!
I'm not sure yet what the strategy will be for modifiers, e.g. [`[Clamp]`](http://heycam.github.io/webidl/#Clamp). Maybe something like `conversions["unsigned long"](x, { clamp: true })`? We'll see.
We might also want to extend the API to give better error messages, e.g. "Argument 1 of HTMLMediaElement.fastSeek is not a finite floating-point value" instead of "Argument is not a finite floating-point value." This would require passing in more information to the conversion functions than we currently do.
## Background
What's actually going on here, conceptually, is pretty weird. Let's try to explain.
WebIDL, as part of its madness-inducing design, has its own type system. When people write algorithms in web platform specs, they usually operate on WebIDL values, i.e. instances of WebIDL types. For example, if they were specifying the algorithm for our `doStuff` operation above, they would treat `x` as a WebIDL value of [WebIDL type `boolean`](http://heycam.github.io/webidl/#idl-boolean). Crucially, they would _not_ treat `x` as a JavaScript variable whose value is either the JavaScript `true` or `false`. They're instead working in a different type system altogether, with its own rules.
Separately from its type system, WebIDL defines a ["binding"](http://heycam.github.io/webidl/#ecmascript-binding) of the type system into JavaScript. This contains rules like: when you pass a JavaScript value to the JavaScript method that manifests a given WebIDL operation, how does that get converted into a WebIDL value? For example, a JavaScript `true` passed in the position of a WebIDL `boolean` argument becomes a WebIDL `true`. But, a JavaScript `true` passed in the position of a [WebIDL `unsigned long`](http://heycam.github.io/webidl/#idl-unsigned-long) becomes a WebIDL `1`. And so on.
Finally, we have the actual implementation code. This is usually C++, although these days [some smart people are using Rust](https://github.com/servo/servo). The implementation, of course, has its own type system. So when they implement the WebIDL algorithms, they don't actually use WebIDL values, since those aren't "real" outside of specs. Instead, implementations apply the WebIDL binding rules in such a way as to convert incoming JavaScript values into C++ values. For example, if code in the browser called `doStuff(true, true)`, then the implementation code would eventually receive a C++ `bool` containing `true` and a C++ `uint32_t` containing `1`.
The upside of all this is that implementations can abstract all the conversion logic away, letting WebIDL handle it, and focus on implementing the relevant methods in C++ with values of the correct type already provided. That is payoff of WebIDL, in a nutshell.
And getting to that payoff is the goal of _this_ project—but for JavaScript implementations, instead of C++ ones. That is, this library is designed to make it easier for JavaScript developers to write functions that behave like a given WebIDL operation. So conceptually, the conversion pipeline, which in its general form is JavaScript values ↦ WebIDL values ↦ implementation-language values, in this case becomes JavaScript values ↦ WebIDL values ↦ JavaScript values. And that intermediate step is where all the logic is performed: a JavaScript `true` becomes a WebIDL `1` in an unsigned long context, which then becomes a JavaScript `1`.
## Don't Use This
Seriously, why would you ever use this? You really shouldn't. WebIDL is … not great, and you shouldn't be emulating its semantics. If you're looking for a generic argument-processing library, you should find one with better rules than those from WebIDL. In general, your JavaScript should not be trying to become more like WebIDL; if anything, we should fix WebIDL to make it more like JavaScript.
The _only_ people who should use this are those trying to create faithful implementations (or polyfills) of web platform interfaces defined in WebIDL.

View File

@ -0,0 +1,189 @@
"use strict";
var conversions = {};
module.exports = conversions;
function sign(x) {
return x < 0 ? -1 : 1;
}
function evenRound(x) {
// Round x to the nearest integer, choosing the even integer if it lies halfway between two.
if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
return Math.floor(x);
} else {
return Math.round(x);
}
}
function createNumberConversion(bitLength, typeOpts) {
if (!typeOpts.unsigned) {
--bitLength;
}
const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
const upperBound = Math.pow(2, bitLength) - 1;
const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
return function(V, opts) {
if (!opts) opts = {};
let x = +V;
if (opts.enforceRange) {
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite number");
}
x = sign(x) * Math.floor(Math.abs(x));
if (x < lowerBound || x > upperBound) {
throw new TypeError("Argument is not in byte range");
}
return x;
}
if (!isNaN(x) && opts.clamp) {
x = evenRound(x);
if (x < lowerBound) x = lowerBound;
if (x > upperBound) x = upperBound;
return x;
}
if (!Number.isFinite(x) || x === 0) {
return 0;
}
x = sign(x) * Math.floor(Math.abs(x));
x = x % moduloVal;
if (!typeOpts.unsigned && x >= moduloBound) {
return x - moduloVal;
} else if (typeOpts.unsigned) {
if (x < 0) {
x += moduloVal;
} else if (x === -0) { // don't return negative zero
return 0;
}
}
return x;
}
}
conversions["void"] = function () {
return undefined;
};
conversions["boolean"] = function (val) {
return !!val;
};
conversions["byte"] = createNumberConversion(8, { unsigned: false });
conversions["octet"] = createNumberConversion(8, { unsigned: true });
conversions["short"] = createNumberConversion(16, { unsigned: false });
conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
conversions["long"] = createNumberConversion(32, { unsigned: false });
conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
conversions["double"] = function (V) {
const x = +V;
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite floating-point value");
}
return x;
};
conversions["unrestricted double"] = function (V) {
const x = +V;
if (isNaN(x)) {
throw new TypeError("Argument is NaN");
}
return x;
};
// not quite valid, but good enough for JS
conversions["float"] = conversions["double"];
conversions["unrestricted float"] = conversions["unrestricted double"];
conversions["DOMString"] = function (V, opts) {
if (!opts) opts = {};
if (opts.treatNullAsEmptyString && V === null) {
return "";
}
return String(V);
};
conversions["ByteString"] = function (V, opts) {
const x = String(V);
let c = undefined;
for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
if (c > 255) {
throw new TypeError("Argument is not a valid bytestring");
}
}
return x;
};
conversions["USVString"] = function (V) {
const S = String(V);
const n = S.length;
const U = [];
for (let i = 0; i < n; ++i) {
const c = S.charCodeAt(i);
if (c < 0xD800 || c > 0xDFFF) {
U.push(String.fromCodePoint(c));
} else if (0xDC00 <= c && c <= 0xDFFF) {
U.push(String.fromCodePoint(0xFFFD));
} else {
if (i === n - 1) {
U.push(String.fromCodePoint(0xFFFD));
} else {
const d = S.charCodeAt(i + 1);
if (0xDC00 <= d && d <= 0xDFFF) {
const a = c & 0x3FF;
const b = d & 0x3FF;
U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
++i;
} else {
U.push(String.fromCodePoint(0xFFFD));
}
}
}
}
return U.join('');
};
conversions["Date"] = function (V, opts) {
if (!(V instanceof Date)) {
throw new TypeError("Argument is not a Date object");
}
if (isNaN(V)) {
return undefined;
}
return V;
};
conversions["RegExp"] = function (V, opts) {
if (!(V instanceof RegExp)) {
V = new RegExp(V);
}
return V;
};

View File

@ -0,0 +1,59 @@
{
"_from": "webidl-conversions@^3.0.0",
"_id": "webidl-conversions@3.0.1",
"_inBundle": false,
"_integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"_location": "/webidl-conversions",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "webidl-conversions@^3.0.0",
"name": "webidl-conversions",
"escapedName": "webidl-conversions",
"rawSpec": "^3.0.0",
"saveSpec": null,
"fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/whatwg-url"
],
"_resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"_shasum": "24534275e2a7bc6be7bc86611cc16ae0a5654871",
"_spec": "webidl-conversions@^3.0.0",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1\\node_modules\\whatwg-url",
"author": {
"name": "Domenic Denicola",
"email": "d@domenic.me",
"url": "https://domenic.me/"
},
"bugs": {
"url": "https://github.com/jsdom/webidl-conversions/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Implements the WebIDL algorithms for converting to and from JavaScript values",
"devDependencies": {
"mocha": "^1.21.4"
},
"files": [
"lib/"
],
"homepage": "https://github.com/jsdom/webidl-conversions#readme",
"keywords": [
"webidl",
"web",
"types"
],
"license": "BSD-2-Clause",
"main": "lib/index.js",
"name": "webidl-conversions",
"repository": {
"type": "git",
"url": "git+https://github.com/jsdom/webidl-conversions.git"
},
"scripts": {
"test": "mocha test/*.js"
},
"version": "3.0.1"
}

21
modules/gar_utils/node_modules/whatwg-url/LICENSE.txt generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 20152016 Sebastian Mayr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

67
modules/gar_utils/node_modules/whatwg-url/README.md generated vendored Normal file
View File

@ -0,0 +1,67 @@
# whatwg-url
whatwg-url is a full implementation of the WHATWG [URL Standard](https://url.spec.whatwg.org/). It can be used standalone, but it also exposes a lot of the internal algorithms that are useful for integrating a URL parser into a project like [jsdom](https://github.com/tmpvar/jsdom).
## Current Status
whatwg-url is currently up to date with the URL spec up to commit [a62223](https://github.com/whatwg/url/commit/a622235308342c9adc7fc2fd1659ff059f7d5e2a).
## API
### The `URL` Constructor
The main API is the [`URL`](https://url.spec.whatwg.org/#url) export, which follows the spec's behavior in all ways (including e.g. `USVString` conversion). Most consumers of this library will want to use this.
### Low-level URL Standard API
The following methods are exported for use by places like jsdom that need to implement things like [`HTMLHyperlinkElementUtils`](https://html.spec.whatwg.org/#htmlhyperlinkelementutils). They operate on or return an "internal URL" or ["URL record"](https://url.spec.whatwg.org/#concept-url) type.
- [URL parser](https://url.spec.whatwg.org/#concept-url-parser): `parseURL(input, { baseURL, encodingOverride })`
- [Basic URL parser](https://url.spec.whatwg.org/#concept-basic-url-parser): `basicURLParse(input, { baseURL, encodingOverride, url, stateOverride })`
- [URL serializer](https://url.spec.whatwg.org/#concept-url-serializer): `serializeURL(urlRecord, excludeFragment)`
- [Host serializer](https://url.spec.whatwg.org/#concept-host-serializer): `serializeHost(hostFromURLRecord)`
- [Serialize an integer](https://url.spec.whatwg.org/#serialize-an-integer): `serializeInteger(number)`
- [Origin](https://url.spec.whatwg.org/#concept-url-origin) [serializer](https://html.spec.whatwg.org/multipage/browsers.html#serialization-of-an-origin): `serializeURLOrigin(urlRecord)`
- [Set the username](https://url.spec.whatwg.org/#set-the-username): `setTheUsername(urlRecord, usernameString)`
- [Set the password](https://url.spec.whatwg.org/#set-the-password): `setThePassword(urlRecord, passwordString)`
- [Cannot have a username/password/port](https://url.spec.whatwg.org/#cannot-have-a-username-password-port): `cannotHaveAUsernamePasswordPort(urlRecord)`
The `stateOverride` parameter is one of the following strings:
- [`"scheme start"`](https://url.spec.whatwg.org/#scheme-start-state)
- [`"scheme"`](https://url.spec.whatwg.org/#scheme-state)
- [`"no scheme"`](https://url.spec.whatwg.org/#no-scheme-state)
- [`"special relative or authority"`](https://url.spec.whatwg.org/#special-relative-or-authority-state)
- [`"path or authority"`](https://url.spec.whatwg.org/#path-or-authority-state)
- [`"relative"`](https://url.spec.whatwg.org/#relative-state)
- [`"relative slash"`](https://url.spec.whatwg.org/#relative-slash-state)
- [`"special authority slashes"`](https://url.spec.whatwg.org/#special-authority-slashes-state)
- [`"special authority ignore slashes"`](https://url.spec.whatwg.org/#special-authority-ignore-slashes-state)
- [`"authority"`](https://url.spec.whatwg.org/#authority-state)
- [`"host"`](https://url.spec.whatwg.org/#host-state)
- [`"hostname"`](https://url.spec.whatwg.org/#hostname-state)
- [`"port"`](https://url.spec.whatwg.org/#port-state)
- [`"file"`](https://url.spec.whatwg.org/#file-state)
- [`"file slash"`](https://url.spec.whatwg.org/#file-slash-state)
- [`"file host"`](https://url.spec.whatwg.org/#file-host-state)
- [`"path start"`](https://url.spec.whatwg.org/#path-start-state)
- [`"path"`](https://url.spec.whatwg.org/#path-state)
- [`"cannot-be-a-base-URL path"`](https://url.spec.whatwg.org/#cannot-be-a-base-url-path-state)
- [`"query"`](https://url.spec.whatwg.org/#query-state)
- [`"fragment"`](https://url.spec.whatwg.org/#fragment-state)
The URL record type has the following API:
- [`scheme`](https://url.spec.whatwg.org/#concept-url-scheme)
- [`username`](https://url.spec.whatwg.org/#concept-url-username)
- [`password`](https://url.spec.whatwg.org/#concept-url-password)
- [`host`](https://url.spec.whatwg.org/#concept-url-host)
- [`port`](https://url.spec.whatwg.org/#concept-url-port)
- [`path`](https://url.spec.whatwg.org/#concept-url-path) (as an array)
- [`query`](https://url.spec.whatwg.org/#concept-url-query)
- [`fragment`](https://url.spec.whatwg.org/#concept-url-fragment)
- [`cannotBeABaseURL`](https://url.spec.whatwg.org/#url-cannot-be-a-base-url-flag) (as a boolean)
These properties should be treated with care, as in general changing them will cause the URL record to be in an inconsistent state until the appropriate invocation of `basicURLParse` is used to fix it up. You can see examples of this in the URL Standard, where there are many step sequences like "4. Set context objects urls fragment to the empty string. 5. Basic URL parse _input_ with context objects url as _url_ and fragment state as _state override_." In between those two steps, a URL record is in an unusable state.
The return value of "failure" in the spec is represented by the string `"failure"`. That is, functions like `parseURL` and `basicURLParse` can return _either_ a URL record _or_ the string `"failure"`.

View File

@ -0,0 +1,200 @@
"use strict";
const usm = require("./url-state-machine");
exports.implementation = class URLImpl {
constructor(constructorArgs) {
const url = constructorArgs[0];
const base = constructorArgs[1];
let parsedBase = null;
if (base !== undefined) {
parsedBase = usm.basicURLParse(base);
if (parsedBase === "failure") {
throw new TypeError("Invalid base URL");
}
}
const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
// TODO: query stuff
}
get href() {
return usm.serializeURL(this._url);
}
set href(v) {
const parsedURL = usm.basicURLParse(v);
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
}
get origin() {
return usm.serializeURLOrigin(this._url);
}
get protocol() {
return this._url.scheme + ":";
}
set protocol(v) {
usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
}
get username() {
return this._url.username;
}
set username(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setTheUsername(this._url, v);
}
get password() {
return this._url.password;
}
set password(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setThePassword(this._url, v);
}
get host() {
const url = this._url;
if (url.host === null) {
return "";
}
if (url.port === null) {
return usm.serializeHost(url.host);
}
return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
}
set host(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
}
get hostname() {
if (this._url.host === null) {
return "";
}
return usm.serializeHost(this._url.host);
}
set hostname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
}
get port() {
if (this._url.port === null) {
return "";
}
return usm.serializeInteger(this._url.port);
}
set port(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
if (v === "") {
this._url.port = null;
} else {
usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
}
}
get pathname() {
if (this._url.cannotBeABaseURL) {
return this._url.path[0];
}
if (this._url.path.length === 0) {
return "";
}
return "/" + this._url.path.join("/");
}
set pathname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
this._url.path = [];
usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
}
get search() {
if (this._url.query === null || this._url.query === "") {
return "";
}
return "?" + this._url.query;
}
set search(v) {
// TODO: query stuff
const url = this._url;
if (v === "") {
url.query = null;
return;
}
const input = v[0] === "?" ? v.substring(1) : v;
url.query = "";
usm.basicURLParse(input, { url, stateOverride: "query" });
}
get hash() {
if (this._url.fragment === null || this._url.fragment === "") {
return "";
}
return "#" + this._url.fragment;
}
set hash(v) {
if (v === "") {
this._url.fragment = null;
return;
}
const input = v[0] === "#" ? v.substring(1) : v;
this._url.fragment = "";
usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
}
toJSON() {
return this.href;
}
};

196
modules/gar_utils/node_modules/whatwg-url/lib/URL.js generated vendored Normal file
View File

@ -0,0 +1,196 @@
"use strict";
const conversions = require("webidl-conversions");
const utils = require("./utils.js");
const Impl = require(".//URL-impl.js");
const impl = utils.implSymbol;
function URL(url) {
if (!this || this[impl] || !(this instanceof URL)) {
throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
}
if (arguments.length < 1) {
throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
}
const args = [];
for (let i = 0; i < arguments.length && i < 2; ++i) {
args[i] = arguments[i];
}
args[0] = conversions["USVString"](args[0]);
if (args[1] !== undefined) {
args[1] = conversions["USVString"](args[1]);
}
module.exports.setup(this, args);
}
URL.prototype.toJSON = function toJSON() {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
const args = [];
for (let i = 0; i < arguments.length && i < 0; ++i) {
args[i] = arguments[i];
}
return this[impl].toJSON.apply(this[impl], args);
};
Object.defineProperty(URL.prototype, "href", {
get() {
return this[impl].href;
},
set(V) {
V = conversions["USVString"](V);
this[impl].href = V;
},
enumerable: true,
configurable: true
});
URL.prototype.toString = function () {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
return this.href;
};
Object.defineProperty(URL.prototype, "origin", {
get() {
return this[impl].origin;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "protocol", {
get() {
return this[impl].protocol;
},
set(V) {
V = conversions["USVString"](V);
this[impl].protocol = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "username", {
get() {
return this[impl].username;
},
set(V) {
V = conversions["USVString"](V);
this[impl].username = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "password", {
get() {
return this[impl].password;
},
set(V) {
V = conversions["USVString"](V);
this[impl].password = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "host", {
get() {
return this[impl].host;
},
set(V) {
V = conversions["USVString"](V);
this[impl].host = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hostname", {
get() {
return this[impl].hostname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hostname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "port", {
get() {
return this[impl].port;
},
set(V) {
V = conversions["USVString"](V);
this[impl].port = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "pathname", {
get() {
return this[impl].pathname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].pathname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "search", {
get() {
return this[impl].search;
},
set(V) {
V = conversions["USVString"](V);
this[impl].search = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hash", {
get() {
return this[impl].hash;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hash = V;
},
enumerable: true,
configurable: true
});
module.exports = {
is(obj) {
return !!obj && obj[impl] instanceof Impl.implementation;
},
create(constructorArgs, privateData) {
let obj = Object.create(URL.prototype);
this.setup(obj, constructorArgs, privateData);
return obj;
},
setup(obj, constructorArgs, privateData) {
if (!privateData) privateData = {};
privateData.wrapper = obj;
obj[impl] = new Impl.implementation(constructorArgs, privateData);
obj[impl][utils.wrapperSymbol] = obj;
},
interface: URL,
expose: {
Window: { URL: URL },
Worker: { URL: URL }
}
};

View File

@ -0,0 +1,11 @@
"use strict";
exports.URL = require("./URL").interface;
exports.serializeURL = require("./url-state-machine").serializeURL;
exports.serializeURLOrigin = require("./url-state-machine").serializeURLOrigin;
exports.basicURLParse = require("./url-state-machine").basicURLParse;
exports.setTheUsername = require("./url-state-machine").setTheUsername;
exports.setThePassword = require("./url-state-machine").setThePassword;
exports.serializeHost = require("./url-state-machine").serializeHost;
exports.serializeInteger = require("./url-state-machine").serializeInteger;
exports.parseURL = require("./url-state-machine").parseURL;

File diff suppressed because it is too large Load Diff

20
modules/gar_utils/node_modules/whatwg-url/lib/utils.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
"use strict";
module.exports.mixin = function mixin(target, source) {
const keys = Object.getOwnPropertyNames(source);
for (let i = 0; i < keys.length; ++i) {
Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
}
};
module.exports.wrapperSymbol = Symbol("wrapper");
module.exports.implSymbol = Symbol("impl");
module.exports.wrapperForImpl = function (impl) {
return impl[module.exports.wrapperSymbol];
};
module.exports.implForWrapper = function (wrapper) {
return wrapper[module.exports.implSymbol];
};

67
modules/gar_utils/node_modules/whatwg-url/package.json generated vendored Normal file
View File

@ -0,0 +1,67 @@
{
"_from": "whatwg-url@^5.0.0",
"_id": "whatwg-url@5.0.0",
"_inBundle": false,
"_integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"_location": "/whatwg-url",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "whatwg-url@^5.0.0",
"name": "whatwg-url",
"escapedName": "whatwg-url",
"rawSpec": "^5.0.0",
"saveSpec": null,
"fetchSpec": "^5.0.0"
},
"_requiredBy": [
"/node-fetch"
],
"_resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"_shasum": "966454e8765462e37644d3626f6742ce8b70965d",
"_spec": "whatwg-url@^5.0.0",
"_where": "C:\\Users\\fukil\\wrk\\CITK_PARUS\\1\\node_modules\\node-fetch",
"author": {
"name": "Sebastian Mayr",
"email": "github@smayr.name"
},
"bugs": {
"url": "https://github.com/jsdom/whatwg-url/issues"
},
"bundleDependencies": false,
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
},
"deprecated": false,
"description": "An implementation of the WHATWG URL Standard's URL API and parsing machinery",
"devDependencies": {
"eslint": "^2.6.0",
"istanbul": "~0.4.3",
"mocha": "^2.2.4",
"recast": "~0.10.29",
"request": "^2.55.0",
"webidl2js": "^3.0.2"
},
"files": [
"lib/"
],
"homepage": "https://github.com/jsdom/whatwg-url#readme",
"license": "MIT",
"main": "lib/public-api.js",
"name": "whatwg-url",
"repository": {
"type": "git",
"url": "git+https://github.com/jsdom/whatwg-url.git"
},
"scripts": {
"build": "node scripts/transform.js && node scripts/convert-idl.js",
"coverage": "istanbul cover node_modules/mocha/bin/_mocha",
"lint": "eslint .",
"prepublish": "npm run build",
"pretest": "node scripts/get-latest-platform-tests.js && npm run build",
"test": "mocha"
},
"version": "5.0.0"
}

43
modules/gar_utils/package-lock.json generated Normal file
View File

@ -0,0 +1,43 @@
{
"requires": true,
"lockfileVersion": 1,
"dependencies": {
"node-fetch": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz",
"integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==",
"requires": {
"whatwg-url": "^5.0.0"
}
},
"node-stream-zip": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.15.0.tgz",
"integrity": "sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw=="
},
"sax": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
"integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA=="
},
"tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
},
"whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"requires": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
}
}
}

View File

@ -0,0 +1,433 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR) - парсеры
*/
//------------------------------
// Подключаем внешние библиотеки
//------------------------------
const oracledb = require("oracledb"); //Работа с СУБД Oracle
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Модели (привязка парсера к файлу данных)
const MODELS = [
{ fileNameMask: /AS_ADDR_OBJ_PARAMS_(\d{8})_(.*)/i, parser: "PARAMS" },
{ fileNameMask: /AS_ADDR_OBJ_(\d{8})_(.*)/i, parser: "ADDR_OBJ" },
{ fileNameMask: /AS_ADM_HIERARCHY_(\d{8})_(.*)/i, parser: "ADM_HIERARCHY" },
{ fileNameMask: /AS_CHANGE_HISTORY_(\d{8})_(.*)/i, parser: "CHANGE_HISTORY" },
{ fileNameMask: /AS_HOUSES_PARAMS_(\d{8})_(.*)/i, parser: "PARAMS", insertProcedureName: "PKG_EXS_EXT_GAR.HOUSES_PARAMS_INSERT" },
{ fileNameMask: /AS_HOUSES_(\d{8})_(.*)/i, parser: "HOUSES" },
{ fileNameMask: /AS_MUN_HIERARCHY_(\d{8})_(.*)/i, parser: "MUN_HIERARCHY" },
{ fileNameMask: /AS_REESTR_OBJECTS_(\d{8})_(.*)/i, parser: "REESTR_OBJECTS" },
{ fileNameMask: /AS_STEADS_PARAMS_(\d{8})_(.*)/i, parser: "PARAMS", insertProcedureName: "PKG_EXS_EXT_GAR.STEADS_PARAMS_INSERT" },
{ fileNameMask: /AS_STEADS_(\d{8})_(.*)/i, parser: "STEADS" },
{ fileNameMask: /AS_HOUSE_TYPES_(\d{8})_(.*)/i, parser: "HOUSE_TYPES" },
{ fileNameMask: /AS_ADDHOUSE_TYPES_(\d{8})_(.*)/i, parser: "HOUSE_TYPES", insertProcedureName: "PKG_EXS_EXT_GAR.ADDHOUSE_TYPES_INSERT" },
{ fileNameMask: /AS_ADDR_OBJ_TYPES_(\d{8})_(.*)/i, parser: "ADDR_OBJ_TYPES" }
];
//Парсеры
const PARSERS = {
ADDR_OBJ_TYPES: {
element: "ADDRESSOBJECTTYPE",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NTYPE_ID: Number(item.attributes.ID),
STYPE_LEVEL: item.attributes.LEVEL,
SSHORTNAME: item.attributes.SHORTNAME,
STYPE_NAME: item.attributes.NAME,
STYPE_DESC: item.attributes.DESC,
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTIVE: item.attributes.ISACTIVE == "true" ? 1 : 0
}));
const sql = `begin PKG_EXS_EXT_GAR.ADDR_OBJ_TYPES_INSERT(:NIDENT, :NTYPE_ID, :STYPE_LEVEL, :SSHORTNAME, :STYPE_NAME, :STYPE_DESC, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NTYPE_ID: { type: oracledb.NUMBER },
STYPE_LEVEL: { type: oracledb.STRING, maxSize: 10 },
SSHORTNAME: { type: oracledb.STRING, maxSize: 50 },
STYPE_NAME: { type: oracledb.STRING, maxSize: 250 },
STYPE_DESC: { type: oracledb.STRING, maxSize: 250 },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
HOUSE_TYPES: {
element: "HOUSETYPE",
async save(connection, ident, items, insertProcedureName) {
const binds = items.map(item => ({
NIDENT: ident,
NTYPE_ID: Number(item.attributes.ID),
STYPE_NAME: item.attributes.NAME,
SSHORTNAME: item.attributes.SHORTNAME,
STYPE_DESC: item.attributes.DESC,
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTIVE: item.attributes.ISACTIVE == "true" ? 1 : 0
}));
const sql = `begin ${
insertProcedureName ? insertProcedureName : "PKG_EXS_EXT_GAR.HOUSE_TYPES_INSERT"
}(:NIDENT, :NTYPE_ID, :STYPE_NAME, :SSHORTNAME, :STYPE_DESC, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NTYPE_ID: { type: oracledb.NUMBER },
STYPE_NAME: { type: oracledb.STRING, maxSize: 250 },
SSHORTNAME: { type: oracledb.STRING, maxSize: 50 },
STYPE_DESC: { type: oracledb.STRING, maxSize: 250 },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
PARAMS: {
element: "PARAM",
async save(connection, ident, items, insertProcedureName) {
const binds = items.map(item => ({
NIDENT: ident,
NPARAM_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
NCHANGEID: item.attributes.CHANGEID == undefined || item.attributes.CHANGEID == null ? null : Number(item.attributes.CHANGEID),
NCHANGEIDEND:
item.attributes.CHANGEIDEND == undefined || item.attributes.CHANGEIDEND == null ? null : Number(item.attributes.CHANGEIDEND),
NPARAM_TYPEID: Number(item.attributes.TYPEID),
SPARAM_VALUE: item.attributes.VALUE.substring(0, 4000),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE)
}));
const sql = `begin ${
insertProcedureName ? insertProcedureName : "PKG_EXS_EXT_GAR.ADDR_OBJ_PARAMS_INSERT"
}(:NIDENT, :NPARAM_ID, :NOBJECTID, :NCHANGEID, :NCHANGEIDEND, :NPARAM_TYPEID, :SPARAM_VALUE, :DUPDATEDATE, :DSTARTDATE, :DENDDATE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NPARAM_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
NCHANGEID: { type: oracledb.NUMBER },
NCHANGEIDEND: { type: oracledb.NUMBER },
NPARAM_TYPEID: { type: oracledb.NUMBER },
SPARAM_VALUE: { type: oracledb.STRING, maxSize: 4000 },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE }
}
});
}
},
MUN_HIERARCHY: {
element: "ITEM",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NMUN_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
NPARENTOBJID:
item.attributes.PARENTOBJID == undefined || item.attributes.PARENTOBJID == null ? null : Number(item.attributes.PARENTOBJID),
NCHANGEID: Number(item.attributes.CHANGEID),
SOKTMO: item.attributes.OKTMO,
NPREVID: item.attributes.PREVID == undefined || item.attributes.PREVID == null ? null : Number(item.attributes.PREVID),
NNEXTID: item.attributes.NEXTID == undefined || item.attributes.NEXTID == null ? null : Number(item.attributes.NEXTID),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTIVE: Number(item.attributes.ISACTIVE)
}));
const sql = `begin PKG_EXS_EXT_GAR.MUN_HIERARCHY_INSERT(:NIDENT, :NMUN_ID, :NOBJECTID, :NPARENTOBJID, :NCHANGEID, :SOKTMO, :NPREVID, :NNEXTID, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NMUN_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
NPARENTOBJID: { type: oracledb.NUMBER },
NCHANGEID: { type: oracledb.NUMBER },
SOKTMO: { type: oracledb.STRING, maxSize: 11 },
NPREVID: { type: oracledb.NUMBER },
NNEXTID: { type: oracledb.NUMBER },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
ADM_HIERARCHY: {
element: "ITEM",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NADM_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
NPARENTOBJID:
item.attributes.PARENTOBJID == undefined || item.attributes.PARENTOBJID == null ? null : Number(item.attributes.PARENTOBJID),
NCHANGEID: Number(item.attributes.CHANGEID),
SREGIONCODE: item.attributes.REGIONCODE,
SAREACODE: item.attributes.AREACODE,
SCITYCODE: item.attributes.CITYCODE,
SPLACECODE: item.attributes.PLACECODE,
SPLANCODE: item.attributes.PLANCODE,
SSTREETCODE: item.attributes.STREETCODE,
NPREVID: item.attributes.PREVID == undefined || item.attributes.PREVID == null ? null : Number(item.attributes.PREVID),
NNEXTID: item.attributes.NEXTID == undefined || item.attributes.NEXTID == null ? null : Number(item.attributes.NEXTID),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTIVE: Number(item.attributes.ISACTIVE)
}));
const sql = `begin PKG_EXS_EXT_GAR.ADM_HIERARCHY_INSERT(:NIDENT, :NADM_ID, :NOBJECTID, :NPARENTOBJID, :NCHANGEID, :SREGIONCODE, :SAREACODE, :SCITYCODE, :SPLACECODE, :SPLANCODE, :SSTREETCODE, :NPREVID, :NNEXTID, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NADM_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
NPARENTOBJID: { type: oracledb.NUMBER },
NCHANGEID: { type: oracledb.NUMBER },
SREGIONCODE: { type: oracledb.STRING, maxSize: 4 },
SAREACODE: { type: oracledb.STRING, maxSize: 4 },
SCITYCODE: { type: oracledb.STRING, maxSize: 4 },
SPLACECODE: { type: oracledb.STRING, maxSize: 4 },
SPLANCODE: { type: oracledb.STRING, maxSize: 4 },
SSTREETCODE: { type: oracledb.STRING, maxSize: 4 },
NPREVID: { type: oracledb.NUMBER },
NNEXTID: { type: oracledb.NUMBER },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
CHANGE_HISTORY: {
element: "ITEM",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NCHANGEID: Number(item.attributes.CHANGEID),
NOBJECTID: Number(item.attributes.OBJECTID),
SADROBJECTID: item.attributes.ADROBJECTID,
NOPERTYPEID: Number(item.attributes.OPERTYPEID),
NNDOCID: item.attributes.NDOCID == undefined || item.attributes.NDOCID == null ? null : Number(item.attributes.NDOCID),
DCHANGEDATE: new Date(item.attributes.CHANGEDATE)
}));
const sql = `begin PKG_EXS_EXT_GAR.CHANGE_HISTORY_INSERT(:NIDENT, :NCHANGEID, :NOBJECTID, :SADROBJECTID, :NOPERTYPEID, :NNDOCID, :DCHANGEDATE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NCHANGEID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
SADROBJECTID: { type: oracledb.STRING, maxSize: 36 },
NOPERTYPEID: { type: oracledb.NUMBER },
NNDOCID: { type: oracledb.NUMBER },
DCHANGEDATE: { type: oracledb.DATE }
}
});
}
},
REESTR_OBJECTS: {
element: "OBJECT",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NOBJECTID: Number(item.attributes.OBJECTID),
SOBJECTGUID: item.attributes.OBJECTGUID,
NCHANGEID: Number(item.attributes.CHANGEID),
NISACTIVE: Number(item.attributes.ISACTIVE),
NLEVELID: Number(item.attributes.LEVELID),
DCREATEDATE: new Date(item.attributes.CREATEDATE),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE)
}));
const sql = `begin PKG_EXS_EXT_GAR.REESTR_OBJECTS_INSERT(:NIDENT, :NOBJECTID, :SOBJECTGUID, :NCHANGEID, :NISACTIVE, :NLEVELID, :DCREATEDATE, :DUPDATEDATE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
SOBJECTGUID: { type: oracledb.STRING, maxSize: 36 },
NCHANGEID: { type: oracledb.NUMBER },
NISACTIVE: { type: oracledb.NUMBER },
NLEVELID: { type: oracledb.NUMBER },
DCREATEDATE: { type: oracledb.DATE },
DUPDATEDATE: { type: oracledb.DATE }
}
});
}
},
ADDR_OBJ: {
element: "OBJECT",
async save(connection, ident, items, insertProcedureName, region) {
const binds = items.map(item => ({
NIDENT: ident,
SREGIONCODE: region,
NADDR_OBJ_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
SOBJECTGUID: item.attributes.OBJECTGUID,
NCHANGEID: Number(item.attributes.CHANGEID),
SADDR_OBJ_NAME: item.attributes.NAME,
STYPENAME: item.attributes.TYPENAME,
NADDR_OBJ_LEVEL: Number(item.attributes.LEVEL),
SOPERTYPEID: item.attributes.OPERTYPEID,
NPREVID: item.attributes.PREVID == undefined || item.attributes.PREVID == null ? null : Number(item.attributes.PREVID),
NNEXTID: item.attributes.NEXTID == undefined || item.attributes.NEXTID == null ? null : Number(item.attributes.NEXTID),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTUAL: Number(item.attributes.ISACTUAL),
NISACTIVE: Number(item.attributes.ISACTIVE)
}));
const sql = `begin PKG_EXS_EXT_GAR.ADDR_OBJ_INSERT(:NIDENT, :SREGIONCODE, :NADDR_OBJ_ID, :NOBJECTID, :SOBJECTGUID, :NCHANGEID, :SADDR_OBJ_NAME, :STYPENAME, :NADDR_OBJ_LEVEL, :SOPERTYPEID, :NPREVID, :NNEXTID, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTUAL, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
SREGIONCODE: { type: oracledb.STRING, maxSize: 2 },
NADDR_OBJ_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
SOBJECTGUID: { type: oracledb.STRING, maxSize: 36 },
NCHANGEID: { type: oracledb.NUMBER },
SADDR_OBJ_NAME: { type: oracledb.STRING, maxSize: 250 },
STYPENAME: { type: oracledb.STRING, maxSize: 50 },
NADDR_OBJ_LEVEL: { type: oracledb.NUMBER },
SOPERTYPEID: { type: oracledb.STRING, maxSize: 2 },
NPREVID: { type: oracledb.NUMBER },
NNEXTID: { type: oracledb.NUMBER },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTUAL: { type: oracledb.NUMBER },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
HOUSES: {
element: "HOUSE",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NHOUSES_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
SOBJECTGUID: item.attributes.OBJECTGUID,
NCHANGEID: Number(item.attributes.CHANGEID),
SHOUSENUM: item.attributes.HOUSENUM,
SADDNUM1: item.attributes.ADDNUM1,
SADDNUM2: item.attributes.ADDNUM2,
NHOUSETYPE: item.attributes.HOUSETYPE == undefined || item.attributes.HOUSETYPE == null ? null : Number(item.attributes.HOUSETYPE),
NADDTYPE1: item.attributes.ADDTYPE1 == undefined || item.attributes.ADDTYPE1 == null ? null : Number(item.attributes.ADDTYPE1),
NADDTYPE2: item.attributes.ADDTYPE2 == undefined || item.attributes.ADDTYPE2 == null ? null : Number(item.attributes.ADDTYPE2),
NOPERTYPEID: Number(item.attributes.OPERTYPEID),
NPREVID: item.attributes.PREVID == undefined || item.attributes.PREVID == null ? null : Number(item.attributes.PREVID),
NNEXTID: item.attributes.NEXTID == undefined || item.attributes.NEXTID == null ? null : Number(item.attributes.NEXTID),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTUAL: Number(item.attributes.ISACTUAL),
NISACTIVE: Number(item.attributes.ISACTIVE)
}));
const sql = `begin PKG_EXS_EXT_GAR.HOUSES_INSERT(:NIDENT, :NHOUSES_ID, :NOBJECTID, :SOBJECTGUID, :NCHANGEID, :SHOUSENUM, :SADDNUM1, :SADDNUM2, :NHOUSETYPE, :NADDTYPE1, :NADDTYPE2, :NOPERTYPEID, :NPREVID, :NNEXTID, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTUAL, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NHOUSES_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
SOBJECTGUID: { type: oracledb.STRING, maxSize: 36 },
NCHANGEID: { type: oracledb.NUMBER },
SHOUSENUM: { type: oracledb.STRING, maxSize: 50 },
SADDNUM1: { type: oracledb.STRING, maxSize: 50 },
SADDNUM2: { type: oracledb.STRING, maxSize: 50 },
NHOUSETYPE: { type: oracledb.NUMBER },
NADDTYPE1: { type: oracledb.NUMBER },
NADDTYPE2: { type: oracledb.NUMBER },
NOPERTYPEID: { type: oracledb.NUMBER },
NPREVID: { type: oracledb.NUMBER },
NNEXTID: { type: oracledb.NUMBER },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTUAL: { type: oracledb.NUMBER },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
},
STEADS: {
element: "STEAD",
async save(connection, ident, items) {
const binds = items.map(item => ({
NIDENT: ident,
NSTEADS_ID: Number(item.attributes.ID),
NOBJECTID: Number(item.attributes.OBJECTID),
SOBJECTGUID: item.attributes.OBJECTGUID,
NCHANGEID: Number(item.attributes.CHANGEID),
SSTEADS_NUMBER: item.attributes.NUMBER,
NOPERTYPEID: Number(item.attributes.OPERTYPEID),
NPREVID: item.attributes.PREVID == undefined || item.attributes.PREVID == null ? null : Number(item.attributes.PREVID),
NNEXTID: item.attributes.NEXTID == undefined || item.attributes.NEXTID == null ? null : Number(item.attributes.NEXTID),
DUPDATEDATE: new Date(item.attributes.UPDATEDATE),
DSTARTDATE: new Date(item.attributes.STARTDATE),
DENDDATE: new Date(item.attributes.ENDDATE),
NISACTUAL: Number(item.attributes.ISACTUAL),
NISACTIVE: Number(item.attributes.ISACTIVE)
}));
const sql = `begin PKG_EXS_EXT_GAR.STEADS_INSERT(:NIDENT, :NSTEADS_ID, :NOBJECTID, :SOBJECTGUID, :NCHANGEID, :SSTEADS_NUMBER, :NOPERTYPEID, :NPREVID, :NNEXTID, :DUPDATEDATE, :DSTARTDATE, :DENDDATE, :NISACTUAL, :NISACTIVE); end;`;
await connection.executeMany(sql, binds, {
autoCommit: true,
bindDefs: {
NIDENT: { type: oracledb.NUMBER },
NSTEADS_ID: { type: oracledb.NUMBER },
NOBJECTID: { type: oracledb.NUMBER },
SOBJECTGUID: { type: oracledb.STRING, maxSize: 36 },
NCHANGEID: { type: oracledb.NUMBER },
SSTEADS_NUMBER: { type: oracledb.STRING, maxSize: 250 },
NOPERTYPEID: { type: oracledb.NUMBER },
NPREVID: { type: oracledb.NUMBER },
NNEXTID: { type: oracledb.NUMBER },
DUPDATEDATE: { type: oracledb.DATE },
DSTARTDATE: { type: oracledb.DATE },
DENDDATE: { type: oracledb.DATE },
NISACTUAL: { type: oracledb.NUMBER },
NISACTIVE: { type: oracledb.NUMBER }
}
});
}
}
};
//------------
// Тело модуля
//------------
//Поиск модели по имени файла
const findModelByFileName = fileName => MODELS.find(item => (fileName.match(item.fileNameMask) ? true : false));
//-----------------
// Интерфейс модуля
//-----------------
exports.MODELS = MODELS;
exports.PARSERS = PARSERS;
exports.findModelByFileName = findModelByFileName;

View File

@ -0,0 +1,93 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR) - вспомогательные функции
*/
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Типы сообщений
const LOG_ERR = "LOG_ERR"; //Ошибка
const LOG_WRN = "LOG_WRN"; //Предупреждение
const LOG_INF = "LOG_INF"; //Информация
//Типы сообщений обработчиков
const WRK_MSG_TYPE = {
TASK: "TASK",
RESULT: "RESULT",
STOP: "STOP"
};
//------------
// Тело модуля
//------------
//Протоколирование
const log = (type, message, module, stream) => {
let d = new Date();
if (stream)
stream.write(
`${d.toLocaleString("ru-RU")}${module ? ` (${module})` : ""}${
type === LOG_ERR ? " ОШИБКА" : type === LOG_WRN ? " ПРЕДУПРЕЖДЕНИЕ" : " ИНФОРМАЦИЯ"
}: ${message}\n`
);
};
//Протоколирование - ошибка
const logErr = (message, module, stream) => log(LOG_ERR, message, module, stream);
//Протоколирование - предупреждение
const logWrn = (message, module, stream) => log(LOG_WRN, message, module, stream);
//Протоколирование - информация
const logInf = (message, module, stream) => log(LOG_INF, message, module, stream);
//Формирование сообщения для останова
const makeStopMessage = () => ({ type: WRK_MSG_TYPE.STOP });
//Формирование сообщения с задачей
const makeTaskMessage = ({ payload }) => ({ type: WRK_MSG_TYPE.TASK, payload });
//Формирование ответа на задачу
const makeTaskResult = ({ err, payload }) => ({ type: WRK_MSG_TYPE.RESULT, err, payload });
//Формирование ответа на задачу (с успехом)
const makeTaskOKResult = payload => makeTaskResult({ err: null, payload });
//Формирование ответа на задачу (с ошибкой)
const makeTaskErrResult = err => makeTaskResult({ err, payload: null });
// Преобразование строки в дату в формате DD.MM.YYYY
const stringToDate = dateString => {
const dateStringSplit = dateString.split(".");
if (dateStringSplit.length == 3) {
try {
return new Date(+dateStringSplit[2], +dateStringSplit[1] - 1, +dateStringSplit[0] + 1);
} catch (e) {
return null;
}
} else {
return null;
}
};
// Преобразование даты в ISO строку в формате YYYY.MM.DD
const dateToISOString = date => {
return date.toISOString().substr(0, 10);
};
//-----------------
// Интерфейс модуля
//-----------------
exports.WRK_MSG_TYPE = WRK_MSG_TYPE;
exports.logErr = logErr;
exports.logWrn = logWrn;
exports.logInf = logInf;
exports.makeStopMessage = makeStopMessage;
exports.makeTaskMessage = makeTaskMessage;
exports.makeTaskOKResult = makeTaskOKResult;
exports.makeTaskErrResult = makeTaskErrResult;
exports.stringToDate = stringToDate;
exports.dateToISOString = dateToISOString;

View File

@ -0,0 +1,274 @@
/*
Сервис интеграции ПП Парус 8 с WEB API
Дополнительный модуль: Интеграция с ГАР (GAR) - пул обработчиков
*/
//------------------------------
// Подключаем внешние библиотеки
//------------------------------
const { Worker } = require("worker_threads"); //Параллельные обработчики
const { WRK_MSG_TYPE, logInf, logWrn, makeStopMessage } = require("./utils"); //Вспомогательные функции
//--------------------------
// Глобальные идентификаторы
//--------------------------
//Название модудля для протокола
const MODULE = "GAR_WORKERS_POOL";
//Размер пула по умолчанию
const DEFAULT_POOL_SIZE = 10;
//Таймаут ожидания останова воркеров
const WRK_TERMINATE_TIMEOUT = 1000;
//------------
// Тело модуля
//------------
//Пул обработчиков
class WorkersPool {
//Конструктор класса
constructor({ workerPath, timeout, limit, drainTimeout } = {}) {
//Воркеры
this.items = [];
//Колбэки воркеров
this.cb = [];
//Флаги доступности воркеров
this.free = [];
//Флаги терминированности воркеров
this.terminated = [];
//Очередь ожидания освободившихся воркеров
this.queue = [];
//Таймаут ожидания освобождения воркера
this.timeout = timeout || 0;
//Текущий воркер
this.current = 0;
//Текущий размер пула
this.size = 0;
//Количество свободных воркеров в пуле
this.available = 0;
//Предельный размер пула
this.limit = limit || DEFAULT_POOL_SIZE;
//Количество запущенных и работающих обработчиков
this.online = 0;
//Путь к алгоритму воркера пула
if (!workerPath) throw new Error("Не указан путь к файлу обработчика.");
this.workerPath = workerPath;
//Флаг запущенности пула
this.started = false;
//Флаг выполнения останова пула
this.draining = false;
//Таймаут останова пула
if (drainTimeout !== undefined && drainTimeout < WRK_TERMINATE_TIMEOUT)
throw new Error(`Таймаут ожидания останова пула не может быть меньше чем ${WRK_TERMINATE_TIMEOUT} мс.`);
this.drainTimeout = drainTimeout || WRK_TERMINATE_TIMEOUT;
}
//Запуск пула
start({ dbBuferSize, fileChunkSize, loadLog, dbConn }) {
//Проверим возможность запуска
if (this.started) throw new Error("Пул уже запущен");
if (!this.workerPath) throw new Error("Не указан путь к файлу обработчика.");
//Формируем пул заданного размера
for (let i = 0; i < this.limit; i++) {
//Создаём воркер
let wrk = new Worker(this.workerPath, {
workerData: {
number: i,
dbBuferSize,
fileChunkSize,
loadLog: JSON.stringify(loadLog),
dbConn
}
});
//Подписываемся на запуск обработчика
wrk.on("online", () => {
logInf(`Обработчик #${i} запущен.`, MODULE, loadLog);
this.online++;
logInf(`Всего запущено: ${this.online}`, MODULE, loadLog);
});
//Подписываемся на останов обработчика
wrk.on("exit", exitCode => {
logInf(`Обработчик #${i} остановлен. Код выхода - ${exitCode}`, MODULE, loadLog);
this.online--;
logInf(`Всего запущено: ${this.online}`, MODULE, loadLog);
});
//Подписываемся на сообщения от обработчика
wrk.on("message", data => {
//Если пришел результат работы
if (data?.type === WRK_MSG_TYPE.RESULT) {
//И есть колбэк его ожидающий
if (this.cb[i]) {
//Вызываем колбэк
this.cb[i](data.err, data.payload);
//Забываем колбэк
this.cb[i] = null;
}
this.release(wrk);
}
if (data?.type == WRK_MSG_TYPE.STOP) {
this.items[i].terminate();
}
});
//Добавляем воркер в пул
this.add(wrk);
}
//Говорим, что запустились
return new Promise(async resolve => {
while (this.online < this.limit) {
await new Promise(resolve => setTimeout(resolve, 0));
}
this.started = true;
resolve();
});
}
//Останов пула
async stop(loadLog) {
//Устанавливаем флаг останова - чтобы не принимать новые задачи в пул
this.draining = true;
//Всех кто ждет освобождения обработчиков = сбрасываем с ошибкой
while (this.queue.length > 0) {
const { reject, timer } = this.queue.shift();
if (timer) clearTimeout(timer);
if (reject) setTimeout(reject, 0, new Error("Пул закрывается - размещение задач недопустимо."));
}
//Инициализируем количество неостановленных обработчиков и количество попыток останова
let more = 0;
let cntTry = 0;
//Пока есть кого останавливать
do {
//Сброс счётчика неостановленных с предыдущей итерации останова
more = 0;
//Проверим, что ещё не нарушен таймаут останова пула
if (cntTry * WRK_TERMINATE_TIMEOUT > this.drainTimeout) {
logWrn("Истёк таймаут ожидания останова пула - занятые обработчики будут остановлены принудительно.", MODULE, loadLog);
}
//Останавливаем всех свободных обработчиков (или всех подряд если уже нарушен таймаут останова пула)
for (let i = 0; i < this.items.length; i++) {
if (this.free[i] || (!this.free[i] && !this.terminated[i] && cntTry * WRK_TERMINATE_TIMEOUT > this.drainTimeout)) {
this.terminated[i] = true;
this.items[i].postMessage(makeStopMessage());
} else more++;
}
//Если ещё осталось кого останавливать - ждем и пробуем снова
if (more > 0) {
await new Promise(resolve => setTimeout(resolve, WRK_TERMINATE_TIMEOUT));
logInf(`Ожидаю освобождение обработчиков...(ещё занято ${more})`, MODULE, loadLog);
}
cntTry++;
} while (more != 0);
do {
await new Promise(resolve => setTimeout(resolve, 0));
} while (this.online > 0);
this.started = false;
}
//Извлечение ближайшего свободного обработчика из пула
async next() {
//Проверим возможность извлечения обработчика
if (this.draining) return null;
if (this.size === 0) return null;
//Если доступных нет - ставим запрос на ожидание в очередь
if (this.available === 0) {
return new Promise((resolve, reject) => {
const waiting = { resolve, reject, timer: null };
if (this.timeout > 0) {
waiting.timer = setTimeout(() => {
waiting.resolve = null;
this.queue.shift();
reject(new Error("Истёк таймаут ожидания освобождения обработчика."));
}, this.timeout);
}
this.queue.push(waiting);
});
}
//Ищем первый свободный обработчик
let item = null;
let free = false;
do {
item = this.items[this.current];
free = this.free[this.current];
this.current++;
if (this.current === this.size) this.current = 0;
} while (!item || !free);
//Возвращаем его
return item;
}
//Добавление обработчика
add(item) {
//Проверим возможность добавления
if (this.items.includes(item)) throw new Error("Обработчик уже существет в пуле.");
//Увеличиваем фактический размер пула и количество доступных обработчиков
this.size++;
this.available++;
//Добавляем обработчик, флаг его доступности/терминированности и слот для его колбэков
this.items.push(item);
this.free.push(true);
this.terminated.push(false);
this.cb.push(null);
}
//Захват обработчика
async capture() {
//Ожидаем ближайший свободный
const item = await this.next();
if (!item) return null;
//Если дождались его - выставляем флаг недоступности и уменьшаем количество доступных
const index = this.items.indexOf(item);
this.free[index] = false;
this.available--;
//Возвращаем полученный свободный обработчик
return item;
}
//Высвобождение обработчика
release(item) {
//Найдем высвобождаемый обработчик
const index = this.items.indexOf(item);
//Проверим, что его можно освободить
if (index < 0) throw new Error("Попытка освободить несуществующий обработчик.");
if (this.free[index]) throw new Error("Попытка осободить незанятый обработчик.");
//Выставляем флаг - обработчик доступен и увеличиваем количество доступных
this.free[index] = true;
this.available++;
//Если кто-то есть в очереди ожидания доступных обработчиков (и мы не останавливаем пул сейчас)
if (this.queue.length > 0 && !this.draining) {
//Отдаём ему высвободившийся обработчик
const { resolve, timer } = this.queue.shift();
if (timer) clearTimeout(timer);
if (resolve) setTimeout(resolve, 0, item);
}
}
//Добавление задачи пулу
async sendTask(task, cb) {
//Проверим, что можно добавить новую задачу
if (this.draining) throw new Error("Пул закрывается - размещение задач недопустимо.");
if (!this.started) throw new Error("Пул не запущен - размещение задач недопустимо.");
//Захватим обработчик
let item = await this.capture();
//Если удалось
if (item) {
//Найдем его в пуле
const index = this.items.indexOf(item);
if (index < 0) throw new Error("Выбран обработчик, отсуствующий в пуле.");
//Запомним колбэк для его результата
this.cb[index] = cb;
//Отправим ему задачу
item.postMessage(task);
} else {
//Не смогли захватить обработчик
throw new Error("Не удалось выбрать обработчик из пула.");
}
}
}
//-----------------
// Интерфейс модуля
//-----------------
exports.WorkersPool = WorkersPool;