Added MongoDB
This commit is contained in:
parent
9f71e73862
commit
7124d431dc
5212 changed files with 975653 additions and 695 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,3 +5,4 @@ debug.js
|
||||||
deploy-guild-commands.js
|
deploy-guild-commands.js
|
||||||
MOOver.code-workspace
|
MOOver.code-workspace
|
||||||
allCode.js
|
allCode.js
|
||||||
|
./database/test.json
|
|
@ -1,8 +1,7 @@
|
||||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||||
let birthdayJSON = require('../database/birthdays.json');
|
|
||||||
const { MessageEmbed } = require('discord.js');
|
const { MessageEmbed } = require('discord.js');
|
||||||
const help = require('../helpFunctions.js');
|
const help = require('../helpFunctions.js');
|
||||||
const PATH = './database/birthdays.json';
|
const bModel = require('../database/birthdaySchema');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
data: new SlashCommandBuilder()
|
data: new SlashCommandBuilder()
|
||||||
|
@ -15,7 +14,7 @@ module.exports = {
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand
|
subcommand
|
||||||
.setName('add')
|
.setName('add')
|
||||||
.setDescription('Adds new birthday entry to database')
|
.setDescription('Adds user to birthday list')
|
||||||
.addUserOption(option => option.setName('user')
|
.addUserOption(option => option.setName('user')
|
||||||
.setDescription('Select a user')
|
.setDescription('Select a user')
|
||||||
.setRequired(true))
|
.setRequired(true))
|
||||||
|
@ -32,8 +31,8 @@ module.exports = {
|
||||||
.setDescription('Nickname of birthday person')))
|
.setDescription('Nickname of birthday person')))
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand
|
subcommand
|
||||||
.setName('delete')
|
.setName('remove')
|
||||||
.setDescription('Deletes birthday entry')
|
.setDescription('Removes user from birthday list')
|
||||||
.addUserOption(option => option.setName('user')
|
.addUserOption(option => option.setName('user')
|
||||||
.setDescription('Select a user')
|
.setDescription('Select a user')
|
||||||
.setRequired(true)))
|
.setRequired(true)))
|
||||||
|
@ -44,7 +43,7 @@ module.exports = {
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand
|
subcommand
|
||||||
.setName('date')
|
.setName('date')
|
||||||
.setDescription('Change date of a person')
|
.setDescription('Change date of a user')
|
||||||
.addUserOption(option => option.setName('user')
|
.addUserOption(option => option.setName('user')
|
||||||
.setDescription('Select a user')
|
.setDescription('Select a user')
|
||||||
.setRequired(true))
|
.setRequired(true))
|
||||||
|
@ -59,15 +58,15 @@ module.exports = {
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand
|
subcommand
|
||||||
.setName('nickname')
|
.setName('nickname')
|
||||||
.setDescription('Change nickname of a person')
|
.setDescription('Change nickname of a user')
|
||||||
.addUserOption(option => option.setName('user')
|
.addUserOption(option => option.setName('user')
|
||||||
.setDescription('Select a user')
|
.setDescription('Select a user')
|
||||||
.setRequired(true))
|
.setRequired(true))
|
||||||
.addStringOption(option =>
|
.addStringOption(option =>
|
||||||
option.setName('nickname')
|
option.setName('nickname')
|
||||||
.setDescription('Nickname of birthday person (can be empty to remove)')))),
|
.setDescription('Nickname of birthday a user (can be empty to remove)')))),
|
||||||
async execute(interaction) {
|
async execute(interaction) {
|
||||||
const error = catchErrors(interaction.options);
|
const error = catchDateErrors(interaction.options);
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
await interaction.reply(error);
|
await interaction.reply(error);
|
||||||
}
|
}
|
||||||
|
@ -82,133 +81,123 @@ module.exports = {
|
||||||
if (subcommandGroup == undefined) {
|
if (subcommandGroup == undefined) {
|
||||||
switch (subcommand) {
|
switch (subcommand) {
|
||||||
case 'add':
|
case 'add':
|
||||||
await interaction.reply(addBirthday(interaction.options));
|
await interaction.reply(await addBirthday(interaction.options));
|
||||||
break;
|
break;
|
||||||
case 'delete':
|
case 'remove':
|
||||||
await interaction.reply(deleteBirthday(interaction.options));
|
await interaction.reply(await removeBirthday(interaction.options));
|
||||||
break;
|
break;
|
||||||
case 'check':
|
case 'check':
|
||||||
await interaction.reply({ embeds: [checkBirthday(interaction)] });
|
await interaction.reply({ embeds: [await checkBirthday(interaction)] });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
switch (subcommand) {
|
switch (subcommand) {
|
||||||
case 'date':
|
case 'date':
|
||||||
await interaction.reply(changeDate(interaction.options));
|
await interaction.reply(await changeDate(interaction.options));
|
||||||
break;
|
break;
|
||||||
case 'nickname':
|
case 'nickname':
|
||||||
await interaction.reply(changeNickname(interaction.options));
|
await interaction.reply(await changeNickname(interaction.options));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
function addBirthday(options) {
|
async function addBirthday(options) {
|
||||||
const userId = options.getUser('user').id;
|
const userId = options.getUser('user').id;
|
||||||
const newDay = options.getInteger('day');
|
const newDay = options.getInteger('day');
|
||||||
const newMonth = options.getInteger('month');
|
const newMonth = options.getInteger('month');
|
||||||
|
|
||||||
let nickname;
|
const nickname = options.getString('nickname');
|
||||||
try {
|
|
||||||
nickname = options.getString('nickname');
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
nickname = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
let error = null;
|
||||||
const currDay = birthdayJSON[i].day;
|
try {
|
||||||
const currMonth = birthdayJSON[i].month;
|
const dbEntry = await bModel.create({
|
||||||
if (birthdayJSON[i].id == userId) {
|
id: userId,
|
||||||
return 'This user already exists in database';
|
day: newDay,
|
||||||
}
|
month: newMonth,
|
||||||
if ((currMonth == newMonth && currDay >= newDay) || currMonth > newMonth) {
|
name: nickname,
|
||||||
const fstPart = birthdayJSON.slice(0, i);
|
});
|
||||||
const sndPart = birthdayJSON.slice(i);
|
dbEntry.save();
|
||||||
fstPart.push({ id: userId, day: newDay, month: newMonth, nickname: nickname });
|
error = await sortTable();
|
||||||
birthdayJSON = fstPart.concat(sndPart);
|
}
|
||||||
const error = help.writeToFile(birthdayJSON, PATH);
|
catch (err) {
|
||||||
if (error != null) {
|
error = err;
|
||||||
return 'There was an error while updating the birthday list';
|
console.log(err);
|
||||||
}
|
|
||||||
return `Successfuly added <@${userId}> to the birthday list`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
birthdayJSON.push({ id: userId, day: newDay, month: newMonth, nickname: nickname });
|
|
||||||
const error = help.writeToFile(birthdayJSON, PATH);
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return 'There was an error while updating the birthday list';
|
return 'There was an error \n(user is probably already on the birthday list)';
|
||||||
}
|
}
|
||||||
return `Successfuly added <@${userId}> to the birthday list`;
|
return `Successfuly added <@${userId}> to the birthday list`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkBirthday(interaction) {
|
async function checkBirthday(interaction) {
|
||||||
const guildMembers = interaction.guild.members;
|
const currDay = new Date().getDate();
|
||||||
const currentDay = new Date().getDate();
|
const currMonth = new Date().getMonth();
|
||||||
const currentMonth = new Date().getMonth() + 1;
|
|
||||||
|
|
||||||
|
const query = bModel.find({});
|
||||||
|
const result = await query.exec();
|
||||||
|
console.log(result);
|
||||||
|
|
||||||
const closest = [];
|
|
||||||
let closestD;
|
let closestD;
|
||||||
let closestM;
|
let closestM;
|
||||||
|
const closest = [];
|
||||||
|
const guildMembers = interaction.guild.members;
|
||||||
let isFirst = true;
|
let isFirst = true;
|
||||||
|
for (let i = 0; i < result.length; i++) {
|
||||||
const rng = help.RNG(6);
|
const birthDay = result[i].day;
|
||||||
let probably = '';
|
const birthMonth = result[i].month;
|
||||||
if (rng == 1) {
|
const userId = result[i].id;
|
||||||
probably = '(probably)';
|
const nick = result[i].nickname;
|
||||||
}
|
if ((currMonth == birthMonth && currDay <= birthDay) || currMonth < birthMonth) {
|
||||||
else if (rng == 2) {
|
|
||||||
probably = '(or will they?)';
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the person with day closest to today date
|
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
|
||||||
const birthDay = birthdayJSON[i].day;
|
|
||||||
const birthMonth = birthdayJSON[i].month;
|
|
||||||
const userId = birthdayJSON[i].id;
|
|
||||||
const nick = birthdayJSON[i].nickname;
|
|
||||||
// first date that is bigger or equal is the closest
|
|
||||||
if ((currentMonth == birthMonth && currentDay <= birthDay) || currentMonth < birthMonth) {
|
|
||||||
if (isFirst) {
|
if (isFirst) {
|
||||||
isFirst = false;
|
|
||||||
closestD = birthDay;
|
closestD = birthDay;
|
||||||
closestM = birthMonth;
|
closestM = birthMonth;
|
||||||
|
isFirst = false;
|
||||||
}
|
}
|
||||||
if (!isFirst) {
|
if (!isFirst && (closestD == birthDay && closestM == birthMonth)) {
|
||||||
if (closestD == birthDay && closestM == birthMonth) {
|
if (isInGuild(guildMembers, userId)) {
|
||||||
const result = isInGuild(guildMembers, userId);
|
closest.push(`<@${userId}> ${nick}`);
|
||||||
if (result != undefined) {
|
|
||||||
closest.push(`<@${userId}> ${nick}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
closest.join('\n');
|
const probably = getProbably();
|
||||||
|
const personList = closest.join('\n');
|
||||||
const embed = new MessageEmbed()
|
const embed = new MessageEmbed()
|
||||||
.setTitle(`Closest birthday is ${closestD}. ${closestM}.`)
|
.setTitle(`Closest birthday is ${closestD}. ${closestM}.`)
|
||||||
.setDescription(`${closest} \n will celebrate ${probably}`)
|
.setDescription(`${personList} \n will celebrate ${probably}`)
|
||||||
.setColor(help.randomColor());
|
.setColor(help.randomColor());
|
||||||
return embed;
|
return embed;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (closest != []) {
|
||||||
|
const probably = getProbably();
|
||||||
|
const personList = closest.join('\n');
|
||||||
|
const embed = new MessageEmbed()
|
||||||
|
.setTitle(`Closest birthday is ${closestD}. ${closestM}.`)
|
||||||
|
.setDescription(`${personList} \n will celebrate ${probably}`)
|
||||||
|
.setColor(help.randomColor());
|
||||||
|
return embed;
|
||||||
|
}
|
||||||
|
|
||||||
// ? if the closest is in next year -> closest is the first in list
|
// ? if the closest is in next year -> closest is the first in list
|
||||||
closestD = birthdayJSON[0].day;
|
closestD = result[0].day;
|
||||||
closestM = birthdayJSON[0].month;
|
closestM = result[0].month;
|
||||||
// check if there are others with the same date just to be sure
|
// check if there are others with the same date just to be sure
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
for (let i = 0; i < result.length; i++) {
|
||||||
const birthDay = birthdayJSON[i].day;
|
const birthDay = result[i].day;
|
||||||
const birthMonth = birthdayJSON[i].month;
|
const birthMonth = result[i].month;
|
||||||
const userId = birthdayJSON[i].id;
|
const userId = result[i].id;
|
||||||
const nick = birthdayJSON[i].nickname;
|
const nick = result[i].nickname;
|
||||||
|
|
||||||
if (closestD == birthDay && closestM == birthMonth) {
|
if (closestD == birthDay && closestM == birthMonth) {
|
||||||
closest.push(`<@${userId}> ${nick}`);
|
closest.push(`<@${userId}> ${nick}`);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
const probably = getProbably();
|
||||||
closest.join('\n');
|
closest.join('\n');
|
||||||
const embed = new MessageEmbed()
|
const embed = new MessageEmbed()
|
||||||
.setTitle(`Closest birthday is ${birthDay}. ${birthMonth}.`)
|
.setTitle(`Closest birthday is ${birthDay}. ${birthMonth}.`)
|
||||||
|
@ -225,25 +214,18 @@ function checkBirthday(interaction) {
|
||||||
return embed;
|
return embed;
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteBirthday(options) {
|
async function removeBirthday(options) {
|
||||||
const userId = options.getUser('user').id;
|
const userId = options.getUser('user').id;
|
||||||
|
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
let error = null;
|
||||||
if (birthdayJSON[i].id == userId) {
|
await bModel.deleteOne({ id: userId }), function(err) {
|
||||||
const fstPart = birthdayJSON.slice(0, i);
|
if (err) error = err;
|
||||||
const sndPart = birthdayJSON.slice(i + 1);
|
};
|
||||||
birthdayJSON = fstPart.concat(sndPart);
|
if (error) return 'There was an error';
|
||||||
const error = help.writeToFile(birthdayJSON, PATH);
|
return `Successfuly deleted <@${userId}> from birthday list`;
|
||||||
if (error != null) {
|
|
||||||
return 'There was an error while updating birthday list';
|
|
||||||
}
|
|
||||||
return `Successfuly deleted <@${userId}> from birthday list`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 'There was a problem :c';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function catchErrors(options) {
|
function catchDateErrors(options) {
|
||||||
const month = options.getInteger('month');
|
const month = options.getInteger('month');
|
||||||
const day = options.getInteger('day');
|
const day = options.getInteger('day');
|
||||||
if (month == null || day == null) {
|
if (month == null || day == null) {
|
||||||
|
@ -259,61 +241,75 @@ function catchErrors(options) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function changeDate(options) {
|
async function changeDate(options) {
|
||||||
const userId = options.getUser('user').id;
|
const userId = options.getUser('user').id;
|
||||||
|
const newDay = options.getInteger('day');
|
||||||
|
const newMonth = options.getInteger('month');
|
||||||
|
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
try {
|
||||||
const id = birthdayJSON[i].id;
|
await bModel.findOneAndUpdate({ id: userId }, { $set: { day: newDay, month: newMonth } });
|
||||||
if (birthdayJSON[i].id == userId) {
|
sortTable();
|
||||||
const day = options.getInteger('day');
|
|
||||||
const month = options.getInteger('month');
|
|
||||||
const nick = birthdayJSON[i].nickname;
|
|
||||||
const prevD = birthdayJSON[i].day;
|
|
||||||
const prevM = birthdayJSON[i].month;
|
|
||||||
let fstPart = birthdayJSON.slice(0, i);
|
|
||||||
let sndPart = birthdayJSON.slice(i + 1);
|
|
||||||
birthdayJSON = fstPart.concat(sndPart);
|
|
||||||
|
|
||||||
for (let j = 0; j < birthdayJSON.length; j++) {
|
|
||||||
const currDay = birthdayJSON[j].day;
|
|
||||||
const currMonth = birthdayJSON[j].month;
|
|
||||||
if ((currMonth == month && currDay >= day) || currMonth > month) {
|
|
||||||
fstPart = birthdayJSON.slice(0, j);
|
|
||||||
sndPart = birthdayJSON.slice(j);
|
|
||||||
fstPart.push({ id: id, day: day, month: month, nickname: nick });
|
|
||||||
birthdayJSON = fstPart.concat(sndPart);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const error = help.writeToFile(birthdayJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return 'There was an error while updating the birthday list';
|
|
||||||
}
|
|
||||||
return `Successfuly changed birthday date of <@${userId}> from ${prevD}. ${prevM}. to ${day}. ${month}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return 'There was an error (this user probably isn\'t on birthday list)';
|
catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
return 'There was an error while updating the birthday list';
|
||||||
|
}
|
||||||
|
return `Successfuly changed birthday date of <@${userId}> to ${newDay}. ${newMonth}.`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function changeNickname(options) {
|
async function changeNickname(options) {
|
||||||
|
|
||||||
const userId = options.getUser('user').id;
|
const userId = options.getUser('user').id;
|
||||||
for (let i = 0; i < birthdayJSON.length; i++) {
|
let nick = options.getString('nickname');
|
||||||
if (birthdayJSON[i].id == userId) {
|
if (nick == null) nick = '';
|
||||||
const prevNick = birthdayJSON[i].nickname;
|
try {
|
||||||
const newNick = options.getString('nickname');
|
await bModel.findOneAndUpdate({ id: userId }, { $set: { nickname: nick } });
|
||||||
birthdayJSON[i].nickname = newNick;
|
}
|
||||||
|
catch {
|
||||||
|
return 'There was an error';
|
||||||
|
}
|
||||||
|
return `Succesfully change nickname of <@${userId}> to ${nick}`;
|
||||||
|
}
|
||||||
|
|
||||||
const error = help.writeToFile(birthdayJSON, PATH);
|
async function sortTable() {
|
||||||
if (error != null) {
|
const query = bModel.find({}).sort({ month: 'asc', day: 'asc' });
|
||||||
return 'There was an error while updating the birthday list';
|
const result = await query.exec();
|
||||||
}
|
let error;
|
||||||
return `Succesfully change nickname of <@${userId}> from ${prevNick} to ${newNick}`;
|
await bModel.deleteMany({}), function(err) {
|
||||||
}
|
if (err) error = err;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (error) return error;
|
||||||
|
|
||||||
|
for (let i = 0; i < result.length; i++) {
|
||||||
|
const entry = await bModel.create({
|
||||||
|
id: result[i].id,
|
||||||
|
day: result[i].day,
|
||||||
|
month: result[i].month,
|
||||||
|
nickname: result[i].nickname,
|
||||||
|
});
|
||||||
|
entry.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getProbably() {
|
||||||
|
const rng = help.RNG(6);
|
||||||
|
switch (rng) {
|
||||||
|
case 0:
|
||||||
|
return 'probably';
|
||||||
|
case 1:
|
||||||
|
return 'or not';
|
||||||
|
case 2:
|
||||||
|
return 'or will they?';
|
||||||
|
case 3:
|
||||||
|
return '\n I still love you the same don\'t worry';
|
||||||
|
default:
|
||||||
|
return '';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkMonth(month) {
|
async function checkMonth(month) {
|
||||||
switch (month) {
|
switch (month) {
|
||||||
case 1:
|
case 1:
|
||||||
return 31;
|
return 31;
|
||||||
|
@ -343,5 +339,8 @@ function checkMonth(month) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function isInGuild(guildMembers, userId) {
|
async function isInGuild(guildMembers, userId) {
|
||||||
(await guildMembers.fetch()).find(user => user.id == userId);
|
if ((await guildMembers.fetch()).find(user => user.id == userId) == undefined) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
}
|
}
|
|
@ -1,9 +1,8 @@
|
||||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||||
const { MessageEmbed } = require('discord.js');
|
const { MessageEmbed } = require('discord.js');
|
||||||
const help = require('../helpFunctions.js');
|
const help = require('../helpFunctions.js');
|
||||||
const PATH = './database/events.json';
|
const eModel = require('../database/eventSchema');
|
||||||
const eventsJSON = require('../database/events.json');
|
|
||||||
const { writeToFile } = require('../helpFunctions.js');
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
data: new SlashCommandBuilder()
|
data: new SlashCommandBuilder()
|
||||||
|
@ -29,10 +28,9 @@ module.exports = {
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand.setName('delete')
|
subcommand.setName('delete')
|
||||||
.setDescription('Deletes event from database')
|
.setDescription('Deletes event from database')
|
||||||
.addStringOption(option => option.setName('name')
|
|
||||||
.setDescription('Name of the event you want to delete'))
|
|
||||||
.addIntegerOption(option => option.setName('id')
|
.addIntegerOption(option => option.setName('id')
|
||||||
.setDescription('Id of the even you want to change')))
|
.setDescription('Id of the even you want to change')
|
||||||
|
.setRequired(true)))
|
||||||
.addSubcommandGroup(subcommandGroup =>
|
.addSubcommandGroup(subcommandGroup =>
|
||||||
subcommandGroup.setName('change')
|
subcommandGroup.setName('change')
|
||||||
.setDescription('Change the event entry')
|
.setDescription('Change the event entry')
|
||||||
|
@ -48,9 +46,8 @@ module.exports = {
|
||||||
.setDescription('New event month')
|
.setDescription('New event month')
|
||||||
.setRequired(true))
|
.setRequired(true))
|
||||||
.addIntegerOption(option => option.setName('id')
|
.addIntegerOption(option => option.setName('id')
|
||||||
.setDescription('Id of the even you want to change'))
|
.setDescription('Id of the even you want to change')
|
||||||
.addStringOption(option => option.setName('name')
|
.setRequired(true)))
|
||||||
.setDescription('Name of the event you want to change')))
|
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand.setName('name')
|
subcommand.setName('name')
|
||||||
.setDescription('Change name of an event')
|
.setDescription('Change name of an event')
|
||||||
|
@ -59,9 +56,8 @@ module.exports = {
|
||||||
.setDescription('New name of the event')
|
.setDescription('New name of the event')
|
||||||
.setRequired(true))
|
.setRequired(true))
|
||||||
.addIntegerOption(option => option.setName('id')
|
.addIntegerOption(option => option.setName('id')
|
||||||
.setDescription('Id of the even you want to change'))
|
.setDescription('Id of the even you want to change')
|
||||||
.addStringOption(option => option.setName('name')
|
.setRequired(true))))
|
||||||
.setDescription('Name of the event you want to change'))))
|
|
||||||
.addSubcommand(subcommand =>
|
.addSubcommand(subcommand =>
|
||||||
subcommand.setName('list')
|
subcommand.setName('list')
|
||||||
.setDescription('List all events')),
|
.setDescription('List all events')),
|
||||||
|
@ -79,478 +75,176 @@ module.exports = {
|
||||||
subcommandGroup = undefined;
|
subcommandGroup = undefined;
|
||||||
}
|
}
|
||||||
const subcommand = interaction.options.getSubcommand();
|
const subcommand = interaction.options.getSubcommand();
|
||||||
const key = idOrName(interaction.options);
|
|
||||||
if (subcommandGroup == undefined) {
|
if (subcommandGroup == undefined) {
|
||||||
switch (subcommand) {
|
switch (subcommand) {
|
||||||
case 'list':
|
case 'list':
|
||||||
await interaction.reply({ embeds: [listEvents(interaction)] });
|
await interaction.reply({ embeds: [await listEvents(interaction)] });
|
||||||
break;
|
break;
|
||||||
case 'add':
|
case 'add':
|
||||||
await interaction.reply(addEvent(interaction));
|
await interaction.reply(await addEvent(interaction));
|
||||||
break;
|
break;
|
||||||
case 'delete':
|
case 'delete':
|
||||||
await interaction.reply(deleteEvent(interaction, key));
|
await interaction.reply(await deleteEvent(interaction));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
switch (subcommand) {
|
switch (subcommand) {
|
||||||
case 'date':
|
case 'date':
|
||||||
await interaction.reply(changeEventDate(interaction, key));
|
await interaction.reply(await changeEventDate(interaction));
|
||||||
break;
|
break;
|
||||||
case 'name':
|
case 'name':
|
||||||
await interaction.reply(changeEventName(interaction, key));
|
await interaction.reply(await changeEventName(interaction));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO add event GLOBAL
|
async function changeEventDate(interaction) {
|
||||||
// TODO add event local
|
const id = interaction.options.getInteger('id');
|
||||||
// if the guild isnt there add it
|
|
||||||
|
|
||||||
|
|
||||||
function changeEventDate(interaction, key) {
|
|
||||||
if (key == null) {
|
|
||||||
return 'I need id or name of the event you want to edit';
|
|
||||||
}
|
|
||||||
const newDay = interaction.options.getInteger('day');
|
const newDay = interaction.options.getInteger('day');
|
||||||
const newMonth = interaction.options.getInteger('month');
|
const newMonth = interaction.options.getInteger('month');
|
||||||
// TODO deduplicate
|
|
||||||
if (!isNaN(key)) {
|
|
||||||
const id = parseFloat(key);
|
|
||||||
if (id % 10 == 0) {
|
|
||||||
let globalEvents = eventsJSON.global;
|
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
const name = globalEvents[i].name;
|
|
||||||
if (globalEvents[i].id == id) {
|
|
||||||
const prevDay = globalEvents[i].day;
|
|
||||||
const prevMonth = globalEvents[i].month;
|
|
||||||
let fstPart = globalEvents.slice(0, i);
|
|
||||||
let sndPart = globalEvents.slice(i + 1);
|
|
||||||
globalEvents = fstPart.concat(sndPart);
|
|
||||||
|
|
||||||
for (let j = 0; j < globalEvents.length; j++) {
|
try {
|
||||||
if ((newMonth == globalEvents[i].month && newDay >= globalEvents.day)
|
await eModel.findOneAndUpdate({ id: id }, { $set: { day: newDay, month: newMonth } });
|
||||||
|| newMonth < globalEvents[i].month) {
|
|
||||||
fstPart = globalEvents.slice(0, j);
|
|
||||||
fstPart.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
sndPart = globalEvents.slice(j + 1);
|
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
|
||||||
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed global event ${name} date ` +
|
|
||||||
`from ${prevDay}. ${prevMonth}. ` +
|
|
||||||
`to ${newDay}. ${newMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
globalEvents.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
eventsJSON.global = globalEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed global event ${name} date ` +
|
|
||||||
`from ${prevDay}. ${prevMonth}. ` +
|
|
||||||
`to ${newDay}. ${newMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
if (guildEvents[i].id == id) {
|
|
||||||
const prevDay = guildEvents[i].day;
|
|
||||||
const prevMonth = guildEvents[i].month;
|
|
||||||
let fstPart = guildEvents.slice(0, i);
|
|
||||||
let sndPart = guildEvents.slice(i + 1);
|
|
||||||
for (let j = 0; j < guildEvents.length; j++) {
|
|
||||||
if ((newMonth == guildEvents[i].month &&
|
|
||||||
newDay >= guildEvents.day) ||
|
|
||||||
newMonth < guildEvents[i].month) {
|
|
||||||
fstPart = guildEvents.slice(0, j);
|
|
||||||
fstPart.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
sndPart = guildEvents.slice(j + 1);
|
|
||||||
eventsJSON[interaction.guild.id] = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed guild event ${name} date ` +
|
|
||||||
`from ${prevDay}. ${prevMonth}. ` +
|
|
||||||
`to ${newDay}. ${newMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
guildEvents.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
eventsJSON.global = guildEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed guild event ${name} date ` +
|
|
||||||
`from ${prevDay}. ${prevMonth}. ` +
|
|
||||||
`to ${newDay}. ${newMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else {
|
catch (err) {
|
||||||
let globalEvents = eventsJSON.global;
|
console.log(err);
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
return 'There was an error while updating the event list';
|
||||||
const name = globalEvents[i].name;
|
|
||||||
const id = globalEvents[i].id;
|
|
||||||
if (globalEvents[i].name == name) {
|
|
||||||
const prevDay = globalEvents[i].day;
|
|
||||||
const prevMonth = globalEvents[i].month;
|
|
||||||
let fstPart = globalEvents.slice(0, i);
|
|
||||||
let sndPart = globalEvents.slice(i + 1);
|
|
||||||
globalEvents = fstPart.concat(sndPart);
|
|
||||||
|
|
||||||
for (let j = 0; j < globalEvents.length; j++) {
|
|
||||||
if ((newMonth == globalEvents[i].month && newDay >= globalEvents.day)
|
|
||||||
|| newMonth < globalEvents[i].month) {
|
|
||||||
fstPart = globalEvents.slice(0, j);
|
|
||||||
fstPart.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
sndPart = globalEvents.slice(j);
|
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
|
||||||
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed global event ${name}` +
|
|
||||||
` date from ${prevDay}. ${prevMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
const id = globalEvents[i].id;
|
|
||||||
if (guildEvents[i].name == name) {
|
|
||||||
const prevDay = guildEvents[i].day;
|
|
||||||
const prevMonth = guildEvents[i].month;
|
|
||||||
let fstPart = guildEvents.slice(0, i);
|
|
||||||
let sndPart = guildEvents.slice(i + 1);
|
|
||||||
for (let j = 0; j < guildEvents.length; j++) {
|
|
||||||
if ((newMonth == guildEvents[i].month &&
|
|
||||||
newDay >= guildEvents.day) ||
|
|
||||||
newMonth < guildEvents[i].month) {
|
|
||||||
fstPart = guildEvents.slice(0, j);
|
|
||||||
fstPart.push({ id: id, name: name, day: newDay, month: newMonth });
|
|
||||||
sndPart = guildEvents.slice(j);
|
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
|
||||||
eventsJSON[interaction.guild.id] = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly changed guild event ${name} date` +
|
|
||||||
` from ${prevDay}. ${prevMonth}.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return 'There was an error (probably entered wrong id/name)';
|
return `Changed event date to ${newDay}. ${newMonth}.`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function changeEventName(interaction, key) {
|
async function changeEventName(interaction) {
|
||||||
if (key == null) {
|
const id = interaction.options.getInteger('id');
|
||||||
return 'I need id or name of the event you want to edit';
|
const newName = interaction.options.getString('name');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await eModel.findOneAndUpdate({ id: id }, { $set: { name: newName } });
|
||||||
}
|
}
|
||||||
// TODO deduplicate
|
catch {
|
||||||
if (!isNaN(key)) {
|
return 'There was an error';
|
||||||
const id = parseFloat(key);
|
|
||||||
if (id % 10 == 0) {
|
|
||||||
const globalEvents = eventsJSON.global;
|
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
const name = globalEvents[i].name;
|
|
||||||
if (globalEvents[i].id == id) {
|
|
||||||
globalEvents[i].name = interaction.options.getString('name');
|
|
||||||
eventsJSON.global = globalEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 'Successfuly changed name of global event' +
|
|
||||||
`${name} to ${interaction.options.getString('name')}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
if (guildEvents[i].id == id) {
|
|
||||||
guildEvents[i].name = interaction.options.getString('name');
|
|
||||||
eventsJSON[interaction.guild.id] = guildEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 'Successfuly changed name of guild event' +
|
|
||||||
`${name} to ${interaction.options.getString('name')}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else {
|
return `Changed event name to ${newName}`;
|
||||||
const globalEvents = eventsJSON.global;
|
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
const name = globalEvents[i].name;
|
|
||||||
if (name == key) {
|
|
||||||
globalEvents[i].name = interaction.options.getString('name');
|
|
||||||
eventsJSON.global = globalEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 'Successfuly changed name of global event' +
|
|
||||||
`${name} to ${interaction.options.getString('name')}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
if (name == key) {
|
|
||||||
guildEvents[i].name = interaction.options.getString('name');
|
|
||||||
eventsJSON[interaction.guild.id] = guildEvents;
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 'Successfuly changed name of guild event' +
|
|
||||||
`${name} to ${interaction.options.getString('name')}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 'There was an error (probably entered wrong id/name)';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteEvent(interaction, key) {
|
async function deleteEvent(interaction) {
|
||||||
if (key == null) {
|
const id = interaction.options.getInteger('id');
|
||||||
return 'I need id or name of the event you want to delete';
|
|
||||||
}
|
let error = null;
|
||||||
// TODO deduplicate
|
await eModel.deleteOne({ id: id }), function(err) {
|
||||||
if (!isNaN(key)) {
|
if (err) error = err;
|
||||||
const id = parseFloat(key);
|
};
|
||||||
if (id % 10 == 0) {
|
if (error) return 'There was an error';
|
||||||
const globalEvents = eventsJSON.global;
|
return 'Successfuly deleted event from event list';
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
const name = globalEvents[i].name;
|
|
||||||
if (globalEvents[i].id == id) {
|
|
||||||
const fstPart = globalEvents.slice(0, i);
|
|
||||||
const sndPart = globalEvents.slice(i + 1);
|
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly deleted global event ${name} from database`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
if (guildEvents[i].id == id) {
|
|
||||||
const fstPart = guildEvents.slice(0, i);
|
|
||||||
const sndPart = guildEvents.slice(i + 1);
|
|
||||||
eventsJSON[interaction.guild.id] = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly deleted guild event ${name} from database`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
|
||||||
const name = guildEvents[i].name;
|
|
||||||
if (name == key) {
|
|
||||||
const fstPart = guildEvents.slice(0, i);
|
|
||||||
const sndPart = guildEvents.slice(i + 1);
|
|
||||||
eventsJSON[interaction.guild.id] = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly deleted guild event ${name} from database`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const globalEvents = eventsJSON.global;
|
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
const name = globalEvents[i].name;
|
|
||||||
if (name == key) {
|
|
||||||
const fstPart = globalEvents.slice(0, i);
|
|
||||||
const sndPart = globalEvents.slice(i + 1);
|
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
|
||||||
const error = writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return `Successfuly deleted global event ${name} from database`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 'There was an error (probably entered wrong id/name)';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function addEvent(interaction) {
|
async function addEvent(interaction) {
|
||||||
const name = interaction.options.getString('name');
|
const name = interaction.options.getString('name');
|
||||||
const day = interaction.options.getInteger('day');
|
const day = interaction.options.getInteger('day');
|
||||||
const month = interaction.options.getInteger('month');
|
const month = interaction.options.getInteger('month');
|
||||||
|
|
||||||
let isGlobal;
|
let isGlobal = interaction.options.getBoolean('global');
|
||||||
try {
|
if (!isGlobal) isGlobal = false;
|
||||||
isGlobal = interaction.options.getBoolean('global');
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
isGlobal = false;
|
|
||||||
}
|
|
||||||
// TODO if duplicate send if they want to add it anyway and 2 buttons yes/no
|
// TODO if duplicate send if they want to add it anyway and 2 buttons yes/no
|
||||||
|
|
||||||
const ms = new Date().getMilliseconds();
|
const ms = new Date().getMilliseconds();
|
||||||
let id = (100000 * day) + (100 * (ms % 1000)) + (month * 10);
|
const id = (1000 * day) + (1000 * (ms % 1000)) + month;
|
||||||
|
|
||||||
|
// TODO DEDUPLICATE!!!
|
||||||
|
let error = null;
|
||||||
if (isGlobal) {
|
if (isGlobal) {
|
||||||
const globalEvents = eventsJSON.global;
|
try {
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
const dbEntry = await eModel.create({
|
||||||
// TODO make this help function you basically copy it evrytime (in birthday.js aswell)
|
guild: 'global',
|
||||||
if ((globalEvents[i].month == month && globalEvents[i].day >= day) || globalEvents[i].month > month) {
|
id: id,
|
||||||
const fstPart = globalEvents.slice(0, i);
|
name: name,
|
||||||
const sndPart = globalEvents.slice(i);
|
day: day,
|
||||||
fstPart.push({ id: id, name: name, day: day, month: month });
|
month: month,
|
||||||
eventsJSON.global = fstPart.concat(sndPart);
|
});
|
||||||
const error = help.writeToFile(eventsJSON, PATH);
|
dbEntry.save();
|
||||||
if (error != null) {
|
error = await sortTable();
|
||||||
return 'There was an error while updating event list';
|
}
|
||||||
}
|
catch (err) {
|
||||||
return `Successfuly added global event ${name} to event list`;
|
error = err;
|
||||||
}
|
console.log(err);
|
||||||
}
|
}
|
||||||
globalEvents.push({ id: id, name: name, day: day, month: month });
|
|
||||||
eventsJSON.global = globalEvents;
|
|
||||||
const error = help.writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return 'There was an error while updating event list';
|
return 'There was an error \n(user is probably already on the birthday list)';
|
||||||
}
|
}
|
||||||
return `Successfuly added guild event ${name} to event list`;
|
return `Successfuly added global event ${name}`;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
try {
|
||||||
id++;
|
const dbEntry = await eModel.create({
|
||||||
if (guildEvents == undefined) {
|
guild: interaction.guild.id,
|
||||||
eventsJSON[interaction.guild.id] = [{ id: id, name: name, day: day, month: month }];
|
id: id,
|
||||||
const error = help.writeToFile(eventsJSON, PATH);
|
name: name,
|
||||||
if (error != null) {
|
day: day,
|
||||||
return 'There was an error while updating event list';
|
month: month,
|
||||||
}
|
});
|
||||||
return `Successfuly added guild event ${name} to event list`;
|
dbEntry.save();
|
||||||
|
error = await sortTable();
|
||||||
}
|
}
|
||||||
else {
|
catch (err) {
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
error = err;
|
||||||
if ((guildEvents[i].month == month && guildEvents[i].day >= day) || guildEvents[i].month > month) {
|
console.log(err);
|
||||||
const fstPart = guildEvents.slice(0, i);
|
|
||||||
const sndPart = guildEvents.slice(i);
|
|
||||||
fstPart.push({ id: id, name: name, day: day, month: month });
|
|
||||||
eventsJSON[interaction.guild.id] = fstPart.concat(sndPart);
|
|
||||||
const error = help.writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return 'There was an error while updating event list';
|
|
||||||
}
|
|
||||||
return `Successfuly added guild event ${name} to event list`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
guildEvents.push({ id: id, name: name, day: day, month: month });
|
|
||||||
eventsJSON[interaction.guild.id] = guildEvents;
|
|
||||||
const error = help.writeToFile(eventsJSON, PATH);
|
|
||||||
if (error != null) {
|
|
||||||
return 'There was an error while updating event list';
|
|
||||||
}
|
|
||||||
return `Successfuly added guild event ${name} to event list`;
|
|
||||||
}
|
}
|
||||||
|
if (error != null) {
|
||||||
|
return 'There was an error \n(user is probably already on the birthday list)';
|
||||||
|
}
|
||||||
|
return `Successfuly added guild event ${name}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function listEvents(interaction) {
|
async function listEvents(interaction) {
|
||||||
|
let query = eModel.find({ guild: 'global' });
|
||||||
|
const globalEvents = await query.exec();
|
||||||
|
|
||||||
|
query = eModel.find({ guild: interaction.guild.id });
|
||||||
|
const guildEvents = await query.exec();
|
||||||
|
|
||||||
const embed = new MessageEmbed()
|
const embed = new MessageEmbed()
|
||||||
.setColor(help.randomColor())
|
.setColor(help.randomColor())
|
||||||
.setTitle('Literally nothing here');
|
.setTitle('Literally nothing here');
|
||||||
|
|
||||||
let eventIds = [];
|
let eventIds = [];
|
||||||
let eventNames = [];
|
let eventNames = [];
|
||||||
let eventDates = [];
|
let eventDates = [];
|
||||||
const globalEvents = eventsJSON.global;
|
// TODO DEDUPLCIATE
|
||||||
// TODO deduplicate
|
for (let i = 0; i < globalEvents.length; i++) {
|
||||||
if (globalEvents != undefined && globalEvents.length > 0) {
|
eventIds.push(globalEvents[i].id);
|
||||||
embed.addField('Global events:', '\u200b')
|
eventNames.push(globalEvents[i].name);
|
||||||
.setTitle('');
|
eventDates.push(`${globalEvents[i].day}. ${globalEvents[i].month}.`);
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
}
|
||||||
eventIds.push(globalEvents[i].id);
|
if (globalEvents.length > 0) {
|
||||||
eventNames.push(globalEvents[i].name);
|
embed.addField('Global Events: ', '\u200b');
|
||||||
eventDates.push(`${globalEvents[i].day}. ${globalEvents[i].month}.`);
|
|
||||||
}
|
|
||||||
embed.addField('Id: ', eventIds.join('\n'), true);
|
embed.addField('Id: ', eventIds.join('\n'), true);
|
||||||
embed.addField('Name: ', eventNames.join('\n'), true);
|
embed.addField('Name: ', eventNames.join('\n'), true);
|
||||||
embed.addField('Date: ', eventDates.join('\n'), true);
|
embed.addField('Date: ', eventDates.join('\n'), true);
|
||||||
embed.addField('\u200b', '\u200b');
|
embed.addField('\u200b', '\u200b');
|
||||||
}
|
}
|
||||||
|
|
||||||
const guildEvents = eventsJSON[interaction.guild.id];
|
eventIds = [];
|
||||||
if (guildEvents != undefined && guildEvents.length > 0) {
|
eventNames = [];
|
||||||
eventIds = [];
|
eventDates = [];
|
||||||
eventNames = [];
|
for (let i = 0; i < guildEvents.length; i++) {
|
||||||
eventDates = [];
|
eventIds.push(guildEvents[i].id);
|
||||||
embed.addField('Guild events:', '\u200b')
|
eventNames.push(guildEvents[i].name);
|
||||||
.setTitle('');
|
eventDates.push(`${guildEvents[i].day}. ${guildEvents[i].month}.`);
|
||||||
for (let i = 0; i < guildEvents.length; i++) {
|
}
|
||||||
eventIds.push(guildEvents[i].id);
|
|
||||||
eventNames.push(guildEvents[i].name);
|
if (guildEvents.length > 0) {
|
||||||
eventDates.push(`${guildEvents[i].day}. ${guildEvents[i].month}.`);
|
embed.addField('Guild events:', '\u200b');
|
||||||
}
|
|
||||||
embed.addField('Id: ', eventIds.join('\n'), true);
|
embed.addField('Id: ', eventIds.join('\n'), true);
|
||||||
embed.addField('Name: ', eventNames.join('\n'), true);
|
embed.addField('Name: ', eventNames.join('\n'), true);
|
||||||
embed.addField('Date: ', eventDates.join('\n'), true);
|
embed.addField('Date: ', eventDates.join('\n'), true);
|
||||||
}
|
}
|
||||||
|
embed.setTitle('');
|
||||||
return embed;
|
return embed;
|
||||||
}
|
}
|
||||||
|
|
||||||
function idOrName(options) {
|
|
||||||
let id;
|
|
||||||
try {
|
|
||||||
id = options.getInteger('id');
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
id = undefined;
|
|
||||||
}
|
|
||||||
if (id == undefined) {
|
|
||||||
let name;
|
|
||||||
try {
|
|
||||||
name = options.getString('name');
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
name = undefined;
|
|
||||||
}
|
|
||||||
if (name == undefined) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
function catchErrors(options) {
|
function catchErrors(options) {
|
||||||
const month = options.getInteger('month');
|
const month = options.getInteger('month');
|
||||||
const day = options.getInteger('day');
|
const day = options.getInteger('day');
|
||||||
|
@ -595,3 +289,27 @@ function checkMonth(month) {
|
||||||
return 31;
|
return 31;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function sortTable() {
|
||||||
|
const query = eModel.find({}).sort({ month: 'asc', day: 'asc' });
|
||||||
|
const result = await query.exec();
|
||||||
|
let error;
|
||||||
|
await eModel.deleteMany({}), function(err) {
|
||||||
|
if (err) error = err;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (error) return error;
|
||||||
|
|
||||||
|
for (let i = 0; i < result.length; i++) {
|
||||||
|
const entry = await eModel.create({
|
||||||
|
guild: result[i].guild,
|
||||||
|
id: result[i].id,
|
||||||
|
name: result[i].name,
|
||||||
|
day: result[i].day,
|
||||||
|
month: result[i].month,
|
||||||
|
});
|
||||||
|
entry.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
18
database/birthdaySchema.js
Normal file
18
database/birthdaySchema.js
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
|
const Birthdays = new mongoose.Schema({
|
||||||
|
id: {
|
||||||
|
type: String,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
day: Number,
|
||||||
|
month: Number,
|
||||||
|
nickname: {
|
||||||
|
type: String,
|
||||||
|
default: '',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const birthdaysModule = mongoose.model('birthdays', Birthdays);
|
||||||
|
|
||||||
|
module.exports = birthdaysModule;
|
|
@ -1,26 +0,0 @@
|
||||||
[
|
|
||||||
{
|
|
||||||
"id": "431899299434070026",
|
|
||||||
"day": 1,
|
|
||||||
"month": 2,
|
|
||||||
"nickname": "Ľaco"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "552222387458801676",
|
|
||||||
"day": 3,
|
|
||||||
"month": 3,
|
|
||||||
"nickname": "aaaaaaaaa"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "368089707965448193",
|
|
||||||
"day": 7,
|
|
||||||
"month": 4,
|
|
||||||
"nickname": "Peťko"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "246311280506437643",
|
|
||||||
"day": 14,
|
|
||||||
"month": 7,
|
|
||||||
"nickname": "Martin"
|
|
||||||
}
|
|
||||||
]
|
|
35
database/database.js
Normal file
35
database/database.js
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
module.exports = {
|
||||||
|
addToDB: addToDB,
|
||||||
|
updateDB: updateDB,
|
||||||
|
deleteEntry: deleteEntry,
|
||||||
|
findById: findById,
|
||||||
|
};
|
||||||
|
|
||||||
|
async function addToDB(option, data) {
|
||||||
|
const model = require(`./${data.name}Schema`);
|
||||||
|
try {
|
||||||
|
const dbEntry = await model.create({
|
||||||
|
guild: 'global',
|
||||||
|
name: 'Valentine\'s Day',
|
||||||
|
day: 14,
|
||||||
|
month: 2,
|
||||||
|
});
|
||||||
|
dbEntry.save();
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function updateDB(query) {
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteEntry(query) {
|
||||||
|
}
|
||||||
|
|
||||||
|
async function findById(id) {
|
||||||
|
}
|
||||||
|
|
||||||
|
async function findByName(name) {
|
||||||
|
}
|
17
database/eventSchema.js
Normal file
17
database/eventSchema.js
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
|
const Events = new mongoose.Schema({
|
||||||
|
guild: String,
|
||||||
|
id: {
|
||||||
|
type: Number,
|
||||||
|
index: true,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
name: String,
|
||||||
|
day: Number,
|
||||||
|
month: Number,
|
||||||
|
});
|
||||||
|
|
||||||
|
const eventsModule = mongoose.model('events', Events);
|
||||||
|
|
||||||
|
module.exports = eventsModule;
|
|
@ -1,18 +0,0 @@
|
||||||
{
|
|
||||||
"global": [
|
|
||||||
{
|
|
||||||
"id": 1431220,
|
|
||||||
"name": "Valentine's Day",
|
|
||||||
"day": 14,
|
|
||||||
"month": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"770748282191740940": [
|
|
||||||
{
|
|
||||||
"id": 1480921,
|
|
||||||
"name": "Valentine's Day",
|
|
||||||
"day": 14,
|
|
||||||
"month": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
const axios = require('axios').default;
|
const axios = require('axios').default;
|
||||||
const Discord = require('discord.js');
|
const Discord = require('discord.js');
|
||||||
const fs = require('fs');
|
|
||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
@ -10,7 +10,6 @@ module.exports = {
|
||||||
getGifEmbed: getGifEmbed,
|
getGifEmbed: getGifEmbed,
|
||||||
getGifWithMessage: getGifWithMessage,
|
getGifWithMessage: getGifWithMessage,
|
||||||
returnPromiseString: returnPromiseString,
|
returnPromiseString: returnPromiseString,
|
||||||
writeToFile: writeToFile,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
function randomColor() {
|
function randomColor() {
|
||||||
|
@ -39,21 +38,17 @@ async function getGifEmbed(gifQuery, gifAmount) {
|
||||||
const gifEmbed = new Discord.MessageEmbed()
|
const gifEmbed = new Discord.MessageEmbed()
|
||||||
.setImage(gif)
|
.setImage(gif)
|
||||||
.setColor(randomColor());
|
.setColor(randomColor());
|
||||||
|
|
||||||
return gifEmbed;
|
return gifEmbed;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getGifWithMessage(interaction, gifQuery, gifAmount) {
|
async function getGifWithMessage(interaction, gifQuery, gifAmount) {
|
||||||
const gifEmbed = getGifEmbed(gifQuery, gifAmount);
|
const gifEmbed = await getGifEmbed(gifQuery, gifAmount);
|
||||||
|
|
||||||
let who;
|
const who = interaction.options.getMentionable('who');
|
||||||
try {
|
if (who == null) {
|
||||||
who = interaction.options.getMentionable('who');
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return gifEmbed;
|
return gifEmbed;
|
||||||
}
|
}
|
||||||
(await gifEmbed).setDescription(interaction.user.username
|
gifEmbed.setDescription(interaction.user.username
|
||||||
+ ` ${interaction.commandName}s ` + `${who}`);
|
+ ` ${interaction.commandName}s ` + `${who}`);
|
||||||
return gifEmbed;
|
return gifEmbed;
|
||||||
}
|
}
|
||||||
|
@ -63,14 +58,3 @@ async function returnPromiseString(guildMembers) {
|
||||||
guildMembers.fetch();
|
guildMembers.fetch();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeToFile(content, path) {
|
|
||||||
const jsonString = JSON.stringify(content, null, 4);
|
|
||||||
let error = null;
|
|
||||||
fs.writeFile(path, jsonString, 'utf8', (err) => {
|
|
||||||
if (err) {
|
|
||||||
error = 'There was an error while updating the birthday list';
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return error;
|
|
||||||
}
|
|
126
main.js
126
main.js
|
@ -20,30 +20,42 @@ const client = new Client({
|
||||||
});
|
});
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
require('dotenv').config();
|
|
||||||
|
|
||||||
client.commands = new Collection();
|
client.commands = new Collection();
|
||||||
const commandFiles = fs.readdirSync('./commands')
|
const commandFiles = fs.readdirSync('./commands')
|
||||||
.filter(file => !file.includes('WIP'));
|
.filter(file => !file.includes('WIP'));
|
||||||
|
|
||||||
const cron = require('node-cron');
|
|
||||||
for (const file of commandFiles) {
|
for (const file of commandFiles) {
|
||||||
const command = require(`./commands/${file}`);
|
const command = require(`./commands/${file}`);
|
||||||
// Set a new item in the Collection
|
// Set a new item in the Collection
|
||||||
// With the key as the command name and the value as the exported module
|
// With the key as the command name and the value as the exported module
|
||||||
client.commands.set(command.data.name, command);
|
client.commands.set(command.data.name, command);
|
||||||
}
|
}
|
||||||
|
const cron = require('node-cron');
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
|
mongoose
|
||||||
|
.connect(process.env.DBSRV, {
|
||||||
|
useNewUrlParser: true,
|
||||||
|
useUnifiedTopology: true,
|
||||||
|
}).then(() => {
|
||||||
|
console.log('Connected to database');
|
||||||
|
}).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
require('dotenv').config();
|
||||||
const help = require('./helpFunctions.js');
|
const help = require('./helpFunctions.js');
|
||||||
|
const resp = require('./responses.js');
|
||||||
|
|
||||||
client.once('ready', () => {
|
client.once('ready', async () => {
|
||||||
if (client.user.username != 'MOOver Debug') {
|
if (client.user.username != 'MOOver Debug') {
|
||||||
client.channels.cache.get('780439236867653635').send('Just turned on!');
|
client.channels.cache.get('780439236867653635').send('Just turned on!');
|
||||||
}
|
}
|
||||||
cron.schedule('0 13 * * *', async function() {
|
cron.schedule('0 13 * * *', async function() {
|
||||||
|
console.log('aaaa');
|
||||||
pingEvent();
|
pingEvent();
|
||||||
});
|
});
|
||||||
console.log('Running!');
|
console.log('Running!', client.user.createdAt);
|
||||||
});
|
});
|
||||||
|
|
||||||
client.on('messageCreate', gotMessage);
|
client.on('messageCreate', gotMessage);
|
||||||
|
@ -76,9 +88,9 @@ function gotMessage(message) {
|
||||||
message.channel.send('https://cdn.discordapp.com' + linkArr[1]);
|
message.channel.send('https://cdn.discordapp.com' + linkArr[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
const chance = help.RNG(3000);
|
const chance = help.RNG(1000);
|
||||||
if (chance == 1337) {
|
if (chance == 420) {
|
||||||
whoAsked(message);
|
resp.whoAsked(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
const msg = message.content.toLowerCase();
|
const msg = message.content.toLowerCase();
|
||||||
|
@ -102,30 +114,17 @@ function gotMessage(message) {
|
||||||
|
|
||||||
if (!isBot) {
|
if (!isBot) {
|
||||||
if (msg.includes('henlo')) {
|
if (msg.includes('henlo')) {
|
||||||
henlo(message);
|
resp.henlo(message);
|
||||||
}
|
}
|
||||||
else if (msg.includes('how ye')) {
|
else if (msg.includes('how ye')) {
|
||||||
mood(message);
|
resp.mood(message);
|
||||||
}
|
}
|
||||||
else if (msg.includes('tylko jedno')) {
|
else if (msg.includes('tylko jedno')) {
|
||||||
message.channel.send('Koksu pięć gram odlecieć sam');
|
message.reply('Koksu pięć gram odlecieć sam');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Responses
|
|
||||||
function henlo(message) {
|
|
||||||
const emojis = ['🥰', '🐄', '🐮', '❤️', '👋', '🤠', '😊'];
|
|
||||||
const randomNum = help.RNG(emojis.length);
|
|
||||||
message.reply('Henlooo ' + message.author.username + ' ' + emojis[randomNum]);
|
|
||||||
}
|
|
||||||
|
|
||||||
function mood(message) {
|
|
||||||
const responses = ['Not bad, how yee?', 'MOOdy', 'A bit sad 😢', 'Good, how yee?', 'I\'m fine, how yee?'];
|
|
||||||
const randomNum = help.RNG(responses.length);
|
|
||||||
message.reply(responses[randomNum]);
|
|
||||||
}
|
|
||||||
|
|
||||||
function move(message, channelId) {
|
function move(message, channelId) {
|
||||||
message.react('🐮');
|
message.react('🐮');
|
||||||
|
|
||||||
|
@ -174,77 +173,64 @@ function move(message, channelId) {
|
||||||
setTimeout(() => message.delete(), 3000);
|
setTimeout(() => message.delete(), 3000);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function whoAsked(message) {
|
|
||||||
const searchKey = 'who-asked';
|
|
||||||
const gifAmount = 20;
|
|
||||||
const gifs = `https://g.tenor.com/v1/search?q=${searchKey}&key=${process.env.TENOR}&limit=${gifAmount}`;
|
|
||||||
|
|
||||||
message.reply({ embeds: [help.getGifEmbed(gifs, gifAmount)] });
|
|
||||||
}
|
|
||||||
|
|
||||||
async function pingEvent() {
|
async function pingEvent() {
|
||||||
const currentDay = new Date().getDate();
|
const bModel = require('./database/birthdaySchema');
|
||||||
const currentMonth = new Date().getMonth();
|
const eModel = require('./database/eventSchema');
|
||||||
|
|
||||||
|
const currentDay = new Date().getDate();
|
||||||
|
const currentMonth = new Date().getMonth() + 1;
|
||||||
|
|
||||||
|
let query = bModel.find({ day: currentDay, month: currentMonth });
|
||||||
|
const birthdayList = await query.exec();
|
||||||
|
|
||||||
|
query = eModel.find({ guild: 'global', day: currentDay, month: currentMonth });
|
||||||
|
const globalEventList = await query.exec();
|
||||||
|
|
||||||
|
console.log(birthdayList, globalEventList);
|
||||||
const guildIds = [];
|
const guildIds = [];
|
||||||
const sysChannelIds = [];
|
const sysChannelIds = [];
|
||||||
client.guilds.cache.forEach(element => {
|
client.guilds.cache.forEach(element => {
|
||||||
sysChannelIds.push(element.channels.guild.systemChannelId);
|
sysChannelIds.push(element.channels.guild.systemChannelId);
|
||||||
guildIds.push(element.id);
|
guildIds.push(element.id);
|
||||||
});
|
});
|
||||||
// TODO deduplicate
|
|
||||||
const birthdays = require('./database/birthdays.json');
|
|
||||||
const todayBirthdays = [];
|
|
||||||
for (let i = 0; i < birthdays.length; i++) {
|
|
||||||
if (birthdays[i].day == currentDay && birthdays[i].month == currentMonth) {
|
|
||||||
todayBirthdays.push((birthdays[i].id, birthdays[i].nickname));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// TODO deduplicate
|
||||||
|
const todayBirthdays = [];
|
||||||
if (todayBirthdays != []) {
|
if (todayBirthdays != []) {
|
||||||
for (let i = 0; i < guildIds.length; i++) {
|
for (let i = 0; i < guildIds.length; i++) {
|
||||||
const guildId = guildIds[i];
|
const guildId = guildIds[i];
|
||||||
const sysChannelId = sysChannelIds[i];
|
const sysChannelId = sysChannelIds[i];
|
||||||
const guild = client.guilds.cache.find((g) => g.id == guildId);
|
const guild = client.guilds.cache.find((g) => g.id == guildId);
|
||||||
for (let j = 0; j < todayBirthdays.length; j++) {
|
for (let j = 0; j < birthdayList.length; j++) {
|
||||||
const userId = todayBirthdays[i][0];
|
const userId = birthdayList[i].id;
|
||||||
if ((await guild.members.fetch()).find(user => user.id == userId) != undefined) {
|
if ((await guild.members.fetch()).find(user => user.id == userId) != undefined) {
|
||||||
client.channels.cache.get(sysChannelId).send(`Happy birthday <@${userId}>!`);
|
const gifAmount = 12;
|
||||||
|
const embed = await help.getGifEmbed(`https://g.tenor.com/v1/search?q=anime-hug&key=${process.env.TENOR}&limit=${gifAmount}`, gifAmount);
|
||||||
|
embed.setDescription(`Happy Birthday <@${userId}> !!!`);
|
||||||
|
client.channels.cache.get(sysChannelId)
|
||||||
|
.send({ embeds: [embed] });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const eventsJSON = require('./database/events.json');
|
|
||||||
const globalEvents = eventsJSON.global;
|
|
||||||
const todayGlobalEvents = [];
|
|
||||||
for (let i = 0; i < globalEvents.length; i++) {
|
|
||||||
if (globalEvents[i].day == currentDay && globalEvents[i].month == currentMonth) {
|
|
||||||
todayGlobalEvents.push((globalEvents[i].id));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < guildIds.length; i++) {
|
for (let i = 0; i < guildIds.length; i++) {
|
||||||
const guildId = guildIds[i];
|
const guildId = guildIds[i];
|
||||||
const guildEvents = eventsJSON[guild];
|
|
||||||
const sysChannelId = sysChannelIds[i];
|
const sysChannelId = sysChannelIds[i];
|
||||||
const guild = client.guilds.cache.find((g) => g.id == guildId);
|
query = eModel.find({ guild: guildId, day: currentDay, month: currentMonth });
|
||||||
if (todayGlobalEvents != []) {
|
const guildEvents = await query.exec();
|
||||||
for (let j = 0; j < todayGlobalEvents.length; j++) {
|
for (let j = 0; j < globalEventList.length; j++) {
|
||||||
let specialMessage;
|
let specialMessage = '';
|
||||||
if (todayGlobalEvents[i].name == 'Valentine\'s Day') {
|
if (globalEventList[i].name == 'Valentine\'s Day') {
|
||||||
specialMessage = '\n Don\'t forget I love you all with all my hart 🥺';
|
specialMessage = '\n Don\'t forget I love you all with all my hart 🥺';
|
||||||
}
|
|
||||||
client.channels.cache.get(sysChannelId)
|
|
||||||
.send(`It's ${todayGlobalEvents} today!` + specialMessage);
|
|
||||||
}
|
|
||||||
for (let j = 0; j < guildEvents.length; j++) {
|
|
||||||
if (guildEvents[i].day == currentDay && guildEvents[i].month == currentMonth) {
|
|
||||||
client.channels.cache.get(sysChannelId)
|
|
||||||
.send(`It's ${todayGlobalEvents} today!`);
|
|
||||||
}
|
}
|
||||||
|
client.channels.cache.get(sysChannelId)
|
||||||
|
.send(`It's **${globalEventList[i].name}** today!` + specialMessage);
|
||||||
|
}
|
||||||
|
for (let j = 0; j < guildEvents.length; j++) {
|
||||||
|
client.channels.cache.get(sysChannelId)
|
||||||
|
.send(`It's **${guildEvents[i].name}** today!`);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
1
node_modules/.bin/detect-libc
generated
vendored
Symbolic link
1
node_modules/.bin/detect-libc
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../detect-libc/bin/detect-libc.js
|
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../mkdirp/bin/cmd.js
|
1
node_modules/.bin/needle
generated
vendored
Symbolic link
1
node_modules/.bin/needle
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../needle/bin/needle
|
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../node-pre-gyp/bin/node-pre-gyp
|
1
node_modules/.bin/npm
generated
vendored
Symbolic link
1
node_modules/.bin/npm
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../npm/bin/npm-cli.js
|
1
node_modules/.bin/npx
generated
vendored
Symbolic link
1
node_modules/.bin/npx
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../npm/bin/npx-cli.js
|
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../rc/cli.js
|
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../semver/bin/semver.js
|
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../uuid/dist/bin/uuid
|
3285
node_modules/.package-lock.json
generated
vendored
3285
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
21
node_modules/@types/debug/LICENSE
generated
vendored
Executable file
21
node_modules/@types/debug/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
74
node_modules/@types/debug/README.md
generated
vendored
Executable file
74
node_modules/@types/debug/README.md
generated
vendored
Executable file
|
@ -0,0 +1,74 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/debug`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for debug (https://github.com/visionmedia/debug).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/debug.
|
||||||
|
## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/debug/index.d.ts)
|
||||||
|
````ts
|
||||||
|
// Type definitions for debug 4.1
|
||||||
|
// Project: https://github.com/visionmedia/debug
|
||||||
|
// Definitions by: Seon-Wook Park <https://github.com/swook>
|
||||||
|
// Gal Talmor <https://github.com/galtalmor>
|
||||||
|
// John McLaughlin <https://github.com/zamb3zi>
|
||||||
|
// Brasten Sager <https://github.com/brasten>
|
||||||
|
// Nicolas Penin <https://github.com/npenin>
|
||||||
|
// Kristian Brünn <https://github.com/kristianmitk>
|
||||||
|
// Caleb Gregory <https://github.com/calebgregory>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
declare var debug: debug.Debug & { debug: debug.Debug; default: debug.Debug };
|
||||||
|
|
||||||
|
export = debug;
|
||||||
|
export as namespace debug;
|
||||||
|
|
||||||
|
declare namespace debug {
|
||||||
|
interface Debug {
|
||||||
|
(namespace: string): Debugger;
|
||||||
|
coerce: (val: any) => any;
|
||||||
|
disable: () => string;
|
||||||
|
enable: (namespaces: string) => void;
|
||||||
|
enabled: (namespaces: string) => boolean;
|
||||||
|
formatArgs: (this: Debugger, args: any[]) => void;
|
||||||
|
log: (...args: any[]) => any;
|
||||||
|
selectColor: (namespace: string) => string | number;
|
||||||
|
humanize: typeof import('ms');
|
||||||
|
|
||||||
|
names: RegExp[];
|
||||||
|
skips: RegExp[];
|
||||||
|
|
||||||
|
formatters: Formatters;
|
||||||
|
}
|
||||||
|
|
||||||
|
type IDebug = Debug;
|
||||||
|
|
||||||
|
interface Formatters {
|
||||||
|
[formatter: string]: (v: any) => string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type IDebugger = Debugger;
|
||||||
|
|
||||||
|
interface Debugger {
|
||||||
|
(formatter: any, ...args: any[]): void;
|
||||||
|
|
||||||
|
color: string;
|
||||||
|
diff: number;
|
||||||
|
enabled: boolean;
|
||||||
|
log: (...args: any[]) => any;
|
||||||
|
namespace: string;
|
||||||
|
destroy: () => boolean;
|
||||||
|
extend: (namespace: string, delimiter?: string) => Debugger;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
````
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Sat, 24 Jul 2021 08:01:14 GMT
|
||||||
|
* Dependencies: [@types/ms](https://npmjs.com/package/@types/ms)
|
||||||
|
* Global values: `debug`
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [Seon-Wook Park](https://github.com/swook), [Gal Talmor](https://github.com/galtalmor), [John McLaughlin](https://github.com/zamb3zi), [Brasten Sager](https://github.com/brasten), [Nicolas Penin](https://github.com/npenin), [Kristian Brünn](https://github.com/kristianmitk), and [Caleb Gregory](https://github.com/calebgregory).
|
54
node_modules/@types/debug/index.d.ts
generated
vendored
Executable file
54
node_modules/@types/debug/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,54 @@
|
||||||
|
// Type definitions for debug 4.1
|
||||||
|
// Project: https://github.com/visionmedia/debug
|
||||||
|
// Definitions by: Seon-Wook Park <https://github.com/swook>
|
||||||
|
// Gal Talmor <https://github.com/galtalmor>
|
||||||
|
// John McLaughlin <https://github.com/zamb3zi>
|
||||||
|
// Brasten Sager <https://github.com/brasten>
|
||||||
|
// Nicolas Penin <https://github.com/npenin>
|
||||||
|
// Kristian Brünn <https://github.com/kristianmitk>
|
||||||
|
// Caleb Gregory <https://github.com/calebgregory>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
declare var debug: debug.Debug & { debug: debug.Debug; default: debug.Debug };
|
||||||
|
|
||||||
|
export = debug;
|
||||||
|
export as namespace debug;
|
||||||
|
|
||||||
|
declare namespace debug {
|
||||||
|
interface Debug {
|
||||||
|
(namespace: string): Debugger;
|
||||||
|
coerce: (val: any) => any;
|
||||||
|
disable: () => string;
|
||||||
|
enable: (namespaces: string) => void;
|
||||||
|
enabled: (namespaces: string) => boolean;
|
||||||
|
formatArgs: (this: Debugger, args: any[]) => void;
|
||||||
|
log: (...args: any[]) => any;
|
||||||
|
selectColor: (namespace: string) => string | number;
|
||||||
|
humanize: typeof import('ms');
|
||||||
|
|
||||||
|
names: RegExp[];
|
||||||
|
skips: RegExp[];
|
||||||
|
|
||||||
|
formatters: Formatters;
|
||||||
|
}
|
||||||
|
|
||||||
|
type IDebug = Debug;
|
||||||
|
|
||||||
|
interface Formatters {
|
||||||
|
[formatter: string]: (v: any) => string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type IDebugger = Debugger;
|
||||||
|
|
||||||
|
interface Debugger {
|
||||||
|
(formatter: any, ...args: any[]): void;
|
||||||
|
|
||||||
|
color: string;
|
||||||
|
diff: number;
|
||||||
|
enabled: boolean;
|
||||||
|
log: (...args: any[]) => any;
|
||||||
|
namespace: string;
|
||||||
|
destroy: () => boolean;
|
||||||
|
extend: (namespace: string, delimiter?: string) => Debugger;
|
||||||
|
}
|
||||||
|
}
|
57
node_modules/@types/debug/package.json
generated
vendored
Executable file
57
node_modules/@types/debug/package.json
generated
vendored
Executable file
|
@ -0,0 +1,57 @@
|
||||||
|
{
|
||||||
|
"name": "@types/debug",
|
||||||
|
"version": "4.1.7",
|
||||||
|
"description": "TypeScript definitions for debug",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/debug",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Seon-Wook Park",
|
||||||
|
"url": "https://github.com/swook",
|
||||||
|
"githubUsername": "swook"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Gal Talmor",
|
||||||
|
"url": "https://github.com/galtalmor",
|
||||||
|
"githubUsername": "galtalmor"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "John McLaughlin",
|
||||||
|
"url": "https://github.com/zamb3zi",
|
||||||
|
"githubUsername": "zamb3zi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Brasten Sager",
|
||||||
|
"url": "https://github.com/brasten",
|
||||||
|
"githubUsername": "brasten"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Nicolas Penin",
|
||||||
|
"url": "https://github.com/npenin",
|
||||||
|
"githubUsername": "npenin"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Kristian Brünn",
|
||||||
|
"url": "https://github.com/kristianmitk",
|
||||||
|
"githubUsername": "kristianmitk"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Caleb Gregory",
|
||||||
|
"url": "https://github.com/calebgregory",
|
||||||
|
"githubUsername": "calebgregory"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/debug"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/ms": "*"
|
||||||
|
},
|
||||||
|
"typesPublisherContentHash": "b83b27a0dee1329b5308b30bc0a4193efda8f025b3f5d9301130acb5be89a5b7",
|
||||||
|
"typeScriptVersion": "3.6"
|
||||||
|
}
|
21
node_modules/@types/ms/LICENSE
generated
vendored
Normal file
21
node_modules/@types/ms/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
16
node_modules/@types/ms/README.md
generated
vendored
Normal file
16
node_modules/@types/ms/README.md
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/ms`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for ms (https://github.com/zeit/ms).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/ms
|
||||||
|
|
||||||
|
Additional Details
|
||||||
|
* Last updated: Wed, 04 Sep 2019 20:48:21 GMT
|
||||||
|
* Dependencies: none
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by Zhiyuan Wang <https://github.com/danny8002>.
|
25
node_modules/@types/ms/index.d.ts
generated
vendored
Normal file
25
node_modules/@types/ms/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Type definitions for ms v0.7.1
|
||||||
|
// Project: https://github.com/zeit/ms
|
||||||
|
// Definitions by: Zhiyuan Wang <https://github.com/danny8002>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Short/Long format for `value`.
|
||||||
|
*
|
||||||
|
* @param {Number} value
|
||||||
|
* @param {{long: boolean}} options
|
||||||
|
* @return {String}
|
||||||
|
*/
|
||||||
|
declare function ms(value: number, options?: { long: boolean }): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the given `value` and return milliseconds.
|
||||||
|
*
|
||||||
|
* @param {String} value
|
||||||
|
* @return {Number}
|
||||||
|
*/
|
||||||
|
declare function ms(value: string): number;
|
||||||
|
|
||||||
|
export = ms;
|
24
node_modules/@types/ms/package.json
generated
vendored
Normal file
24
node_modules/@types/ms/package.json
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
{
|
||||||
|
"name": "@types/ms",
|
||||||
|
"version": "0.7.31",
|
||||||
|
"description": "TypeScript definitions for ms",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Zhiyuan Wang",
|
||||||
|
"url": "https://github.com/danny8002",
|
||||||
|
"githubUsername": "danny8002"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/ms"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {},
|
||||||
|
"typesPublisherContentHash": "ff2ed90b1d3539f07c5e91fe5cac8d4aa504a3290632a4e76a02d1684dcfabfc",
|
||||||
|
"typeScriptVersion": "2.0"
|
||||||
|
}
|
21
node_modules/@types/webidl-conversions/LICENSE
generated
vendored
Executable file
21
node_modules/@types/webidl-conversions/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
16
node_modules/@types/webidl-conversions/README.md
generated
vendored
Executable file
16
node_modules/@types/webidl-conversions/README.md
generated
vendored
Executable file
|
@ -0,0 +1,16 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/webidl-conversions`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for webidl-conversions (https://github.com/jsdom/webidl-conversions#readme).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/webidl-conversions.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Fri, 02 Jul 2021 18:05:21 GMT
|
||||||
|
* Dependencies: none
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [ExE Boss](https://github.com/ExE-Boss).
|
103
node_modules/@types/webidl-conversions/index.d.ts
generated
vendored
Executable file
103
node_modules/@types/webidl-conversions/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,103 @@
|
||||||
|
// Type definitions for webidl-conversions 6.1
|
||||||
|
// Project: https://github.com/jsdom/webidl-conversions#readme
|
||||||
|
// Definitions by: ExE Boss <https://github.com/ExE-Boss>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 3.0
|
||||||
|
|
||||||
|
type Parameters<T extends (...args: any[]) => any> = T extends (...args: infer P) => any ? P : never;
|
||||||
|
|
||||||
|
declare namespace WebIDLConversions {
|
||||||
|
interface Globals {
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
Number: (value?: any) => number;
|
||||||
|
String: (value?: any) => string;
|
||||||
|
TypeError: new (message?: string) => TypeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Options {
|
||||||
|
context?: string | undefined;
|
||||||
|
globals?: Globals | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IntegerOptions extends Options {
|
||||||
|
enforceRange?: boolean | undefined;
|
||||||
|
clamp?: boolean | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface StringOptions extends Options {
|
||||||
|
treatNullAsEmptyString?: boolean | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BufferSourceOptions extends Options {
|
||||||
|
allowShared?: boolean | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
type IntegerConversion = (V: any, opts?: IntegerOptions) => number;
|
||||||
|
type StringConversion = (V: any, opts?: StringOptions) => string;
|
||||||
|
type NumberConversion = (V: any, opts?: Options) => number;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare const WebIDLConversions: {
|
||||||
|
any<V>(V: V, opts?: WebIDLConversions.Options): V;
|
||||||
|
void(V?: any, opts?: WebIDLConversions.Options): void;
|
||||||
|
boolean(V: any, opts?: WebIDLConversions.Options): boolean;
|
||||||
|
|
||||||
|
byte(V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
octet(V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
|
||||||
|
short(V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
['unsigned short'](V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
|
||||||
|
long(V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
['unsigned long'](V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
|
||||||
|
['long long'](V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
['unsigned long long'](V: any, opts?: WebIDLConversions.IntegerOptions): number;
|
||||||
|
|
||||||
|
double(V: any, opts?: WebIDLConversions.Options): number;
|
||||||
|
['unrestricted double'](V: any, opts?: WebIDLConversions.Options): number;
|
||||||
|
|
||||||
|
float(V: any, opts?: WebIDLConversions.Options): number;
|
||||||
|
['unrestricted float'](V: any, opts?: WebIDLConversions.Options): number;
|
||||||
|
|
||||||
|
DOMString(V: any, opts?: WebIDLConversions.StringOptions): string;
|
||||||
|
ByteString(V: any, opts?: WebIDLConversions.StringOptions): string;
|
||||||
|
USVString(V: any, opts?: WebIDLConversions.StringOptions): string;
|
||||||
|
|
||||||
|
object<V>(V: V, opts?: WebIDLConversions.Options): V extends object ? V : V & object;
|
||||||
|
ArrayBuffer(V: any, opts?: WebIDLConversions.BufferSourceOptions & { allowShared?: false | undefined }): ArrayBuffer;
|
||||||
|
ArrayBuffer(V: any, opts?: WebIDLConversions.BufferSourceOptions): ArrayBufferLike;
|
||||||
|
DataView(V: any, opts?: WebIDLConversions.BufferSourceOptions): DataView;
|
||||||
|
|
||||||
|
Int8Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Int8Array;
|
||||||
|
Int16Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Int16Array;
|
||||||
|
Int32Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Int32Array;
|
||||||
|
|
||||||
|
Uint8Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Uint8Array;
|
||||||
|
Uint16Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Uint16Array;
|
||||||
|
Uint32Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Uint32Array;
|
||||||
|
Uint8ClampedArray(V: any, opts?: WebIDLConversions.BufferSourceOptions): Uint8ClampedArray;
|
||||||
|
|
||||||
|
Float32Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Float32Array;
|
||||||
|
Float64Array(V: any, opts?: WebIDLConversions.BufferSourceOptions): Float64Array;
|
||||||
|
|
||||||
|
ArrayBufferView(V: any, opts?: WebIDLConversions.BufferSourceOptions): ArrayBufferView;
|
||||||
|
BufferSource(V: any, opts?: WebIDLConversions.BufferSourceOptions & { allowShared?: false | undefined }): ArrayBuffer | ArrayBufferView;
|
||||||
|
BufferSource(V: any, opts?: WebIDLConversions.BufferSourceOptions): ArrayBufferLike | ArrayBufferView;
|
||||||
|
|
||||||
|
DOMTimeStamp(V: any, opts?: WebIDLConversions.Options): number;
|
||||||
|
|
||||||
|
// tslint:disable:ban-types
|
||||||
|
/** @deprecated Will be removed in v7.0 */
|
||||||
|
Function<V>(V: V, opts?: WebIDLConversions.Options): V extends (...args: any[]) => any ? V : Function;
|
||||||
|
|
||||||
|
/** @deprecated Will be removed in v7.0 */
|
||||||
|
VoidFunction<V>(
|
||||||
|
V: V,
|
||||||
|
opts?: WebIDLConversions.Options,
|
||||||
|
): V extends (...args: any[]) => any ? (...args: Parameters<V>) => void : Function;
|
||||||
|
};
|
||||||
|
|
||||||
|
// This can't use ES6 style exports, as those can't have spaces in export names.
|
||||||
|
export = WebIDLConversions;
|
25
node_modules/@types/webidl-conversions/package.json
generated
vendored
Executable file
25
node_modules/@types/webidl-conversions/package.json
generated
vendored
Executable file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"name": "@types/webidl-conversions",
|
||||||
|
"version": "6.1.1",
|
||||||
|
"description": "TypeScript definitions for webidl-conversions",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/webidl-conversions",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "ExE Boss",
|
||||||
|
"url": "https://github.com/ExE-Boss",
|
||||||
|
"githubUsername": "ExE-Boss"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/webidl-conversions"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {},
|
||||||
|
"typesPublisherContentHash": "bc47f919faf031afa91cea9b170f96f05eeac452057ba17794386552a99d0ad7",
|
||||||
|
"typeScriptVersion": "3.6"
|
||||||
|
}
|
21
node_modules/@types/whatwg-url/LICENSE
generated
vendored
Executable file
21
node_modules/@types/whatwg-url/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
16
node_modules/@types/whatwg-url/README.md
generated
vendored
Executable file
16
node_modules/@types/whatwg-url/README.md
generated
vendored
Executable file
|
@ -0,0 +1,16 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/whatwg-url`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for whatwg-url (https://github.com/jsdom/whatwg-url#readme).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/whatwg-url.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Fri, 02 Jul 2021 18:05:37 GMT
|
||||||
|
* Dependencies: [@types/webidl-conversions](https://npmjs.com/package/@types/webidl-conversions), [@types/node](https://npmjs.com/package/@types/node)
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [Alexander Marks](https://github.com/aomarks), and [ExE Boss](https://github.com/ExE-Boss).
|
23
node_modules/@types/whatwg-url/dist/URL-impl.d.ts
generated
vendored
Executable file
23
node_modules/@types/whatwg-url/dist/URL-impl.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
declare class URLImpl {
|
||||||
|
constructor(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs: readonly [url: string, base?: string],
|
||||||
|
privateData?: {},
|
||||||
|
);
|
||||||
|
|
||||||
|
href: string;
|
||||||
|
readonly origin: string;
|
||||||
|
protocol: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
host: string;
|
||||||
|
hostname: string;
|
||||||
|
port: string;
|
||||||
|
pathname: string;
|
||||||
|
search: string;
|
||||||
|
readonly searchParams: URLSearchParams;
|
||||||
|
hash: string;
|
||||||
|
|
||||||
|
toJSON(): string;
|
||||||
|
}
|
||||||
|
export { URLImpl as implementation };
|
76
node_modules/@types/whatwg-url/dist/URL.d.ts
generated
vendored
Executable file
76
node_modules/@types/whatwg-url/dist/URL.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,76 @@
|
||||||
|
import { Options as WebIDLConversionOptions } from "webidl-conversions";
|
||||||
|
import { URL } from "../index";
|
||||||
|
import { implementation as URLImpl } from "./URL-impl";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URL` object with an implementation
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function is(obj: unknown): obj is URL;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLImpl` WebIDL2JS implementation object
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function isImpl(obj: unknown): obj is URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts the `URL` wrapper into a `URLImpl` object.
|
||||||
|
*
|
||||||
|
* @throws {TypeError} If `obj` is not a `URL` wrapper instance provided by this package.
|
||||||
|
*/
|
||||||
|
export function convert(obj: unknown, options?: WebIDLConversionOptions): URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URL` instance.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URL` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function create(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs: readonly [url: string, base?: string],
|
||||||
|
privateData?: {},
|
||||||
|
): URL;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls `create()` and returns the internal `URLImpl`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URL` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function createImpl(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs: readonly [url: string, base?: string],
|
||||||
|
privateData?: {},
|
||||||
|
): URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the `URL` instance, called by `create()`.
|
||||||
|
*
|
||||||
|
* Useful when manually sub-classing a non-constructable wrapper object.
|
||||||
|
*/
|
||||||
|
export function setup<T extends URL>(
|
||||||
|
obj: T,
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs: readonly [url: string, base?: string],
|
||||||
|
privateData?: {},
|
||||||
|
): T;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URL` object without runing the constructor steps.
|
||||||
|
*
|
||||||
|
* Useful when implementing specifications that initialize objects
|
||||||
|
* in different ways than their constructors do.
|
||||||
|
*/
|
||||||
|
declare function _new(globalObject: object): URLImpl;
|
||||||
|
export { _new as new };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Installs the `URL` constructor onto the `globalObject`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the target `globalObject` doesn't have an `Error` constructor.
|
||||||
|
*/
|
||||||
|
export function install(globalObject: object, globalNames: readonly string[]): void;
|
23
node_modules/@types/whatwg-url/dist/URLSearchParams-impl.d.ts
generated
vendored
Executable file
23
node_modules/@types/whatwg-url/dist/URLSearchParams-impl.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
declare class URLSearchParamsImpl {
|
||||||
|
constructor(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs: readonly [
|
||||||
|
init?:
|
||||||
|
| ReadonlyArray<readonly [name: string, value: string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
],
|
||||||
|
privateData: { readonly doNotStripQMark?: boolean | undefined },
|
||||||
|
);
|
||||||
|
|
||||||
|
append(name: string, value: string): void;
|
||||||
|
delete(name: string): void;
|
||||||
|
get(name: string): string | null;
|
||||||
|
getAll(name: string): string[];
|
||||||
|
has(name: string): boolean;
|
||||||
|
set(name: string, value: string): void;
|
||||||
|
sort(): void;
|
||||||
|
|
||||||
|
[Symbol.iterator](): IterableIterator<[name: string, value: string]>;
|
||||||
|
}
|
||||||
|
export { URLSearchParamsImpl as implementation };
|
91
node_modules/@types/whatwg-url/dist/URLSearchParams.d.ts
generated
vendored
Executable file
91
node_modules/@types/whatwg-url/dist/URLSearchParams.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,91 @@
|
||||||
|
import { Options as WebIDLConversionOptions } from "webidl-conversions";
|
||||||
|
import { URLSearchParams } from "../index";
|
||||||
|
import { implementation as URLSearchParamsImpl } from "./URLSearchParams-impl";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLSearchParams` object with an implementation
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function is(obj: unknown): obj is URLSearchParams;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLSearchParamsImpl` WebIDL2JS implementation object
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function isImpl(obj: unknown): obj is URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts the `URLSearchParams` wrapper into a `URLSearchParamsImpl` object.
|
||||||
|
*
|
||||||
|
* @throws {TypeError} If `obj` is not a `URLSearchParams` wrapper instance provided by this package.
|
||||||
|
*/
|
||||||
|
export function convert(obj: unknown, options?: WebIDLConversionOptions): URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URLSearchParams` instance.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URLSearchParams` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function create(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs?: readonly [
|
||||||
|
init:
|
||||||
|
| ReadonlyArray<[name: string, value: string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean | undefined },
|
||||||
|
): URLSearchParams;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls `create()` and returns the internal `URLSearchParamsImpl`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URLSearchParams` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function createImpl(
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs?: readonly [
|
||||||
|
init:
|
||||||
|
| ReadonlyArray<[name: string, value: string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean | undefined },
|
||||||
|
): URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the `URLSearchParams` instance, called by `create()`.
|
||||||
|
*
|
||||||
|
* Useful when manually sub-classing a non-constructable wrapper object.
|
||||||
|
*/
|
||||||
|
export function setup<T extends URLSearchParams>(
|
||||||
|
obj: T,
|
||||||
|
globalObject: object,
|
||||||
|
constructorArgs?: readonly [
|
||||||
|
init:
|
||||||
|
| ReadonlyArray<[name: string, value: string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean | undefined },
|
||||||
|
): T;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URLSearchParams` object without runing the constructor steps.
|
||||||
|
*
|
||||||
|
* Useful when implementing specifications that initialize objects
|
||||||
|
* in different ways than their constructors do.
|
||||||
|
*/
|
||||||
|
declare function _new(globalObject: object): URLSearchParamsImpl;
|
||||||
|
export { _new as new };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Installs the `URLSearchParams` constructor onto the `globalObject`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the target `globalObject` doesn't have an `Error` constructor.
|
||||||
|
*/
|
||||||
|
export function install(globalObject: object, globalNames: readonly string[]): void;
|
162
node_modules/@types/whatwg-url/index.d.ts
generated
vendored
Executable file
162
node_modules/@types/whatwg-url/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,162 @@
|
||||||
|
// Type definitions for whatwg-url 8.2
|
||||||
|
// Project: https://github.com/jsdom/whatwg-url#readme
|
||||||
|
// Definitions by: Alexander Marks <https://github.com/aomarks>
|
||||||
|
// ExE Boss <https://github.com/ExE-Boss>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// Minimum TypeScript Version: 3.6
|
||||||
|
|
||||||
|
/// <reference types="node"/>
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#url-representation */
|
||||||
|
export interface URLRecord {
|
||||||
|
scheme: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
host: string | number | IPv6Address | null;
|
||||||
|
port: number | null;
|
||||||
|
path: string[];
|
||||||
|
query: string | null;
|
||||||
|
fragment: string | null;
|
||||||
|
cannotBeABaseURL?: boolean | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-ipv6 */
|
||||||
|
export type IPv6Address = [number, number, number, number, number, number, number, number];
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#url-class */
|
||||||
|
export class URL {
|
||||||
|
constructor(url: string, base?: string | URL);
|
||||||
|
|
||||||
|
get href(): string;
|
||||||
|
set href(V: string);
|
||||||
|
|
||||||
|
get origin(): string;
|
||||||
|
|
||||||
|
get protocol(): string;
|
||||||
|
set protocol(V: string);
|
||||||
|
|
||||||
|
get username(): string;
|
||||||
|
set username(V: string);
|
||||||
|
|
||||||
|
get password(): string;
|
||||||
|
set password(V: string);
|
||||||
|
|
||||||
|
get host(): string;
|
||||||
|
set host(V: string);
|
||||||
|
|
||||||
|
get hostname(): string;
|
||||||
|
set hostname(V: string);
|
||||||
|
|
||||||
|
get port(): string;
|
||||||
|
set port(V: string);
|
||||||
|
|
||||||
|
get pathname(): string;
|
||||||
|
set pathname(V: string);
|
||||||
|
|
||||||
|
get search(): string;
|
||||||
|
set search(V: string);
|
||||||
|
|
||||||
|
get searchParams(): URLSearchParams;
|
||||||
|
|
||||||
|
get hash(): string;
|
||||||
|
set hash(V: string);
|
||||||
|
|
||||||
|
toJSON(): string;
|
||||||
|
|
||||||
|
readonly [Symbol.toStringTag]: "URL";
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#interface-urlsearchparams */
|
||||||
|
export class URLSearchParams {
|
||||||
|
constructor(
|
||||||
|
init?:
|
||||||
|
| ReadonlyArray<readonly [name: string, value: string]>
|
||||||
|
| Iterable<readonly [name: string, value: string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
);
|
||||||
|
|
||||||
|
append(name: string, value: string): void;
|
||||||
|
delete(name: string): void;
|
||||||
|
get(name: string): string | null;
|
||||||
|
getAll(name: string): string[];
|
||||||
|
has(name: string): boolean;
|
||||||
|
set(name: string, value: string): void;
|
||||||
|
sort(): void;
|
||||||
|
|
||||||
|
keys(): IterableIterator<string>;
|
||||||
|
values(): IterableIterator<string>;
|
||||||
|
entries(): IterableIterator<[name: string, value: string]>;
|
||||||
|
forEach<THIS_ARG = void>(
|
||||||
|
callback: (this: THIS_ARG, value: string, name: string, searchParams: this) => void,
|
||||||
|
thisArg?: THIS_ARG,
|
||||||
|
): void;
|
||||||
|
|
||||||
|
readonly [Symbol.toStringTag]: "URLSearchParams";
|
||||||
|
[Symbol.iterator](): IterableIterator<[name: string, value: string]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-url-parser */
|
||||||
|
export function parseURL(
|
||||||
|
input: string,
|
||||||
|
options?: { readonly baseURL?: string | undefined; readonly encodingOverride?: string | undefined },
|
||||||
|
): URLRecord | null;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-basic-url-parser */
|
||||||
|
export function basicURLParse(
|
||||||
|
input: string,
|
||||||
|
options?: {
|
||||||
|
baseURL?: string | undefined;
|
||||||
|
encodingOverride?: string | undefined;
|
||||||
|
url?: URLRecord | undefined;
|
||||||
|
stateOverride?: StateOverride | undefined;
|
||||||
|
},
|
||||||
|
): URLRecord | null;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#scheme-start-state */
|
||||||
|
export type StateOverride =
|
||||||
|
| "scheme start"
|
||||||
|
| "scheme"
|
||||||
|
| "no scheme"
|
||||||
|
| "special relative or authority"
|
||||||
|
| "path or authority"
|
||||||
|
| "relative"
|
||||||
|
| "relative slash"
|
||||||
|
| "special authority slashes"
|
||||||
|
| "special authority ignore slashes"
|
||||||
|
| "authority"
|
||||||
|
| "host"
|
||||||
|
| "hostname"
|
||||||
|
| "port"
|
||||||
|
| "file"
|
||||||
|
| "file slash"
|
||||||
|
| "file host"
|
||||||
|
| "path start"
|
||||||
|
| "path"
|
||||||
|
| "cannot-be-a-base-URL path"
|
||||||
|
| "query"
|
||||||
|
| "fragment";
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-url-serializer */
|
||||||
|
export function serializeURL(urlRecord: URLRecord, excludeFragment?: boolean): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-host-serializer */
|
||||||
|
export function serializeHost(host: string | number | IPv6Address): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#serialize-an-integer */
|
||||||
|
export function serializeInteger(number: number): string;
|
||||||
|
|
||||||
|
/** https://html.spec.whatwg.org#ascii-serialisation-of-an-origin */
|
||||||
|
export function serializeURLOrigin(urlRecord: URLRecord): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#set-the-username */
|
||||||
|
export function setTheUsername(urlRecord: URLRecord, username: string): void;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#set-the-password */
|
||||||
|
export function setThePassword(urlRecord: URLRecord, password: string): void;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#cannot-have-a-username-password-port */
|
||||||
|
export function cannotHaveAUsernamePasswordPort(urlRecord: URLRecord): boolean;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#percent-decode */
|
||||||
|
export function percentDecode(buffer: Extract<NodeJS.TypedArray, ArrayLike<number>>): Buffer;
|
40
node_modules/@types/whatwg-url/package.json
generated
vendored
Executable file
40
node_modules/@types/whatwg-url/package.json
generated
vendored
Executable file
|
@ -0,0 +1,40 @@
|
||||||
|
{
|
||||||
|
"name": "@types/whatwg-url",
|
||||||
|
"version": "8.2.1",
|
||||||
|
"description": "TypeScript definitions for whatwg-url",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/whatwg-url",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Alexander Marks",
|
||||||
|
"url": "https://github.com/aomarks",
|
||||||
|
"githubUsername": "aomarks"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ExE Boss",
|
||||||
|
"url": "https://github.com/ExE-Boss",
|
||||||
|
"githubUsername": "ExE-Boss"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"typesVersions": {
|
||||||
|
"<=3.9": {
|
||||||
|
"*": [
|
||||||
|
"ts3.9/*"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/whatwg-url"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*",
|
||||||
|
"@types/webidl-conversions": "*"
|
||||||
|
},
|
||||||
|
"typesPublisherContentHash": "a1260472a5aaba17ca5053dad6f0e88d68682ac63f5ddf4bc333657bdd7e9e96",
|
||||||
|
"typeScriptVersion": "3.6"
|
||||||
|
}
|
23
node_modules/@types/whatwg-url/ts3.9/dist/URL-impl.d.ts
generated
vendored
Executable file
23
node_modules/@types/whatwg-url/ts3.9/dist/URL-impl.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
declare class URLImpl {
|
||||||
|
constructor(
|
||||||
|
globalObject: object,
|
||||||
|
[url, base]: readonly [string, string?],
|
||||||
|
privateData?: {},
|
||||||
|
);
|
||||||
|
|
||||||
|
href: string;
|
||||||
|
readonly origin: string;
|
||||||
|
protocol: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
host: string;
|
||||||
|
hostname: string;
|
||||||
|
port: string;
|
||||||
|
pathname: string;
|
||||||
|
search: string;
|
||||||
|
readonly searchParams: URLSearchParams;
|
||||||
|
hash: string;
|
||||||
|
|
||||||
|
toJSON(): string;
|
||||||
|
}
|
||||||
|
export { URLImpl as implementation };
|
76
node_modules/@types/whatwg-url/ts3.9/dist/URL.d.ts
generated
vendored
Executable file
76
node_modules/@types/whatwg-url/ts3.9/dist/URL.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,76 @@
|
||||||
|
import { Options as WebIDLConversionOptions } from "webidl-conversions";
|
||||||
|
import { URL } from "../index";
|
||||||
|
import { implementation as URLImpl } from "./URL-impl";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URL` object with an implementation
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function is(obj: unknown): obj is URL;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLImpl` WebIDL2JS implementation object
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function isImpl(obj: unknown): obj is URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts the `URL` wrapper into a `URLImpl` object.
|
||||||
|
*
|
||||||
|
* @throws {TypeError} If `obj` is not a `URL` wrapper instance provided by this package.
|
||||||
|
*/
|
||||||
|
export function convert(obj: unknown, options?: WebIDLConversionOptions): URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URL` instance.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URL` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function create(
|
||||||
|
globalObject: object,
|
||||||
|
[url, base]: readonly [string, string?],
|
||||||
|
privateData?: {},
|
||||||
|
): URL;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls `create()` and returns the internal `URLImpl`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URL` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function createImpl(
|
||||||
|
globalObject: object,
|
||||||
|
[url, base]: readonly [string, string?],
|
||||||
|
privateData?: {},
|
||||||
|
): URLImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the `URL` instance, called by `create()`.
|
||||||
|
*
|
||||||
|
* Useful when manually sub-classing a non-constructable wrapper object.
|
||||||
|
*/
|
||||||
|
export function setup<T extends URL>(
|
||||||
|
obj: T,
|
||||||
|
globalObject: object,
|
||||||
|
[url, base]: readonly [string, string?],
|
||||||
|
privateData?: {},
|
||||||
|
): T;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URL` object without runing the constructor steps.
|
||||||
|
*
|
||||||
|
* Useful when implementing specifications that initialize objects
|
||||||
|
* in different ways than their constructors do.
|
||||||
|
*/
|
||||||
|
declare function _new(globalObject: object): URLImpl;
|
||||||
|
export { _new as new };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Installs the `URL` constructor onto the `globalObject`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the target `globalObject` doesn't have an `Error` constructor.
|
||||||
|
*/
|
||||||
|
export function install(globalObject: object, globalNames: readonly string[]): void;
|
24
node_modules/@types/whatwg-url/ts3.9/dist/URLSearchParams-impl.d.ts
generated
vendored
Executable file
24
node_modules/@types/whatwg-url/ts3.9/dist/URLSearchParams-impl.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,24 @@
|
||||||
|
declare class URLSearchParamsImpl {
|
||||||
|
constructor(
|
||||||
|
globalObject: object,
|
||||||
|
[init]: readonly [
|
||||||
|
(
|
||||||
|
| ReadonlyArray<readonly [string, string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string
|
||||||
|
)?,
|
||||||
|
],
|
||||||
|
privateData: { readonly doNotStripQMark?: boolean },
|
||||||
|
);
|
||||||
|
|
||||||
|
append(name: string, value: string): void;
|
||||||
|
delete(name: string): void;
|
||||||
|
get(name: string): string | null;
|
||||||
|
getAll(name: string): string[];
|
||||||
|
has(name: string): boolean;
|
||||||
|
set(name: string, value: string): void;
|
||||||
|
sort(): void;
|
||||||
|
|
||||||
|
[Symbol.iterator](): IterableIterator<[string, string]>;
|
||||||
|
}
|
||||||
|
export { URLSearchParamsImpl as implementation };
|
94
node_modules/@types/whatwg-url/ts3.9/dist/URLSearchParams.d.ts
generated
vendored
Executable file
94
node_modules/@types/whatwg-url/ts3.9/dist/URLSearchParams.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,94 @@
|
||||||
|
import { Options as WebIDLConversionOptions } from "webidl-conversions";
|
||||||
|
import { URLSearchParams } from "../index";
|
||||||
|
import { implementation as URLSearchParamsImpl } from "./URLSearchParams-impl";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLSearchParams` object with an implementation
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function is(obj: unknown): obj is URLSearchParams;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether `obj` is a `URLSearchParamsImpl` WebIDL2JS implementation object
|
||||||
|
* provided by this package.
|
||||||
|
*/
|
||||||
|
export function isImpl(obj: unknown): obj is URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts the `URLSearchParams` wrapper into a `URLSearchParamsImpl` object.
|
||||||
|
*
|
||||||
|
* @throws {TypeError} If `obj` is not a `URLSearchParams` wrapper instance provided by this package.
|
||||||
|
*/
|
||||||
|
export function convert(obj: unknown, options?: WebIDLConversionOptions): URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URLSearchParams` instance.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URLSearchParams` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function create(
|
||||||
|
globalObject: object,
|
||||||
|
[init]?: readonly [
|
||||||
|
(
|
||||||
|
| ReadonlyArray<readonly [string, string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string
|
||||||
|
)?,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean },
|
||||||
|
): URLSearchParams;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls `create()` and returns the internal `URLSearchParamsImpl`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the `globalObject` doesn't have a WebIDL2JS constructor
|
||||||
|
* registry or a `URLSearchParams` constructor provided by this package
|
||||||
|
* in the WebIDL2JS constructor registry.
|
||||||
|
*/
|
||||||
|
export function createImpl(
|
||||||
|
globalObject: object,
|
||||||
|
[init]?: readonly [
|
||||||
|
(
|
||||||
|
| ReadonlyArray<readonly [string, string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string
|
||||||
|
)?,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean },
|
||||||
|
): URLSearchParamsImpl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the `URLSearchParams` instance, called by `create()`.
|
||||||
|
*
|
||||||
|
* Useful when manually sub-classing a non-constructable wrapper object.
|
||||||
|
*/
|
||||||
|
export function setup<T extends URLSearchParams>(
|
||||||
|
obj: T,
|
||||||
|
globalObject: object,
|
||||||
|
[init]?: readonly [
|
||||||
|
(
|
||||||
|
| ReadonlyArray<readonly [string, string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string
|
||||||
|
)?,
|
||||||
|
],
|
||||||
|
privateData?: { doNotStripQMark?: boolean },
|
||||||
|
): T;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new `URLSearchParams` object without runing the constructor steps.
|
||||||
|
*
|
||||||
|
* Useful when implementing specifications that initialize objects
|
||||||
|
* in different ways than their constructors do.
|
||||||
|
*/
|
||||||
|
declare function _new(globalObject: object): URLSearchParamsImpl;
|
||||||
|
export { _new as new };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Installs the `URLSearchParams` constructor onto the `globalObject`.
|
||||||
|
*
|
||||||
|
* @throws {Error} If the target `globalObject` doesn't have an `Error` constructor.
|
||||||
|
*/
|
||||||
|
export function install(globalObject: object, globalNames: readonly string[]): void;
|
155
node_modules/@types/whatwg-url/ts3.9/index.d.ts
generated
vendored
Executable file
155
node_modules/@types/whatwg-url/ts3.9/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,155 @@
|
||||||
|
/// <reference types="node"/>
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#url-representation */
|
||||||
|
export interface URLRecord {
|
||||||
|
scheme: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
host: string | number | IPv6Address | null;
|
||||||
|
port: number | null;
|
||||||
|
path: string[];
|
||||||
|
query: string | null;
|
||||||
|
fragment: string | null;
|
||||||
|
cannotBeABaseURL?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-ipv6 */
|
||||||
|
export type IPv6Address = [number, number, number, number, number, number, number, number];
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#url-class */
|
||||||
|
export class URL {
|
||||||
|
constructor(url: string, base?: string | URL);
|
||||||
|
|
||||||
|
get href(): string;
|
||||||
|
set href(V: string);
|
||||||
|
|
||||||
|
get origin(): string;
|
||||||
|
|
||||||
|
get protocol(): string;
|
||||||
|
set protocol(V: string);
|
||||||
|
|
||||||
|
get username(): string;
|
||||||
|
set username(V: string);
|
||||||
|
|
||||||
|
get password(): string;
|
||||||
|
set password(V: string);
|
||||||
|
|
||||||
|
get host(): string;
|
||||||
|
set host(V: string);
|
||||||
|
|
||||||
|
get hostname(): string;
|
||||||
|
set hostname(V: string);
|
||||||
|
|
||||||
|
get port(): string;
|
||||||
|
set port(V: string);
|
||||||
|
|
||||||
|
get pathname(): string;
|
||||||
|
set pathname(V: string);
|
||||||
|
|
||||||
|
get search(): string;
|
||||||
|
set search(V: string);
|
||||||
|
|
||||||
|
get searchParams(): URLSearchParams;
|
||||||
|
|
||||||
|
get hash(): string;
|
||||||
|
set hash(V: string);
|
||||||
|
|
||||||
|
toJSON(): string;
|
||||||
|
|
||||||
|
readonly [Symbol.toStringTag]: "URL";
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#interface-urlsearchparams */
|
||||||
|
export class URLSearchParams {
|
||||||
|
constructor(
|
||||||
|
init?:
|
||||||
|
| ReadonlyArray<readonly [string, string]>
|
||||||
|
| Iterable<readonly [string, string]>
|
||||||
|
| { readonly [name: string]: string }
|
||||||
|
| string,
|
||||||
|
);
|
||||||
|
|
||||||
|
append(name: string, value: string): void;
|
||||||
|
delete(name: string): void;
|
||||||
|
get(name: string): string | null;
|
||||||
|
getAll(name: string): string[];
|
||||||
|
has(name: string): boolean;
|
||||||
|
set(name: string, value: string): void;
|
||||||
|
sort(): void;
|
||||||
|
|
||||||
|
keys(): IterableIterator<string>;
|
||||||
|
values(): IterableIterator<string>;
|
||||||
|
entries(): IterableIterator<[string, string]>;
|
||||||
|
forEach<THIS_ARG = void>(
|
||||||
|
callback: (this: THIS_ARG, value: string, name: string, searchParams: this) => void,
|
||||||
|
thisArg?: THIS_ARG,
|
||||||
|
): void;
|
||||||
|
|
||||||
|
readonly [Symbol.toStringTag]: "URLSearchParams";
|
||||||
|
[Symbol.iterator](): IterableIterator<[string, string]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-url-parser */
|
||||||
|
export function parseURL(
|
||||||
|
input: string,
|
||||||
|
options?: { readonly baseURL?: string; readonly encodingOverride?: string },
|
||||||
|
): URLRecord | null;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-basic-url-parser */
|
||||||
|
export function basicURLParse(
|
||||||
|
input: string,
|
||||||
|
options?: {
|
||||||
|
baseURL?: string;
|
||||||
|
encodingOverride?: string;
|
||||||
|
url?: URLRecord;
|
||||||
|
stateOverride?: StateOverride;
|
||||||
|
},
|
||||||
|
): URLRecord | null;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#scheme-start-state */
|
||||||
|
export type StateOverride =
|
||||||
|
| "scheme start"
|
||||||
|
| "scheme"
|
||||||
|
| "no scheme"
|
||||||
|
| "special relative or authority"
|
||||||
|
| "path or authority"
|
||||||
|
| "relative"
|
||||||
|
| "relative slash"
|
||||||
|
| "special authority slashes"
|
||||||
|
| "special authority ignore slashes"
|
||||||
|
| "authority"
|
||||||
|
| "host"
|
||||||
|
| "hostname"
|
||||||
|
| "port"
|
||||||
|
| "file"
|
||||||
|
| "file slash"
|
||||||
|
| "file host"
|
||||||
|
| "path start"
|
||||||
|
| "path"
|
||||||
|
| "cannot-be-a-base-URL path"
|
||||||
|
| "query"
|
||||||
|
| "fragment";
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-url-serializer */
|
||||||
|
export function serializeURL(urlRecord: URLRecord, excludeFragment?: boolean): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#concept-host-serializer */
|
||||||
|
export function serializeHost(host: string | number | IPv6Address): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#serialize-an-integer */
|
||||||
|
export function serializeInteger(number: number): string;
|
||||||
|
|
||||||
|
/** https://html.spec.whatwg.org#ascii-serialisation-of-an-origin */
|
||||||
|
export function serializeURLOrigin(urlRecord: URLRecord): string;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#set-the-username */
|
||||||
|
export function setTheUsername(urlRecord: URLRecord, username: string): void;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#set-the-password */
|
||||||
|
export function setThePassword(urlRecord: URLRecord, password: string): void;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#cannot-have-a-username-password-port */
|
||||||
|
export function cannotHaveAUsernamePasswordPort(urlRecord: URLRecord): boolean;
|
||||||
|
|
||||||
|
/** https://url.spec.whatwg.org/#percent-decode */
|
||||||
|
export function percentDecode(buffer: Extract<NodeJS.TypedArray, ArrayLike<number>>): Buffer;
|
4
node_modules/@types/whatwg-url/ts3.9/webidl2js-wrapper.d.ts
generated
vendored
Executable file
4
node_modules/@types/whatwg-url/ts3.9/webidl2js-wrapper.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
import * as URL from "./dist/URL";
|
||||||
|
import * as URLSearchParams from "./dist/URLSearchParams";
|
||||||
|
|
||||||
|
export { URL, URLSearchParams };
|
4
node_modules/@types/whatwg-url/webidl2js-wrapper.d.ts
generated
vendored
Executable file
4
node_modules/@types/whatwg-url/webidl2js-wrapper.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
import * as URL from "./dist/URL";
|
||||||
|
import * as URLSearchParams from "./dist/URLSearchParams";
|
||||||
|
|
||||||
|
export { URL, URLSearchParams };
|
46
node_modules/abbrev/LICENSE
generated
vendored
Normal file
46
node_modules/abbrev/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
This software is dual-licensed under the ISC and MIT licenses.
|
||||||
|
You may use this software under EITHER of the following licenses.
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
Copyright Isaac Z. Schlueter and Contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person
|
||||||
|
obtaining a copy of this software and associated documentation
|
||||||
|
files (the "Software"), to deal in the Software without
|
||||||
|
restriction, including without limitation the rights to use,
|
||||||
|
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||||
|
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||||
|
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
23
node_modules/abbrev/README.md
generated
vendored
Normal file
23
node_modules/abbrev/README.md
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# abbrev-js
|
||||||
|
|
||||||
|
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
var abbrev = require("abbrev");
|
||||||
|
abbrev("foo", "fool", "folding", "flop");
|
||||||
|
|
||||||
|
// returns:
|
||||||
|
{ fl: 'flop'
|
||||||
|
, flo: 'flop'
|
||||||
|
, flop: 'flop'
|
||||||
|
, fol: 'folding'
|
||||||
|
, fold: 'folding'
|
||||||
|
, foldi: 'folding'
|
||||||
|
, foldin: 'folding'
|
||||||
|
, folding: 'folding'
|
||||||
|
, foo: 'foo'
|
||||||
|
, fool: 'fool'
|
||||||
|
}
|
||||||
|
|
||||||
|
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
|
61
node_modules/abbrev/abbrev.js
generated
vendored
Normal file
61
node_modules/abbrev/abbrev.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
module.exports = exports = abbrev.abbrev = abbrev
|
||||||
|
|
||||||
|
abbrev.monkeyPatch = monkeyPatch
|
||||||
|
|
||||||
|
function monkeyPatch () {
|
||||||
|
Object.defineProperty(Array.prototype, 'abbrev', {
|
||||||
|
value: function () { return abbrev(this) },
|
||||||
|
enumerable: false, configurable: true, writable: true
|
||||||
|
})
|
||||||
|
|
||||||
|
Object.defineProperty(Object.prototype, 'abbrev', {
|
||||||
|
value: function () { return abbrev(Object.keys(this)) },
|
||||||
|
enumerable: false, configurable: true, writable: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function abbrev (list) {
|
||||||
|
if (arguments.length !== 1 || !Array.isArray(list)) {
|
||||||
|
list = Array.prototype.slice.call(arguments, 0)
|
||||||
|
}
|
||||||
|
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
|
||||||
|
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort them lexicographically, so that they're next to their nearest kin
|
||||||
|
args = args.sort(lexSort)
|
||||||
|
|
||||||
|
// walk through each, seeing how much it has in common with the next and previous
|
||||||
|
var abbrevs = {}
|
||||||
|
, prev = ""
|
||||||
|
for (var i = 0, l = args.length ; i < l ; i ++) {
|
||||||
|
var current = args[i]
|
||||||
|
, next = args[i + 1] || ""
|
||||||
|
, nextMatches = true
|
||||||
|
, prevMatches = true
|
||||||
|
if (current === next) continue
|
||||||
|
for (var j = 0, cl = current.length ; j < cl ; j ++) {
|
||||||
|
var curChar = current.charAt(j)
|
||||||
|
nextMatches = nextMatches && curChar === next.charAt(j)
|
||||||
|
prevMatches = prevMatches && curChar === prev.charAt(j)
|
||||||
|
if (!nextMatches && !prevMatches) {
|
||||||
|
j ++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prev = current
|
||||||
|
if (j === cl) {
|
||||||
|
abbrevs[current] = current
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
|
||||||
|
abbrevs[a] = current
|
||||||
|
a += current.charAt(j)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return abbrevs
|
||||||
|
}
|
||||||
|
|
||||||
|
function lexSort (a, b) {
|
||||||
|
return a === b ? 0 : a > b ? 1 : -1
|
||||||
|
}
|
21
node_modules/abbrev/package.json
generated
vendored
Normal file
21
node_modules/abbrev/package.json
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"name": "abbrev",
|
||||||
|
"version": "1.1.1",
|
||||||
|
"description": "Like ruby's abbrev module, but in js",
|
||||||
|
"author": "Isaac Z. Schlueter <i@izs.me>",
|
||||||
|
"main": "abbrev.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test.js --100",
|
||||||
|
"preversion": "npm test",
|
||||||
|
"postversion": "npm publish",
|
||||||
|
"postpublish": "git push origin --all; git push origin --tags"
|
||||||
|
},
|
||||||
|
"repository": "http://github.com/isaacs/abbrev-js",
|
||||||
|
"license": "ISC",
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "^10.1"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"abbrev.js"
|
||||||
|
]
|
||||||
|
}
|
14
node_modules/aproba/LICENSE
generated
vendored
Normal file
14
node_modules/aproba/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
Copyright (c) 2015, Rebecca Turner <me@re-becca.org>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
94
node_modules/aproba/README.md
generated
vendored
Normal file
94
node_modules/aproba/README.md
generated
vendored
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
aproba
|
||||||
|
======
|
||||||
|
|
||||||
|
A ridiculously light-weight function argument validator
|
||||||
|
|
||||||
|
```
|
||||||
|
var validate = require("aproba")
|
||||||
|
|
||||||
|
function myfunc(a, b, c) {
|
||||||
|
// `a` must be a string, `b` a number, `c` a function
|
||||||
|
validate('SNF', arguments) // [a,b,c] is also valid
|
||||||
|
}
|
||||||
|
|
||||||
|
myfunc('test', 23, function () {}) // ok
|
||||||
|
myfunc(123, 23, function () {}) // type error
|
||||||
|
myfunc('test', 23) // missing arg error
|
||||||
|
myfunc('test', 23, function () {}, true) // too many args error
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Valid types are:
|
||||||
|
|
||||||
|
| type | description
|
||||||
|
| :--: | :----------
|
||||||
|
| * | matches any type
|
||||||
|
| A | `Array.isArray` OR an `arguments` object
|
||||||
|
| S | typeof == string
|
||||||
|
| N | typeof == number
|
||||||
|
| F | typeof == function
|
||||||
|
| O | typeof == object and not type A and not type E
|
||||||
|
| B | typeof == boolean
|
||||||
|
| E | `instanceof Error` OR `null` **(special: see below)**
|
||||||
|
| Z | == `null`
|
||||||
|
|
||||||
|
Validation failures throw one of three exception types, distinguished by a
|
||||||
|
`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`.
|
||||||
|
|
||||||
|
If you pass in an invalid type then it will throw with a code of
|
||||||
|
`EUNKNOWNTYPE`.
|
||||||
|
|
||||||
|
If an **error** argument is found and is not null then the remaining
|
||||||
|
arguments are optional. That is, if you say `ESO` then that's like using a
|
||||||
|
non-magical `E` in: `E|ESO|ZSO`.
|
||||||
|
|
||||||
|
### But I have optional arguments?!
|
||||||
|
|
||||||
|
You can provide more than one signature by separating them with pipes `|`.
|
||||||
|
If any signature matches the arguments then they'll be considered valid.
|
||||||
|
|
||||||
|
So for example, say you wanted to write a signature for
|
||||||
|
`fs.createWriteStream`. The docs for it describe it thusly:
|
||||||
|
|
||||||
|
```
|
||||||
|
fs.createWriteStream(path[, options])
|
||||||
|
```
|
||||||
|
|
||||||
|
This would be a signature of `SO|S`. That is, a string and and object, or
|
||||||
|
just a string.
|
||||||
|
|
||||||
|
Now, if you read the full `fs` docs, you'll see that actually path can ALSO
|
||||||
|
be a buffer. And options can be a string, that is:
|
||||||
|
```
|
||||||
|
path <String> | <Buffer>
|
||||||
|
options <String> | <Object>
|
||||||
|
```
|
||||||
|
|
||||||
|
To reproduce this you have to fully enumerate all of the possible
|
||||||
|
combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The
|
||||||
|
awkwardness is a feature: It reminds you of the complexity you're adding to
|
||||||
|
your API when you do this sort of thing.
|
||||||
|
|
||||||
|
|
||||||
|
### Browser support
|
||||||
|
|
||||||
|
This has no dependencies and should work in browsers, though you'll have
|
||||||
|
noisier stack traces.
|
||||||
|
|
||||||
|
### Why this exists
|
||||||
|
|
||||||
|
I wanted a very simple argument validator. It needed to do two things:
|
||||||
|
|
||||||
|
1. Be more concise and easier to use than assertions
|
||||||
|
|
||||||
|
2. Not encourage an infinite bikeshed of DSLs
|
||||||
|
|
||||||
|
This is why types are specified by a single character and there's no such
|
||||||
|
thing as an optional argument.
|
||||||
|
|
||||||
|
This is not intended to validate user data. This is specifically about
|
||||||
|
asserting the interface of your functions.
|
||||||
|
|
||||||
|
If you need greater validation, I encourage you to write them by hand or
|
||||||
|
look elsewhere.
|
||||||
|
|
105
node_modules/aproba/index.js
generated
vendored
Normal file
105
node_modules/aproba/index.js
generated
vendored
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
function isArguments (thingy) {
|
||||||
|
return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
|
||||||
|
}
|
||||||
|
|
||||||
|
var types = {
|
||||||
|
'*': {label: 'any', check: function () { return true }},
|
||||||
|
A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
|
||||||
|
S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
|
||||||
|
N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
|
||||||
|
F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
|
||||||
|
O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
|
||||||
|
B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
|
||||||
|
E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
|
||||||
|
Z: {label: 'null', check: function (thingy) { return thingy == null }}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addSchema (schema, arity) {
|
||||||
|
var group = arity[schema.length] = arity[schema.length] || []
|
||||||
|
if (group.indexOf(schema) === -1) group.push(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
var validate = module.exports = function (rawSchemas, args) {
|
||||||
|
if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
|
||||||
|
if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
|
||||||
|
if (!args) throw missingRequiredArg(1, 'args')
|
||||||
|
if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
|
||||||
|
if (!types.A.check(args)) throw invalidType(1, ['array'], args)
|
||||||
|
var schemas = rawSchemas.split('|')
|
||||||
|
var arity = {}
|
||||||
|
|
||||||
|
schemas.forEach(function (schema) {
|
||||||
|
for (var ii = 0; ii < schema.length; ++ii) {
|
||||||
|
var type = schema[ii]
|
||||||
|
if (!types[type]) throw unknownType(ii, type)
|
||||||
|
}
|
||||||
|
if (/E.*E/.test(schema)) throw moreThanOneError(schema)
|
||||||
|
addSchema(schema, arity)
|
||||||
|
if (/E/.test(schema)) {
|
||||||
|
addSchema(schema.replace(/E.*$/, 'E'), arity)
|
||||||
|
addSchema(schema.replace(/E/, 'Z'), arity)
|
||||||
|
if (schema.length === 1) addSchema('', arity)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
var matching = arity[args.length]
|
||||||
|
if (!matching) {
|
||||||
|
throw wrongNumberOfArgs(Object.keys(arity), args.length)
|
||||||
|
}
|
||||||
|
for (var ii = 0; ii < args.length; ++ii) {
|
||||||
|
var newMatching = matching.filter(function (schema) {
|
||||||
|
var type = schema[ii]
|
||||||
|
var typeCheck = types[type].check
|
||||||
|
return typeCheck(args[ii])
|
||||||
|
})
|
||||||
|
if (!newMatching.length) {
|
||||||
|
var labels = matching.map(function (schema) {
|
||||||
|
return types[schema[ii]].label
|
||||||
|
}).filter(function (schema) { return schema != null })
|
||||||
|
throw invalidType(ii, labels, args[ii])
|
||||||
|
}
|
||||||
|
matching = newMatching
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function missingRequiredArg (num) {
|
||||||
|
return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function unknownType (num, type) {
|
||||||
|
return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function invalidType (num, expectedTypes, value) {
|
||||||
|
var valueType
|
||||||
|
Object.keys(types).forEach(function (typeCode) {
|
||||||
|
if (types[typeCode].check(value)) valueType = types[typeCode].label
|
||||||
|
})
|
||||||
|
return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
|
||||||
|
englishList(expectedTypes) + ' but got ' + valueType)
|
||||||
|
}
|
||||||
|
|
||||||
|
function englishList (list) {
|
||||||
|
return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
|
||||||
|
}
|
||||||
|
|
||||||
|
function wrongNumberOfArgs (expected, got) {
|
||||||
|
var english = englishList(expected)
|
||||||
|
var args = expected.every(function (ex) { return ex.length === 1 })
|
||||||
|
? 'argument'
|
||||||
|
: 'arguments'
|
||||||
|
return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
|
||||||
|
}
|
||||||
|
|
||||||
|
function moreThanOneError (schema) {
|
||||||
|
return newException('ETOOMANYERRORTYPES',
|
||||||
|
'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
|
||||||
|
}
|
||||||
|
|
||||||
|
function newException (code, msg) {
|
||||||
|
var e = new Error(msg)
|
||||||
|
e.code = code
|
||||||
|
if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
|
||||||
|
return e
|
||||||
|
}
|
34
node_modules/aproba/package.json
generated
vendored
Normal file
34
node_modules/aproba/package.json
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
{
|
||||||
|
"name": "aproba",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"description": "A ridiculously light-weight argument validator (now browser friendly)",
|
||||||
|
"main": "index.js",
|
||||||
|
"directories": {
|
||||||
|
"test": "test"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"standard": "^10.0.3",
|
||||||
|
"tap": "^10.0.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"test": "standard && tap -j3 test/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/iarna/aproba"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"argument",
|
||||||
|
"validate"
|
||||||
|
],
|
||||||
|
"author": "Rebecca Turner <me@re-becca.org>",
|
||||||
|
"license": "ISC",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/iarna/aproba/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/iarna/aproba"
|
||||||
|
}
|
37
node_modules/are-we-there-yet/CHANGES.md
generated
vendored
Normal file
37
node_modules/are-we-there-yet/CHANGES.md
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
Hi, figured we could actually use a changelog now:
|
||||||
|
|
||||||
|
## 1.1.5 2018-05-24
|
||||||
|
|
||||||
|
* [#92](https://github.com/iarna/are-we-there-yet/pull/92) Fix bug where
|
||||||
|
`finish` would throw errors when including `TrackerStream` objects in
|
||||||
|
`TrackerGroup` collections. (@brianloveswords)
|
||||||
|
|
||||||
|
## 1.1.4 2017-04-21
|
||||||
|
|
||||||
|
* Fix typo in package.json
|
||||||
|
|
||||||
|
## 1.1.3 2017-04-21
|
||||||
|
|
||||||
|
* Improve documentation and limit files included in the distribution.
|
||||||
|
|
||||||
|
## 1.1.2 2016-03-15
|
||||||
|
|
||||||
|
* Add tracker group cycle detection and tests for it
|
||||||
|
|
||||||
|
## 1.1.1 2016-01-29
|
||||||
|
|
||||||
|
* Fix a typo in stream completion tracker
|
||||||
|
|
||||||
|
## 1.1.0 2016-01-29
|
||||||
|
|
||||||
|
* Rewrote completion percent computation to be low impact– no more walking a
|
||||||
|
tree of completion groups every time we need this info. Previously, with
|
||||||
|
medium sized tree of completion groups, even a relatively modest number of
|
||||||
|
calls to the top level `completed()` method would result in absurd numbers
|
||||||
|
of calls overall as it walked down the tree. We now, instead, keep track as
|
||||||
|
we bubble up changes, so the computation is limited to when data changes and
|
||||||
|
to the depth of that one branch, instead of _every_ node. (Plus, we were already
|
||||||
|
incurring _this_ cost, since we already bubbled out changes.)
|
||||||
|
* Moved different tracker types out to their own files.
|
||||||
|
* Made tests test for TOO MANY events too.
|
||||||
|
* Standarized the source code formatting
|
5
node_modules/are-we-there-yet/LICENSE
generated
vendored
Normal file
5
node_modules/are-we-there-yet/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
Copyright (c) 2015, Rebecca Turner
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
195
node_modules/are-we-there-yet/README.md
generated
vendored
Normal file
195
node_modules/are-we-there-yet/README.md
generated
vendored
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
are-we-there-yet
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Track complex hiearchies of asynchronous task completion statuses. This is
|
||||||
|
intended to give you a way of recording and reporting the progress of the big
|
||||||
|
recursive fan-out and gather type workflows that are so common in async.
|
||||||
|
|
||||||
|
What you do with this completion data is up to you, but the most common use case is to
|
||||||
|
feed it to one of the many progress bar modules.
|
||||||
|
|
||||||
|
Most progress bar modules include a rudamentary version of this, but my
|
||||||
|
needs were more complex.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var TrackerGroup = require("are-we-there-yet").TrackerGroup
|
||||||
|
|
||||||
|
var top = new TrackerGroup("program")
|
||||||
|
|
||||||
|
var single = top.newItem("one thing", 100)
|
||||||
|
single.completeWork(20)
|
||||||
|
|
||||||
|
console.log(top.completed()) // 0.2
|
||||||
|
|
||||||
|
fs.stat("file", function(er, stat) {
|
||||||
|
if (er) throw er
|
||||||
|
var stream = top.newStream("file", stat.size)
|
||||||
|
console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete
|
||||||
|
// and 50% * 20% == 10%
|
||||||
|
fs.createReadStream("file").pipe(stream).on("data", function (chunk) {
|
||||||
|
// do stuff with chunk
|
||||||
|
})
|
||||||
|
top.on("change", function (name) {
|
||||||
|
// called each time a chunk is read from "file"
|
||||||
|
// top.completed() will start at 0.1 and fill up to 0.6 as the file is read
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Shared Methods
|
||||||
|
==============
|
||||||
|
|
||||||
|
* var completed = tracker.completed()
|
||||||
|
|
||||||
|
Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream`
|
||||||
|
|
||||||
|
Returns the ratio of completed work to work to be done. Range of 0 to 1.
|
||||||
|
|
||||||
|
* tracker.finish()
|
||||||
|
|
||||||
|
Implemented in: `Tracker`, `TrackerGroup`
|
||||||
|
|
||||||
|
Marks the tracker as completed. With a TrackerGroup this marks all of its
|
||||||
|
components as completed.
|
||||||
|
|
||||||
|
Marks all of the components of this tracker as finished, which in turn means
|
||||||
|
that `tracker.completed()` for this will now be 1.
|
||||||
|
|
||||||
|
This will result in one or more `change` events being emitted.
|
||||||
|
|
||||||
|
Events
|
||||||
|
======
|
||||||
|
|
||||||
|
All tracker objects emit `change` events with the following arguments:
|
||||||
|
|
||||||
|
```
|
||||||
|
function (name, completed, tracker)
|
||||||
|
```
|
||||||
|
|
||||||
|
`name` is the name of the tracker that originally emitted the event,
|
||||||
|
or if it didn't have one, the first containing tracker group that had one.
|
||||||
|
|
||||||
|
`completed` is the percent complete (as returned by `tracker.completed()` method).
|
||||||
|
|
||||||
|
`tracker` is the tracker object that you are listening for events on.
|
||||||
|
|
||||||
|
TrackerGroup
|
||||||
|
============
|
||||||
|
|
||||||
|
* var tracker = new TrackerGroup(**name**)
|
||||||
|
|
||||||
|
* **name** *(optional)* - The name of this tracker group, used in change
|
||||||
|
notifications if the component updating didn't have a name. Defaults to undefined.
|
||||||
|
|
||||||
|
Creates a new empty tracker aggregation group. These are trackers whose
|
||||||
|
completion status is determined by the completion status of other trackers.
|
||||||
|
|
||||||
|
* tracker.addUnit(**otherTracker**, **weight**)
|
||||||
|
|
||||||
|
* **otherTracker** - Any of the other are-we-there-yet tracker objects
|
||||||
|
* **weight** *(optional)* - The weight to give the tracker, defaults to 1.
|
||||||
|
|
||||||
|
Adds the **otherTracker** to this aggregation group. The weight determines
|
||||||
|
how long you expect this tracker to take to complete in proportion to other
|
||||||
|
units. So for instance, if you add one tracker with a weight of 1 and
|
||||||
|
another with a weight of 2, you're saying the second will take twice as long
|
||||||
|
to complete as the first. As such, the first will account for 33% of the
|
||||||
|
completion of this tracker and the second will account for the other 67%.
|
||||||
|
|
||||||
|
Returns **otherTracker**.
|
||||||
|
|
||||||
|
* var subGroup = tracker.newGroup(**name**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subGroup = tracker.addUnit(new TrackerGroup(name), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* var subItem = tracker.newItem(**name**, **todo**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subItem = tracker.addUnit(new Tracker(name, todo), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* var subStream = tracker.newStream(**name**, **todo**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subStream = tracker.addUnit(new TrackerStream(name, todo), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* console.log( tracker.debug() )
|
||||||
|
|
||||||
|
Returns a tree showing the completion of this tracker group and all of its
|
||||||
|
children, including recursively entering all of the children.
|
||||||
|
|
||||||
|
Tracker
|
||||||
|
=======
|
||||||
|
|
||||||
|
* var tracker = new Tracker(**name**, **todo**)
|
||||||
|
|
||||||
|
* **name** *(optional)* The name of this counter to report in change
|
||||||
|
events. Defaults to undefined.
|
||||||
|
* **todo** *(optional)* The amount of work todo (a number). Defaults to 0.
|
||||||
|
|
||||||
|
Ordinarily these are constructed as a part of a tracker group (via
|
||||||
|
`newItem`).
|
||||||
|
|
||||||
|
* var completed = tracker.completed()
|
||||||
|
|
||||||
|
Returns the ratio of completed work to work to be done. Range of 0 to 1. If
|
||||||
|
total work to be done is 0 then it will return 0.
|
||||||
|
|
||||||
|
* tracker.addWork(**todo**)
|
||||||
|
|
||||||
|
* **todo** A number to add to the amount of work to be done.
|
||||||
|
|
||||||
|
Increases the amount of work to be done, thus decreasing the completion
|
||||||
|
percentage. Triggers a `change` event.
|
||||||
|
|
||||||
|
* tracker.completeWork(**completed**)
|
||||||
|
|
||||||
|
* **completed** A number to add to the work complete
|
||||||
|
|
||||||
|
Increase the amount of work complete, thus increasing the completion percentage.
|
||||||
|
Will never increase the work completed past the amount of work todo. That is,
|
||||||
|
percentages > 100% are not allowed. Triggers a `change` event.
|
||||||
|
|
||||||
|
* tracker.finish()
|
||||||
|
|
||||||
|
Marks this tracker as finished, tracker.completed() will now be 1. Triggers
|
||||||
|
a `change` event.
|
||||||
|
|
||||||
|
TrackerStream
|
||||||
|
=============
|
||||||
|
|
||||||
|
* var tracker = new TrackerStream(**name**, **size**, **options**)
|
||||||
|
|
||||||
|
* **name** *(optional)* The name of this counter to report in change
|
||||||
|
events. Defaults to undefined.
|
||||||
|
* **size** *(optional)* The number of bytes being sent through this stream.
|
||||||
|
* **options** *(optional)* A hash of stream options
|
||||||
|
|
||||||
|
The tracker stream object is a pass through stream that updates an internal
|
||||||
|
tracker object each time a block passes through. It's intended to track
|
||||||
|
downloads, file extraction and other related activities. You use it by piping
|
||||||
|
your data source into it and then using it as your data source.
|
||||||
|
|
||||||
|
If your data has a length attribute then that's used as the amount of work
|
||||||
|
completed when the chunk is passed through. If it does not (eg, object
|
||||||
|
streams) then each chunk counts as completing 1 unit of work, so your size
|
||||||
|
should be the total number of objects being streamed.
|
||||||
|
|
||||||
|
* tracker.addWork(**todo**)
|
||||||
|
|
||||||
|
* **todo** Increase the expected overall size by **todo** bytes.
|
||||||
|
|
||||||
|
Increases the amount of work to be done, thus decreasing the completion
|
||||||
|
percentage. Triggers a `change` event.
|
4
node_modules/are-we-there-yet/index.js
generated
vendored
Normal file
4
node_modules/are-we-there-yet/index.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
'use strict'
|
||||||
|
exports.TrackerGroup = require('./tracker-group.js')
|
||||||
|
exports.Tracker = require('./tracker.js')
|
||||||
|
exports.TrackerStream = require('./tracker-stream.js')
|
35
node_modules/are-we-there-yet/package.json
generated
vendored
Normal file
35
node_modules/are-we-there-yet/package.json
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
{
|
||||||
|
"name": "are-we-there-yet",
|
||||||
|
"version": "1.1.7",
|
||||||
|
"description": "Keep track of the overall completion of many disparate processes",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "standard && tap test/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/iarna/are-we-there-yet.git"
|
||||||
|
},
|
||||||
|
"author": "Rebecca Turner (http://re-becca.org)",
|
||||||
|
"license": "ISC",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/iarna/are-we-there-yet/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/iarna/are-we-there-yet",
|
||||||
|
"devDependencies": {
|
||||||
|
"standard": "^11.0.1",
|
||||||
|
"tap": "^12.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"delegates": "^1.0.0",
|
||||||
|
"readable-stream": "^2.0.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"tracker-base.js",
|
||||||
|
"tracker-group.js",
|
||||||
|
"tracker-stream.js",
|
||||||
|
"tracker.js",
|
||||||
|
"CHANGES.md"
|
||||||
|
]
|
||||||
|
}
|
11
node_modules/are-we-there-yet/tracker-base.js
generated
vendored
Normal file
11
node_modules/are-we-there-yet/tracker-base.js
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
'use strict'
|
||||||
|
var EventEmitter = require('events').EventEmitter
|
||||||
|
var util = require('util')
|
||||||
|
|
||||||
|
var trackerId = 0
|
||||||
|
var TrackerBase = module.exports = function (name) {
|
||||||
|
EventEmitter.call(this)
|
||||||
|
this.id = ++trackerId
|
||||||
|
this.name = name
|
||||||
|
}
|
||||||
|
util.inherits(TrackerBase, EventEmitter)
|
107
node_modules/are-we-there-yet/tracker-group.js
generated
vendored
Normal file
107
node_modules/are-we-there-yet/tracker-group.js
generated
vendored
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var TrackerBase = require('./tracker-base.js')
|
||||||
|
var Tracker = require('./tracker.js')
|
||||||
|
var TrackerStream = require('./tracker-stream.js')
|
||||||
|
|
||||||
|
var TrackerGroup = module.exports = function (name) {
|
||||||
|
TrackerBase.call(this, name)
|
||||||
|
this.parentGroup = null
|
||||||
|
this.trackers = []
|
||||||
|
this.completion = {}
|
||||||
|
this.weight = {}
|
||||||
|
this.totalWeight = 0
|
||||||
|
this.finished = false
|
||||||
|
this.bubbleChange = bubbleChange(this)
|
||||||
|
}
|
||||||
|
util.inherits(TrackerGroup, TrackerBase)
|
||||||
|
|
||||||
|
function bubbleChange (trackerGroup) {
|
||||||
|
return function (name, completed, tracker) {
|
||||||
|
trackerGroup.completion[tracker.id] = completed
|
||||||
|
if (trackerGroup.finished) return
|
||||||
|
trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.nameInTree = function () {
|
||||||
|
var names = []
|
||||||
|
var from = this
|
||||||
|
while (from) {
|
||||||
|
names.unshift(from.name)
|
||||||
|
from = from.parentGroup
|
||||||
|
}
|
||||||
|
return names.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.addUnit = function (unit, weight) {
|
||||||
|
if (unit.addUnit) {
|
||||||
|
var toTest = this
|
||||||
|
while (toTest) {
|
||||||
|
if (unit === toTest) {
|
||||||
|
throw new Error(
|
||||||
|
'Attempted to add tracker group ' +
|
||||||
|
unit.name + ' to tree that already includes it ' +
|
||||||
|
this.nameInTree(this))
|
||||||
|
}
|
||||||
|
toTest = toTest.parentGroup
|
||||||
|
}
|
||||||
|
unit.parentGroup = this
|
||||||
|
}
|
||||||
|
this.weight[unit.id] = weight || 1
|
||||||
|
this.totalWeight += this.weight[unit.id]
|
||||||
|
this.trackers.push(unit)
|
||||||
|
this.completion[unit.id] = unit.completed()
|
||||||
|
unit.on('change', this.bubbleChange)
|
||||||
|
if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit)
|
||||||
|
return unit
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.completed = function () {
|
||||||
|
if (this.trackers.length === 0) return 0
|
||||||
|
var valPerWeight = 1 / this.totalWeight
|
||||||
|
var completed = 0
|
||||||
|
for (var ii = 0; ii < this.trackers.length; ii++) {
|
||||||
|
var trackerId = this.trackers[ii].id
|
||||||
|
completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId]
|
||||||
|
}
|
||||||
|
return completed
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newGroup = function (name, weight) {
|
||||||
|
return this.addUnit(new TrackerGroup(name), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newItem = function (name, todo, weight) {
|
||||||
|
return this.addUnit(new Tracker(name, todo), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newStream = function (name, todo, weight) {
|
||||||
|
return this.addUnit(new TrackerStream(name, todo), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.finish = function () {
|
||||||
|
this.finished = true
|
||||||
|
if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)
|
||||||
|
for (var ii = 0; ii < this.trackers.length; ii++) {
|
||||||
|
var tracker = this.trackers[ii]
|
||||||
|
tracker.finish()
|
||||||
|
tracker.removeListener('change', this.bubbleChange)
|
||||||
|
}
|
||||||
|
this.emit('change', this.name, 1, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buffer = ' '
|
||||||
|
TrackerGroup.prototype.debug = function (depth) {
|
||||||
|
depth = depth || 0
|
||||||
|
var indent = depth ? buffer.substr(0, depth) : ''
|
||||||
|
var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'
|
||||||
|
this.trackers.forEach(function (tracker) {
|
||||||
|
if (tracker instanceof TrackerGroup) {
|
||||||
|
output += tracker.debug(depth + 1)
|
||||||
|
} else {
|
||||||
|
output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return output
|
||||||
|
}
|
36
node_modules/are-we-there-yet/tracker-stream.js
generated
vendored
Normal file
36
node_modules/are-we-there-yet/tracker-stream.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var stream = require('readable-stream')
|
||||||
|
var delegate = require('delegates')
|
||||||
|
var Tracker = require('./tracker.js')
|
||||||
|
|
||||||
|
var TrackerStream = module.exports = function (name, size, options) {
|
||||||
|
stream.Transform.call(this, options)
|
||||||
|
this.tracker = new Tracker(name, size)
|
||||||
|
this.name = name
|
||||||
|
this.id = this.tracker.id
|
||||||
|
this.tracker.on('change', delegateChange(this))
|
||||||
|
}
|
||||||
|
util.inherits(TrackerStream, stream.Transform)
|
||||||
|
|
||||||
|
function delegateChange (trackerStream) {
|
||||||
|
return function (name, completion, tracker) {
|
||||||
|
trackerStream.emit('change', name, completion, trackerStream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerStream.prototype._transform = function (data, encoding, cb) {
|
||||||
|
this.tracker.completeWork(data.length ? data.length : 1)
|
||||||
|
this.push(data)
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerStream.prototype._flush = function (cb) {
|
||||||
|
this.tracker.finish()
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
delegate(TrackerStream.prototype, 'tracker')
|
||||||
|
.method('completed')
|
||||||
|
.method('addWork')
|
||||||
|
.method('finish')
|
30
node_modules/are-we-there-yet/tracker.js
generated
vendored
Normal file
30
node_modules/are-we-there-yet/tracker.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var TrackerBase = require('./tracker-base.js')
|
||||||
|
|
||||||
|
var Tracker = module.exports = function (name, todo) {
|
||||||
|
TrackerBase.call(this, name)
|
||||||
|
this.workDone = 0
|
||||||
|
this.workTodo = todo || 0
|
||||||
|
}
|
||||||
|
util.inherits(Tracker, TrackerBase)
|
||||||
|
|
||||||
|
Tracker.prototype.completed = function () {
|
||||||
|
return this.workTodo === 0 ? 0 : this.workDone / this.workTodo
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.addWork = function (work) {
|
||||||
|
this.workTodo += work
|
||||||
|
this.emit('change', this.name, this.completed(), this)
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.completeWork = function (work) {
|
||||||
|
this.workDone += work
|
||||||
|
if (this.workDone > this.workTodo) this.workDone = this.workTodo
|
||||||
|
this.emit('change', this.name, this.completed(), this)
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.finish = function () {
|
||||||
|
this.workTodo = this.workDone = 1
|
||||||
|
this.emit('change', this.name, 1, this)
|
||||||
|
}
|
21
node_modules/base64-js/LICENSE
generated
vendored
Normal file
21
node_modules/base64-js/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Jameson Little
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
34
node_modules/base64-js/README.md
generated
vendored
Normal file
34
node_modules/base64-js/README.md
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
base64-js
|
||||||
|
=========
|
||||||
|
|
||||||
|
`base64-js` does basic base64 encoding/decoding in pure JS.
|
||||||
|
|
||||||
|
[](http://travis-ci.org/beatgammit/base64-js)
|
||||||
|
|
||||||
|
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
|
||||||
|
|
||||||
|
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
|
||||||
|
|
||||||
|
## install
|
||||||
|
|
||||||
|
With [npm](https://npmjs.org) do:
|
||||||
|
|
||||||
|
`npm install base64-js` and `var base64js = require('base64-js')`
|
||||||
|
|
||||||
|
For use in web browsers do:
|
||||||
|
|
||||||
|
`<script src="base64js.min.js"></script>`
|
||||||
|
|
||||||
|
[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme)
|
||||||
|
|
||||||
|
## methods
|
||||||
|
|
||||||
|
`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
|
||||||
|
|
||||||
|
* `byteLength` - Takes a base64 string and returns length of byte array
|
||||||
|
* `toByteArray` - Takes a base64 string and returns a byte array
|
||||||
|
* `fromByteArray` - Takes a byte array and returns a base64 string
|
||||||
|
|
||||||
|
## license
|
||||||
|
|
||||||
|
MIT
|
1
node_modules/base64-js/base64js.min.js
generated
vendored
Normal file
1
node_modules/base64-js/base64js.min.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c<g.length;c++)a(g[c]);return a}return b}()({"/":[function(a,b,c){'use strict';function d(a){var b=a.length;if(0<b%4)throw new Error("Invalid string. Length must be a multiple of 4");var c=a.indexOf("=");-1===c&&(c=b);var d=c===b?0:4-c%4;return[c,d]}function e(a,b,c){return 3*(b+c)/4-c}function f(a){var b,c,f=d(a),g=f[0],h=f[1],j=new m(e(a,g,h)),k=0,n=0<h?g-4:g;for(c=0;c<n;c+=4)b=l[a.charCodeAt(c)]<<18|l[a.charCodeAt(c+1)]<<12|l[a.charCodeAt(c+2)]<<6|l[a.charCodeAt(c+3)],j[k++]=255&b>>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;f<c;f+=3)d=(16711680&a[f]<<16)+(65280&a[f+1]<<8)+(255&a[f+2]),e.push(g(d));return e.join("")}function j(a){for(var b,c=a.length,d=c%3,e=[],f=16383,g=0,j=c-d;g<j;g+=f)e.push(h(a,g,g+f>j?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o<p;++o)k[o]=n[o],l[n.charCodeAt(o)]=o;l[45]=62,l[95]=63},{}]},{},[])("/")});
|
3
node_modules/base64-js/index.d.ts
generated
vendored
Normal file
3
node_modules/base64-js/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
export function byteLength(b64: string): number;
|
||||||
|
export function toByteArray(b64: string): Uint8Array;
|
||||||
|
export function fromByteArray(uint8: Uint8Array): string;
|
150
node_modules/base64-js/index.js
generated
vendored
Normal file
150
node_modules/base64-js/index.js
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
exports.byteLength = byteLength
|
||||||
|
exports.toByteArray = toByteArray
|
||||||
|
exports.fromByteArray = fromByteArray
|
||||||
|
|
||||||
|
var lookup = []
|
||||||
|
var revLookup = []
|
||||||
|
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
|
||||||
|
|
||||||
|
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||||
|
for (var i = 0, len = code.length; i < len; ++i) {
|
||||||
|
lookup[i] = code[i]
|
||||||
|
revLookup[code.charCodeAt(i)] = i
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support decoding URL-safe base64 strings, as Node.js does.
|
||||||
|
// See: https://en.wikipedia.org/wiki/Base64#URL_applications
|
||||||
|
revLookup['-'.charCodeAt(0)] = 62
|
||||||
|
revLookup['_'.charCodeAt(0)] = 63
|
||||||
|
|
||||||
|
function getLens (b64) {
|
||||||
|
var len = b64.length
|
||||||
|
|
||||||
|
if (len % 4 > 0) {
|
||||||
|
throw new Error('Invalid string. Length must be a multiple of 4')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim off extra bytes after placeholder bytes are found
|
||||||
|
// See: https://github.com/beatgammit/base64-js/issues/42
|
||||||
|
var validLen = b64.indexOf('=')
|
||||||
|
if (validLen === -1) validLen = len
|
||||||
|
|
||||||
|
var placeHoldersLen = validLen === len
|
||||||
|
? 0
|
||||||
|
: 4 - (validLen % 4)
|
||||||
|
|
||||||
|
return [validLen, placeHoldersLen]
|
||||||
|
}
|
||||||
|
|
||||||
|
// base64 is 4/3 + up to two characters of the original data
|
||||||
|
function byteLength (b64) {
|
||||||
|
var lens = getLens(b64)
|
||||||
|
var validLen = lens[0]
|
||||||
|
var placeHoldersLen = lens[1]
|
||||||
|
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||||||
|
}
|
||||||
|
|
||||||
|
function _byteLength (b64, validLen, placeHoldersLen) {
|
||||||
|
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||||||
|
}
|
||||||
|
|
||||||
|
function toByteArray (b64) {
|
||||||
|
var tmp
|
||||||
|
var lens = getLens(b64)
|
||||||
|
var validLen = lens[0]
|
||||||
|
var placeHoldersLen = lens[1]
|
||||||
|
|
||||||
|
var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
|
||||||
|
|
||||||
|
var curByte = 0
|
||||||
|
|
||||||
|
// if there are placeholders, only get up to the last complete 4 chars
|
||||||
|
var len = placeHoldersLen > 0
|
||||||
|
? validLen - 4
|
||||||
|
: validLen
|
||||||
|
|
||||||
|
var i
|
||||||
|
for (i = 0; i < len; i += 4) {
|
||||||
|
tmp =
|
||||||
|
(revLookup[b64.charCodeAt(i)] << 18) |
|
||||||
|
(revLookup[b64.charCodeAt(i + 1)] << 12) |
|
||||||
|
(revLookup[b64.charCodeAt(i + 2)] << 6) |
|
||||||
|
revLookup[b64.charCodeAt(i + 3)]
|
||||||
|
arr[curByte++] = (tmp >> 16) & 0xFF
|
||||||
|
arr[curByte++] = (tmp >> 8) & 0xFF
|
||||||
|
arr[curByte++] = tmp & 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
if (placeHoldersLen === 2) {
|
||||||
|
tmp =
|
||||||
|
(revLookup[b64.charCodeAt(i)] << 2) |
|
||||||
|
(revLookup[b64.charCodeAt(i + 1)] >> 4)
|
||||||
|
arr[curByte++] = tmp & 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
if (placeHoldersLen === 1) {
|
||||||
|
tmp =
|
||||||
|
(revLookup[b64.charCodeAt(i)] << 10) |
|
||||||
|
(revLookup[b64.charCodeAt(i + 1)] << 4) |
|
||||||
|
(revLookup[b64.charCodeAt(i + 2)] >> 2)
|
||||||
|
arr[curByte++] = (tmp >> 8) & 0xFF
|
||||||
|
arr[curByte++] = tmp & 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
return arr
|
||||||
|
}
|
||||||
|
|
||||||
|
function tripletToBase64 (num) {
|
||||||
|
return lookup[num >> 18 & 0x3F] +
|
||||||
|
lookup[num >> 12 & 0x3F] +
|
||||||
|
lookup[num >> 6 & 0x3F] +
|
||||||
|
lookup[num & 0x3F]
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeChunk (uint8, start, end) {
|
||||||
|
var tmp
|
||||||
|
var output = []
|
||||||
|
for (var i = start; i < end; i += 3) {
|
||||||
|
tmp =
|
||||||
|
((uint8[i] << 16) & 0xFF0000) +
|
||||||
|
((uint8[i + 1] << 8) & 0xFF00) +
|
||||||
|
(uint8[i + 2] & 0xFF)
|
||||||
|
output.push(tripletToBase64(tmp))
|
||||||
|
}
|
||||||
|
return output.join('')
|
||||||
|
}
|
||||||
|
|
||||||
|
function fromByteArray (uint8) {
|
||||||
|
var tmp
|
||||||
|
var len = uint8.length
|
||||||
|
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
|
||||||
|
var parts = []
|
||||||
|
var maxChunkLength = 16383 // must be multiple of 3
|
||||||
|
|
||||||
|
// go through the array every three bytes, we'll deal with trailing stuff later
|
||||||
|
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
||||||
|
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// pad the end with zeros, but make sure to not forget the extra bytes
|
||||||
|
if (extraBytes === 1) {
|
||||||
|
tmp = uint8[len - 1]
|
||||||
|
parts.push(
|
||||||
|
lookup[tmp >> 2] +
|
||||||
|
lookup[(tmp << 4) & 0x3F] +
|
||||||
|
'=='
|
||||||
|
)
|
||||||
|
} else if (extraBytes === 2) {
|
||||||
|
tmp = (uint8[len - 2] << 8) + uint8[len - 1]
|
||||||
|
parts.push(
|
||||||
|
lookup[tmp >> 10] +
|
||||||
|
lookup[(tmp >> 4) & 0x3F] +
|
||||||
|
lookup[(tmp << 2) & 0x3F] +
|
||||||
|
'='
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.join('')
|
||||||
|
}
|
47
node_modules/base64-js/package.json
generated
vendored
Normal file
47
node_modules/base64-js/package.json
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
{
|
||||||
|
"name": "base64-js",
|
||||||
|
"description": "Base64 encoding/decoding in pure JS",
|
||||||
|
"version": "1.5.1",
|
||||||
|
"author": "T. Jameson Little <t.jameson.little@gmail.com>",
|
||||||
|
"typings": "index.d.ts",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/beatgammit/base64-js/issues"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"babel-minify": "^0.5.1",
|
||||||
|
"benchmark": "^2.1.4",
|
||||||
|
"browserify": "^16.3.0",
|
||||||
|
"standard": "*",
|
||||||
|
"tape": "4.x"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/beatgammit/base64-js",
|
||||||
|
"keywords": [
|
||||||
|
"base64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "index.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/beatgammit/base64-js.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "browserify -s base64js -r ./ | minify > base64js.min.js",
|
||||||
|
"lint": "standard",
|
||||||
|
"test": "npm run lint && npm run unit",
|
||||||
|
"unit": "tape test/*.js"
|
||||||
|
},
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "patreon",
|
||||||
|
"url": "https://www.patreon.com/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "consulting",
|
||||||
|
"url": "https://feross.org/support"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
201
node_modules/bson/LICENSE.md
generated
vendored
Normal file
201
node_modules/bson/LICENSE.md
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
376
node_modules/bson/README.md
generated
vendored
Normal file
376
node_modules/bson/README.md
generated
vendored
Normal file
|
@ -0,0 +1,376 @@
|
||||||
|
# BSON parser
|
||||||
|
|
||||||
|
BSON is short for "Binary JSON," and is the binary-encoded serialization of JSON-like documents. You can learn more about it in [the specification](http://bsonspec.org).
|
||||||
|
|
||||||
|
This browser version of the BSON parser is compiled using [rollup](https://rollupjs.org/) and the current version is pre-compiled in the `dist` directory.
|
||||||
|
|
||||||
|
This is the default BSON parser, however, there is a C++ Node.js addon version as well that does not support the browser. It can be found at [mongod-js/bson-ext](https://github.com/mongodb-js/bson-ext).
|
||||||
|
|
||||||
|
### Table of Contents
|
||||||
|
- [Usage](#usage)
|
||||||
|
- [Bugs/Feature Requests](#bugs--feature-requests)
|
||||||
|
- [Installation](#installation)
|
||||||
|
- [Documentation](#documentation)
|
||||||
|
- [FAQ](#faq)
|
||||||
|
|
||||||
|
## Bugs / Feature Requests
|
||||||
|
|
||||||
|
Think you've found a bug? Want to see a new feature in `bson`? Please open a case in our issue management tool, JIRA:
|
||||||
|
|
||||||
|
1. Create an account and login: [jira.mongodb.org](https://jira.mongodb.org)
|
||||||
|
2. Navigate to the NODE project: [jira.mongodb.org/browse/NODE](https://jira.mongodb.org/browse/NODE)
|
||||||
|
3. Click **Create Issue** - Please provide as much information as possible about the issue and how to reproduce it.
|
||||||
|
|
||||||
|
Bug reports in JIRA for all driver projects (i.e. NODE, PYTHON, CSHARP, JAVA) and the Core Server (i.e. SERVER) project are **public**.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To build a new version perform the following operations:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Node (no bundling)
|
||||||
|
A simple example of how to use BSON in `Node.js`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const BSON = require('bson');
|
||||||
|
const Long = BSON.Long;
|
||||||
|
|
||||||
|
// Serialize a document
|
||||||
|
const doc = { long: Long.fromNumber(100) };
|
||||||
|
const data = BSON.serialize(doc);
|
||||||
|
console.log('data:', data);
|
||||||
|
|
||||||
|
// Deserialize the resulting Buffer
|
||||||
|
const doc_2 = BSON.deserialize(data);
|
||||||
|
console.log('doc_2:', doc_2);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Browser (no bundling)
|
||||||
|
|
||||||
|
If you are not using a bundler like webpack, you can include `dist/bson.bundle.js` using a script tag. It includes polyfills for built-in node types like `Buffer`.
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="./dist/bson.bundle.js"></script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
function start() {
|
||||||
|
// Get the Long type
|
||||||
|
const Long = BSON.Long;
|
||||||
|
|
||||||
|
// Serialize a document
|
||||||
|
const doc = { long: Long.fromNumber(100) }
|
||||||
|
const data = BSON.serialize(doc);
|
||||||
|
console.log('data:', data);
|
||||||
|
|
||||||
|
// De serialize it again
|
||||||
|
const doc_2 = BSON.deserialize(data);
|
||||||
|
console.log('doc_2:', doc_2);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using webpack
|
||||||
|
|
||||||
|
If using webpack, you can use your normal import/require syntax of your project to pull in the `bson` library.
|
||||||
|
|
||||||
|
ES6 Example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Long, serialize, deserialize } from 'bson';
|
||||||
|
|
||||||
|
// Serialize a document
|
||||||
|
const doc = { long: Long.fromNumber(100) };
|
||||||
|
const data = serialize(doc);
|
||||||
|
console.log('data:', data);
|
||||||
|
|
||||||
|
// De serialize it again
|
||||||
|
const doc_2 = deserialize(data);
|
||||||
|
console.log('doc_2:', doc_2);
|
||||||
|
```
|
||||||
|
|
||||||
|
ES5 Example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const BSON = require('bson');
|
||||||
|
const Long = BSON.Long;
|
||||||
|
|
||||||
|
// Serialize a document
|
||||||
|
const doc = { long: Long.fromNumber(100) };
|
||||||
|
const data = BSON.serialize(doc);
|
||||||
|
console.log('data:', data);
|
||||||
|
|
||||||
|
// Deserialize the resulting Buffer
|
||||||
|
const doc_2 = BSON.deserialize(data);
|
||||||
|
console.log('doc_2:', doc_2);
|
||||||
|
```
|
||||||
|
|
||||||
|
Depending on your settings, webpack will under the hood resolve to one of the following:
|
||||||
|
|
||||||
|
- `dist/bson.browser.esm.js` If your project is in the browser and using ES6 modules (Default for `webworker` and `web` targets)
|
||||||
|
- `dist/bson.browser.umd.js` If your project is in the browser and not using ES6 modules
|
||||||
|
- `dist/bson.esm.js` If your project is in Node.js and using ES6 modules (Default for `node` targets)
|
||||||
|
- `lib/bson.js` (the normal include path) If your project is in Node.js and not using ES6 modules
|
||||||
|
|
||||||
|
For more information, see [this page on webpack's `resolve.mainFields`](https://webpack.js.org/configuration/resolve/#resolvemainfields) and [the `package.json` for this project](./package.json#L52)
|
||||||
|
|
||||||
|
### Usage with Angular
|
||||||
|
|
||||||
|
Starting with Angular 6, Angular CLI removed the shim for `global` and other node built-in variables (original comment [here](https://github.com/angular/angular-cli/issues/9827#issuecomment-386154063)). If you are using BSON with Angular, you may need to add the following shim to your `polyfills.ts` file:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// In polyfills.ts
|
||||||
|
(window as any).global = window;
|
||||||
|
```
|
||||||
|
|
||||||
|
- [Original Comment by Angular CLI](https://github.com/angular/angular-cli/issues/9827#issuecomment-386154063)
|
||||||
|
- [Original Source for Solution](https://stackoverflow.com/a/50488337/4930088)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
`npm install bson`
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
### Objects
|
||||||
|
|
||||||
|
<dl>
|
||||||
|
<dt><a href="#EJSON">EJSON</a> : <code>object</code></dt>
|
||||||
|
<dd></dd>
|
||||||
|
</dl>
|
||||||
|
|
||||||
|
### Functions
|
||||||
|
|
||||||
|
<dl>
|
||||||
|
<dt><a href="#setInternalBufferSize">setInternalBufferSize(size)</a></dt>
|
||||||
|
<dd><p>Sets the size of the internal serialization buffer.</p>
|
||||||
|
</dd>
|
||||||
|
<dt><a href="#serialize">serialize(object)</a> ⇒ <code>Buffer</code></dt>
|
||||||
|
<dd><p>Serialize a Javascript object.</p>
|
||||||
|
</dd>
|
||||||
|
<dt><a href="#serializeWithBufferAndIndex">serializeWithBufferAndIndex(object, buffer)</a> ⇒ <code>Number</code></dt>
|
||||||
|
<dd><p>Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization.</p>
|
||||||
|
</dd>
|
||||||
|
<dt><a href="#deserialize">deserialize(buffer)</a> ⇒ <code>Object</code></dt>
|
||||||
|
<dd><p>Deserialize data as BSON.</p>
|
||||||
|
</dd>
|
||||||
|
<dt><a href="#calculateObjectSize">calculateObjectSize(object)</a> ⇒ <code>Number</code></dt>
|
||||||
|
<dd><p>Calculate the bson size for a passed in Javascript object.</p>
|
||||||
|
</dd>
|
||||||
|
<dt><a href="#deserializeStream">deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex, [options])</a> ⇒ <code>Number</code></dt>
|
||||||
|
<dd><p>Deserialize stream data as BSON documents.</p>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
|
||||||
|
<a name="EJSON"></a>
|
||||||
|
|
||||||
|
### EJSON
|
||||||
|
|
||||||
|
* [EJSON](#EJSON)
|
||||||
|
|
||||||
|
* [.parse(text, [options])](#EJSON.parse)
|
||||||
|
|
||||||
|
* [.stringify(value, [replacer], [space], [options])](#EJSON.stringify)
|
||||||
|
|
||||||
|
* [.serialize(bson, [options])](#EJSON.serialize)
|
||||||
|
|
||||||
|
* [.deserialize(ejson, [options])](#EJSON.deserialize)
|
||||||
|
|
||||||
|
|
||||||
|
<a name="EJSON.parse"></a>
|
||||||
|
|
||||||
|
#### *EJSON*.parse(text, [options])
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| text | <code>string</code> | | |
|
||||||
|
| [options] | <code>object</code> | | Optional settings |
|
||||||
|
| [options.relaxed] | <code>boolean</code> | <code>true</code> | Attempt to return native JS types where possible, rather than BSON types (if true) |
|
||||||
|
|
||||||
|
Parse an Extended JSON string, constructing the JavaScript value or object described by that
|
||||||
|
string.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
```js
|
||||||
|
const { EJSON } = require('bson');
|
||||||
|
const text = '{ "int32": { "$numberInt": "10" } }';
|
||||||
|
|
||||||
|
// prints { int32: { [String: '10'] _bsontype: 'Int32', value: '10' } }
|
||||||
|
console.log(EJSON.parse(text, { relaxed: false }));
|
||||||
|
|
||||||
|
// prints { int32: 10 }
|
||||||
|
console.log(EJSON.parse(text));
|
||||||
|
```
|
||||||
|
<a name="EJSON.stringify"></a>
|
||||||
|
|
||||||
|
#### *EJSON*.stringify(value, [replacer], [space], [options])
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| value | <code>object</code> | | The value to convert to extended JSON |
|
||||||
|
| [replacer] | <code>function</code> \| <code>array</code> | | A function that alters the behavior of the stringification process, or an array of String and Number objects that serve as a whitelist for selecting/filtering the properties of the value object to be included in the JSON string. If this value is null or not provided, all properties of the object are included in the resulting JSON string |
|
||||||
|
| [space] | <code>string</code> \| <code>number</code> | | A String or Number object that's used to insert white space into the output JSON string for readability purposes. |
|
||||||
|
| [options] | <code>object</code> | | Optional settings |
|
||||||
|
| [options.relaxed] | <code>boolean</code> | <code>true</code> | Enabled Extended JSON's `relaxed` mode |
|
||||||
|
| [options.legacy] | <code>boolean</code> | <code>true</code> | Output in Extended JSON v1 |
|
||||||
|
|
||||||
|
Converts a BSON document to an Extended JSON string, optionally replacing values if a replacer
|
||||||
|
function is specified or optionally including only the specified properties if a replacer array
|
||||||
|
is specified.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
```js
|
||||||
|
const { EJSON } = require('bson');
|
||||||
|
const Int32 = require('mongodb').Int32;
|
||||||
|
const doc = { int32: new Int32(10) };
|
||||||
|
|
||||||
|
// prints '{"int32":{"$numberInt":"10"}}'
|
||||||
|
console.log(EJSON.stringify(doc, { relaxed: false }));
|
||||||
|
|
||||||
|
// prints '{"int32":10}'
|
||||||
|
console.log(EJSON.stringify(doc));
|
||||||
|
```
|
||||||
|
<a name="EJSON.serialize"></a>
|
||||||
|
|
||||||
|
#### *EJSON*.serialize(bson, [options])
|
||||||
|
|
||||||
|
| Param | Type | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| bson | <code>object</code> | The object to serialize |
|
||||||
|
| [options] | <code>object</code> | Optional settings passed to the `stringify` function |
|
||||||
|
|
||||||
|
Serializes an object to an Extended JSON string, and reparse it as a JavaScript object.
|
||||||
|
|
||||||
|
<a name="EJSON.deserialize"></a>
|
||||||
|
|
||||||
|
#### *EJSON*.deserialize(ejson, [options])
|
||||||
|
|
||||||
|
| Param | Type | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| ejson | <code>object</code> | The Extended JSON object to deserialize |
|
||||||
|
| [options] | <code>object</code> | Optional settings passed to the parse method |
|
||||||
|
|
||||||
|
Deserializes an Extended JSON object into a plain JavaScript object with native/BSON types
|
||||||
|
|
||||||
|
<a name="setInternalBufferSize"></a>
|
||||||
|
|
||||||
|
### setInternalBufferSize(size)
|
||||||
|
|
||||||
|
| Param | Type | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| size | <code>number</code> | The desired size for the internal serialization buffer |
|
||||||
|
|
||||||
|
Sets the size of the internal serialization buffer.
|
||||||
|
|
||||||
|
<a name="serialize"></a>
|
||||||
|
|
||||||
|
### serialize(object)
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| object | <code>Object</code> | | the Javascript object to serialize. |
|
||||||
|
| [options.checkKeys] | <code>Boolean</code> | | the serializer will check if keys are valid. |
|
||||||
|
| [options.serializeFunctions] | <code>Boolean</code> | <code>false</code> | serialize the javascript functions **(default:false)**. |
|
||||||
|
| [options.ignoreUndefined] | <code>Boolean</code> | <code>true</code> | ignore undefined fields **(default:true)**. |
|
||||||
|
|
||||||
|
Serialize a Javascript object.
|
||||||
|
|
||||||
|
**Returns**: <code>Buffer</code> - returns the Buffer object containing the serialized object.
|
||||||
|
<a name="serializeWithBufferAndIndex"></a>
|
||||||
|
|
||||||
|
### serializeWithBufferAndIndex(object, buffer)
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| object | <code>Object</code> | | the Javascript object to serialize. |
|
||||||
|
| buffer | <code>Buffer</code> | | the Buffer you pre-allocated to store the serialized BSON object. |
|
||||||
|
| [options.checkKeys] | <code>Boolean</code> | | the serializer will check if keys are valid. |
|
||||||
|
| [options.serializeFunctions] | <code>Boolean</code> | <code>false</code> | serialize the javascript functions **(default:false)**. |
|
||||||
|
| [options.ignoreUndefined] | <code>Boolean</code> | <code>true</code> | ignore undefined fields **(default:true)**. |
|
||||||
|
| [options.index] | <code>Number</code> | | the index in the buffer where we wish to start serializing into. |
|
||||||
|
|
||||||
|
Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization.
|
||||||
|
|
||||||
|
**Returns**: <code>Number</code> - returns the index pointing to the last written byte in the buffer.
|
||||||
|
<a name="deserialize"></a>
|
||||||
|
|
||||||
|
### deserialize(buffer)
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| buffer | <code>Buffer</code> | | the buffer containing the serialized set of BSON documents. |
|
||||||
|
| [options.evalFunctions] | <code>Object</code> | <code>false</code> | evaluate functions in the BSON document scoped to the object deserialized. |
|
||||||
|
| [options.cacheFunctions] | <code>Object</code> | <code>false</code> | cache evaluated functions for reuse. |
|
||||||
|
| [options.promoteLongs] | <code>Object</code> | <code>true</code> | when deserializing a Long will fit it into a Number if it's smaller than 53 bits |
|
||||||
|
| [options.promoteBuffers] | <code>Object</code> | <code>false</code> | when deserializing a Binary will return it as a node.js Buffer instance. |
|
||||||
|
| [options.promoteValues] | <code>Object</code> | <code>false</code> | when deserializing will promote BSON values to their Node.js closest equivalent types. |
|
||||||
|
| [options.fieldsAsRaw] | <code>Object</code> | <code></code> | allow to specify if there what fields we wish to return as unserialized raw buffer. |
|
||||||
|
| [options.bsonRegExp] | <code>Object</code> | <code>false</code> | return BSON regular expressions as BSONRegExp instances. |
|
||||||
|
| [options.allowObjectSmallerThanBufferSize] | <code>boolean</code> | <code>false</code> | allows the buffer to be larger than the parsed BSON object |
|
||||||
|
|
||||||
|
Deserialize data as BSON.
|
||||||
|
|
||||||
|
**Returns**: <code>Object</code> - returns the deserialized Javascript Object.
|
||||||
|
<a name="calculateObjectSize"></a>
|
||||||
|
|
||||||
|
### calculateObjectSize(object)
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| object | <code>Object</code> | | the Javascript object to calculate the BSON byte size for. |
|
||||||
|
| [options.serializeFunctions] | <code>Boolean</code> | <code>false</code> | serialize the javascript functions **(default:false)**. |
|
||||||
|
| [options.ignoreUndefined] | <code>Boolean</code> | <code>true</code> | ignore undefined fields **(default:true)**. |
|
||||||
|
|
||||||
|
Calculate the bson size for a passed in Javascript object.
|
||||||
|
|
||||||
|
**Returns**: <code>Number</code> - returns the number of bytes the BSON object will take up.
|
||||||
|
<a name="deserializeStream"></a>
|
||||||
|
|
||||||
|
### deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex, [options])
|
||||||
|
|
||||||
|
| Param | Type | Default | Description |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| data | <code>Buffer</code> | | the buffer containing the serialized set of BSON documents. |
|
||||||
|
| startIndex | <code>Number</code> | | the start index in the data Buffer where the deserialization is to start. |
|
||||||
|
| numberOfDocuments | <code>Number</code> | | number of documents to deserialize. |
|
||||||
|
| documents | <code>Array</code> | | an array where to store the deserialized documents. |
|
||||||
|
| docStartIndex | <code>Number</code> | | the index in the documents array from where to start inserting documents. |
|
||||||
|
| [options] | <code>Object</code> | | additional options used for the deserialization. |
|
||||||
|
| [options.evalFunctions] | <code>Object</code> | <code>false</code> | evaluate functions in the BSON document scoped to the object deserialized. |
|
||||||
|
| [options.cacheFunctions] | <code>Object</code> | <code>false</code> | cache evaluated functions for reuse. |
|
||||||
|
| [options.promoteLongs] | <code>Object</code> | <code>true</code> | when deserializing a Long will fit it into a Number if it's smaller than 53 bits |
|
||||||
|
| [options.promoteBuffers] | <code>Object</code> | <code>false</code> | when deserializing a Binary will return it as a node.js Buffer instance. |
|
||||||
|
| [options.promoteValues] | <code>Object</code> | <code>false</code> | when deserializing will promote BSON values to their Node.js closest equivalent types. |
|
||||||
|
| [options.fieldsAsRaw] | <code>Object</code> | <code></code> | allow to specify if there what fields we wish to return as unserialized raw buffer. |
|
||||||
|
| [options.bsonRegExp] | <code>Object</code> | <code>false</code> | return BSON regular expressions as BSONRegExp instances. |
|
||||||
|
|
||||||
|
Deserialize stream data as BSON documents.
|
||||||
|
|
||||||
|
**Returns**: <code>Number</code> - returns the next index in the buffer after deserialization **x** numbers of documents.
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
#### Why does `undefined` get converted to `null`?
|
||||||
|
|
||||||
|
The `undefined` BSON type has been [deprecated for many years](http://bsonspec.org/spec.html), so this library has dropped support for it. Use the `ignoreUndefined` option (for example, from the [driver](http://mongodb.github.io/node-mongodb-native/2.2/api/MongoClient.html#connect) ) to instead remove `undefined` keys.
|
||||||
|
|
||||||
|
#### How do I add custom serialization logic?
|
||||||
|
|
||||||
|
This library looks for `toBSON()` functions on every path, and calls the `toBSON()` function to get the value to serialize.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const BSON = require('bson');
|
||||||
|
|
||||||
|
class CustomSerialize {
|
||||||
|
toBSON() {
|
||||||
|
return 42;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = { answer: new CustomSerialize() };
|
||||||
|
// "{ answer: 42 }"
|
||||||
|
console.log(BSON.deserialize(BSON.serialize(obj)));
|
||||||
|
```
|
26
node_modules/bson/bower.json
generated
vendored
Normal file
26
node_modules/bson/bower.json
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"name": "bson",
|
||||||
|
"description": "A bson parser for node.js and the browser",
|
||||||
|
"keywords": [
|
||||||
|
"mongodb",
|
||||||
|
"bson",
|
||||||
|
"parser"
|
||||||
|
],
|
||||||
|
"author": "Christian Amor Kvalheim <christkv@gmail.com>",
|
||||||
|
"main": "./dist/bson.js",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"moduleType": [
|
||||||
|
"globals",
|
||||||
|
"node"
|
||||||
|
],
|
||||||
|
"ignore": [
|
||||||
|
"**/.*",
|
||||||
|
"alternate_parsers",
|
||||||
|
"benchmarks",
|
||||||
|
"bower_components",
|
||||||
|
"node_modules",
|
||||||
|
"test",
|
||||||
|
"tools"
|
||||||
|
],
|
||||||
|
"version": "4.6.1"
|
||||||
|
}
|
1118
node_modules/bson/bson.d.ts
generated
vendored
Normal file
1118
node_modules/bson/bson.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
7501
node_modules/bson/dist/bson.browser.esm.js
generated
vendored
Normal file
7501
node_modules/bson/dist/bson.browser.esm.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/bson/dist/bson.browser.esm.js.map
generated
vendored
Normal file
1
node_modules/bson/dist/bson.browser.esm.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7568
node_modules/bson/dist/bson.browser.umd.js
generated
vendored
Normal file
7568
node_modules/bson/dist/bson.browser.umd.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/bson/dist/bson.browser.umd.js.map
generated
vendored
Normal file
1
node_modules/bson/dist/bson.browser.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7567
node_modules/bson/dist/bson.bundle.js
generated
vendored
Normal file
7567
node_modules/bson/dist/bson.bundle.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/bson/dist/bson.bundle.js.map
generated
vendored
Normal file
1
node_modules/bson/dist/bson.bundle.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5470
node_modules/bson/dist/bson.esm.js
generated
vendored
Normal file
5470
node_modules/bson/dist/bson.esm.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/bson/dist/bson.esm.js.map
generated
vendored
Normal file
1
node_modules/bson/dist/bson.esm.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
19
node_modules/bson/etc/prepare.js
generated
vendored
Executable file
19
node_modules/bson/etc/prepare.js
generated
vendored
Executable file
|
@ -0,0 +1,19 @@
|
||||||
|
#! /usr/bin/env node
|
||||||
|
var cp = require('child_process');
|
||||||
|
var fs = require('fs');
|
||||||
|
|
||||||
|
var nodeMajorVersion = +process.version.match(/^v(\d+)\.\d+/)[1];
|
||||||
|
|
||||||
|
if (fs.existsSync('src') && nodeMajorVersion >= 10) {
|
||||||
|
cp.spawnSync('npm', ['run', 'build'], { stdio: 'inherit', shell: true });
|
||||||
|
} else {
|
||||||
|
if (!fs.existsSync('lib')) {
|
||||||
|
console.warn('BSON: No compiled javascript present, the library is not installed correctly.');
|
||||||
|
if (nodeMajorVersion < 10) {
|
||||||
|
console.warn(
|
||||||
|
'This library can only be compiled in nodejs version 10 or later, currently running: ' +
|
||||||
|
nodeMajorVersion
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
239
node_modules/bson/lib/binary.js
generated
vendored
Normal file
239
node_modules/bson/lib/binary.js
generated
vendored
Normal file
|
@ -0,0 +1,239 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Binary = void 0;
|
||||||
|
var buffer_1 = require("buffer");
|
||||||
|
var ensure_buffer_1 = require("./ensure_buffer");
|
||||||
|
var uuid_utils_1 = require("./uuid_utils");
|
||||||
|
var uuid_1 = require("./uuid");
|
||||||
|
var error_1 = require("./error");
|
||||||
|
/**
|
||||||
|
* A class representation of the BSON Binary type.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var Binary = /** @class */ (function () {
|
||||||
|
/**
|
||||||
|
* @param buffer - a buffer object containing the binary data.
|
||||||
|
* @param subType - the option binary type.
|
||||||
|
*/
|
||||||
|
function Binary(buffer, subType) {
|
||||||
|
if (!(this instanceof Binary))
|
||||||
|
return new Binary(buffer, subType);
|
||||||
|
if (!(buffer == null) &&
|
||||||
|
!(typeof buffer === 'string') &&
|
||||||
|
!ArrayBuffer.isView(buffer) &&
|
||||||
|
!(buffer instanceof ArrayBuffer) &&
|
||||||
|
!Array.isArray(buffer)) {
|
||||||
|
throw new error_1.BSONTypeError('Binary can only be constructed from string, Buffer, TypedArray, or Array<number>');
|
||||||
|
}
|
||||||
|
this.sub_type = subType !== null && subType !== void 0 ? subType : Binary.BSON_BINARY_SUBTYPE_DEFAULT;
|
||||||
|
if (buffer == null) {
|
||||||
|
// create an empty binary buffer
|
||||||
|
this.buffer = buffer_1.Buffer.alloc(Binary.BUFFER_SIZE);
|
||||||
|
this.position = 0;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (typeof buffer === 'string') {
|
||||||
|
// string
|
||||||
|
this.buffer = buffer_1.Buffer.from(buffer, 'binary');
|
||||||
|
}
|
||||||
|
else if (Array.isArray(buffer)) {
|
||||||
|
// number[]
|
||||||
|
this.buffer = buffer_1.Buffer.from(buffer);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Buffer | TypedArray | ArrayBuffer
|
||||||
|
this.buffer = ensure_buffer_1.ensureBuffer(buffer);
|
||||||
|
}
|
||||||
|
this.position = this.buffer.byteLength;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Updates this binary with byte_value.
|
||||||
|
*
|
||||||
|
* @param byteValue - a single byte we wish to write.
|
||||||
|
*/
|
||||||
|
Binary.prototype.put = function (byteValue) {
|
||||||
|
// If it's a string and a has more than one character throw an error
|
||||||
|
if (typeof byteValue === 'string' && byteValue.length !== 1) {
|
||||||
|
throw new error_1.BSONTypeError('only accepts single character String');
|
||||||
|
}
|
||||||
|
else if (typeof byteValue !== 'number' && byteValue.length !== 1)
|
||||||
|
throw new error_1.BSONTypeError('only accepts single character Uint8Array or Array');
|
||||||
|
// Decode the byte value once
|
||||||
|
var decodedByte;
|
||||||
|
if (typeof byteValue === 'string') {
|
||||||
|
decodedByte = byteValue.charCodeAt(0);
|
||||||
|
}
|
||||||
|
else if (typeof byteValue === 'number') {
|
||||||
|
decodedByte = byteValue;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
decodedByte = byteValue[0];
|
||||||
|
}
|
||||||
|
if (decodedByte < 0 || decodedByte > 255) {
|
||||||
|
throw new error_1.BSONTypeError('only accepts number in a valid unsigned byte range 0-255');
|
||||||
|
}
|
||||||
|
if (this.buffer.length > this.position) {
|
||||||
|
this.buffer[this.position++] = decodedByte;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var buffer = buffer_1.Buffer.alloc(Binary.BUFFER_SIZE + this.buffer.length);
|
||||||
|
// Combine the two buffers together
|
||||||
|
this.buffer.copy(buffer, 0, 0, this.buffer.length);
|
||||||
|
this.buffer = buffer;
|
||||||
|
this.buffer[this.position++] = decodedByte;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Writes a buffer or string to the binary.
|
||||||
|
*
|
||||||
|
* @param sequence - a string or buffer to be written to the Binary BSON object.
|
||||||
|
* @param offset - specify the binary of where to write the content.
|
||||||
|
*/
|
||||||
|
Binary.prototype.write = function (sequence, offset) {
|
||||||
|
offset = typeof offset === 'number' ? offset : this.position;
|
||||||
|
// If the buffer is to small let's extend the buffer
|
||||||
|
if (this.buffer.length < offset + sequence.length) {
|
||||||
|
var buffer = buffer_1.Buffer.alloc(this.buffer.length + sequence.length);
|
||||||
|
this.buffer.copy(buffer, 0, 0, this.buffer.length);
|
||||||
|
// Assign the new buffer
|
||||||
|
this.buffer = buffer;
|
||||||
|
}
|
||||||
|
if (ArrayBuffer.isView(sequence)) {
|
||||||
|
this.buffer.set(ensure_buffer_1.ensureBuffer(sequence), offset);
|
||||||
|
this.position =
|
||||||
|
offset + sequence.byteLength > this.position ? offset + sequence.length : this.position;
|
||||||
|
}
|
||||||
|
else if (typeof sequence === 'string') {
|
||||||
|
this.buffer.write(sequence, offset, sequence.length, 'binary');
|
||||||
|
this.position =
|
||||||
|
offset + sequence.length > this.position ? offset + sequence.length : this.position;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Reads **length** bytes starting at **position**.
|
||||||
|
*
|
||||||
|
* @param position - read from the given position in the Binary.
|
||||||
|
* @param length - the number of bytes to read.
|
||||||
|
*/
|
||||||
|
Binary.prototype.read = function (position, length) {
|
||||||
|
length = length && length > 0 ? length : this.position;
|
||||||
|
// Let's return the data based on the type we have
|
||||||
|
return this.buffer.slice(position, position + length);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns the value of this binary as a string.
|
||||||
|
* @param asRaw - Will skip converting to a string
|
||||||
|
* @remarks
|
||||||
|
* This is handy when calling this function conditionally for some key value pairs and not others
|
||||||
|
*/
|
||||||
|
Binary.prototype.value = function (asRaw) {
|
||||||
|
asRaw = !!asRaw;
|
||||||
|
// Optimize to serialize for the situation where the data == size of buffer
|
||||||
|
if (asRaw && this.buffer.length === this.position) {
|
||||||
|
return this.buffer;
|
||||||
|
}
|
||||||
|
// If it's a node.js buffer object
|
||||||
|
if (asRaw) {
|
||||||
|
return this.buffer.slice(0, this.position);
|
||||||
|
}
|
||||||
|
return this.buffer.toString('binary', 0, this.position);
|
||||||
|
};
|
||||||
|
/** the length of the binary sequence */
|
||||||
|
Binary.prototype.length = function () {
|
||||||
|
return this.position;
|
||||||
|
};
|
||||||
|
Binary.prototype.toJSON = function () {
|
||||||
|
return this.buffer.toString('base64');
|
||||||
|
};
|
||||||
|
Binary.prototype.toString = function (format) {
|
||||||
|
return this.buffer.toString(format);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Binary.prototype.toExtendedJSON = function (options) {
|
||||||
|
options = options || {};
|
||||||
|
var base64String = this.buffer.toString('base64');
|
||||||
|
var subType = Number(this.sub_type).toString(16);
|
||||||
|
if (options.legacy) {
|
||||||
|
return {
|
||||||
|
$binary: base64String,
|
||||||
|
$type: subType.length === 1 ? '0' + subType : subType
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
$binary: {
|
||||||
|
base64: base64String,
|
||||||
|
subType: subType.length === 1 ? '0' + subType : subType
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
Binary.prototype.toUUID = function () {
|
||||||
|
if (this.sub_type === Binary.SUBTYPE_UUID) {
|
||||||
|
return new uuid_1.UUID(this.buffer.slice(0, this.position));
|
||||||
|
}
|
||||||
|
throw new error_1.BSONError("Binary sub_type \"" + this.sub_type + "\" is not supported for converting to UUID. Only \"" + Binary.SUBTYPE_UUID + "\" is currently supported.");
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Binary.fromExtendedJSON = function (doc, options) {
|
||||||
|
options = options || {};
|
||||||
|
var data;
|
||||||
|
var type;
|
||||||
|
if ('$binary' in doc) {
|
||||||
|
if (options.legacy && typeof doc.$binary === 'string' && '$type' in doc) {
|
||||||
|
type = doc.$type ? parseInt(doc.$type, 16) : 0;
|
||||||
|
data = buffer_1.Buffer.from(doc.$binary, 'base64');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (typeof doc.$binary !== 'string') {
|
||||||
|
type = doc.$binary.subType ? parseInt(doc.$binary.subType, 16) : 0;
|
||||||
|
data = buffer_1.Buffer.from(doc.$binary.base64, 'base64');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if ('$uuid' in doc) {
|
||||||
|
type = 4;
|
||||||
|
data = uuid_utils_1.uuidHexStringToBuffer(doc.$uuid);
|
||||||
|
}
|
||||||
|
if (!data) {
|
||||||
|
throw new error_1.BSONTypeError("Unexpected Binary Extended JSON format " + JSON.stringify(doc));
|
||||||
|
}
|
||||||
|
return new Binary(data, type);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Binary.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
|
||||||
|
return this.inspect();
|
||||||
|
};
|
||||||
|
Binary.prototype.inspect = function () {
|
||||||
|
var asBuffer = this.value(true);
|
||||||
|
return "new Binary(Buffer.from(\"" + asBuffer.toString('hex') + "\", \"hex\"), " + this.sub_type + ")";
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Binary default subtype
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
Binary.BSON_BINARY_SUBTYPE_DEFAULT = 0;
|
||||||
|
/** Initial buffer default size */
|
||||||
|
Binary.BUFFER_SIZE = 256;
|
||||||
|
/** Default BSON type */
|
||||||
|
Binary.SUBTYPE_DEFAULT = 0;
|
||||||
|
/** Function BSON type */
|
||||||
|
Binary.SUBTYPE_FUNCTION = 1;
|
||||||
|
/** Byte Array BSON type */
|
||||||
|
Binary.SUBTYPE_BYTE_ARRAY = 2;
|
||||||
|
/** Deprecated UUID BSON type @deprecated Please use SUBTYPE_UUID */
|
||||||
|
Binary.SUBTYPE_UUID_OLD = 3;
|
||||||
|
/** UUID BSON type */
|
||||||
|
Binary.SUBTYPE_UUID = 4;
|
||||||
|
/** MD5 BSON type */
|
||||||
|
Binary.SUBTYPE_MD5 = 5;
|
||||||
|
/** Encrypted BSON type */
|
||||||
|
Binary.SUBTYPE_ENCRYPTED = 6;
|
||||||
|
/** Column BSON type */
|
||||||
|
Binary.SUBTYPE_COLUMN = 7;
|
||||||
|
/** User BSON type */
|
||||||
|
Binary.SUBTYPE_USER_DEFINED = 128;
|
||||||
|
return Binary;
|
||||||
|
}());
|
||||||
|
exports.Binary = Binary;
|
||||||
|
Object.defineProperty(Binary.prototype, '_bsontype', { value: 'Binary' });
|
||||||
|
//# sourceMappingURL=binary.js.map
|
1
node_modules/bson/lib/binary.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/binary.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
252
node_modules/bson/lib/bson.js
generated
vendored
Normal file
252
node_modules/bson/lib/bson.js
generated
vendored
Normal file
|
@ -0,0 +1,252 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.BSONRegExp = exports.MaxKey = exports.MinKey = exports.Int32 = exports.Double = exports.Timestamp = exports.Long = exports.UUID = exports.ObjectId = exports.Binary = exports.DBRef = exports.BSONSymbol = exports.Map = exports.Code = exports.LongWithoutOverridesClass = exports.EJSON = exports.BSON_INT64_MIN = exports.BSON_INT64_MAX = exports.BSON_INT32_MIN = exports.BSON_INT32_MAX = exports.BSON_DATA_UNDEFINED = exports.BSON_DATA_TIMESTAMP = exports.BSON_DATA_SYMBOL = exports.BSON_DATA_STRING = exports.BSON_DATA_REGEXP = exports.BSON_DATA_OID = exports.BSON_DATA_OBJECT = exports.BSON_DATA_NUMBER = exports.BSON_DATA_NULL = exports.BSON_DATA_MIN_KEY = exports.BSON_DATA_MAX_KEY = exports.BSON_DATA_LONG = exports.BSON_DATA_INT = exports.BSON_DATA_DECIMAL128 = exports.BSON_DATA_DBPOINTER = exports.BSON_DATA_DATE = exports.BSON_DATA_CODE_W_SCOPE = exports.BSON_DATA_CODE = exports.BSON_DATA_BOOLEAN = exports.BSON_DATA_BINARY = exports.BSON_DATA_ARRAY = exports.BSON_BINARY_SUBTYPE_COLUMN = exports.BSON_BINARY_SUBTYPE_ENCRYPTED = exports.BSON_BINARY_SUBTYPE_UUID_NEW = exports.BSON_BINARY_SUBTYPE_UUID = exports.BSON_BINARY_SUBTYPE_USER_DEFINED = exports.BSON_BINARY_SUBTYPE_MD5 = exports.BSON_BINARY_SUBTYPE_FUNCTION = exports.BSON_BINARY_SUBTYPE_DEFAULT = exports.BSON_BINARY_SUBTYPE_BYTE_ARRAY = void 0;
|
||||||
|
exports.deserializeStream = exports.calculateObjectSize = exports.deserialize = exports.serializeWithBufferAndIndex = exports.serialize = exports.setInternalBufferSize = exports.BSONTypeError = exports.BSONError = exports.ObjectID = exports.Decimal128 = void 0;
|
||||||
|
var buffer_1 = require("buffer");
|
||||||
|
var binary_1 = require("./binary");
|
||||||
|
Object.defineProperty(exports, "Binary", { enumerable: true, get: function () { return binary_1.Binary; } });
|
||||||
|
var code_1 = require("./code");
|
||||||
|
Object.defineProperty(exports, "Code", { enumerable: true, get: function () { return code_1.Code; } });
|
||||||
|
var db_ref_1 = require("./db_ref");
|
||||||
|
Object.defineProperty(exports, "DBRef", { enumerable: true, get: function () { return db_ref_1.DBRef; } });
|
||||||
|
var decimal128_1 = require("./decimal128");
|
||||||
|
Object.defineProperty(exports, "Decimal128", { enumerable: true, get: function () { return decimal128_1.Decimal128; } });
|
||||||
|
var double_1 = require("./double");
|
||||||
|
Object.defineProperty(exports, "Double", { enumerable: true, get: function () { return double_1.Double; } });
|
||||||
|
var ensure_buffer_1 = require("./ensure_buffer");
|
||||||
|
var extended_json_1 = require("./extended_json");
|
||||||
|
var int_32_1 = require("./int_32");
|
||||||
|
Object.defineProperty(exports, "Int32", { enumerable: true, get: function () { return int_32_1.Int32; } });
|
||||||
|
var long_1 = require("./long");
|
||||||
|
Object.defineProperty(exports, "Long", { enumerable: true, get: function () { return long_1.Long; } });
|
||||||
|
var map_1 = require("./map");
|
||||||
|
Object.defineProperty(exports, "Map", { enumerable: true, get: function () { return map_1.Map; } });
|
||||||
|
var max_key_1 = require("./max_key");
|
||||||
|
Object.defineProperty(exports, "MaxKey", { enumerable: true, get: function () { return max_key_1.MaxKey; } });
|
||||||
|
var min_key_1 = require("./min_key");
|
||||||
|
Object.defineProperty(exports, "MinKey", { enumerable: true, get: function () { return min_key_1.MinKey; } });
|
||||||
|
var objectid_1 = require("./objectid");
|
||||||
|
Object.defineProperty(exports, "ObjectId", { enumerable: true, get: function () { return objectid_1.ObjectId; } });
|
||||||
|
Object.defineProperty(exports, "ObjectID", { enumerable: true, get: function () { return objectid_1.ObjectId; } });
|
||||||
|
var error_1 = require("./error");
|
||||||
|
var calculate_size_1 = require("./parser/calculate_size");
|
||||||
|
// Parts of the parser
|
||||||
|
var deserializer_1 = require("./parser/deserializer");
|
||||||
|
var serializer_1 = require("./parser/serializer");
|
||||||
|
var regexp_1 = require("./regexp");
|
||||||
|
Object.defineProperty(exports, "BSONRegExp", { enumerable: true, get: function () { return regexp_1.BSONRegExp; } });
|
||||||
|
var symbol_1 = require("./symbol");
|
||||||
|
Object.defineProperty(exports, "BSONSymbol", { enumerable: true, get: function () { return symbol_1.BSONSymbol; } });
|
||||||
|
var timestamp_1 = require("./timestamp");
|
||||||
|
Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
|
||||||
|
var uuid_1 = require("./uuid");
|
||||||
|
Object.defineProperty(exports, "UUID", { enumerable: true, get: function () { return uuid_1.UUID; } });
|
||||||
|
var constants_1 = require("./constants");
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_BYTE_ARRAY", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_BYTE_ARRAY; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_DEFAULT", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_DEFAULT; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_FUNCTION", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_FUNCTION; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_MD5", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_MD5; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_USER_DEFINED", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_USER_DEFINED; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_UUID", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_UUID; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_UUID_NEW", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_UUID_NEW; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_ENCRYPTED", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_ENCRYPTED; } });
|
||||||
|
Object.defineProperty(exports, "BSON_BINARY_SUBTYPE_COLUMN", { enumerable: true, get: function () { return constants_1.BSON_BINARY_SUBTYPE_COLUMN; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_ARRAY", { enumerable: true, get: function () { return constants_1.BSON_DATA_ARRAY; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_BINARY", { enumerable: true, get: function () { return constants_1.BSON_DATA_BINARY; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_BOOLEAN", { enumerable: true, get: function () { return constants_1.BSON_DATA_BOOLEAN; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_CODE", { enumerable: true, get: function () { return constants_1.BSON_DATA_CODE; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_CODE_W_SCOPE", { enumerable: true, get: function () { return constants_1.BSON_DATA_CODE_W_SCOPE; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_DATE", { enumerable: true, get: function () { return constants_1.BSON_DATA_DATE; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_DBPOINTER", { enumerable: true, get: function () { return constants_1.BSON_DATA_DBPOINTER; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_DECIMAL128", { enumerable: true, get: function () { return constants_1.BSON_DATA_DECIMAL128; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_INT", { enumerable: true, get: function () { return constants_1.BSON_DATA_INT; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_LONG", { enumerable: true, get: function () { return constants_1.BSON_DATA_LONG; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_MAX_KEY", { enumerable: true, get: function () { return constants_1.BSON_DATA_MAX_KEY; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_MIN_KEY", { enumerable: true, get: function () { return constants_1.BSON_DATA_MIN_KEY; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_NULL", { enumerable: true, get: function () { return constants_1.BSON_DATA_NULL; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_NUMBER", { enumerable: true, get: function () { return constants_1.BSON_DATA_NUMBER; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_OBJECT", { enumerable: true, get: function () { return constants_1.BSON_DATA_OBJECT; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_OID", { enumerable: true, get: function () { return constants_1.BSON_DATA_OID; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_REGEXP", { enumerable: true, get: function () { return constants_1.BSON_DATA_REGEXP; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_STRING", { enumerable: true, get: function () { return constants_1.BSON_DATA_STRING; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_SYMBOL", { enumerable: true, get: function () { return constants_1.BSON_DATA_SYMBOL; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_TIMESTAMP", { enumerable: true, get: function () { return constants_1.BSON_DATA_TIMESTAMP; } });
|
||||||
|
Object.defineProperty(exports, "BSON_DATA_UNDEFINED", { enumerable: true, get: function () { return constants_1.BSON_DATA_UNDEFINED; } });
|
||||||
|
Object.defineProperty(exports, "BSON_INT32_MAX", { enumerable: true, get: function () { return constants_1.BSON_INT32_MAX; } });
|
||||||
|
Object.defineProperty(exports, "BSON_INT32_MIN", { enumerable: true, get: function () { return constants_1.BSON_INT32_MIN; } });
|
||||||
|
Object.defineProperty(exports, "BSON_INT64_MAX", { enumerable: true, get: function () { return constants_1.BSON_INT64_MAX; } });
|
||||||
|
Object.defineProperty(exports, "BSON_INT64_MIN", { enumerable: true, get: function () { return constants_1.BSON_INT64_MIN; } });
|
||||||
|
var extended_json_2 = require("./extended_json");
|
||||||
|
Object.defineProperty(exports, "EJSON", { enumerable: true, get: function () { return extended_json_2.EJSON; } });
|
||||||
|
var timestamp_2 = require("./timestamp");
|
||||||
|
Object.defineProperty(exports, "LongWithoutOverridesClass", { enumerable: true, get: function () { return timestamp_2.LongWithoutOverridesClass; } });
|
||||||
|
var error_2 = require("./error");
|
||||||
|
Object.defineProperty(exports, "BSONError", { enumerable: true, get: function () { return error_2.BSONError; } });
|
||||||
|
Object.defineProperty(exports, "BSONTypeError", { enumerable: true, get: function () { return error_2.BSONTypeError; } });
|
||||||
|
/** @internal */
|
||||||
|
// Default Max Size
|
||||||
|
var MAXSIZE = 1024 * 1024 * 17;
|
||||||
|
// Current Internal Temporary Serialization Buffer
|
||||||
|
var buffer = buffer_1.Buffer.alloc(MAXSIZE);
|
||||||
|
/**
|
||||||
|
* Sets the size of the internal serialization buffer.
|
||||||
|
*
|
||||||
|
* @param size - The desired size for the internal serialization buffer
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function setInternalBufferSize(size) {
|
||||||
|
// Resize the internal serialization buffer if needed
|
||||||
|
if (buffer.length < size) {
|
||||||
|
buffer = buffer_1.Buffer.alloc(size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.setInternalBufferSize = setInternalBufferSize;
|
||||||
|
/**
|
||||||
|
* Serialize a Javascript object.
|
||||||
|
*
|
||||||
|
* @param object - the Javascript object to serialize.
|
||||||
|
* @returns Buffer object containing the serialized object.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function serialize(object, options) {
|
||||||
|
if (options === void 0) { options = {}; }
|
||||||
|
// Unpack the options
|
||||||
|
var checkKeys = typeof options.checkKeys === 'boolean' ? options.checkKeys : false;
|
||||||
|
var serializeFunctions = typeof options.serializeFunctions === 'boolean' ? options.serializeFunctions : false;
|
||||||
|
var ignoreUndefined = typeof options.ignoreUndefined === 'boolean' ? options.ignoreUndefined : true;
|
||||||
|
var minInternalBufferSize = typeof options.minInternalBufferSize === 'number' ? options.minInternalBufferSize : MAXSIZE;
|
||||||
|
// Resize the internal serialization buffer if needed
|
||||||
|
if (buffer.length < minInternalBufferSize) {
|
||||||
|
buffer = buffer_1.Buffer.alloc(minInternalBufferSize);
|
||||||
|
}
|
||||||
|
// Attempt to serialize
|
||||||
|
var serializationIndex = serializer_1.serializeInto(buffer, object, checkKeys, 0, 0, serializeFunctions, ignoreUndefined, []);
|
||||||
|
// Create the final buffer
|
||||||
|
var finishedBuffer = buffer_1.Buffer.alloc(serializationIndex);
|
||||||
|
// Copy into the finished buffer
|
||||||
|
buffer.copy(finishedBuffer, 0, 0, finishedBuffer.length);
|
||||||
|
// Return the buffer
|
||||||
|
return finishedBuffer;
|
||||||
|
}
|
||||||
|
exports.serialize = serialize;
|
||||||
|
/**
|
||||||
|
* Serialize a Javascript object using a predefined Buffer and index into the buffer,
|
||||||
|
* useful when pre-allocating the space for serialization.
|
||||||
|
*
|
||||||
|
* @param object - the Javascript object to serialize.
|
||||||
|
* @param finalBuffer - the Buffer you pre-allocated to store the serialized BSON object.
|
||||||
|
* @returns the index pointing to the last written byte in the buffer.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function serializeWithBufferAndIndex(object, finalBuffer, options) {
|
||||||
|
if (options === void 0) { options = {}; }
|
||||||
|
// Unpack the options
|
||||||
|
var checkKeys = typeof options.checkKeys === 'boolean' ? options.checkKeys : false;
|
||||||
|
var serializeFunctions = typeof options.serializeFunctions === 'boolean' ? options.serializeFunctions : false;
|
||||||
|
var ignoreUndefined = typeof options.ignoreUndefined === 'boolean' ? options.ignoreUndefined : true;
|
||||||
|
var startIndex = typeof options.index === 'number' ? options.index : 0;
|
||||||
|
// Attempt to serialize
|
||||||
|
var serializationIndex = serializer_1.serializeInto(buffer, object, checkKeys, 0, 0, serializeFunctions, ignoreUndefined);
|
||||||
|
buffer.copy(finalBuffer, startIndex, 0, serializationIndex);
|
||||||
|
// Return the index
|
||||||
|
return startIndex + serializationIndex - 1;
|
||||||
|
}
|
||||||
|
exports.serializeWithBufferAndIndex = serializeWithBufferAndIndex;
|
||||||
|
/**
|
||||||
|
* Deserialize data as BSON.
|
||||||
|
*
|
||||||
|
* @param buffer - the buffer containing the serialized set of BSON documents.
|
||||||
|
* @returns returns the deserialized Javascript Object.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function deserialize(buffer, options) {
|
||||||
|
if (options === void 0) { options = {}; }
|
||||||
|
return deserializer_1.deserialize(buffer instanceof buffer_1.Buffer ? buffer : ensure_buffer_1.ensureBuffer(buffer), options);
|
||||||
|
}
|
||||||
|
exports.deserialize = deserialize;
|
||||||
|
/**
|
||||||
|
* Calculate the bson size for a passed in Javascript object.
|
||||||
|
*
|
||||||
|
* @param object - the Javascript object to calculate the BSON byte size for
|
||||||
|
* @returns size of BSON object in bytes
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function calculateObjectSize(object, options) {
|
||||||
|
if (options === void 0) { options = {}; }
|
||||||
|
options = options || {};
|
||||||
|
var serializeFunctions = typeof options.serializeFunctions === 'boolean' ? options.serializeFunctions : false;
|
||||||
|
var ignoreUndefined = typeof options.ignoreUndefined === 'boolean' ? options.ignoreUndefined : true;
|
||||||
|
return calculate_size_1.calculateObjectSize(object, serializeFunctions, ignoreUndefined);
|
||||||
|
}
|
||||||
|
exports.calculateObjectSize = calculateObjectSize;
|
||||||
|
/**
|
||||||
|
* Deserialize stream data as BSON documents.
|
||||||
|
*
|
||||||
|
* @param data - the buffer containing the serialized set of BSON documents.
|
||||||
|
* @param startIndex - the start index in the data Buffer where the deserialization is to start.
|
||||||
|
* @param numberOfDocuments - number of documents to deserialize.
|
||||||
|
* @param documents - an array where to store the deserialized documents.
|
||||||
|
* @param docStartIndex - the index in the documents array from where to start inserting documents.
|
||||||
|
* @param options - additional options used for the deserialization.
|
||||||
|
* @returns next index in the buffer after deserialization **x** numbers of documents.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
function deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex, options) {
|
||||||
|
var internalOptions = Object.assign({ allowObjectSmallerThanBufferSize: true, index: 0 }, options);
|
||||||
|
var bufferData = ensure_buffer_1.ensureBuffer(data);
|
||||||
|
var index = startIndex;
|
||||||
|
// Loop over all documents
|
||||||
|
for (var i = 0; i < numberOfDocuments; i++) {
|
||||||
|
// Find size of the document
|
||||||
|
var size = bufferData[index] |
|
||||||
|
(bufferData[index + 1] << 8) |
|
||||||
|
(bufferData[index + 2] << 16) |
|
||||||
|
(bufferData[index + 3] << 24);
|
||||||
|
// Update options with index
|
||||||
|
internalOptions.index = index;
|
||||||
|
// Parse the document at this point
|
||||||
|
documents[docStartIndex + i] = deserializer_1.deserialize(bufferData, internalOptions);
|
||||||
|
// Adjust index by the document size
|
||||||
|
index = index + size;
|
||||||
|
}
|
||||||
|
// Return object containing end index of parsing and list of documents
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
exports.deserializeStream = deserializeStream;
|
||||||
|
/**
|
||||||
|
* BSON default export
|
||||||
|
* @deprecated Please use named exports
|
||||||
|
* @privateRemarks
|
||||||
|
* We want to someday deprecate the default export,
|
||||||
|
* so none of the new TS types are being exported on the default
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var BSON = {
|
||||||
|
Binary: binary_1.Binary,
|
||||||
|
Code: code_1.Code,
|
||||||
|
DBRef: db_ref_1.DBRef,
|
||||||
|
Decimal128: decimal128_1.Decimal128,
|
||||||
|
Double: double_1.Double,
|
||||||
|
Int32: int_32_1.Int32,
|
||||||
|
Long: long_1.Long,
|
||||||
|
UUID: uuid_1.UUID,
|
||||||
|
Map: map_1.Map,
|
||||||
|
MaxKey: max_key_1.MaxKey,
|
||||||
|
MinKey: min_key_1.MinKey,
|
||||||
|
ObjectId: objectid_1.ObjectId,
|
||||||
|
ObjectID: objectid_1.ObjectId,
|
||||||
|
BSONRegExp: regexp_1.BSONRegExp,
|
||||||
|
BSONSymbol: symbol_1.BSONSymbol,
|
||||||
|
Timestamp: timestamp_1.Timestamp,
|
||||||
|
EJSON: extended_json_1.EJSON,
|
||||||
|
setInternalBufferSize: setInternalBufferSize,
|
||||||
|
serialize: serialize,
|
||||||
|
serializeWithBufferAndIndex: serializeWithBufferAndIndex,
|
||||||
|
deserialize: deserialize,
|
||||||
|
calculateObjectSize: calculateObjectSize,
|
||||||
|
deserializeStream: deserializeStream,
|
||||||
|
BSONError: error_1.BSONError,
|
||||||
|
BSONTypeError: error_1.BSONTypeError
|
||||||
|
};
|
||||||
|
exports.default = BSON;
|
||||||
|
//# sourceMappingURL=bson.js.map
|
1
node_modules/bson/lib/bson.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/bson.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
45
node_modules/bson/lib/code.js
generated
vendored
Normal file
45
node_modules/bson/lib/code.js
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Code = void 0;
|
||||||
|
/**
|
||||||
|
* A class representation of the BSON Code type.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var Code = /** @class */ (function () {
|
||||||
|
/**
|
||||||
|
* @param code - a string or function.
|
||||||
|
* @param scope - an optional scope for the function.
|
||||||
|
*/
|
||||||
|
function Code(code, scope) {
|
||||||
|
if (!(this instanceof Code))
|
||||||
|
return new Code(code, scope);
|
||||||
|
this.code = code;
|
||||||
|
this.scope = scope;
|
||||||
|
}
|
||||||
|
Code.prototype.toJSON = function () {
|
||||||
|
return { code: this.code, scope: this.scope };
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Code.prototype.toExtendedJSON = function () {
|
||||||
|
if (this.scope) {
|
||||||
|
return { $code: this.code, $scope: this.scope };
|
||||||
|
}
|
||||||
|
return { $code: this.code };
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Code.fromExtendedJSON = function (doc) {
|
||||||
|
return new Code(doc.$code, doc.$scope);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Code.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
|
||||||
|
return this.inspect();
|
||||||
|
};
|
||||||
|
Code.prototype.inspect = function () {
|
||||||
|
var codeJson = this.toJSON();
|
||||||
|
return "new Code(\"" + codeJson.code + "\"" + (codeJson.scope ? ", " + JSON.stringify(codeJson.scope) : '') + ")";
|
||||||
|
};
|
||||||
|
return Code;
|
||||||
|
}());
|
||||||
|
exports.Code = Code;
|
||||||
|
Object.defineProperty(Code.prototype, '_bsontype', { value: 'Code' });
|
||||||
|
//# sourceMappingURL=code.js.map
|
1
node_modules/bson/lib/code.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/code.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"code.js","sourceRoot":"","sources":["../src/code.ts"],"names":[],"mappings":";;;AAQA;;;GAGG;AACH;IAKE;;;OAGG;IACH,cAAY,IAAuB,EAAE,KAAgB;QACnD,IAAI,CAAC,CAAC,IAAI,YAAY,IAAI,CAAC;YAAE,OAAO,IAAI,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAE1D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACrB,CAAC;IAED,qBAAM,GAAN;QACE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC;IAChD,CAAC;IAED,gBAAgB;IAChB,6BAAc,GAAd;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC;SACjD;QAED,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC;IAC9B,CAAC;IAED,gBAAgB;IACT,qBAAgB,GAAvB,UAAwB,GAAiB;QACvC,OAAO,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED,gBAAgB;IAChB,eAAC,MAAM,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC,GAA1C;QACE,OAAO,IAAI,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED,sBAAO,GAAP;QACE,IAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC/B,OAAO,gBAAa,QAAQ,CAAC,IAAI,WAC/B,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,OAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAG,CAAC,CAAC,CAAC,EAAE,OAC1D,CAAC;IACN,CAAC;IACH,WAAC;AAAD,CAAC,AA7CD,IA6CC;AA7CY,oBAAI;AA+CjB,MAAM,CAAC,cAAc,CAAC,IAAI,CAAC,SAAS,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC"}
|
82
node_modules/bson/lib/constants.js
generated
vendored
Normal file
82
node_modules/bson/lib/constants.js
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.BSON_BINARY_SUBTYPE_USER_DEFINED = exports.BSON_BINARY_SUBTYPE_COLUMN = exports.BSON_BINARY_SUBTYPE_ENCRYPTED = exports.BSON_BINARY_SUBTYPE_MD5 = exports.BSON_BINARY_SUBTYPE_UUID_NEW = exports.BSON_BINARY_SUBTYPE_UUID = exports.BSON_BINARY_SUBTYPE_BYTE_ARRAY = exports.BSON_BINARY_SUBTYPE_FUNCTION = exports.BSON_BINARY_SUBTYPE_DEFAULT = exports.BSON_DATA_MAX_KEY = exports.BSON_DATA_MIN_KEY = exports.BSON_DATA_DECIMAL128 = exports.BSON_DATA_LONG = exports.BSON_DATA_TIMESTAMP = exports.BSON_DATA_INT = exports.BSON_DATA_CODE_W_SCOPE = exports.BSON_DATA_SYMBOL = exports.BSON_DATA_CODE = exports.BSON_DATA_DBPOINTER = exports.BSON_DATA_REGEXP = exports.BSON_DATA_NULL = exports.BSON_DATA_DATE = exports.BSON_DATA_BOOLEAN = exports.BSON_DATA_OID = exports.BSON_DATA_UNDEFINED = exports.BSON_DATA_BINARY = exports.BSON_DATA_ARRAY = exports.BSON_DATA_OBJECT = exports.BSON_DATA_STRING = exports.BSON_DATA_NUMBER = exports.JS_INT_MIN = exports.JS_INT_MAX = exports.BSON_INT64_MIN = exports.BSON_INT64_MAX = exports.BSON_INT32_MIN = exports.BSON_INT32_MAX = void 0;
|
||||||
|
/** @internal */
|
||||||
|
exports.BSON_INT32_MAX = 0x7fffffff;
|
||||||
|
/** @internal */
|
||||||
|
exports.BSON_INT32_MIN = -0x80000000;
|
||||||
|
/** @internal */
|
||||||
|
exports.BSON_INT64_MAX = Math.pow(2, 63) - 1;
|
||||||
|
/** @internal */
|
||||||
|
exports.BSON_INT64_MIN = -Math.pow(2, 63);
|
||||||
|
/**
|
||||||
|
* Any integer up to 2^53 can be precisely represented by a double.
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
exports.JS_INT_MAX = Math.pow(2, 53);
|
||||||
|
/**
|
||||||
|
* Any integer down to -2^53 can be precisely represented by a double.
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
exports.JS_INT_MIN = -Math.pow(2, 53);
|
||||||
|
/** Number BSON Type @internal */
|
||||||
|
exports.BSON_DATA_NUMBER = 1;
|
||||||
|
/** String BSON Type @internal */
|
||||||
|
exports.BSON_DATA_STRING = 2;
|
||||||
|
/** Object BSON Type @internal */
|
||||||
|
exports.BSON_DATA_OBJECT = 3;
|
||||||
|
/** Array BSON Type @internal */
|
||||||
|
exports.BSON_DATA_ARRAY = 4;
|
||||||
|
/** Binary BSON Type @internal */
|
||||||
|
exports.BSON_DATA_BINARY = 5;
|
||||||
|
/** Binary BSON Type @internal */
|
||||||
|
exports.BSON_DATA_UNDEFINED = 6;
|
||||||
|
/** ObjectId BSON Type @internal */
|
||||||
|
exports.BSON_DATA_OID = 7;
|
||||||
|
/** Boolean BSON Type @internal */
|
||||||
|
exports.BSON_DATA_BOOLEAN = 8;
|
||||||
|
/** Date BSON Type @internal */
|
||||||
|
exports.BSON_DATA_DATE = 9;
|
||||||
|
/** null BSON Type @internal */
|
||||||
|
exports.BSON_DATA_NULL = 10;
|
||||||
|
/** RegExp BSON Type @internal */
|
||||||
|
exports.BSON_DATA_REGEXP = 11;
|
||||||
|
/** Code BSON Type @internal */
|
||||||
|
exports.BSON_DATA_DBPOINTER = 12;
|
||||||
|
/** Code BSON Type @internal */
|
||||||
|
exports.BSON_DATA_CODE = 13;
|
||||||
|
/** Symbol BSON Type @internal */
|
||||||
|
exports.BSON_DATA_SYMBOL = 14;
|
||||||
|
/** Code with Scope BSON Type @internal */
|
||||||
|
exports.BSON_DATA_CODE_W_SCOPE = 15;
|
||||||
|
/** 32 bit Integer BSON Type @internal */
|
||||||
|
exports.BSON_DATA_INT = 16;
|
||||||
|
/** Timestamp BSON Type @internal */
|
||||||
|
exports.BSON_DATA_TIMESTAMP = 17;
|
||||||
|
/** Long BSON Type @internal */
|
||||||
|
exports.BSON_DATA_LONG = 18;
|
||||||
|
/** Decimal128 BSON Type @internal */
|
||||||
|
exports.BSON_DATA_DECIMAL128 = 19;
|
||||||
|
/** MinKey BSON Type @internal */
|
||||||
|
exports.BSON_DATA_MIN_KEY = 0xff;
|
||||||
|
/** MaxKey BSON Type @internal */
|
||||||
|
exports.BSON_DATA_MAX_KEY = 0x7f;
|
||||||
|
/** Binary Default Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_DEFAULT = 0;
|
||||||
|
/** Binary Function Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_FUNCTION = 1;
|
||||||
|
/** Binary Byte Array Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_BYTE_ARRAY = 2;
|
||||||
|
/** Binary Deprecated UUID Type @deprecated Please use BSON_BINARY_SUBTYPE_UUID_NEW @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_UUID = 3;
|
||||||
|
/** Binary UUID Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_UUID_NEW = 4;
|
||||||
|
/** Binary MD5 Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_MD5 = 5;
|
||||||
|
/** Encrypted BSON type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_ENCRYPTED = 6;
|
||||||
|
/** Column BSON type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_COLUMN = 7;
|
||||||
|
/** Binary User Defined Type @internal */
|
||||||
|
exports.BSON_BINARY_SUBTYPE_USER_DEFINED = 128;
|
||||||
|
//# sourceMappingURL=constants.js.map
|
1
node_modules/bson/lib/constants.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/constants.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":";;;AAAA,gBAAgB;AACH,QAAA,cAAc,GAAG,UAAU,CAAC;AACzC,gBAAgB;AACH,QAAA,cAAc,GAAG,CAAC,UAAU,CAAC;AAC1C,gBAAgB;AACH,QAAA,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC;AAClD,gBAAgB;AACH,QAAA,cAAc,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAE/C;;;GAGG;AACU,QAAA,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAE1C;;;GAGG;AACU,QAAA,UAAU,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAE3C,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AAElC,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AAElC,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AAElC,gCAAgC;AACnB,QAAA,eAAe,GAAG,CAAC,CAAC;AAEjC,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AAElC,iCAAiC;AACpB,QAAA,mBAAmB,GAAG,CAAC,CAAC;AAErC,mCAAmC;AACtB,QAAA,aAAa,GAAG,CAAC,CAAC;AAE/B,kCAAkC;AACrB,QAAA,iBAAiB,GAAG,CAAC,CAAC;AAEnC,+BAA+B;AAClB,QAAA,cAAc,GAAG,CAAC,CAAC;AAEhC,+BAA+B;AAClB,QAAA,cAAc,GAAG,EAAE,CAAC;AAEjC,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,EAAE,CAAC;AAEnC,+BAA+B;AAClB,QAAA,mBAAmB,GAAG,EAAE,CAAC;AAEtC,+BAA+B;AAClB,QAAA,cAAc,GAAG,EAAE,CAAC;AAEjC,iCAAiC;AACpB,QAAA,gBAAgB,GAAG,EAAE,CAAC;AAEnC,0CAA0C;AAC7B,QAAA,sBAAsB,GAAG,EAAE,CAAC;AAEzC,yCAAyC;AAC5B,QAAA,aAAa,GAAG,EAAE,CAAC;AAEhC,oCAAoC;AACvB,QAAA,mBAAmB,GAAG,EAAE,CAAC;AAEtC,+BAA+B;AAClB,QAAA,cAAc,GAAG,EAAE,CAAC;AAEjC,qCAAqC;AACxB,QAAA,oBAAoB,GAAG,EAAE,CAAC;AAEvC,iCAAiC;AACpB,QAAA,iBAAiB,GAAG,IAAI,CAAC;AAEtC,iCAAiC;AACpB,QAAA,iBAAiB,GAAG,IAAI,CAAC;AAEtC,oCAAoC;AACvB,QAAA,2BAA2B,GAAG,CAAC,CAAC;AAE7C,qCAAqC;AACxB,QAAA,4BAA4B,GAAG,CAAC,CAAC;AAE9C,uCAAuC;AAC1B,QAAA,8BAA8B,GAAG,CAAC,CAAC;AAEhD,gGAAgG;AACnF,QAAA,wBAAwB,GAAG,CAAC,CAAC;AAE1C,iCAAiC;AACpB,QAAA,4BAA4B,GAAG,CAAC,CAAC;AAE9C,gCAAgC;AACnB,QAAA,uBAAuB,GAAG,CAAC,CAAC;AAEzC,oCAAoC;AACvB,QAAA,6BAA6B,GAAG,CAAC,CAAC;AAE/C,iCAAiC;AACpB,QAAA,0BAA0B,GAAG,CAAC,CAAC;AAE5C,yCAAyC;AAC5B,QAAA,gCAAgC,GAAG,GAAG,CAAC"}
|
96
node_modules/bson/lib/db_ref.js
generated
vendored
Normal file
96
node_modules/bson/lib/db_ref.js
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.DBRef = exports.isDBRefLike = void 0;
|
||||||
|
var utils_1 = require("./parser/utils");
|
||||||
|
/** @internal */
|
||||||
|
function isDBRefLike(value) {
|
||||||
|
return (utils_1.isObjectLike(value) &&
|
||||||
|
value.$id != null &&
|
||||||
|
typeof value.$ref === 'string' &&
|
||||||
|
(value.$db == null || typeof value.$db === 'string'));
|
||||||
|
}
|
||||||
|
exports.isDBRefLike = isDBRefLike;
|
||||||
|
/**
|
||||||
|
* A class representation of the BSON DBRef type.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var DBRef = /** @class */ (function () {
|
||||||
|
/**
|
||||||
|
* @param collection - the collection name.
|
||||||
|
* @param oid - the reference ObjectId.
|
||||||
|
* @param db - optional db name, if omitted the reference is local to the current db.
|
||||||
|
*/
|
||||||
|
function DBRef(collection, oid, db, fields) {
|
||||||
|
if (!(this instanceof DBRef))
|
||||||
|
return new DBRef(collection, oid, db, fields);
|
||||||
|
// check if namespace has been provided
|
||||||
|
var parts = collection.split('.');
|
||||||
|
if (parts.length === 2) {
|
||||||
|
db = parts.shift();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
collection = parts.shift();
|
||||||
|
}
|
||||||
|
this.collection = collection;
|
||||||
|
this.oid = oid;
|
||||||
|
this.db = db;
|
||||||
|
this.fields = fields || {};
|
||||||
|
}
|
||||||
|
Object.defineProperty(DBRef.prototype, "namespace", {
|
||||||
|
// Property provided for compatibility with the 1.x parser
|
||||||
|
// the 1.x parser used a "namespace" property, while 4.x uses "collection"
|
||||||
|
/** @internal */
|
||||||
|
get: function () {
|
||||||
|
return this.collection;
|
||||||
|
},
|
||||||
|
set: function (value) {
|
||||||
|
this.collection = value;
|
||||||
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
DBRef.prototype.toJSON = function () {
|
||||||
|
var o = Object.assign({
|
||||||
|
$ref: this.collection,
|
||||||
|
$id: this.oid
|
||||||
|
}, this.fields);
|
||||||
|
if (this.db != null)
|
||||||
|
o.$db = this.db;
|
||||||
|
return o;
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
DBRef.prototype.toExtendedJSON = function (options) {
|
||||||
|
options = options || {};
|
||||||
|
var o = {
|
||||||
|
$ref: this.collection,
|
||||||
|
$id: this.oid
|
||||||
|
};
|
||||||
|
if (options.legacy) {
|
||||||
|
return o;
|
||||||
|
}
|
||||||
|
if (this.db)
|
||||||
|
o.$db = this.db;
|
||||||
|
o = Object.assign(o, this.fields);
|
||||||
|
return o;
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
DBRef.fromExtendedJSON = function (doc) {
|
||||||
|
var copy = Object.assign({}, doc);
|
||||||
|
delete copy.$ref;
|
||||||
|
delete copy.$id;
|
||||||
|
delete copy.$db;
|
||||||
|
return new DBRef(doc.$ref, doc.$id, doc.$db, copy);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
DBRef.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
|
||||||
|
return this.inspect();
|
||||||
|
};
|
||||||
|
DBRef.prototype.inspect = function () {
|
||||||
|
// NOTE: if OID is an ObjectId class it will just print the oid string.
|
||||||
|
var oid = this.oid === undefined || this.oid.toString === undefined ? this.oid : this.oid.toString();
|
||||||
|
return "new DBRef(\"" + this.namespace + "\", new ObjectId(\"" + oid + "\")" + (this.db ? ", \"" + this.db + "\"" : '') + ")";
|
||||||
|
};
|
||||||
|
return DBRef;
|
||||||
|
}());
|
||||||
|
exports.DBRef = DBRef;
|
||||||
|
Object.defineProperty(DBRef.prototype, '_bsontype', { value: 'DBRef' });
|
||||||
|
//# sourceMappingURL=db_ref.js.map
|
1
node_modules/bson/lib/db_ref.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/db_ref.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"db_ref.js","sourceRoot":"","sources":["../src/db_ref.ts"],"names":[],"mappings":";;;AAGA,wCAA8C;AAS9C,gBAAgB;AAChB,SAAgB,WAAW,CAAC,KAAc;IACxC,OAAO,CACL,oBAAY,CAAC,KAAK,CAAC;QACnB,KAAK,CAAC,GAAG,IAAI,IAAI;QACjB,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;QAC9B,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,CACrD,CAAC;AACJ,CAAC;AAPD,kCAOC;AAED;;;GAGG;AACH;IAQE;;;;OAIG;IACH,eAAY,UAAkB,EAAE,GAAa,EAAE,EAAW,EAAE,MAAiB;QAC3E,IAAI,CAAC,CAAC,IAAI,YAAY,KAAK,CAAC;YAAE,OAAO,IAAI,KAAK,CAAC,UAAU,EAAE,GAAG,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;QAE5E,uCAAuC;QACvC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACpC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;YACtB,EAAE,GAAG,KAAK,CAAC,KAAK,EAAE,CAAC;YACnB,oEAAoE;YACpE,UAAU,GAAG,KAAK,CAAC,KAAK,EAAG,CAAC;SAC7B;QAED,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC;QACb,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAC7B,CAAC;IAMD,sBAAI,4BAAS;QAJb,0DAA0D;QAC1D,0EAA0E;QAE1E,gBAAgB;aAChB;YACE,OAAO,IAAI,CAAC,UAAU,CAAC;QACzB,CAAC;aAED,UAAc,KAAa;YACzB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QAC1B,CAAC;;;OAJA;IAMD,sBAAM,GAAN;QACE,IAAM,CAAC,GAAG,MAAM,CAAC,MAAM,CACrB;YACE,IAAI,EAAE,IAAI,CAAC,UAAU;YACrB,GAAG,EAAE,IAAI,CAAC,GAAG;SACd,EACD,IAAI,CAAC,MAAM,CACZ,CAAC;QAEF,IAAI,IAAI,CAAC,EAAE,IAAI,IAAI;YAAE,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,EAAE,CAAC;QACrC,OAAO,CAAC,CAAC;IACX,CAAC;IAED,gBAAgB;IAChB,8BAAc,GAAd,UAAe,OAAsB;QACnC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,CAAC,GAAc;YACjB,IAAI,EAAE,IAAI,CAAC,UAAU;YACrB,GAAG,EAAE,IAAI,CAAC,GAAG;SACd,CAAC;QAEF,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,OAAO,CAAC,CAAC;SACV;QAED,IAAI,IAAI,CAAC,EAAE;YAAE,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,EAAE,CAAC;QAC7B,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,OAAO,CAAC,CAAC;IACX,CAAC;IAED,gBAAgB;IACT,sBAAgB,GAAvB,UAAwB,GAAc;QACpC,IAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,GAAG,CAAuB,CAAC;QAC1D,OAAO,IAAI,CAAC,IAAI,CAAC;QACjB,OAAO,IAAI,CAAC,GAAG,CAAC;QAChB,OAAO,IAAI,CAAC,GAAG,CAAC;QAChB,OAAO,IAAI,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;IACrD,CAAC;IAED,gBAAgB;IAChB,gBAAC,MAAM,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC,GAA1C;QACE,OAAO,IAAI,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED,uBAAO,GAAP;QACE,uEAAuE;QACvE,IAAM,GAAG,GACP,IAAI,CAAC,GAAG,KAAK,SAAS,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QAC7F,OAAO,iBAAc,IAAI,CAAC,SAAS,2BAAoB,GAAG,YACxD,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,SAAM,IAAI,CAAC,EAAE,OAAG,CAAC,CAAC,CAAC,EAAE,OAC9B,CAAC;IACN,CAAC;IACH,YAAC;AAAD,CAAC,AA9FD,IA8FC;AA9FY,sBAAK;AAgGlB,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC"}
|
668
node_modules/bson/lib/decimal128.js
generated
vendored
Normal file
668
node_modules/bson/lib/decimal128.js
generated
vendored
Normal file
|
@ -0,0 +1,668 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Decimal128 = void 0;
|
||||||
|
var buffer_1 = require("buffer");
|
||||||
|
var error_1 = require("./error");
|
||||||
|
var long_1 = require("./long");
|
||||||
|
var utils_1 = require("./parser/utils");
|
||||||
|
var PARSE_STRING_REGEXP = /^(\+|-)?(\d+|(\d*\.\d*))?(E|e)?([-+])?(\d+)?$/;
|
||||||
|
var PARSE_INF_REGEXP = /^(\+|-)?(Infinity|inf)$/i;
|
||||||
|
var PARSE_NAN_REGEXP = /^(\+|-)?NaN$/i;
|
||||||
|
var EXPONENT_MAX = 6111;
|
||||||
|
var EXPONENT_MIN = -6176;
|
||||||
|
var EXPONENT_BIAS = 6176;
|
||||||
|
var MAX_DIGITS = 34;
|
||||||
|
// Nan value bits as 32 bit values (due to lack of longs)
|
||||||
|
var NAN_BUFFER = [
|
||||||
|
0x7c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
|
||||||
|
].reverse();
|
||||||
|
// Infinity value bits 32 bit values (due to lack of longs)
|
||||||
|
var INF_NEGATIVE_BUFFER = [
|
||||||
|
0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
|
||||||
|
].reverse();
|
||||||
|
var INF_POSITIVE_BUFFER = [
|
||||||
|
0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
|
||||||
|
].reverse();
|
||||||
|
var EXPONENT_REGEX = /^([-+])?(\d+)?$/;
|
||||||
|
// Extract least significant 5 bits
|
||||||
|
var COMBINATION_MASK = 0x1f;
|
||||||
|
// Extract least significant 14 bits
|
||||||
|
var EXPONENT_MASK = 0x3fff;
|
||||||
|
// Value of combination field for Inf
|
||||||
|
var COMBINATION_INFINITY = 30;
|
||||||
|
// Value of combination field for NaN
|
||||||
|
var COMBINATION_NAN = 31;
|
||||||
|
// Detect if the value is a digit
|
||||||
|
function isDigit(value) {
|
||||||
|
return !isNaN(parseInt(value, 10));
|
||||||
|
}
|
||||||
|
// Divide two uint128 values
|
||||||
|
function divideu128(value) {
|
||||||
|
var DIVISOR = long_1.Long.fromNumber(1000 * 1000 * 1000);
|
||||||
|
var _rem = long_1.Long.fromNumber(0);
|
||||||
|
if (!value.parts[0] && !value.parts[1] && !value.parts[2] && !value.parts[3]) {
|
||||||
|
return { quotient: value, rem: _rem };
|
||||||
|
}
|
||||||
|
for (var i = 0; i <= 3; i++) {
|
||||||
|
// Adjust remainder to match value of next dividend
|
||||||
|
_rem = _rem.shiftLeft(32);
|
||||||
|
// Add the divided to _rem
|
||||||
|
_rem = _rem.add(new long_1.Long(value.parts[i], 0));
|
||||||
|
value.parts[i] = _rem.div(DIVISOR).low;
|
||||||
|
_rem = _rem.modulo(DIVISOR);
|
||||||
|
}
|
||||||
|
return { quotient: value, rem: _rem };
|
||||||
|
}
|
||||||
|
// Multiply two Long values and return the 128 bit value
|
||||||
|
function multiply64x2(left, right) {
|
||||||
|
if (!left && !right) {
|
||||||
|
return { high: long_1.Long.fromNumber(0), low: long_1.Long.fromNumber(0) };
|
||||||
|
}
|
||||||
|
var leftHigh = left.shiftRightUnsigned(32);
|
||||||
|
var leftLow = new long_1.Long(left.getLowBits(), 0);
|
||||||
|
var rightHigh = right.shiftRightUnsigned(32);
|
||||||
|
var rightLow = new long_1.Long(right.getLowBits(), 0);
|
||||||
|
var productHigh = leftHigh.multiply(rightHigh);
|
||||||
|
var productMid = leftHigh.multiply(rightLow);
|
||||||
|
var productMid2 = leftLow.multiply(rightHigh);
|
||||||
|
var productLow = leftLow.multiply(rightLow);
|
||||||
|
productHigh = productHigh.add(productMid.shiftRightUnsigned(32));
|
||||||
|
productMid = new long_1.Long(productMid.getLowBits(), 0)
|
||||||
|
.add(productMid2)
|
||||||
|
.add(productLow.shiftRightUnsigned(32));
|
||||||
|
productHigh = productHigh.add(productMid.shiftRightUnsigned(32));
|
||||||
|
productLow = productMid.shiftLeft(32).add(new long_1.Long(productLow.getLowBits(), 0));
|
||||||
|
// Return the 128 bit result
|
||||||
|
return { high: productHigh, low: productLow };
|
||||||
|
}
|
||||||
|
function lessThan(left, right) {
|
||||||
|
// Make values unsigned
|
||||||
|
var uhleft = left.high >>> 0;
|
||||||
|
var uhright = right.high >>> 0;
|
||||||
|
// Compare high bits first
|
||||||
|
if (uhleft < uhright) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (uhleft === uhright) {
|
||||||
|
var ulleft = left.low >>> 0;
|
||||||
|
var ulright = right.low >>> 0;
|
||||||
|
if (ulleft < ulright)
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function invalidErr(string, message) {
|
||||||
|
throw new error_1.BSONTypeError("\"" + string + "\" is not a valid Decimal128 string - " + message);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A class representation of the BSON Decimal128 type.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var Decimal128 = /** @class */ (function () {
|
||||||
|
/**
|
||||||
|
* @param bytes - a buffer containing the raw Decimal128 bytes in little endian order,
|
||||||
|
* or a string representation as returned by .toString()
|
||||||
|
*/
|
||||||
|
function Decimal128(bytes) {
|
||||||
|
if (!(this instanceof Decimal128))
|
||||||
|
return new Decimal128(bytes);
|
||||||
|
if (typeof bytes === 'string') {
|
||||||
|
this.bytes = Decimal128.fromString(bytes).bytes;
|
||||||
|
}
|
||||||
|
else if (utils_1.isUint8Array(bytes)) {
|
||||||
|
if (bytes.byteLength !== 16) {
|
||||||
|
throw new error_1.BSONTypeError('Decimal128 must take a Buffer of 16 bytes');
|
||||||
|
}
|
||||||
|
this.bytes = bytes;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new error_1.BSONTypeError('Decimal128 must take a Buffer or string');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Create a Decimal128 instance from a string representation
|
||||||
|
*
|
||||||
|
* @param representation - a numeric string representation.
|
||||||
|
*/
|
||||||
|
Decimal128.fromString = function (representation) {
|
||||||
|
// Parse state tracking
|
||||||
|
var isNegative = false;
|
||||||
|
var sawRadix = false;
|
||||||
|
var foundNonZero = false;
|
||||||
|
// Total number of significant digits (no leading or trailing zero)
|
||||||
|
var significantDigits = 0;
|
||||||
|
// Total number of significand digits read
|
||||||
|
var nDigitsRead = 0;
|
||||||
|
// Total number of digits (no leading zeros)
|
||||||
|
var nDigits = 0;
|
||||||
|
// The number of the digits after radix
|
||||||
|
var radixPosition = 0;
|
||||||
|
// The index of the first non-zero in *str*
|
||||||
|
var firstNonZero = 0;
|
||||||
|
// Digits Array
|
||||||
|
var digits = [0];
|
||||||
|
// The number of digits in digits
|
||||||
|
var nDigitsStored = 0;
|
||||||
|
// Insertion pointer for digits
|
||||||
|
var digitsInsert = 0;
|
||||||
|
// The index of the first non-zero digit
|
||||||
|
var firstDigit = 0;
|
||||||
|
// The index of the last digit
|
||||||
|
var lastDigit = 0;
|
||||||
|
// Exponent
|
||||||
|
var exponent = 0;
|
||||||
|
// loop index over array
|
||||||
|
var i = 0;
|
||||||
|
// The high 17 digits of the significand
|
||||||
|
var significandHigh = new long_1.Long(0, 0);
|
||||||
|
// The low 17 digits of the significand
|
||||||
|
var significandLow = new long_1.Long(0, 0);
|
||||||
|
// The biased exponent
|
||||||
|
var biasedExponent = 0;
|
||||||
|
// Read index
|
||||||
|
var index = 0;
|
||||||
|
// Naively prevent against REDOS attacks.
|
||||||
|
// TODO: implementing a custom parsing for this, or refactoring the regex would yield
|
||||||
|
// further gains.
|
||||||
|
if (representation.length >= 7000) {
|
||||||
|
throw new error_1.BSONTypeError('' + representation + ' not a valid Decimal128 string');
|
||||||
|
}
|
||||||
|
// Results
|
||||||
|
var stringMatch = representation.match(PARSE_STRING_REGEXP);
|
||||||
|
var infMatch = representation.match(PARSE_INF_REGEXP);
|
||||||
|
var nanMatch = representation.match(PARSE_NAN_REGEXP);
|
||||||
|
// Validate the string
|
||||||
|
if ((!stringMatch && !infMatch && !nanMatch) || representation.length === 0) {
|
||||||
|
throw new error_1.BSONTypeError('' + representation + ' not a valid Decimal128 string');
|
||||||
|
}
|
||||||
|
if (stringMatch) {
|
||||||
|
// full_match = stringMatch[0]
|
||||||
|
// sign = stringMatch[1]
|
||||||
|
var unsignedNumber = stringMatch[2];
|
||||||
|
// stringMatch[3] is undefined if a whole number (ex "1", 12")
|
||||||
|
// but defined if a number w/ decimal in it (ex "1.0, 12.2")
|
||||||
|
var e = stringMatch[4];
|
||||||
|
var expSign = stringMatch[5];
|
||||||
|
var expNumber = stringMatch[6];
|
||||||
|
// they provided e, but didn't give an exponent number. for ex "1e"
|
||||||
|
if (e && expNumber === undefined)
|
||||||
|
invalidErr(representation, 'missing exponent power');
|
||||||
|
// they provided e, but didn't give a number before it. for ex "e1"
|
||||||
|
if (e && unsignedNumber === undefined)
|
||||||
|
invalidErr(representation, 'missing exponent base');
|
||||||
|
if (e === undefined && (expSign || expNumber)) {
|
||||||
|
invalidErr(representation, 'missing e before exponent');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Get the negative or positive sign
|
||||||
|
if (representation[index] === '+' || representation[index] === '-') {
|
||||||
|
isNegative = representation[index++] === '-';
|
||||||
|
}
|
||||||
|
// Check if user passed Infinity or NaN
|
||||||
|
if (!isDigit(representation[index]) && representation[index] !== '.') {
|
||||||
|
if (representation[index] === 'i' || representation[index] === 'I') {
|
||||||
|
return new Decimal128(buffer_1.Buffer.from(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
|
||||||
|
}
|
||||||
|
else if (representation[index] === 'N') {
|
||||||
|
return new Decimal128(buffer_1.Buffer.from(NAN_BUFFER));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Read all the digits
|
||||||
|
while (isDigit(representation[index]) || representation[index] === '.') {
|
||||||
|
if (representation[index] === '.') {
|
||||||
|
if (sawRadix)
|
||||||
|
invalidErr(representation, 'contains multiple periods');
|
||||||
|
sawRadix = true;
|
||||||
|
index = index + 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (nDigitsStored < 34) {
|
||||||
|
if (representation[index] !== '0' || foundNonZero) {
|
||||||
|
if (!foundNonZero) {
|
||||||
|
firstNonZero = nDigitsRead;
|
||||||
|
}
|
||||||
|
foundNonZero = true;
|
||||||
|
// Only store 34 digits
|
||||||
|
digits[digitsInsert++] = parseInt(representation[index], 10);
|
||||||
|
nDigitsStored = nDigitsStored + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (foundNonZero)
|
||||||
|
nDigits = nDigits + 1;
|
||||||
|
if (sawRadix)
|
||||||
|
radixPosition = radixPosition + 1;
|
||||||
|
nDigitsRead = nDigitsRead + 1;
|
||||||
|
index = index + 1;
|
||||||
|
}
|
||||||
|
if (sawRadix && !nDigitsRead)
|
||||||
|
throw new error_1.BSONTypeError('' + representation + ' not a valid Decimal128 string');
|
||||||
|
// Read exponent if exists
|
||||||
|
if (representation[index] === 'e' || representation[index] === 'E') {
|
||||||
|
// Read exponent digits
|
||||||
|
var match = representation.substr(++index).match(EXPONENT_REGEX);
|
||||||
|
// No digits read
|
||||||
|
if (!match || !match[2])
|
||||||
|
return new Decimal128(buffer_1.Buffer.from(NAN_BUFFER));
|
||||||
|
// Get exponent
|
||||||
|
exponent = parseInt(match[0], 10);
|
||||||
|
// Adjust the index
|
||||||
|
index = index + match[0].length;
|
||||||
|
}
|
||||||
|
// Return not a number
|
||||||
|
if (representation[index])
|
||||||
|
return new Decimal128(buffer_1.Buffer.from(NAN_BUFFER));
|
||||||
|
// Done reading input
|
||||||
|
// Find first non-zero digit in digits
|
||||||
|
firstDigit = 0;
|
||||||
|
if (!nDigitsStored) {
|
||||||
|
firstDigit = 0;
|
||||||
|
lastDigit = 0;
|
||||||
|
digits[0] = 0;
|
||||||
|
nDigits = 1;
|
||||||
|
nDigitsStored = 1;
|
||||||
|
significantDigits = 0;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lastDigit = nDigitsStored - 1;
|
||||||
|
significantDigits = nDigits;
|
||||||
|
if (significantDigits !== 1) {
|
||||||
|
while (digits[firstNonZero + significantDigits - 1] === 0) {
|
||||||
|
significantDigits = significantDigits - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Normalization of exponent
|
||||||
|
// Correct exponent based on radix position, and shift significand as needed
|
||||||
|
// to represent user input
|
||||||
|
// Overflow prevention
|
||||||
|
if (exponent <= radixPosition && radixPosition - exponent > 1 << 14) {
|
||||||
|
exponent = EXPONENT_MIN;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
exponent = exponent - radixPosition;
|
||||||
|
}
|
||||||
|
// Attempt to normalize the exponent
|
||||||
|
while (exponent > EXPONENT_MAX) {
|
||||||
|
// Shift exponent to significand and decrease
|
||||||
|
lastDigit = lastDigit + 1;
|
||||||
|
if (lastDigit - firstDigit > MAX_DIGITS) {
|
||||||
|
// Check if we have a zero then just hard clamp, otherwise fail
|
||||||
|
var digitsString = digits.join('');
|
||||||
|
if (digitsString.match(/^0+$/)) {
|
||||||
|
exponent = EXPONENT_MAX;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
invalidErr(representation, 'overflow');
|
||||||
|
}
|
||||||
|
exponent = exponent - 1;
|
||||||
|
}
|
||||||
|
while (exponent < EXPONENT_MIN || nDigitsStored < nDigits) {
|
||||||
|
// Shift last digit. can only do this if < significant digits than # stored.
|
||||||
|
if (lastDigit === 0 && significantDigits < nDigitsStored) {
|
||||||
|
exponent = EXPONENT_MIN;
|
||||||
|
significantDigits = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (nDigitsStored < nDigits) {
|
||||||
|
// adjust to match digits not stored
|
||||||
|
nDigits = nDigits - 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// adjust to round
|
||||||
|
lastDigit = lastDigit - 1;
|
||||||
|
}
|
||||||
|
if (exponent < EXPONENT_MAX) {
|
||||||
|
exponent = exponent + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Check if we have a zero then just hard clamp, otherwise fail
|
||||||
|
var digitsString = digits.join('');
|
||||||
|
if (digitsString.match(/^0+$/)) {
|
||||||
|
exponent = EXPONENT_MAX;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
invalidErr(representation, 'overflow');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Round
|
||||||
|
// We've normalized the exponent, but might still need to round.
|
||||||
|
if (lastDigit - firstDigit + 1 < significantDigits) {
|
||||||
|
var endOfString = nDigitsRead;
|
||||||
|
// If we have seen a radix point, 'string' is 1 longer than we have
|
||||||
|
// documented with ndigits_read, so inc the position of the first nonzero
|
||||||
|
// digit and the position that digits are read to.
|
||||||
|
if (sawRadix) {
|
||||||
|
firstNonZero = firstNonZero + 1;
|
||||||
|
endOfString = endOfString + 1;
|
||||||
|
}
|
||||||
|
// if negative, we need to increment again to account for - sign at start.
|
||||||
|
if (isNegative) {
|
||||||
|
firstNonZero = firstNonZero + 1;
|
||||||
|
endOfString = endOfString + 1;
|
||||||
|
}
|
||||||
|
var roundDigit = parseInt(representation[firstNonZero + lastDigit + 1], 10);
|
||||||
|
var roundBit = 0;
|
||||||
|
if (roundDigit >= 5) {
|
||||||
|
roundBit = 1;
|
||||||
|
if (roundDigit === 5) {
|
||||||
|
roundBit = digits[lastDigit] % 2 === 1 ? 1 : 0;
|
||||||
|
for (i = firstNonZero + lastDigit + 2; i < endOfString; i++) {
|
||||||
|
if (parseInt(representation[i], 10)) {
|
||||||
|
roundBit = 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (roundBit) {
|
||||||
|
var dIdx = lastDigit;
|
||||||
|
for (; dIdx >= 0; dIdx--) {
|
||||||
|
if (++digits[dIdx] > 9) {
|
||||||
|
digits[dIdx] = 0;
|
||||||
|
// overflowed most significant digit
|
||||||
|
if (dIdx === 0) {
|
||||||
|
if (exponent < EXPONENT_MAX) {
|
||||||
|
exponent = exponent + 1;
|
||||||
|
digits[dIdx] = 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return new Decimal128(buffer_1.Buffer.from(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Encode significand
|
||||||
|
// The high 17 digits of the significand
|
||||||
|
significandHigh = long_1.Long.fromNumber(0);
|
||||||
|
// The low 17 digits of the significand
|
||||||
|
significandLow = long_1.Long.fromNumber(0);
|
||||||
|
// read a zero
|
||||||
|
if (significantDigits === 0) {
|
||||||
|
significandHigh = long_1.Long.fromNumber(0);
|
||||||
|
significandLow = long_1.Long.fromNumber(0);
|
||||||
|
}
|
||||||
|
else if (lastDigit - firstDigit < 17) {
|
||||||
|
var dIdx = firstDigit;
|
||||||
|
significandLow = long_1.Long.fromNumber(digits[dIdx++]);
|
||||||
|
significandHigh = new long_1.Long(0, 0);
|
||||||
|
for (; dIdx <= lastDigit; dIdx++) {
|
||||||
|
significandLow = significandLow.multiply(long_1.Long.fromNumber(10));
|
||||||
|
significandLow = significandLow.add(long_1.Long.fromNumber(digits[dIdx]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var dIdx = firstDigit;
|
||||||
|
significandHigh = long_1.Long.fromNumber(digits[dIdx++]);
|
||||||
|
for (; dIdx <= lastDigit - 17; dIdx++) {
|
||||||
|
significandHigh = significandHigh.multiply(long_1.Long.fromNumber(10));
|
||||||
|
significandHigh = significandHigh.add(long_1.Long.fromNumber(digits[dIdx]));
|
||||||
|
}
|
||||||
|
significandLow = long_1.Long.fromNumber(digits[dIdx++]);
|
||||||
|
for (; dIdx <= lastDigit; dIdx++) {
|
||||||
|
significandLow = significandLow.multiply(long_1.Long.fromNumber(10));
|
||||||
|
significandLow = significandLow.add(long_1.Long.fromNumber(digits[dIdx]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var significand = multiply64x2(significandHigh, long_1.Long.fromString('100000000000000000'));
|
||||||
|
significand.low = significand.low.add(significandLow);
|
||||||
|
if (lessThan(significand.low, significandLow)) {
|
||||||
|
significand.high = significand.high.add(long_1.Long.fromNumber(1));
|
||||||
|
}
|
||||||
|
// Biased exponent
|
||||||
|
biasedExponent = exponent + EXPONENT_BIAS;
|
||||||
|
var dec = { low: long_1.Long.fromNumber(0), high: long_1.Long.fromNumber(0) };
|
||||||
|
// Encode combination, exponent, and significand.
|
||||||
|
if (significand.high.shiftRightUnsigned(49).and(long_1.Long.fromNumber(1)).equals(long_1.Long.fromNumber(1))) {
|
||||||
|
// Encode '11' into bits 1 to 3
|
||||||
|
dec.high = dec.high.or(long_1.Long.fromNumber(0x3).shiftLeft(61));
|
||||||
|
dec.high = dec.high.or(long_1.Long.fromNumber(biasedExponent).and(long_1.Long.fromNumber(0x3fff).shiftLeft(47)));
|
||||||
|
dec.high = dec.high.or(significand.high.and(long_1.Long.fromNumber(0x7fffffffffff)));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
dec.high = dec.high.or(long_1.Long.fromNumber(biasedExponent & 0x3fff).shiftLeft(49));
|
||||||
|
dec.high = dec.high.or(significand.high.and(long_1.Long.fromNumber(0x1ffffffffffff)));
|
||||||
|
}
|
||||||
|
dec.low = significand.low;
|
||||||
|
// Encode sign
|
||||||
|
if (isNegative) {
|
||||||
|
dec.high = dec.high.or(long_1.Long.fromString('9223372036854775808'));
|
||||||
|
}
|
||||||
|
// Encode into a buffer
|
||||||
|
var buffer = buffer_1.Buffer.alloc(16);
|
||||||
|
index = 0;
|
||||||
|
// Encode the low 64 bits of the decimal
|
||||||
|
// Encode low bits
|
||||||
|
buffer[index++] = dec.low.low & 0xff;
|
||||||
|
buffer[index++] = (dec.low.low >> 8) & 0xff;
|
||||||
|
buffer[index++] = (dec.low.low >> 16) & 0xff;
|
||||||
|
buffer[index++] = (dec.low.low >> 24) & 0xff;
|
||||||
|
// Encode high bits
|
||||||
|
buffer[index++] = dec.low.high & 0xff;
|
||||||
|
buffer[index++] = (dec.low.high >> 8) & 0xff;
|
||||||
|
buffer[index++] = (dec.low.high >> 16) & 0xff;
|
||||||
|
buffer[index++] = (dec.low.high >> 24) & 0xff;
|
||||||
|
// Encode the high 64 bits of the decimal
|
||||||
|
// Encode low bits
|
||||||
|
buffer[index++] = dec.high.low & 0xff;
|
||||||
|
buffer[index++] = (dec.high.low >> 8) & 0xff;
|
||||||
|
buffer[index++] = (dec.high.low >> 16) & 0xff;
|
||||||
|
buffer[index++] = (dec.high.low >> 24) & 0xff;
|
||||||
|
// Encode high bits
|
||||||
|
buffer[index++] = dec.high.high & 0xff;
|
||||||
|
buffer[index++] = (dec.high.high >> 8) & 0xff;
|
||||||
|
buffer[index++] = (dec.high.high >> 16) & 0xff;
|
||||||
|
buffer[index++] = (dec.high.high >> 24) & 0xff;
|
||||||
|
// Return the new Decimal128
|
||||||
|
return new Decimal128(buffer);
|
||||||
|
};
|
||||||
|
/** Create a string representation of the raw Decimal128 value */
|
||||||
|
Decimal128.prototype.toString = function () {
|
||||||
|
// Note: bits in this routine are referred to starting at 0,
|
||||||
|
// from the sign bit, towards the coefficient.
|
||||||
|
// decoded biased exponent (14 bits)
|
||||||
|
var biased_exponent;
|
||||||
|
// the number of significand digits
|
||||||
|
var significand_digits = 0;
|
||||||
|
// the base-10 digits in the significand
|
||||||
|
var significand = new Array(36);
|
||||||
|
for (var i = 0; i < significand.length; i++)
|
||||||
|
significand[i] = 0;
|
||||||
|
// read pointer into significand
|
||||||
|
var index = 0;
|
||||||
|
// true if the number is zero
|
||||||
|
var is_zero = false;
|
||||||
|
// the most significant significand bits (50-46)
|
||||||
|
var significand_msb;
|
||||||
|
// temporary storage for significand decoding
|
||||||
|
var significand128 = { parts: [0, 0, 0, 0] };
|
||||||
|
// indexing variables
|
||||||
|
var j, k;
|
||||||
|
// Output string
|
||||||
|
var string = [];
|
||||||
|
// Unpack index
|
||||||
|
index = 0;
|
||||||
|
// Buffer reference
|
||||||
|
var buffer = this.bytes;
|
||||||
|
// Unpack the low 64bits into a long
|
||||||
|
// bits 96 - 127
|
||||||
|
var low = buffer[index++] | (buffer[index++] << 8) | (buffer[index++] << 16) | (buffer[index++] << 24);
|
||||||
|
// bits 64 - 95
|
||||||
|
var midl = buffer[index++] | (buffer[index++] << 8) | (buffer[index++] << 16) | (buffer[index++] << 24);
|
||||||
|
// Unpack the high 64bits into a long
|
||||||
|
// bits 32 - 63
|
||||||
|
var midh = buffer[index++] | (buffer[index++] << 8) | (buffer[index++] << 16) | (buffer[index++] << 24);
|
||||||
|
// bits 0 - 31
|
||||||
|
var high = buffer[index++] | (buffer[index++] << 8) | (buffer[index++] << 16) | (buffer[index++] << 24);
|
||||||
|
// Unpack index
|
||||||
|
index = 0;
|
||||||
|
// Create the state of the decimal
|
||||||
|
var dec = {
|
||||||
|
low: new long_1.Long(low, midl),
|
||||||
|
high: new long_1.Long(midh, high)
|
||||||
|
};
|
||||||
|
if (dec.high.lessThan(long_1.Long.ZERO)) {
|
||||||
|
string.push('-');
|
||||||
|
}
|
||||||
|
// Decode combination field and exponent
|
||||||
|
// bits 1 - 5
|
||||||
|
var combination = (high >> 26) & COMBINATION_MASK;
|
||||||
|
if (combination >> 3 === 3) {
|
||||||
|
// Check for 'special' values
|
||||||
|
if (combination === COMBINATION_INFINITY) {
|
||||||
|
return string.join('') + 'Infinity';
|
||||||
|
}
|
||||||
|
else if (combination === COMBINATION_NAN) {
|
||||||
|
return 'NaN';
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
biased_exponent = (high >> 15) & EXPONENT_MASK;
|
||||||
|
significand_msb = 0x08 + ((high >> 14) & 0x01);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
significand_msb = (high >> 14) & 0x07;
|
||||||
|
biased_exponent = (high >> 17) & EXPONENT_MASK;
|
||||||
|
}
|
||||||
|
// unbiased exponent
|
||||||
|
var exponent = biased_exponent - EXPONENT_BIAS;
|
||||||
|
// Create string of significand digits
|
||||||
|
// Convert the 114-bit binary number represented by
|
||||||
|
// (significand_high, significand_low) to at most 34 decimal
|
||||||
|
// digits through modulo and division.
|
||||||
|
significand128.parts[0] = (high & 0x3fff) + ((significand_msb & 0xf) << 14);
|
||||||
|
significand128.parts[1] = midh;
|
||||||
|
significand128.parts[2] = midl;
|
||||||
|
significand128.parts[3] = low;
|
||||||
|
if (significand128.parts[0] === 0 &&
|
||||||
|
significand128.parts[1] === 0 &&
|
||||||
|
significand128.parts[2] === 0 &&
|
||||||
|
significand128.parts[3] === 0) {
|
||||||
|
is_zero = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
for (k = 3; k >= 0; k--) {
|
||||||
|
var least_digits = 0;
|
||||||
|
// Perform the divide
|
||||||
|
var result = divideu128(significand128);
|
||||||
|
significand128 = result.quotient;
|
||||||
|
least_digits = result.rem.low;
|
||||||
|
// We now have the 9 least significant digits (in base 2).
|
||||||
|
// Convert and output to string.
|
||||||
|
if (!least_digits)
|
||||||
|
continue;
|
||||||
|
for (j = 8; j >= 0; j--) {
|
||||||
|
// significand[k * 9 + j] = Math.round(least_digits % 10);
|
||||||
|
significand[k * 9 + j] = least_digits % 10;
|
||||||
|
// least_digits = Math.round(least_digits / 10);
|
||||||
|
least_digits = Math.floor(least_digits / 10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Output format options:
|
||||||
|
// Scientific - [-]d.dddE(+/-)dd or [-]dE(+/-)dd
|
||||||
|
// Regular - ddd.ddd
|
||||||
|
if (is_zero) {
|
||||||
|
significand_digits = 1;
|
||||||
|
significand[index] = 0;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
significand_digits = 36;
|
||||||
|
while (!significand[index]) {
|
||||||
|
significand_digits = significand_digits - 1;
|
||||||
|
index = index + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// the exponent if scientific notation is used
|
||||||
|
var scientific_exponent = significand_digits - 1 + exponent;
|
||||||
|
// The scientific exponent checks are dictated by the string conversion
|
||||||
|
// specification and are somewhat arbitrary cutoffs.
|
||||||
|
//
|
||||||
|
// We must check exponent > 0, because if this is the case, the number
|
||||||
|
// has trailing zeros. However, we *cannot* output these trailing zeros,
|
||||||
|
// because doing so would change the precision of the value, and would
|
||||||
|
// change stored data if the string converted number is round tripped.
|
||||||
|
if (scientific_exponent >= 34 || scientific_exponent <= -7 || exponent > 0) {
|
||||||
|
// Scientific format
|
||||||
|
// if there are too many significant digits, we should just be treating numbers
|
||||||
|
// as + or - 0 and using the non-scientific exponent (this is for the "invalid
|
||||||
|
// representation should be treated as 0/-0" spec cases in decimal128-1.json)
|
||||||
|
if (significand_digits > 34) {
|
||||||
|
string.push("" + 0);
|
||||||
|
if (exponent > 0)
|
||||||
|
string.push('E+' + exponent);
|
||||||
|
else if (exponent < 0)
|
||||||
|
string.push('E' + exponent);
|
||||||
|
return string.join('');
|
||||||
|
}
|
||||||
|
string.push("" + significand[index++]);
|
||||||
|
significand_digits = significand_digits - 1;
|
||||||
|
if (significand_digits) {
|
||||||
|
string.push('.');
|
||||||
|
}
|
||||||
|
for (var i = 0; i < significand_digits; i++) {
|
||||||
|
string.push("" + significand[index++]);
|
||||||
|
}
|
||||||
|
// Exponent
|
||||||
|
string.push('E');
|
||||||
|
if (scientific_exponent > 0) {
|
||||||
|
string.push('+' + scientific_exponent);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
string.push("" + scientific_exponent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Regular format with no decimal place
|
||||||
|
if (exponent >= 0) {
|
||||||
|
for (var i = 0; i < significand_digits; i++) {
|
||||||
|
string.push("" + significand[index++]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var radix_position = significand_digits + exponent;
|
||||||
|
// non-zero digits before radix
|
||||||
|
if (radix_position > 0) {
|
||||||
|
for (var i = 0; i < radix_position; i++) {
|
||||||
|
string.push("" + significand[index++]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
string.push('0');
|
||||||
|
}
|
||||||
|
string.push('.');
|
||||||
|
// add leading zeros after radix
|
||||||
|
while (radix_position++ < 0) {
|
||||||
|
string.push('0');
|
||||||
|
}
|
||||||
|
for (var i = 0; i < significand_digits - Math.max(radix_position - 1, 0); i++) {
|
||||||
|
string.push("" + significand[index++]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string.join('');
|
||||||
|
};
|
||||||
|
Decimal128.prototype.toJSON = function () {
|
||||||
|
return { $numberDecimal: this.toString() };
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Decimal128.prototype.toExtendedJSON = function () {
|
||||||
|
return { $numberDecimal: this.toString() };
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Decimal128.fromExtendedJSON = function (doc) {
|
||||||
|
return Decimal128.fromString(doc.$numberDecimal);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Decimal128.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
|
||||||
|
return this.inspect();
|
||||||
|
};
|
||||||
|
Decimal128.prototype.inspect = function () {
|
||||||
|
return "new Decimal128(\"" + this.toString() + "\")";
|
||||||
|
};
|
||||||
|
return Decimal128;
|
||||||
|
}());
|
||||||
|
exports.Decimal128 = Decimal128;
|
||||||
|
Object.defineProperty(Decimal128.prototype, '_bsontype', { value: 'Decimal128' });
|
||||||
|
//# sourceMappingURL=decimal128.js.map
|
1
node_modules/bson/lib/decimal128.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/decimal128.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
75
node_modules/bson/lib/double.js
generated
vendored
Normal file
75
node_modules/bson/lib/double.js
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Double = void 0;
|
||||||
|
/**
|
||||||
|
* A class representation of the BSON Double type.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
var Double = /** @class */ (function () {
|
||||||
|
/**
|
||||||
|
* Create a Double type
|
||||||
|
*
|
||||||
|
* @param value - the number we want to represent as a double.
|
||||||
|
*/
|
||||||
|
function Double(value) {
|
||||||
|
if (!(this instanceof Double))
|
||||||
|
return new Double(value);
|
||||||
|
if (value instanceof Number) {
|
||||||
|
value = value.valueOf();
|
||||||
|
}
|
||||||
|
this.value = +value;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Access the number value.
|
||||||
|
*
|
||||||
|
* @returns returns the wrapped double number.
|
||||||
|
*/
|
||||||
|
Double.prototype.valueOf = function () {
|
||||||
|
return this.value;
|
||||||
|
};
|
||||||
|
Double.prototype.toJSON = function () {
|
||||||
|
return this.value;
|
||||||
|
};
|
||||||
|
Double.prototype.toString = function (radix) {
|
||||||
|
return this.value.toString(radix);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Double.prototype.toExtendedJSON = function (options) {
|
||||||
|
if (options && (options.legacy || (options.relaxed && isFinite(this.value)))) {
|
||||||
|
return this.value;
|
||||||
|
}
|
||||||
|
// NOTE: JavaScript has +0 and -0, apparently to model limit calculations. If a user
|
||||||
|
// explicitly provided `-0` then we need to ensure the sign makes it into the output
|
||||||
|
if (Object.is(Math.sign(this.value), -0)) {
|
||||||
|
return { $numberDouble: "-" + this.value.toFixed(1) };
|
||||||
|
}
|
||||||
|
var $numberDouble;
|
||||||
|
if (Number.isInteger(this.value)) {
|
||||||
|
$numberDouble = this.value.toFixed(1);
|
||||||
|
if ($numberDouble.length >= 13) {
|
||||||
|
$numberDouble = this.value.toExponential(13).toUpperCase();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$numberDouble = this.value.toString();
|
||||||
|
}
|
||||||
|
return { $numberDouble: $numberDouble };
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Double.fromExtendedJSON = function (doc, options) {
|
||||||
|
var doubleValue = parseFloat(doc.$numberDouble);
|
||||||
|
return options && options.relaxed ? doubleValue : new Double(doubleValue);
|
||||||
|
};
|
||||||
|
/** @internal */
|
||||||
|
Double.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
|
||||||
|
return this.inspect();
|
||||||
|
};
|
||||||
|
Double.prototype.inspect = function () {
|
||||||
|
var eJSON = this.toExtendedJSON();
|
||||||
|
return "new Double(" + eJSON.$numberDouble + ")";
|
||||||
|
};
|
||||||
|
return Double;
|
||||||
|
}());
|
||||||
|
exports.Double = Double;
|
||||||
|
Object.defineProperty(Double.prototype, '_bsontype', { value: 'Double' });
|
||||||
|
//# sourceMappingURL=double.js.map
|
1
node_modules/bson/lib/double.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/double.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"double.js","sourceRoot":"","sources":["../src/double.ts"],"names":[],"mappings":";;;AAOA;;;GAGG;AACH;IAIE;;;;OAIG;IACH,gBAAY,KAAa;QACvB,IAAI,CAAC,CAAC,IAAI,YAAY,MAAM,CAAC;YAAE,OAAO,IAAI,MAAM,CAAC,KAAK,CAAC,CAAC;QAExD,IAAK,KAAiB,YAAY,MAAM,EAAE;YACxC,KAAK,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;SACzB;QAED,IAAI,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC;IACtB,CAAC;IAED;;;;OAIG;IACH,wBAAO,GAAP;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED,yBAAQ,GAAR,UAAS,KAAc;QACrB,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IACpC,CAAC;IAED,gBAAgB;IAChB,+BAAc,GAAd,UAAe,OAAsB;QACnC,IAAI,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;QAED,oFAAoF;QACpF,oFAAoF;QACpF,IAAI,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;YACxC,OAAO,EAAE,aAAa,EAAE,MAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAG,EAAE,CAAC;SACvD;QAED,IAAI,aAAqB,CAAC;QAC1B,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAChC,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YACtC,IAAI,aAAa,CAAC,MAAM,IAAI,EAAE,EAAE;gBAC9B,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;aAC5D;SACF;aAAM;YACL,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC;SACvC;QAED,OAAO,EAAE,aAAa,eAAA,EAAE,CAAC;IAC3B,CAAC;IAED,gBAAgB;IACT,uBAAgB,GAAvB,UAAwB,GAAmB,EAAE,OAAsB;QACjE,IAAM,WAAW,GAAG,UAAU,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;QAClD,OAAO,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,CAAC;IAC5E,CAAC;IAED,gBAAgB;IAChB,iBAAC,MAAM,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC,GAA1C;QACE,OAAO,IAAI,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED,wBAAO,GAAP;QACE,IAAM,KAAK,GAAG,IAAI,CAAC,cAAc,EAAoB,CAAC;QACtD,OAAO,gBAAc,KAAK,CAAC,aAAa,MAAG,CAAC;IAC9C,CAAC;IACH,aAAC;AAAD,CAAC,AA5ED,IA4EC;AA5EY,wBAAM;AA8EnB,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,SAAS,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAC"}
|
25
node_modules/bson/lib/ensure_buffer.js
generated
vendored
Normal file
25
node_modules/bson/lib/ensure_buffer.js
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.ensureBuffer = void 0;
|
||||||
|
var buffer_1 = require("buffer");
|
||||||
|
var error_1 = require("./error");
|
||||||
|
var utils_1 = require("./parser/utils");
|
||||||
|
/**
|
||||||
|
* Makes sure that, if a Uint8Array is passed in, it is wrapped in a Buffer.
|
||||||
|
*
|
||||||
|
* @param potentialBuffer - The potential buffer
|
||||||
|
* @returns Buffer the input if potentialBuffer is a buffer, or a buffer that
|
||||||
|
* wraps a passed in Uint8Array
|
||||||
|
* @throws BSONTypeError If anything other than a Buffer or Uint8Array is passed in
|
||||||
|
*/
|
||||||
|
function ensureBuffer(potentialBuffer) {
|
||||||
|
if (ArrayBuffer.isView(potentialBuffer)) {
|
||||||
|
return buffer_1.Buffer.from(potentialBuffer.buffer, potentialBuffer.byteOffset, potentialBuffer.byteLength);
|
||||||
|
}
|
||||||
|
if (utils_1.isAnyArrayBuffer(potentialBuffer)) {
|
||||||
|
return buffer_1.Buffer.from(potentialBuffer);
|
||||||
|
}
|
||||||
|
throw new error_1.BSONTypeError('Must use either Buffer or TypedArray');
|
||||||
|
}
|
||||||
|
exports.ensureBuffer = ensureBuffer;
|
||||||
|
//# sourceMappingURL=ensure_buffer.js.map
|
1
node_modules/bson/lib/ensure_buffer.js.map
generated
vendored
Normal file
1
node_modules/bson/lib/ensure_buffer.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"ensure_buffer.js","sourceRoot":"","sources":["../src/ensure_buffer.ts"],"names":[],"mappings":";;;AAAA,iCAAgC;AAChC,iCAAwC;AACxC,wCAAkD;AAElD;;;;;;;GAOG;AACH,SAAgB,YAAY,CAAC,eAAuD;IAClF,IAAI,WAAW,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE;QACvC,OAAO,eAAM,CAAC,IAAI,CAChB,eAAe,CAAC,MAAM,EACtB,eAAe,CAAC,UAAU,EAC1B,eAAe,CAAC,UAAU,CAC3B,CAAC;KACH;IAED,IAAI,wBAAgB,CAAC,eAAe,CAAC,EAAE;QACrC,OAAO,eAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;KACrC;IAED,MAAM,IAAI,qBAAa,CAAC,sCAAsC,CAAC,CAAC;AAClE,CAAC;AAdD,oCAcC"}
|
55
node_modules/bson/lib/error.js
generated
vendored
Normal file
55
node_modules/bson/lib/error.js
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
"use strict";
|
||||||
|
var __extends = (this && this.__extends) || (function () {
|
||||||
|
var extendStatics = function (d, b) {
|
||||||
|
extendStatics = Object.setPrototypeOf ||
|
||||||
|
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||||
|
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||||
|
return extendStatics(d, b);
|
||||||
|
};
|
||||||
|
return function (d, b) {
|
||||||
|
if (typeof b !== "function" && b !== null)
|
||||||
|
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||||
|
extendStatics(d, b);
|
||||||
|
function __() { this.constructor = d; }
|
||||||
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.BSONTypeError = exports.BSONError = void 0;
|
||||||
|
/** @public */
|
||||||
|
var BSONError = /** @class */ (function (_super) {
|
||||||
|
__extends(BSONError, _super);
|
||||||
|
function BSONError(message) {
|
||||||
|
var _this = _super.call(this, message) || this;
|
||||||
|
Object.setPrototypeOf(_this, BSONError.prototype);
|
||||||
|
return _this;
|
||||||
|
}
|
||||||
|
Object.defineProperty(BSONError.prototype, "name", {
|
||||||
|
get: function () {
|
||||||
|
return 'BSONError';
|
||||||
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
return BSONError;
|
||||||
|
}(Error));
|
||||||
|
exports.BSONError = BSONError;
|
||||||
|
/** @public */
|
||||||
|
var BSONTypeError = /** @class */ (function (_super) {
|
||||||
|
__extends(BSONTypeError, _super);
|
||||||
|
function BSONTypeError(message) {
|
||||||
|
var _this = _super.call(this, message) || this;
|
||||||
|
Object.setPrototypeOf(_this, BSONTypeError.prototype);
|
||||||
|
return _this;
|
||||||
|
}
|
||||||
|
Object.defineProperty(BSONTypeError.prototype, "name", {
|
||||||
|
get: function () {
|
||||||
|
return 'BSONTypeError';
|
||||||
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
return BSONTypeError;
|
||||||
|
}(TypeError));
|
||||||
|
exports.BSONTypeError = BSONTypeError;
|
||||||
|
//# sourceMappingURL=error.js.map
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue