aboutsummaryrefslogtreecommitdiff
path: root/srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js
diff options
context:
space:
mode:
authorCharles <sircharlesaze@gmail.com>2020-01-09 10:55:03 +0100
committerCharles <sircharlesaze@gmail.com>2020-01-09 13:09:38 +0100
commit04d6d5ca99ebfd1cebb8ce06618fb3811fc1a8aa (patch)
tree5c691241355c943a3c68ddb06b8cf8c60aa11319 /srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js
parent7e0d85db834d6351ed85d01e5126ac31dc510b86 (diff)
downloadft_server-04d6d5ca99ebfd1cebb8ce06618fb3811fc1a8aa.tar.gz
ft_server-04d6d5ca99ebfd1cebb8ce06618fb3811fc1a8aa.tar.bz2
ft_server-04d6d5ca99ebfd1cebb8ce06618fb3811fc1a8aa.zip
phpmyadmin working
Diffstat (limited to 'srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js')
-rw-r--r--srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js113
1 files changed, 113 insertions, 0 deletions
diff --git a/srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js b/srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js
new file mode 100644
index 0000000..d9274dc
--- /dev/null
+++ b/srcs/phpmyadmin/vendor/williamdes/mariadb-mysql-kbs/src/common.js
@@ -0,0 +1,113 @@
+'use strict';
+
+const fs = require('fs');
+const Crawler = require('crawler');
+const path = require('path');
+
+/**
+ * Sort the object keys
+ * @see https://stackoverflow.com/a/48112249/5155484
+ * @param {Object} obj The object
+ * @param {Function} arraySorter The sorter callback
+ */
+const sortObject = function(obj, arraySorter) {
+ if (typeof obj !== 'object') {
+ return obj;
+ }
+ if (Array.isArray(obj)) {
+ if (arraySorter) {
+ obj.sort(arraySorter);
+ }
+ for (var i = 0; i < obj.length; i++) {
+ obj[i] = sortObject(obj[i], arraySorter);
+ }
+ return obj;
+ }
+ var temp = {};
+ var keys = [];
+ for (var key in obj) {
+ keys.push(key);
+ }
+ keys.sort();
+ for (var index in keys) {
+ temp[keys[index]] = sortObject(obj[keys[index]], arraySorter);
+ }
+ return temp;
+};
+
+const writeJSON = function(filename, data, cbSuccess = null) {
+ fs.writeFile(filename, JSON.stringify(sortObject(data), null, 2) + '\n', function(err) {
+ if (err) {
+ return console.log(err);
+ } else {
+ if (cbSuccess !== null) {
+ cbSuccess();
+ }
+ }
+ });
+};
+
+const readJSON = function(filename, callbackSuccess) {
+ fs.readFile(filename, 'utf8', function(err, data) {
+ if (err) {
+ return console.log(err);
+ }
+ callbackSuccess(JSON.parse(data), filename);
+ });
+};
+
+const listDirectory = function(dirname, callbackSuccess) {
+ fs.readdir(dirname, (err, files) => {
+ if (err) {
+ return console.log(err);
+ }
+ callbackSuccess(files, dirname);
+ });
+};
+
+const writePage = function(filePrefix, name, url, data, onWriteSuccess) {
+ let pageKB = {
+ url: url,
+ name: name,
+ data: data,
+ };
+ writeJSON(path.join(__dirname, '../', 'data', filePrefix + pageKB.name + '.json'), pageKB, onWriteSuccess);
+};
+
+const processDataExtraction = function(pages, filePrefix, parsePage) {
+ return new Promise(resolve => {
+ var nbrPagesProcessed = 0;
+ var crawler = new Crawler({
+ maxConnections: 1,
+ // This will be called for each crawled page
+ callback: function(error, res, done) {
+ if (error) {
+ console.log(error);
+ } else {
+ console.log('URL : ' + res.options.url);
+ parsePage(res.$, anchors => {
+ writePage(filePrefix, res.options.name, res.options.url, anchors, () => {
+ nbrPagesProcessed++;
+ if (nbrPagesProcessed === pages.length) {
+ resolve();
+ }
+ });
+ });
+ }
+ done();
+ },
+ });
+ crawler.queue(
+ pages.map(page => {
+ return { uri: page.url, name: page.name, url: page.url };
+ })
+ );
+ });
+};
+
+module.exports = {
+ processDataExtraction: processDataExtraction,
+ listDirectory: listDirectory,
+ readJSON: readJSON,
+ writeJSON: writeJSON,
+};