initial commit

This commit is contained in:
2024-05-22 20:29:46 -04:00
commit 5bb80dbc90
5 changed files with 434 additions and 0 deletions
+3
View File
@@ -0,0 +1,3 @@
node_modules
config.json
tests/config.json
+283
View File
@@ -0,0 +1,283 @@
import axios from "axios";
import base64 from 'base-64'
// import stream from 'stream';
const wait = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
class ghelper {
#errhelper(err) {
if (err.response) {
console.error(err.response);
// switch(err.response.code) {
// }
// console.log(err.response.status);
// console.log(err.response.headers);
}
else console.error(err);
return null
}
async appendToFile(fpath, toAdd) {
try {
const url = `${this.ghurl}/contents/${fpath}`;
const response = await axios.get(url, {
headers: this.authHeaders,
}).catch((err) => {
if (err.res.statusCode != 404) console.error(err);
return null;
});
let content = (response?.data) ? base64.decode(response.data.content) : "";
content += toAdd;
return this.addToRepo(fpath, content, response?.data?.sha);
}
catch (err) {
console.error(err);
return null;
}
}
async addToRepo(fpath, contentRaw, sha = undefined) {
try {
const url = `${this.ghurl}/contents/${fpath}`;
const content = (!(contentRaw instanceof Buffer)) ? Buffer.from(contentRaw) : contentRaw;
const toAdd = content.toString('base64');
// Prepare the commit
const updateData = {
message: `added content for ${fpath}`,
content: toAdd,
};
if (sha) updateData['sha'] = sha;
// Commit the update
const updateResponse = await axios.put(url, updateData, {
headers: this.authHeaders
}).catch((err) => this.#errhelper(err));
return updateResponse?.data;
}
catch (err) {
console.error(err);
return null;
}
}
async remFromRepo(fpath) {
const url = `${this.ghurl}/contents/${fpath}`;
const resget = await axios.get(url, {
headers: this.authHeaders
})
.catch(this.#errhelper);
const sha = resget?.data?.sha;
if (!sha) return;
const resdel = await axios.delete(url, {
headers: this.authHeaders,
data: {
message: `added content for ${fpath}`,
sha
}
}).catch(this.#errhelper);
return resdel;
}
async readTree(branchName) {
try {
let url = `${this.ghurl}/branches/${branchName}`;
// Commit the update
let res = await axios.get(url, {
headers: this.authHeaders
})
.catch(this.#errhelper);
if (!res) return;
const treesha = res.data.commit.commit.tree.sha;
url = `${this.ghurl}/git/trees/${treesha}`;
// Commit the update
res = await axios.get(url, {
headers: this.authHeaders
})
.catch(this.#errhelper);
return res?.data;
}
catch (err) {
console.error(err);
return null;
}
}
/**
* if dPath is a file, then the contents of the file will be returned, otherwise the directory structure will be returned
* @returns {Promise<Buffer | Array<{type: String, name: String>}}
*/
async getStructure(dPath, getSha = false) {
try {
let url = `${this.ghurl}/contents`;
if (dPath) url += `/${dPath}`;
// Commit the update
const res = await axios.get(url, {
headers: this.authHeaders
});
const { data } = res;
if (getSha) return res?.data?.sha;
if (Array.isArray(data)) return data.map(o => ({ type: o.type, name: o.name }));
return Buffer.from(data.content, data.encoding);
}
catch (err) {
if (err.response.status != 404) console.error(err);
return false;
}
}
constructor(repoURL, token) {
this.ghurl = repoURL;
this.authHeaders = {
'Authorization': `token ${token}`
};
}
}
export class mockFileObj {
/** @type {String} */
#fpath;
/** @type {String} */
#encoding;
/** @type {Promise[]} */
#fptrs
/** @type {customFs} */
#cfs;
write(toAdd) {
const r = this.#cfs.appendToFile(this.#fpath, toAdd);
this.#fptrs.push(r);
}
async close() {
try {
await Promise.all(this.#fptrs);
return true;
}
catch (err) {
console.error(err);
return null;
}
}
constructor(cfs, fpath, encoding = undefined) {
this.#fpath = fpath;
this.#encoding = encoding;
this.#fptrs = [];
}
}
/**
* To replace `fs` by writing to a remote source (github)
*/
export default class customFs {
/** @type {ghelper} */
ghs;
async #getFilesInDir(dirPath) {
const all = await this.readdirSync(dirPath);
const files = all.filter(o => o.type === 'file').map(f => `${dirPath}/${f.name}`);
const folders = all.filter(o => o.type === 'dir');
const subFiles = await Promise.all(folders.map(f => this.#getFilesInDir(`${dirPath}/${f.name}`)));
return files.concat(...subFiles);
}
async rmSync(dirPath, opts) {
const files = await this.readdirSync(dirPath);
if (Array.isArray(files) && opts?.recursive) {
// get tree
const subFiles = await this.#getFilesInDir(dirPath);
for (const fname of subFiles) {
await this.ghs.remFromRepo(fname);
}
return subFiles;
}
else return await this.ghs.remFromRepo(dirPath);
}
appendToFile = (fPath, toWrite) => this.ghs.appendToFile(fPath, toWrite);
/**
*
* @param {string} fPath
* @param {any} toWriteRaw
* @param {{encoding:string}} opts?
* @returns
*/
async writeFileSync(fPath, toWriteRaw, opts = undefined) {
// { encoding: 'base64' }
const encoding = opts?.encoding;
const toWrite = (encoding) ? Buffer.from(toWriteRaw).toString(encoding) : toWriteRaw;
const r = await this.existsSync(fPath);
await wait(1000);
return this.ghs.addToRepo(fPath, toWrite, r);
}
writeFile = (fName, toWrite, cb) => this.writeFileSync(fName, toWrite).then(() => cb()).catch(cb);
readFileSync = async (fName) => this.ghs.getStructure(fName);
/**
* @returns {Promise<[{type: "dir" | "file", name: String}]>}
*/
async readdirSync(dirName) {
const r = await this.ghs.getStructure(dirName);
return (r) ? r : [];
}
/**
* returns the file sha if found
* @returns {Promise<String | undefined>}
*/
async existsSync(p) {
const res = await this.ghs.getStructure(p, true);
return (res) ? res : undefined;
}
/**
* reads into buffer, then when it end writes it to dest
*/
createWriteStream = (fpath) => new mockFileObj(this, fpath);
/**
* No need, as github does this for you
*/
async mkdirSync(dirName) {
}
constructor(repoUrl, token) {
this.ghs = new ghelper(repoUrl, token);
}
}
+112
View File
@@ -0,0 +1,112 @@
{
"name": "github-to-fs",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "github-to-fs",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"axios": "^1.7.2",
"base-64": "^1.0.0"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.7.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz",
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/base-64": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/base-64/-/base-64-1.0.0.tgz",
"integrity": "sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
}
}
}
+16
View File
@@ -0,0 +1,16 @@
{
"name": "github-to-fs",
"version": "1.0.0",
"description": "a package that allows you to use github like file storage",
"main": "customFileSystem.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "ION606",
"license": "ISC",
"dependencies": {
"axios": "^1.7.2",
"base-64": "^1.0.0"
},
"type": "module"
}
+20
View File
@@ -0,0 +1,20 @@
import customFs from "../customFileSystem";
import fs from 'fs';
const { token } = JSON.parse(fs.readFileSync('config.json'));
(async () => {
// create the custom FS object
const cfs = new customFs('https://api.github.com/repos/ION606/github-to-fs', token);
// write the file to the repo
await cfs.writeFileSync('example.txt', 'hello world!');
// read the contents of the file
console.log((await cfs.readFileSync('example.txt')).toString());
// remove the file
await cfs.rmSync('example.txt');
// make sure the file is gone
console.log((await cfs.readFileSync('example.txt')).toString());
});