feat: 초기 프로젝트 설정 및 룰.md 파일 추가

This commit is contained in:
2025-07-28 09:53:31 +09:00
commit 09a4d38512
8165 changed files with 1021855 additions and 0 deletions

61
api.hyungi.net/node_modules/js-git/mixins/add-cache.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
"use strict";
module.exports = addCache;
function addCache(repo, cache) {
var loadAs = repo.loadAs;
if (loadAs) repo.loadAs = loadAsCached;
var saveAs = repo.saveAs;
if (saveAs) repo.saveAs = saveAsCached;
var createTree = repo.createTree;
if (createTree) repo.createTree = createTreeCached;
function loadAsCached(type, hash, callback) {
// Next check in disk cache...
cache.loadAs(type, hash, onCacheLoad);
function onCacheLoad(err, value) {
if (err) return callback(err);
// ...and return if it's there.
if (value !== undefined) {
return callback(null, value, hash);
}
// Otherwise load from real data source...
loadAs.call(repo, type, hash, onLoad);
}
function onLoad(err, value) {
if (value === undefined) return callback(err);
// Store it on disk too...
// Force the hash to prevent mismatches.
cache.saveAs(type, value, onSave, hash);
function onSave(err) {
if (err) return callback(err);
// Finally return the value to caller.
callback(null, value, hash);
}
}
}
function saveAsCached(type, value, callback) {
saveAs.call(repo, type, value, onSave);
function onSave(err, hash) {
if (err) return callback(err);
// Store in disk, forcing hash to match.
cache.saveAs(type, value, callback, hash);
}
}
function createTreeCached(entries, callback) {
createTree.call(repo, entries, onTree);
function onTree(err, hash, tree) {
if (err) return callback(err);
cache.saveAs("tree", tree, callback, hash);
}
}
}

View File

@@ -0,0 +1,148 @@
"use strict";
var modes = require('../lib/modes.js');
module.exports = function (repo) {
repo.createTree = createTree;
function createTree(entries, callback) {
if (!callback) return createTree.bind(null, entries);
callback = singleCall(callback);
if (!Array.isArray(entries)) {
entries = Object.keys(entries).map(function (path) {
var entry = entries[path];
entry.path = path;
return entry;
});
}
// Tree paths that we need loaded
var toLoad = {};
function markTree(path) {
while(true) {
if (toLoad[path]) return;
toLoad[path] = true;
trees[path] = {
add: [],
del: [],
tree: {}
};
if (!path) break;
path = path.substring(0, path.lastIndexOf("/"));
}
}
// Commands to run organized by tree path
var trees = {};
// Counter for parallel I/O operations
var left = 1; // One extra counter to protect again zalgo cache callbacks.
// First pass, stubs out the trees structure, sorts adds from deletes,
// and saves any inline content blobs.
entries.forEach(function (entry) {
var index = entry.path.lastIndexOf("/");
var parentPath = entry.path.substr(0, index);
var name = entry.path.substr(index + 1);
markTree(parentPath);
var tree = trees[parentPath];
var adds = tree.add;
var dels = tree.del;
if (!entry.mode) {
dels.push(name);
return;
}
var add = {
name: name,
mode: entry.mode,
hash: entry.hash
};
adds.push(add);
if (entry.hash) return;
left++;
repo.saveAs("blob", entry.content, function (err, hash) {
if (err) return callback(err);
add.hash = hash;
check();
});
});
// Preload the base trees
if (entries.base) loadTree("", entries.base);
// Check just in case there was no IO to perform
check();
function loadTree(path, hash) {
left++;
delete toLoad[path];
repo.loadAs("tree", hash, function (err, tree) {
if (err) return callback(err);
trees[path].tree = tree;
Object.keys(tree).forEach(function (name) {
var childPath = path ? path + "/" + name : name;
if (toLoad[childPath]) loadTree(childPath, tree[name].hash);
});
check();
});
}
function check() {
if (--left) return;
findLeaves().forEach(processLeaf);
}
function processLeaf(path) {
var entry = trees[path];
delete trees[path];
var tree = entry.tree;
entry.del.forEach(function (name) {
delete tree[name];
});
entry.add.forEach(function (item) {
tree[item.name] = {
mode: item.mode,
hash: item.hash
};
});
left++;
repo.saveAs("tree", tree, function (err, hash, tree) {
if (err) return callback(err);
if (!path) return callback(null, hash, tree);
var index = path.lastIndexOf("/");
var parentPath = path.substring(0, index);
var name = path.substring(index + 1);
trees[parentPath].add.push({
name: name,
mode: modes.tree,
hash: hash
});
if (--left) return;
findLeaves().forEach(processLeaf);
});
}
function findLeaves() {
var paths = Object.keys(trees);
var parents = {};
paths.forEach(function (path) {
if (!path) return;
var parent = path.substring(0, path.lastIndexOf("/"));
parents[parent] = true;
});
return paths.filter(function (path) {
return !parents[path];
});
}
}
};
function singleCall(callback) {
var done = false;
return function () {
if (done) return console.warn("Discarding extra callback");
done = true;
return callback.apply(this, arguments);
};
}

51
api.hyungi.net/node_modules/js-git/mixins/delay.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
"use strict";
module.exports = function (repo, ms) {
var saveAs = repo.saveAs;
var loadAs = repo.loadAs;
var readRef = repo.readRef;
var updateRef = repo.updateRef;
var createTree = repo.createTree;
repo.saveAs = saveAsDelayed;
repo.loadAs = loadAsDelayed;
repo.readRef = readRefDelayed;
repo.updateRed = updateRefDelayed;
if (createTree) repo.createTree = createTreeDelayed;
function saveAsDelayed(type, value, callback) {
if (!callback) return saveAsDelayed.bind(repo, type, value);
setTimeout(function () {
return saveAs.call(repo, type, value, callback);
}, ms);
}
function loadAsDelayed(type, hash, callback) {
if (!callback) return loadAsDelayed.bind(repo, type, hash);
setTimeout(function () {
return loadAs.call(repo, type, hash, callback);
}, ms);
}
function readRefDelayed(ref, callback) {
if (!callback) return readRefDelayed.bind(repo, ref);
setTimeout(function () {
return readRef.call(repo, ref, callback);
}, ms);
}
function updateRefDelayed(ref, hash, callback) {
if (!callback) return updateRefDelayed.bind(repo, ref, hash);
setTimeout(function () {
return updateRef.call(repo, ref, hash, callback);
}, ms);
}
function createTreeDelayed(entries, callback) {
if (!callback) return createTreeDelayed.bind(repo, entries);
setTimeout(function () {
return createTree.call(repo, entries, callback);
}, ms);
}
};

View File

@@ -0,0 +1,26 @@
var modes = require('../lib/modes');
module.exports = function (local, remote) {
var loadAs = local.loadAs;
local.loadAs = newLoadAs;
function newLoadAs(type, hash, callback) {
if (!callback) return newLoadAs.bind(local. type, hash);
loadAs.call(local, type, hash, function (err, body) {
if (err) return callback(err);
if (body === undefined) return remote.loadAs(type, hash, callback);
callback(null, body);
});
}
var readRef = local.readRef;
local.readRef = newReadRef;
function newReadRef(ref, callback) {
if (!callback) return newReadRef.bind(local. ref);
readRef.call(local, ref, function (err, body) {
if (err) return callback(err);
if (body === undefined) return remote.readRef(ref, callback);
callback(null, body);
});
}
};

133
api.hyungi.net/node_modules/js-git/mixins/formats.js generated vendored Normal file
View File

@@ -0,0 +1,133 @@
"use strict";
var bodec = require('bodec');
var treeMap = require('../lib/object-codec').treeMap;
module.exports = function (repo) {
var loadAs = repo.loadAs;
repo.loadAs = newLoadAs;
var saveAs = repo.saveAs;
repo.saveAs = newSaveAs;
function newLoadAs(type, hash, callback) {
if (!callback) return newLoadAs.bind(repo, type, hash);
var realType = type === "text" ? "blob":
type === "array" ? "tree" : type;
return loadAs.call(repo, realType, hash, onLoad);
function onLoad(err, body, hash) {
if (body === undefined) return callback(err);
if (type === "text") body = bodec.toUnicode(body);
if (type === "array") body = toArray(body);
return callback(err, body, hash);
}
}
function newSaveAs(type, body, callback) {
if (!callback) return newSaveAs.bind(repo, type, body);
type = type === "text" ? "blob":
type === "array" ? "tree" : type;
if (type === "blob") {
if (typeof body === "string") {
body = bodec.fromUnicode(body);
}
}
else if (type === "tree") {
body = normalizeTree(body);
}
else if (type === "commit") {
body = normalizeCommit(body);
}
else if (type === "tag") {
body = normalizeTag(body);
}
return saveAs.call(repo, type, body, callback);
}
};
function toArray(tree) {
return Object.keys(tree).map(treeMap, tree);
}
function normalizeTree(body) {
var type = body && typeof body;
if (type !== "object") {
throw new TypeError("Tree body must be array or object");
}
var tree = {}, i, l, entry;
// If array form is passed in, convert to object form.
if (Array.isArray(body)) {
for (i = 0, l = body.length; i < l; i++) {
entry = body[i];
tree[entry.name] = {
mode: entry.mode,
hash: entry.hash
};
}
}
else {
var names = Object.keys(body);
for (i = 0, l = names.length; i < l; i++) {
var name = names[i];
entry = body[name];
tree[name] = {
mode: entry.mode,
hash: entry.hash
};
}
}
return tree;
}
function normalizeCommit(body) {
if (!body || typeof body !== "object") {
throw new TypeError("Commit body must be an object");
}
if (!(body.tree && body.author && body.message)) {
throw new TypeError("Tree, author, and message are required for commits");
}
var parents = body.parents || (body.parent ? [ body.parent ] : []);
if (!Array.isArray(parents)) {
throw new TypeError("Parents must be an array");
}
var author = normalizePerson(body.author);
var committer = body.committer ? normalizePerson(body.committer) : author;
return {
tree: body.tree,
parents: parents,
author: author,
committer: committer,
message: body.message
};
}
function normalizeTag(body) {
if (!body || typeof body !== "object") {
throw new TypeError("Tag body must be an object");
}
if (!(body.object && body.type && body.tag && body.tagger && body.message)) {
throw new TypeError("Object, type, tag, tagger, and message required");
}
return {
object: body.object,
type: body.type,
tag: body.tag,
tagger: normalizePerson(body.tagger),
message: body.message
};
}
function normalizePerson(person) {
if (!person || typeof person !== "object") {
throw new TypeError("Person must be an object");
}
if (typeof person.name !== "string" || typeof person.email !== "string") {
throw new TypeError("Name and email are required for person fields");
}
return {
name: person.name,
email: person.email,
date: person.date || new Date()
};
}

339
api.hyungi.net/node_modules/js-git/mixins/fs-db.js generated vendored Normal file
View File

@@ -0,0 +1,339 @@
"use strict";
var bodec = require('bodec');
var inflate = require('../lib/inflate');
var deflate = require('../lib/deflate');
var codec = require('../lib/object-codec');
var parsePackEntry = require('../lib/pack-codec').parseEntry;
var applyDelta = require('../lib/apply-delta');
var sha1 = require('git-sha1');
var pathJoin = require('path').join;
// The fs object has the following interface:
// - readFile(path) => binary
// Must also call callback() with no arguments if the file does not exist.
// - readChunk(path, start, end) => binary
// Must also call callback() with no arguments if the file does not exist.
// - writeFile(path, binary) =>
// Must also make every directory up to parent of path.
// - readDir(path) => array<paths>
// Must also call callback() with no arguments if the file does not exist.
// The repo is expected to have a rootPath property that points to
// the .git folder within the filesystem.
module.exports = function (repo, fs) {
var cachedIndexes = {};
repo.loadAs = loadAs;
repo.saveAs = saveAs;
repo.loadRaw = loadRaw;
repo.saveRaw = saveRaw;
repo.readRef = readRef;
repo.updateRef = updateRef;
repo.hasHash = hasHash;
repo.init = init;
repo.setShallow = setShallow;
function init(ref, callback) {
if (!callback) return init.bind(null, ref);
ref = ref || "refs/heads/master";
var path = pathJoin(repo.rootPath, "HEAD");
fs.writeFile(path, "ref: " + ref, callback);
}
function setShallow(ref, callback) {
if (!callback) return setShallow.bind(null, ref);
var path = pathJoin(repo.rootPath, "shallow");
fs.writeFile(path, ref, callback);
}
function updateRef(ref, hash, callback) {
if (!callback) return updateRef.bind(repo, ref, hash);
var path = pathJoin(repo.rootPath, ref);
var lock = path + ".lock";
fs.writeFile(lock, bodec.fromRaw(hash + "\n"), function(err) {
if(err) return callback(err);
fs.rename(lock, path, callback);
});
}
function readRef(ref, callback) {
if (!callback) return readRef.bind(repo, ref);
var path = pathJoin(repo.rootPath, ref);
fs.readFile(path, function (err, binary) {
if (err) return callback(err);
if (binary === undefined) {
return readPackedRef(ref, callback);
}
var hash;
try { hash = bodec.toRaw(binary).trim(); }
catch (err) { return callback(err); }
callback(null, hash);
});
}
function readPackedRef(ref, callback) {
var path = pathJoin(repo.rootPath, "packed-refs");
fs.readFile(path, function (err, binary) {
if (binary === undefined) return callback(err);
var hash;
try {
var text = bodec.toRaw(binary);
var index = text.indexOf(ref);
if (index >= 0) {
hash = text.substring(index - 41, index - 1);
}
}
catch (err) {
return callback(err);
}
callback(null, hash);
});
}
function saveAs(type, body, callback) {
if (!callback) return saveAs.bind(repo, type, body);
var raw, hash;
try {
raw = codec.frame({
type: type,
body: codec.encoders[type](body)
});
hash = sha1(raw);
}
catch (err) { return callback(err); }
saveRaw(hash, raw, function (err) {
if (err) return callback(err);
callback(null, hash);
});
}
function saveRaw(hash, raw, callback) {
if (!callback) return saveRaw.bind(repo, hash, raw);
var buffer, path;
try {
if (sha1(raw) !== hash) {
throw new Error("Save data does not match hash");
}
buffer = deflate(raw);
path = hashToPath(hash);
}
catch (err) { return callback(err); }
// Try to read the object first.
loadRaw(hash, function (err, data) {
// If it already exists, we're done
if (data) return callback();
// Otherwise write a new file
var tmp = path.replace(/[0-9a-f]+$/, 'tmp_obj_' + Math.random().toString(36).substr(2))
fs.writeFile(tmp, buffer, function(err) {
if(err) return callback(err);
fs.rename(tmp, path, callback);
});
});
}
function loadAs(type, hash, callback) {
if (!callback) return loadAs.bind(repo, type, hash);
loadRaw(hash, function (err, raw) {
if (raw === undefined) return callback(err);
var body;
try {
raw = codec.deframe(raw);
if (raw.type !== type) throw new TypeError("Type mismatch");
body = codec.decoders[raw.type](raw.body);
}
catch (err) { return callback(err); }
callback(null, body);
});
}
function hasHash(hash, callback) {
if (!callback) return hasHash.bind(repo, hash);
loadRaw(hash, function (err, body) {
if (err) return callback(err);
return callback(null, !!body);
});
}
function loadRaw(hash, callback) {
if (!callback) return loadRaw.bind(repo, hash);
var path = hashToPath(hash);
fs.readFile(path, function (err, buffer) {
if (err) return callback(err);
if (buffer) {
var raw;
try { raw = inflate(buffer); }
catch (err) { return callback(err); }
return callback(null, raw);
}
return loadRawPacked(hash, callback);
});
}
function loadRawPacked(hash, callback) {
var packDir = pathJoin(repo.rootPath, "objects/pack");
var packHashes = [];
fs.readDir(packDir, function (err, entries) {
if (!entries) return callback(err);
entries.forEach(function (name) {
var match = name.match(/pack-([0-9a-f]{40}).idx/);
if (match) packHashes.push(match[1]);
});
start();
});
function start() {
var packHash = packHashes.pop();
var offsets;
if (!packHash) return callback();
if (!cachedIndexes[packHash]) loadIndex(packHash);
else onIndex();
function loadIndex() {
var indexFile = pathJoin(packDir, "pack-" + packHash + ".idx" );
fs.readFile(indexFile, function (err, buffer) {
if (!buffer) return callback(err);
try {
cachedIndexes[packHash] = parseIndex(buffer);
}
catch (err) { return callback(err); }
onIndex();
});
}
function onIndex() {
var cached = cachedIndexes[packHash];
var packFile = pathJoin(packDir, "pack-" + packHash + ".pack" );
var index = cached.byHash[hash];
if (!index) return start();
offsets = cached.offsets;
loadChunk(packFile, index.offset, callback);
}
function loadChunk(packFile, start, callback) {
var index = offsets.indexOf(start);
if (index < 0) {
var error = new Error("Can't find chunk starting at " + start);
return callback(error);
}
var end = index + 1 < offsets.length ? offsets[index + 1] : -20;
fs.readChunk(packFile, start, end, function (err, chunk) {
if (!chunk) return callback(err);
var raw;
try {
var entry = parsePackEntry(chunk);
if (entry.type === "ref-delta") {
return loadRaw.call(repo, entry.ref, onBase);
}
else if (entry.type === "ofs-delta") {
return loadChunk(packFile, start - entry.ref, onBase);
}
raw = codec.frame(entry);
}
catch (err) { return callback(err); }
callback(null, raw);
function onBase(err, base) {
if (!base) return callback(err);
var object = codec.deframe(base);
var buffer;
try {
object.body = applyDelta(entry.body, object.body);
buffer = codec.frame(object);
}
catch (err) { return callback(err); }
callback(null, buffer);
}
});
}
}
}
function hashToPath(hash) {
return pathJoin(repo.rootPath, "objects", hash.substring(0, 2), hash.substring(2));
}
};
function parseIndex(buffer) {
if (readUint32(buffer, 0) !== 0xff744f63 ||
readUint32(buffer, 4) !== 0x00000002) {
throw new Error("Only v2 pack indexes supported");
}
// Get the number of hashes in index
// This is the value of the last fan-out entry
var hashOffset = 8 + 255 * 4;
var length = readUint32(buffer, hashOffset);
hashOffset += 4;
var crcOffset = hashOffset + 20 * length;
var lengthOffset = crcOffset + 4 * length;
var largeOffset = lengthOffset + 4 * length;
var checkOffset = largeOffset;
var indexes = new Array(length);
for (var i = 0; i < length; i++) {
var start = hashOffset + i * 20;
var hash = bodec.toHex(bodec.slice(buffer, start, start + 20));
var crc = readUint32(buffer, crcOffset + i * 4);
var offset = readUint32(buffer, lengthOffset + i * 4);
if (offset & 0x80000000) {
offset = largeOffset + (offset &0x7fffffff) * 8;
checkOffset = Math.max(checkOffset, offset + 8);
offset = readUint64(buffer, offset);
}
indexes[i] = {
hash: hash,
offset: offset,
crc: crc
};
}
var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20));
var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40));
if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) {
throw new Error("Checksum mistmatch");
}
var byHash = {};
indexes.sort(function (a, b) {
return a.offset - b.offset;
});
indexes.forEach(function (data) {
byHash[data.hash] = {
offset: data.offset,
crc: data.crc,
};
});
var offsets = indexes.map(function (entry) {
return entry.offset;
}).sort(function (a, b) {
return a - b;
});
return {
offsets: offsets,
byHash: byHash,
checksum: packChecksum
};
}
function readUint32(buffer, offset) {
return (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
}
// Yes this will lose precision over 2^53, but that can't be helped when
// returning a single integer.
// We simply won't support packfiles over 8 petabytes. I'm ok with that.
function readUint64(buffer, offset) {
var hi = (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
var lo = (buffer[offset + 4] << 24 |
buffer[offset + 5] << 16 |
buffer[offset + 6] << 8 |
buffer[offset + 7] << 0) >>> 0;
return hi * 0x100000000 + lo;
}

147
api.hyungi.net/node_modules/js-git/mixins/indexed-db.js generated vendored Normal file
View File

@@ -0,0 +1,147 @@
"use strict";
/*global indexedDB*/
var codec = require('../lib/object-codec.js');
var sha1 = require('git-sha1');
var modes = require('../lib/modes.js');
var db;
mixin.init = init;
mixin.loadAs = loadAs;
mixin.saveAs = saveAs;
module.exports = mixin;
function init(callback) {
db = null;
var request = indexedDB.open("tedit", 1);
// We can only create Object stores in a versionchange transaction.
request.onupgradeneeded = function(evt) {
var db = evt.target.result;
if (evt.dataLoss && evt.dataLoss !== "none") {
return callback(new Error(evt.dataLoss + ": " + evt.dataLossMessage));
}
// A versionchange transaction is started automatically.
evt.target.transaction.onerror = onError;
if(db.objectStoreNames.contains("objects")) {
db.deleteObjectStore("objects");
}
if(db.objectStoreNames.contains("refs")) {
db.deleteObjectStore("refs");
}
db.createObjectStore("objects", {keyPath: "hash"});
db.createObjectStore("refs", {keyPath: "path"});
};
request.onsuccess = function (evt) {
db = evt.target.result;
callback();
};
request.onerror = onError;
}
function mixin(repo, prefix) {
if (!prefix) throw new Error("Prefix required");
repo.refPrefix = prefix;
repo.saveAs = saveAs;
repo.loadAs = loadAs;
repo.readRef = readRef;
repo.updateRef = updateRef;
repo.hasHash = hasHash;
}
function onError(evt) {
console.error("error", evt.target.error);
}
function saveAs(type, body, callback, forcedHash) {
if (!callback) return saveAs.bind(this, type, body);
var hash;
try {
var buffer = codec.frame({type:type,body:body});
hash = forcedHash || sha1(buffer);
}
catch (err) { return callback(err); }
var trans = db.transaction(["objects"], "readwrite");
var store = trans.objectStore("objects");
var entry = { hash: hash, type: type, body: body };
var request = store.put(entry);
request.onsuccess = function() {
// console.warn("SAVE", type, hash);
callback(null, hash, body);
};
request.onerror = function(evt) {
callback(new Error(evt.value));
};
}
function loadAs(type, hash, callback) {
if (!callback) return loadAs.bind(this, type, hash);
loadRaw(hash, function (err, entry) {
if (!entry) return callback(err);
if (type !== entry.type) {
return callback(new TypeError("Type mismatch"));
}
callback(null, entry.body, hash);
});
}
function loadRaw(hash, callback) {
var trans = db.transaction(["objects"], "readwrite");
var store = trans.objectStore("objects");
var request = store.get(hash);
request.onsuccess = function(evt) {
var entry = evt.target.result;
if (!entry) return callback();
return callback(null, entry);
};
request.onerror = function(evt) {
callback(new Error(evt.value));
};
}
function hasHash(hash, callback) {
if (!callback) return hasHash.bind(this, hash);
loadRaw(hash, function (err, body) {
if (err) return callback(err);
return callback(null, !!body);
});
}
function readRef(ref, callback) {
if (!callback) return readRef.bind(this, ref);
var key = this.refPrefix + "/" + ref;
var trans = db.transaction(["refs"], "readwrite");
var store = trans.objectStore("refs");
var request = store.get(key);
request.onsuccess = function(evt) {
var entry = evt.target.result;
if (!entry) return callback();
callback(null, entry.hash);
};
request.onerror = function(evt) {
callback(new Error(evt.value));
};
}
function updateRef(ref, hash, callback) {
if (!callback) return updateRef.bind(this, ref, hash);
var key = this.refPrefix + "/" + ref;
var trans = db.transaction(["refs"], "readwrite");
var store = trans.objectStore("refs");
var entry = { path: key, hash: hash };
var request = store.put(entry);
request.onsuccess = function() {
callback();
};
request.onerror = function(evt) {
callback(new Error(evt.value));
};
}

53
api.hyungi.net/node_modules/js-git/mixins/mem-cache.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
"use strict";
var encoders = require('../lib/object-codec').encoders;
var decoders = require('../lib/object-codec').decoders;
var Binary = require('bodec').Binary;
var cache = memCache.cache = {};
module.exports = memCache;
function memCache(repo) {
var loadAs = repo.loadAs;
repo.loadAs = loadAsCached;
function loadAsCached(type, hash, callback) {
if (!callback) return loadAsCached.bind(this, type, hash);
if (hash in cache) return callback(null, dupe(type, cache[hash]), hash);
loadAs.call(repo, type, hash, function (err, value) {
if (value === undefined) return callback(err);
if (type !== "blob" || value.length < 100) {
cache[hash] = dupe(type, value);
}
return callback.apply(this, arguments);
});
}
var saveAs = repo.saveAs;
repo.saveAs = saveAsCached;
function saveAsCached(type, value, callback) {
if (!callback) return saveAsCached.bind(this, type, value);
value = dupe(type, value);
saveAs.call(repo, type, value, function (err, hash) {
if (err) return callback(err);
if (type !== "blob" || value.length < 100) {
cache[hash] = value;
}
return callback(null, hash, value);
});
}
}
function dupe(type, value) {
if (type === "blob") {
if (type.length >= 100) return value;
return new Binary(value);
}
return decoders[type](encoders[type](value));
}
function deepFreeze(obj) {
Object.freeze(obj);
Object.keys(obj).forEach(function (key) {
var value = obj[key];
if (typeof value === "object") deepFreeze(value);
});
}

95
api.hyungi.net/node_modules/js-git/mixins/mem-db.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
"use strict";
var defer = require('../lib/defer.js');
var codec = require('../lib/object-codec.js');
var sha1 = require('git-sha1');
module.exports = mixin;
var isHash = /^[0-9a-f]{40}$/;
function mixin(repo) {
var objects = {};
var refs = {};
repo.saveAs = saveAs;
repo.loadAs = loadAs;
repo.saveRaw = saveRaw;
repo.loadRaw = loadRaw;
repo.hasHash = hasHash;
repo.readRef = readRef;
repo.updateRef = updateRef;
repo.listRefs = listRefs;
function readRef(ref, callback) {
return makeAsync(function () {
return refs[ref];
}, callback);
}
function listRefs(prefix, callback) {
return makeAsync(function () {
var regex = prefix && new RegExp("^" + prefix + "[/$]");
var out = {};
Object.keys(refs).forEach(function (name) {
if (regex && !regex.test(name)) return;
out[name] = refs[name];
});
return out;
}, callback);
}
function updateRef(ref, hash, callback) {
return makeAsync(function () {
return (refs[ref] = hash);
}, callback);
}
function hasHash(hash, callback) {
return makeAsync(function () {
if (!isHash.test(hash)) hash = refs[hash];
return hash in objects;
}, callback);
}
function saveAs(type, body, callback) {
return makeAsync(function () {
var buffer = codec.frame({type:type,body:body});
var hash = sha1(buffer);
objects[hash] = buffer;
return hash;
}, callback);
}
function saveRaw(hash, buffer, callback) {
return makeAsync(function () {
objects[hash] = buffer;
}, callback);
}
function loadAs(type, hash, callback) {
return makeAsync(function () {
if (!isHash.test(hash)) hash = refs[hash];
var buffer = objects[hash];
if (!buffer) return [];
var obj = codec.deframe(buffer, true);
if (obj.type !== type) throw new TypeError("Type mismatch");
return obj.body;
}, callback);
}
function loadRaw(hash, callback) {
return makeAsync(function () {
return objects[hash];
}, callback);
}
}
function makeAsync(fn, callback) {
if (!callback) return makeAsync.bind(null, fn);
defer(function () {
var out;
try { out = fn(); }
catch (err) { return callback(err); }
callback(null, out);
});
}

201
api.hyungi.net/node_modules/js-git/mixins/pack-ops.js generated vendored Normal file
View File

@@ -0,0 +1,201 @@
"use strict";
var sha1 = require('git-sha1');
var applyDelta = require('../lib/apply-delta.js');
var codec = require('../lib/object-codec.js');
var decodePack = require('../lib/pack-codec.js').decodePack;
var encodePack = require('../lib/pack-codec.js').encodePack;
var makeChannel = require('culvert');
module.exports = function (repo) {
// packChannel is a writable culvert channel {put,drain} containing raw packfile binary data
// opts can contain "onProgress" or "onError" hook functions.
// callback will be called with a list of all unpacked hashes on success.
repo.unpack = unpack; // (packChannel, opts) => hashes
// hashes is an array of hashes to pack
// packChannel will be a readable culvert channel {take} containing raw packfile binary data
repo.pack = pack; // (hashes, opts) => packChannel
};
function unpack(packChannel, opts, callback) {
/*jshint validthis:true*/
if (!callback) return unpack.bind(this, packChannel, opts);
packChannel = applyParser(packChannel, decodePack, callback);
var repo = this;
var version, num, numDeltas = 0, count = 0, countDeltas = 0;
var done, startDeltaProgress = false;
// hashes keyed by offset for ofs-delta resolving
var hashes = {};
// key is hash, boolean is cached "has" value of true or false
var has = {};
// key is hash we're waiting for, value is array of items that are waiting.
var pending = {};
return packChannel.take(onStats);
function onDone(err) {
if (done) return;
done = true;
if (err) return callback(err);
return callback(null, values(hashes));
}
function onStats(err, stats) {
if (err) return onDone(err);
version = stats.version;
num = stats.num;
packChannel.take(onRead);
}
function objectProgress(more) {
if (!more) startDeltaProgress = true;
var percent = Math.round(count / num * 100);
return opts.onProgress("Receiving objects: " + percent + "% (" + (count++) + "/" + num + ") " + (more ? "\r" : "\n"));
}
function deltaProgress(more) {
if (!startDeltaProgress) return;
var percent = Math.round(countDeltas / numDeltas * 100);
return opts.onProgress("Applying deltas: " + percent + "% (" + (countDeltas++) + "/" + numDeltas + ") " + (more ? "\r" : "\n"));
}
function onRead(err, item) {
if (err) return onDone(err);
if (opts.onProgress) objectProgress(item);
if (item === undefined) return onDone();
if (item.size !== item.body.length) {
return onDone(new Error("Body size mismatch"));
}
if (item.type === "ofs-delta") {
numDeltas++;
item.ref = hashes[item.offset - item.ref];
return resolveDelta(item);
}
if (item.type === "ref-delta") {
numDeltas++;
return checkDelta(item);
}
return saveValue(item);
}
function resolveDelta(item) {
if (opts.onProgress) deltaProgress();
return repo.loadRaw(item.ref, function (err, buffer) {
if (err) return onDone(err);
if (!buffer) return onDone(new Error("Missing base image at " + item.ref));
var target = codec.deframe(buffer);
item.type = target.type;
item.body = applyDelta(item.body, target.body);
return saveValue(item);
});
}
function checkDelta(item) {
var hasTarget = has[item.ref];
if (hasTarget === true) return resolveDelta(item);
if (hasTarget === false) return enqueueDelta(item);
return repo.hasHash(item.ref, function (err, value) {
if (err) return onDone(err);
has[item.ref] = value;
if (value) return resolveDelta(item);
return enqueueDelta(item);
});
}
function saveValue(item) {
var buffer = codec.frame(item);
var hash = sha1(buffer);
hashes[item.offset] = hash;
has[hash] = true;
if (hash in pending) {
// I have yet to come across a pack stream that actually needs this.
// So I will only implement it when I have concrete data to test against.
console.error({
list: pending[hash],
item: item
});
throw "TODO: pending value was found, resolve it";
}
return repo.saveRaw(hash, buffer, onSave);
}
function onSave(err) {
if (err) return callback(err);
packChannel.take(onRead);
}
function enqueueDelta(item) {
var list = pending[item.ref];
if (!list) pending[item.ref] = [item];
else list.push(item);
packChannel.take(onRead);
}
}
// TODO: Implement delta refs to reduce stream size
function pack(hashes, opts, callback) {
/*jshint validthis:true*/
if (!callback) return pack.bind(this, hashes, opts);
var repo = this;
var i = 0, first = true, done = false;
return callback(null, applyParser({ take: take }, encodePack));
function take(callback) {
if (done) return callback();
if (first) return readFirst(callback);
var hash = hashes[i++];
if (hash === undefined) {
return callback();
}
repo.loadRaw(hash, function (err, buffer) {
if (err) return callback(err);
if (!buffer) return callback(new Error("Missing hash: " + hash));
// Reframe with pack format header
callback(null, codec.deframe(buffer));
});
}
function readFirst(callback) {
first = false;
callback(null, {num: hashes.length});
}
}
function values(object) {
var keys = Object.keys(object);
var length = keys.length;
var out = new Array(length);
for (var i = 0; i < length; i++) {
out[i] = object[keys[i]];
}
return out;
}
function applyParser(stream, parser, onError) {
var extra = makeChannel();
extra.put = parser(extra.put);
stream.take(onData);
function onData(err, item) {
if (err) return onError(err);
var more;
try { more = extra.put(item); }
catch (err) { return onError(err); }
if (more) stream.take(onData);
else extra.drain(onDrain);
}
function onDrain(err) {
if (err) return onError(err);
stream.take(onData);
}
return { take: extra.take };
}

View File

@@ -0,0 +1,51 @@
var cache = require('./mem-cache').cache;
var modes = require('../lib/modes');
module.exports = function (repo) {
repo.pathToEntry = pathToEntry;
};
function pathToEntry(rootTree, path, callback) {
if (!callback) return pathToEntry.bind(this, rootTree, path);
var repo = this;
var mode = modes.tree;
var hash = rootTree;
var parts = path.split("/").filter(Boolean);
var index = 0;
var cached;
loop();
function loop() {
while (index < parts.length) {
if (mode === modes.tree) {
cached = cache[hash];
if (!cached) return repo.loadAs("tree", hash, onLoad);
var entry = cached[parts[index]];
if (!entry) return callback();
mode = entry.mode;
hash = entry.hash;
index++;
continue;
}
if (modes.isFile(mode)) return callback();
return callback(null, {
last: {
mode: mode,
hash: hash,
path: parts.slice(0, index).join("/"),
rest: parts.slice(index).join("/"),
}
});
}
callback(null, {
mode: mode,
hash: hash
});
}
function onLoad(err, value) {
if (!value) return callback(err || new Error("Missing object: " + hash));
cache[hash] = value;
loop();
}
}

View File

@@ -0,0 +1,28 @@
"use strict";
// This replaces loadAs with a version that batches concurrent requests for
// the same hash.
module.exports = function (repo) {
var pendingReqs = {};
var loadAs = repo.loadAs;
repo.loadAs = newLoadAs;
function newLoadAs(type, hash, callback) {
if (!callback) return newLoadAs.bind(null, type, hash);
var list = pendingReqs[hash];
if (list) {
if (list.type !== type) callback(new Error("Type mismatch"));
else list.push(callback);
return;
}
list = pendingReqs[hash] = [callback];
list.type = type;
loadAs.call(repo, type, hash, function () {
delete pendingReqs[hash];
for (var i = 0, l = list.length; i < l; i++) {
list[i].apply(this, arguments);
}
});
}
};

147
api.hyungi.net/node_modules/js-git/mixins/sync.js generated vendored Normal file
View File

@@ -0,0 +1,147 @@
"use strict";
var modes = require('../lib/modes');
module.exports = function (local, remote) {
local.fetch = fetch;
local.send = send;
local.readRemoteRef = remote.readRef.bind(remote);
local.updateRemoteRef = remote.updateRef.bind(remote);
function fetch(ref, depth, callback) {
if (!callback) return fetch.bind(local, ref, depth);
sync(local, remote, ref, depth, callback);
}
function send(ref, callback) {
if (!callback) return send.bind(local, ref);
sync(remote, local, ref, Infinity, callback);
}
};
// Download remote ref with depth
// Make sure to use Infinity for depth on github mounts or anything that
// doesn't allow shallow clones.
function sync(local, remote, ref, depth, callback) {
if (typeof ref !== "string") throw new TypeError("ref must be string");
if (typeof depth !== "number") throw new TypeError("depth must be number");
var hasCache = {};
remote.readRef(ref, function (err, hash) {
if (!hash) return callback(err);
importCommit(hash, depth, function (err) {
if (err) return callback(err);
callback(null, hash);
});
});
// Caching has check.
function check(type, hash, callback) {
if (typeof type !== "string") throw new TypeError("type must be string");
if (typeof hash !== "string") throw new TypeError("hash must be string");
if (hasCache[hash]) return callback(null, true);
local.hasHash(hash, function (err, has) {
if (err) return callback(err);
hasCache[hash] = has;
callback(null, has);
});
}
function importCommit(hash, depth, callback) {
check("commit", hash, onCheck);
function onCheck(err, has) {
if (err || has) return callback(err);
remote.loadAs("commit", hash, onLoad);
}
function onLoad(err, commit) {
if (!commit) return callback(err || new Error("Missing commit " + hash));
var i = 0;
importTree(commit.tree, onImport);
function onImport(err) {
if (err) return callback(err);
if (i >= commit.parents.length || depth <= 1) {
return local.saveAs("commit", commit, onSave);
}
importCommit(commit.parents[i++], depth - 1, onImport);
}
}
function onSave(err, newHash) {
if (err) return callback(err);
if (newHash !== hash) {
return callback(new Error("Commit hash mismatch " + hash + " != " + newHash));
}
hasCache[hash] = true;
callback();
}
}
function importTree(hash, callback) {
check("tree", hash, onCheck);
function onCheck(err, has) {
if (err || has) return callback(err);
remote.loadAs("tree", hash, onLoad);
}
function onLoad(err, tree) {
if (!tree) return callback(err || new Error("Missing tree " + hash));
var i = 0;
var names = Object.keys(tree);
onImport();
function onImport(err) {
if (err) return callback(err);
if (i >= names.length) {
return local.saveAs("tree", tree, onSave);
}
var name = names[i++];
var entry = tree[name];
if (modes.isBlob(entry.mode)) {
return importBlob(entry.hash, onImport);
}
if (entry.mode === modes.tree) {
return importTree(entry.hash, onImport);
}
// Skip others.
onImport();
}
}
function onSave(err, newHash) {
if (err) return callback(err);
if (newHash !== hash) {
return callback(new Error("Tree hash mismatch " + hash + " != " + newHash));
}
hasCache[hash] = true;
callback();
}
}
function importBlob(hash, callback) {
check("blob", hash, onCheck);
function onCheck(err, has) {
if (err || has) return callback(err);
remote.loadAs("blob", hash, onLoad);
}
function onLoad(err, blob) {
if (!blob) return callback(err || new Error("Missing blob " + hash));
local.saveAs("blob", blob, onSave);
}
function onSave(err, newHash) {
if (err) return callback(err);
if (newHash !== hash) {
return callback(new Error("Blob hash mismatch " + hash + " != " + newHash));
}
hasCache[hash] = true;
callback();
}
}
}

152
api.hyungi.net/node_modules/js-git/mixins/walkers.js generated vendored Normal file
View File

@@ -0,0 +1,152 @@
var modes = require('../lib/modes.js');
module.exports = function (repo) {
repo.logWalk = logWalk; // (ref) => stream<commit>
repo.treeWalk = treeWalk; // (treeHash) => stream<object>
};
module.exports.walk = walk;
function logWalk(ref, callback) {
if (!callback) return logWalk.bind(this, ref);
var last, seen = {};
var repo = this;
if (!repo.readRef) return onShallow();
return repo.readRef("shallow", onShallow);
function onShallow(err, shallow) {
last = shallow;
resolveRef(repo, ref, onHash);
}
function onHash(err, hash) {
if (err) return callback(err);
return repo.loadAs("commit", hash, function (err, commit) {
if (commit === undefined) return callback(err);
commit.hash = hash;
seen[hash] = true;
return callback(null, walk(commit, scan, loadKey, compare));
});
}
function scan(commit) {
if (last === commit) return [];
return commit.parents.filter(function (hash) {
return !seen[hash];
});
}
function loadKey(hash, callback) {
return repo.loadAs("commit", hash, function (err, commit) {
if (!commit) return callback(err || new Error("Missing commit " + hash));
commit.hash = hash;
if (hash === last) commit.last = true;
return callback(null, commit);
});
}
}
function compare(commit, other) {
return commit.author.date < other.author.date;
}
function treeWalk(hash, callback) {
if (!callback) return treeWalk.bind(this, hash);
var repo = this;
return repo.loadAs("tree", hash, onTree);
function onTree(err, body) {
if (!body) return callback(err || new Error("Missing tree " + hash));
var tree = {
mode: modes.tree,
hash: hash,
body: body,
path: "/"
};
return callback(null, walk(tree, treeScan, treeLoadKey, treeCompare));
}
function treeLoadKey(entry, callback) {
if (entry.mode !== modes.tree) return callback(null, entry);
var type = modes.toType(entry.mode);
return repo.loadAs(type, entry.hash, function (err, body) {
if (err) return callback(err);
entry.body = body;
return callback(null, entry);
});
}
}
function treeScan(object) {
if (object.mode !== modes.tree) return [];
var tree = object.body;
return Object.keys(tree).map(function (name) {
var entry = tree[name];
var path = object.path + name;
if (entry.mode === modes.tree) path += "/";
return {
mode: entry.mode,
hash: entry.hash,
path: path
};
});
}
function treeCompare(first, second) {
return first.path < second.path;
}
function resolveRef(repo, hashish, callback) {
if (/^[0-9a-f]{40}$/.test(hashish)) {
return callback(null, hashish);
}
repo.readRef(hashish, function (err, hash) {
if (!hash) return callback(err || new Error("Bad ref " + hashish));
callback(null, hash);
});
}
function walk(seed, scan, loadKey, compare) {
var queue = [seed];
var working = 0, error, cb;
return {read: read, abort: abort};
function read(callback) {
if (!callback) return read;
if (cb) return callback(new Error("Only one read at a time"));
if (working) { cb = callback; return; }
var item = queue.shift();
if (!item) return callback();
try { scan(item).forEach(onKey); }
catch (err) { return callback(err); }
return callback(null, item);
}
function abort(callback) { return callback(); }
function onError(err) {
if (cb) {
var callback = cb; cb = null;
return callback(err);
}
error = err;
}
function onKey(key) {
working++;
loadKey(key, onItem);
}
function onItem(err, item) {
working--;
if (err) return onError(err);
var index = queue.length;
while (index && compare(item, queue[index - 1])) index--;
queue.splice(index, 0, item);
if (!working && cb) {
var callback = cb; cb = null;
return read(callback);
}
}
}

167
api.hyungi.net/node_modules/js-git/mixins/websql-db.js generated vendored Normal file
View File

@@ -0,0 +1,167 @@
"use strict";
var codec = require('../lib/object-codec.js');
var bodec = require('bodec');
var inflate = require('../lib/inflate');
var deflate = require('../lib/deflate');
var sha1 = require('git-sha1');
var modes = require('../lib/modes.js');
var db;
mixin.init = init;
mixin.loadAs = loadAs;
mixin.saveAs = saveAs;
mixin.loadRaw = loadRaw;
mixin.saveRaw = saveRaw;
module.exports = mixin;
function mixin(repo, prefix) {
if (!prefix) throw new Error("Prefix required");
repo.refPrefix = prefix;
repo.saveAs = saveAs;
repo.saveRaw = saveRaw;
repo.loadAs = loadAs;
repo.loadRaw = loadRaw;
repo.readRef = readRef;
repo.updateRef = updateRef;
repo.hasHash = hasHash;
}
function init(callback) {
db = openDatabase('tedit', '1.0', 'tedit local data', 10 * 1024 * 1024);
db.transaction(function (tx) {
tx.executeSql(
'CREATE TABLE IF NOT EXISTS objects (hash unique, body blob)'
);
tx.executeSql(
'CREATE TABLE IF NOT EXISTS refs (path unique, value text)'
);
}, function () {
console.error(arguments);
callback(new Error("Problem initializing database"));
}, function () {
callback();
});
}
function saveAs(type, body, callback) {
/*jshint: validthis: true */
if (!callback) return saveAs.bind(this, type, body);
var hash, buffer;
try {
buffer = codec.frame({type:type,body:body});
hash = sha1(buffer);
}
catch (err) { return callback(err); }
this.saveRaw(hash, buffer, callback);
}
function saveRaw(hash, buffer, callback) {
/*jshint: validthis: true */
if (!callback) return saveRaw.bind(this, hash, buffer);
var sql = 'INSERT INTO objects (hash, body) VALUES (?, ?)';
db.transaction(function (tx) {
var text;
try {
text = bodec.toBase64(deflate(buffer));
}
catch (err) {
return callback(err);
}
tx.executeSql(sql, [hash, text], function () {
callback(null, hash);
});
});
}
function loadAs(type, hash, callback) {
/*jshint: validthis: true */
if (!callback) return loadAs.bind(this, type, hash);
loadRaw(hash, function (err, buffer) {
if (!buffer) return callback(err);
var parts, body;
try {
parts = codec.deframe(buffer);
if (parts.type !== type) throw new Error("Type mismatch");
body = codec.decoders[type](parts.body);
}
catch (err) {
return callback(err);
}
callback(null, body);
});
}
function loadRaw(hash, callback) {
/*jshint: validthis: true */
if (!callback) return loadRaw.bind(this, hash);
var sql = 'SELECT * FROM objects WHERE hash=?';
db.readTransaction(function (tx) {
tx.executeSql(sql, [hash], function (tx, result) {
if (!result.rows.length) return callback();
var item = result.rows.item(0);
var buffer;
try {
buffer = inflate(bodec.fromBase64(item.body));
}
catch (err) {
return callback(err);
}
callback(null, buffer);
}, function (tx, error) {
callback(new Error(error.message));
});
});
}
function hasHash(type, hash, callback) {
/*jshint: validthis: true */
loadAs(type, hash, function (err, value) {
if (err) return callback(err);
if (value === undefined) return callback(null, false);
if (type !== "tree") return callback(null, true);
var names = Object.keys(value);
next();
function next() {
if (!names.length) return callback(null, true);
var name = names.pop();
var entry = value[name];
hasHash(modes.toType(entry.mode), entry.hash, function (err, has) {
if (err) return callback(err);
if (has) return next();
callback(null, false);
});
}
});
}
function readRef(ref, callback) {
/*jshint: validthis: true */
var key = this.refPrefix + "/" + ref;
var sql = 'SELECT * FROM refs WHERE path=?';
db.transaction(function (tx) {
tx.executeSql(sql, [key], function (tx, result) {
if (!result.rows.length) return callback();
var item = result.rows.item(0);
callback(null, item.value);
}, function (tx, error) {
callback(new Error(error.message));
});
});
}
function updateRef(ref, hash, callback) {
/*jshint: validthis: true */
var key = this.refPrefix + "/" + ref;
var sql = 'INSERT INTO refs (path, value) VALUES (?, ?)';
db.transaction(function (tx) {
tx.executeSql(sql, [key, hash], function () {
callback();
}, function (tx, error) {
callback(new Error(error.message));
});
});
}