feat: 초기 프로젝트 설정 및 룰.md 파일 추가

This commit is contained in:
2025-07-28 09:53:31 +09:00
commit 09a4d38512
8165 changed files with 1021855 additions and 0 deletions

61
api.hyungi.net/node_modules/js-git/lib/apply-delta.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
var bodec = require('bodec');
module.exports = applyDelta;
function applyDelta(delta, base) {
var deltaOffset = 0;
if (base.length !== readLength()) {
throw new Error("Base length mismatch");
}
// Create a new output buffer with length from header.
var outOffset = 0;
var out = bodec.create(readLength());
while (deltaOffset < delta.length) {
var byte = delta[deltaOffset++];
// Copy command. Tells us offset in base and length to copy.
if (byte & 0x80) {
var offset = 0;
var length = 0;
if (byte & 0x01) offset |= delta[deltaOffset++] << 0;
if (byte & 0x02) offset |= delta[deltaOffset++] << 8;
if (byte & 0x04) offset |= delta[deltaOffset++] << 16;
if (byte & 0x08) offset |= delta[deltaOffset++] << 24;
if (byte & 0x10) length |= delta[deltaOffset++] << 0;
if (byte & 0x20) length |= delta[deltaOffset++] << 8;
if (byte & 0x40) length |= delta[deltaOffset++] << 16;
if (length === 0) length = 0x10000;
// copy the data
bodec.copy(bodec.slice(base, offset, offset + length), out, outOffset);
outOffset += length;
}
// Insert command, opcode byte is length itself
else if (byte) {
bodec.copy(bodec.slice(delta, deltaOffset, deltaOffset + byte), out, outOffset);
deltaOffset += byte;
outOffset += byte;
}
else throw new Error('Invalid delta opcode');
}
if (outOffset !== out.length) {
throw new Error("Size mismatch in check");
}
return out;
// Read a variable length number our of delta and move the offset.
function readLength() {
var byte = delta[deltaOffset++];
var length = byte & 0x7f;
var shift = 7;
while (byte & 0x80) {
byte = delta[deltaOffset++];
length |= (byte & 0x7f) << shift;
shift += 7;
}
return length;
}
}

67
api.hyungi.net/node_modules/js-git/lib/config-codec.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
"use strict";
// This is for working with git config files like .git/config and .gitmodules.
// I believe this is just INI format.
module.exports = {
encode: encode,
decode: decode
};
function encode(config) {
var lines = [];
Object.keys(config).forEach(function (name) {
var obj = config[name];
var deep = {};
var values = {};
var hasValues = false;
Object.keys(obj).forEach(function (key) {
var value = obj[key];
if (typeof value === 'object') {
deep[key] = value;
}
else {
hasValues = true;
values[key] = value;
}
});
if (hasValues) {
encodeBody('[' + name + ']', values);
}
Object.keys(deep).forEach(function (sub) {
var child = deep[sub];
encodeBody('[' + name + ' "' + sub + '"]', child);
});
});
return lines.join("\n") + "\n";
function encodeBody(header, obj) {
lines.push(header);
Object.keys(obj).forEach(function (name) {
lines.push( "\t" + name + " = " + obj[name]);
});
}
}
function decode(text) {
var config = {};
var section;
text.split(/[\r\n]+/).forEach(function (line) {
var match = line.match(/\[([^ \t"\]]+) *(?:"([^"]+)")?\]/);
if (match) {
section = config[match[1]] || (config[match[1]] = {});
if (match[2]) {
section = section[match[2]] = {};
}
return;
}
match = line.match(/([^ \t=]+)[ \t]*=[ \t]*(.+)/);
if (match) {
section[match[1]] = match[2];
}
});
return config;
}

33
api.hyungi.net/node_modules/js-git/lib/defer.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
"use strict";
var timeouts, messageName;
// node.js
if (typeof process === "object" && typeof process.nextTick === "function") {
module.exports = process.nextTick;
}
// some browsers
else if (typeof setImmediate === "function") {
module.exports = setImmediate;
}
// most other browsers
else {
timeouts = [];
messageName = "zero-timeout-message";
window.addEventListener("message", handleMessage, true);
module.exports = function (fn) {
timeouts.push(fn);
window.postMessage(messageName, "*");
};
}
function handleMessage(event) {
if (event.source == window && event.data == messageName) {
event.stopPropagation();
if (timeouts.length > 0) {
var fn = timeouts.shift();
fn();
}
}
}

10
api.hyungi.net/node_modules/js-git/lib/deflate.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
var pako = require('pako');
var Binary = require('bodec').Binary;
if (Binary === Uint8Array) {
module.exports = pako.deflate;
}
else {
module.exports = function deflate(value) {
return new Binary(pako.deflate(new Uint8Array(value)));
};
}

58
api.hyungi.net/node_modules/js-git/lib/find-common.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
function oneCall(fn) {
var done = false;
return function () {
if (done) return;
done = true;
return fn.apply(this, arguments);
};
}
module.exports = findCommon;
function findCommon(repo, a, b, callback) {
callback = oneCall(callback);
var ahead = 0, behind = 0;
var aStream, bStream;
var aCommit, bCommit;
if (a === b) return callback(null, ahead, behind);
repo.logWalk(a, onAStream);
repo.logWalk(b, onBStream);
function onAStream(err, stream) {
if (err) return callback(err);
aStream = stream;
aStream.read(onA);
}
function onBStream(err, stream) {
if (err) return callback(err);
bStream = stream;
bStream.read(onB);
}
function onA(err, commit) {
if (!commit) return callback(err || new Error("No common commit"));
aCommit = commit;
if (bCommit) compare();
}
function onB(err, commit) {
if (!commit) return callback(err || new Error("No common commit"));
bCommit = commit;
if (aCommit) compare();
}
function compare() {
if (aCommit.hash === bCommit.hash) return callback(null, ahead, behind);
if (aCommit.author.date.seconds > bCommit.author.date.seconds) {
ahead++;
aStream.read(onA);
}
else {
behind++;
bStream.read(onB);
}
}
}

125
api.hyungi.net/node_modules/js-git/lib/git-fs.js generated vendored Normal file
View File

@@ -0,0 +1,125 @@
"use strict";
var modes = require('./modes');
var defer = require('./defer');
// options.encrypt(plain) -> encrypted
// options.decrypt(encrypted) -> plain
// options.shouldEncrypt(path) -> boolean
// options.getRootTree() => hash
// options.setRootTree(hash) =>
module.exports = function (repo, options) {
var toWrite = {};
var callbacks = [];
var writing = false;
return {
readFile: readFile,
writeFile: writeFile,
readDir: readDir
};
function readFile(path, callback) {
if (!callback) return readFile.bind(null, path);
// If there is a pending write for this path, pull from the cache.
if (toWrite[path]) return callback(null, toWrite[path]);
// Otherwise read from the persistent storage
options.getRootTree(onRootTree);
function onRootTree(err, hash) {
if (!hash) return callback(err);
repo.pathToEntry(hash, path, onEntry);
}
function onEntry(err, entry) {
if (!entry || !modes.isBlob(entry.mode)) return callback(err);
repo.loadAs("blob", entry.hash, function (err, content) {
if (!content) return callback(err);
if (entry.mode === modes.sym) {
content = options.decrypt(content);
}
callback(null, content);
});
}
}
function writeFile(path, binary, callback) {
if (!callback) return writeFile.bind(null, path, binary);
toWrite[path] = binary;
callbacks.push(callback);
defer(check);
}
function readDir(path, callback) {
if (!callback) return readDir.bind(null, path);
options.getRootTree(onRootTree);
function onRootTree(err, hash) {
if (!hash) return callback(err);
repo.pathToEntry(hash, path, onEntry);
}
function onEntry(err, entry) {
if (!entry || entry.mode !== modes.tree) return callback(err);
repo.loadAs("tree", entry.hash, onTree);
}
function onTree(err, tree) {
if (!tree) return callback(err);
callback(null, Object.keys(tree));
}
}
function check() {
if (writing || !callbacks.length) return;
writing = true;
options.getRootTree(onRootTree);
function onRootTree(err, hash) {
if (err) return callall(err);
var files = pullFiles();
if (hash) files.base = hash;
repo.createTree(files, onNewTree);
}
function onNewTree(err, hash) {
if (err) return callall(err);
options.setRootTree(hash, onSaveRoot);
}
function onSaveRoot(err) {
if (err) return callall(err);
writing = false;
callall();
defer(check);
}
}
function pullFiles() {
var files = Object.keys(toWrite).map(function (path) {
var content = toWrite[path];
delete toWrite[path];
var mode = modes.blob;
if (options.shouldEncrypt && options.shouldEncrypt(path)) {
mode = modes.sym;
content = options.encrypt(content);
}
return {
path: path,
mode: mode,
content: content
};
});
return files;
}
function callall(err) {
callbacks.splice(0, callbacks.length).forEach(function (callback) {
callback(err);
});
}
};

View File

@@ -0,0 +1,36 @@
var Inflate = require('pako').Inflate;
var Binary = require('bodec').Binary;
// Byte oriented inflate stream. Wrapper for pako's Inflate.
//
// var inf = inflate();
// inf.write(byte) -> more - Write a byte to inflate's state-machine.
// Returns true if more data is expected.
// inf.recycle() - Reset the internal state machine.
// inf.flush() -> data - Flush the output as a binary buffer.
//
module.exports = function inflateStream() {
var inf = new Inflate();
var b = new Uint8Array(1);
var empty = new Binary(0);
return {
write: write,
recycle: recycle,
flush: Binary === Uint8Array ? flush : flushConvert
};
function write(byte) {
b[0] = byte;
inf.push(b);
return !inf.ended;
}
function recycle() { inf = new Inflate(); }
function flush() { return inf.result || empty; }
function flushConvert() {
return inf.result ? new Binary(inf.result) : empty;
}
};

10
api.hyungi.net/node_modules/js-git/lib/inflate.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
var pako = require('pako');
var Binary = require('bodec').Binary;
if (Binary === Uint8Array) {
module.exports = pako.inflate;
}
else {
module.exports = function inflate(value) {
return new Binary(pako.inflate(new Uint8Array(value)));
};
}

28
api.hyungi.net/node_modules/js-git/lib/modes.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
"use strict";
var masks = {
mask: parseInt('100000', 8),
blob: parseInt('140000', 8),
file: parseInt('160000', 8)
};
var modes = module.exports = {
isBlob: function (mode) {
return (mode & masks.blob) === masks.mask;
},
isFile: function (mode) {
return (mode & masks.file) === masks.mask;
},
toType: function (mode) {
if (mode === modes.commit) return "commit";
if (mode === modes.tree) return "tree";
if ((mode & masks.blob) === masks.mask) return "blob";
return "unknown";
},
tree: parseInt( '40000', 8),
blob: parseInt('100644', 8),
file: parseInt('100644', 8),
exec: parseInt('100755', 8),
sym: parseInt('120000', 8),
commit: parseInt('160000', 8)
};

265
api.hyungi.net/node_modules/js-git/lib/object-codec.js generated vendored Normal file
View File

@@ -0,0 +1,265 @@
"use strict";
var bodec = require('bodec');
var modes = require('./modes');
// (body) -> raw-buffer
var encoders = exports.encoders = {
blob: encodeBlob,
tree: encodeTree,
commit: encodeCommit,
tag: encodeTag
};
// ({type:type, body:raw-buffer}) -> buffer
exports.frame = frame;
// (raw-buffer) -> body
var decoders = exports.decoders ={
blob: decodeBlob,
tree: decodeTree,
commit: decodeCommit,
tag: decodeTag
};
// (buffer) -> {type:type, body:raw-buffer}
exports.deframe = deframe;
// Export git style path sort in case it's wanted.
exports.treeMap = treeMap;
exports.treeSort = treeSort;
function encodeBlob(body) {
if (!bodec.isBinary(body)) throw new TypeError("Blobs must be binary values");
return body;
}
function treeMap(key) {
/*jshint validthis:true*/
var entry = this[key];
return {
name: key,
mode: entry.mode,
hash: entry.hash
};
}
function treeSort(a, b) {
var aa = (a.mode === modes.tree) ? a.name + "/" : a.name;
var bb = (b.mode === modes.tree) ? b.name + "/" : b.name;
return aa > bb ? 1 : aa < bb ? -1 : 0;
}
function encodeTree(body) {
var tree = "";
if (Array.isArray(body)) throw new TypeError("Tree must be in object form");
var list = Object.keys(body).map(treeMap, body).sort(treeSort);
for (var i = 0, l = list.length; i < l; i++) {
var entry = list[i];
tree += entry.mode.toString(8) + " " + bodec.encodeUtf8(entry.name) +
"\0" + bodec.decodeHex(entry.hash);
}
return bodec.fromRaw(tree);
}
function encodeTag(body) {
var str = "object " + body.object +
"\ntype " + body.type +
"\ntag " + body.tag +
"\ntagger " + formatPerson(body.tagger) +
"\n\n" + body.message;
return bodec.fromUnicode(str);
}
function encodeCommit(body) {
var str = "tree " + body.tree;
for (var i = 0, l = body.parents.length; i < l; ++i) {
str += "\nparent " + body.parents[i];
}
str += "\nauthor " + formatPerson(body.author) +
"\ncommitter " + formatPerson(body.committer) +
"\n\n" + body.message;
return bodec.fromUnicode(str);
}
function formatPerson(person) {
return safe(person.name) +
" <" + safe(person.email) + "> " +
formatDate(person.date);
}
function safe(string) {
return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, "");
}
function two(num) {
return (num < 10 ? "0" : "") + num;
}
function formatDate(date) {
var seconds, offset;
if (date.seconds) {
seconds = date.seconds;
offset = date.offset;
}
// Also accept Date instances
else {
seconds = Math.floor(date.getTime() / 1000);
offset = date.getTimezoneOffset();
}
var neg = "+";
if (offset <= 0) offset = -offset;
else neg = "-";
offset = neg + two(Math.floor(offset / 60)) + two(offset % 60);
return seconds + " " + offset;
}
function frame(obj) {
var type = obj.type;
var body = obj.body;
if (!bodec.isBinary(body)) body = encoders[type](body);
return bodec.join([
bodec.fromRaw(type + " " + body.length + "\0"),
body
]);
}
function decodeBlob(body) {
return body;
}
function decodeTree(body) {
var i = 0;
var length = body.length;
var start;
var mode;
var name;
var hash;
var tree = {};
while (i < length) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
mode = parseOct(body, start, i++);
start = i;
i = indexOf(body, 0x00, start);
name = bodec.toUnicode(body, start, i++);
hash = bodec.toHex(body, i, i += 20);
tree[name] = {
mode: mode,
hash: hash
};
}
return tree;
}
function decodeCommit(body) {
var i = 0;
var start;
var key;
var parents = [];
var commit = {
tree: "",
parents: parents,
author: "",
committer: "",
message: ""
};
while (body[i] !== 0x0a) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
key = bodec.toRaw(body, start, i++);
start = i;
i = indexOf(body, 0x0a, start);
if (i < 0) throw new SyntaxError("Missing linefeed");
var value = bodec.toUnicode(body, start, i++);
if (key === "parent") {
parents.push(value);
}
else {
if (key === "author" || key === "committer") {
value = decodePerson(value);
}
commit[key] = value;
}
}
i++;
commit.message = bodec.toUnicode(body, i, body.length);
return commit;
}
function decodeTag(body) {
var i = 0;
var start;
var key;
var tag = {};
while (body[i] !== 0x0a) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
key = bodec.toRaw(body, start, i++);
start = i;
i = indexOf(body, 0x0a, start);
if (i < 0) throw new SyntaxError("Missing linefeed");
var value = bodec.toUnicode(body, start, i++);
if (key === "tagger") value = decodePerson(value);
tag[key] = value;
}
i++;
tag.message = bodec.toUnicode(body, i, body.length);
return tag;
}
function decodePerson(string) {
var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/);
if (!match) throw new Error("Improperly formatted person string");
return {
name: match[1],
email: match[2],
date: {
seconds: parseInt(match[3], 10),
offset: parseInt(match[4], 10) / 100 * -60
}
};
}
function deframe(buffer, decode) {
var space = indexOf(buffer, 0x20);
if (space < 0) throw new Error("Invalid git object buffer");
var nil = indexOf(buffer, 0x00, space);
if (nil < 0) throw new Error("Invalid git object buffer");
var body = bodec.slice(buffer, nil + 1);
var size = parseDec(buffer, space + 1, nil);
if (size !== body.length) throw new Error("Invalid body length.");
var type = bodec.toRaw(buffer, 0, space);
return {
type: type,
body: decode ? decoders[type](body) : body
};
}
function indexOf(buffer, byte, i) {
i |= 0;
var length = buffer.length;
for (;;i++) {
if (i >= length) return -1;
if (buffer[i] === byte) return i;
}
}
function parseOct(buffer, start, end) {
var val = 0;
while (start < end) {
val = (val << 3) + buffer[start++] - 0x30;
}
return val;
}
function parseDec(buffer, start, end) {
var val = 0;
while (start < end) {
val = val * 10 + buffer[start++] - 0x30;
}
return val;
}

326
api.hyungi.net/node_modules/js-git/lib/pack-codec.js generated vendored Normal file
View File

@@ -0,0 +1,326 @@
var inflateStream = require('./inflate-stream.js');
var inflate = require('./inflate.js');
var deflate = require('./deflate.js');
var sha1 = require('git-sha1');
var bodec = require('bodec');
var typeToNum = {
commit: 1,
tree: 2,
blob: 3,
tag: 4,
"ofs-delta": 6,
"ref-delta": 7
};
var numToType = {};
for (var type in typeToNum) {
var num = typeToNum[type];
numToType[num] = type;
}
exports.parseEntry = parseEntry;
function parseEntry(chunk) {
var offset = 0;
var byte = chunk[offset++];
var type = numToType[(byte >> 4) & 0x7];
var size = byte & 0xf;
var left = 4;
while (byte & 0x80) {
byte = chunk[offset++];
size |= (byte & 0x7f) << left;
left += 7;
}
size = size >>> 0;
var ref;
if (type === "ref-delta") {
ref = bodec.toHex(bodec.slice(chunk, offset, offset += 20));
}
else if (type === "ofs-delta") {
byte = chunk[offset++];
ref = byte & 0x7f;
while (byte & 0x80) {
byte = chunk[offset++];
ref = ((ref + 1) << 7) | (byte & 0x7f);
}
}
var body = inflate(bodec.slice(chunk, offset));
if (body.length !== size) {
throw new Error("Size mismatch");
}
var result = {
type: type,
body: body
};
if (typeof ref !== "undefined") {
result.ref = ref;
}
return result;
}
exports.decodePack = decodePack;
function decodePack(emit) {
var state = $pack;
var sha1sum = sha1();
var inf = inflateStream();
var offset = 0;
var position = 0;
var version = 0x4b434150; // PACK reversed
var num = 0;
var type = 0;
var length = 0;
var ref = null;
var checksum = "";
var start = 0;
var parts = [];
return function (chunk) {
if (chunk === undefined) {
if (num || checksum.length < 40) throw new Error("Unexpected end of input stream");
return emit();
}
for (var i = 0, l = chunk.length; i < l; i++) {
// console.log([state, i, chunk[i].toString(16)]);
if (!state) throw new Error("Unexpected extra bytes: " + bodec.slice(chunk, i));
state = state(chunk[i], i, chunk);
position++;
}
if (!state) return;
if (state !== $checksum) sha1sum.update(chunk);
var buff = inf.flush();
if (buff.length) {
parts.push(buff);
}
};
// The first four bytes in a packfile are the bytes 'PACK'
function $pack(byte) {
if ((version & 0xff) === byte) {
version >>>= 8;
return version ? $pack : $version;
}
throw new Error("Invalid packfile header");
}
// The version is stored as an unsigned 32 integer in network byte order.
// It must be version 2 or 3.
function $version(byte) {
version = (version << 8) | byte;
if (++offset < 4) return $version;
if (version >= 2 && version <= 3) {
offset = 0;
return $num;
}
throw new Error("Invalid version number " + num);
}
// The number of objects in this packfile is also stored as an unsigned 32 bit int.
function $num(byte) {
num = (num << 8) | byte;
if (++offset < 4) return $num;
offset = 0;
emit({version: version, num: num});
return $header;
}
// n-byte type and length (3-bit type, (n-1)*7+4-bit length)
// CTTTSSSS
// C is continue bit, TTT is type, S+ is length
function $header(byte) {
if (start === 0) start = position;
type = byte >> 4 & 0x07;
length = byte & 0x0f;
if (byte & 0x80) {
offset = 4;
return $header2;
}
return afterHeader();
}
// Second state in the same header parsing.
// CSSSSSSS*
function $header2(byte) {
length |= (byte & 0x7f) << offset;
if (byte & 0x80) {
offset += 7;
return $header2;
}
return afterHeader();
}
// Common helper for finishing tiny and normal headers.
function afterHeader() {
offset = 0;
if (type === 6) {
ref = 0;
return $ofsDelta;
}
if (type === 7) {
ref = "";
return $refDelta;
}
// console.log({type: type,length: length})
return $body;
}
// Big-endian modified base 128 number encoded ref offset
function $ofsDelta(byte) {
ref = byte & 0x7f;
if (byte & 0x80) return $ofsDelta2;
return $body;
}
function $ofsDelta2(byte) {
ref = ((ref + 1) << 7) | (byte & 0x7f);
if (byte & 0x80) return $ofsDelta2;
return $body;
}
// 20 byte raw sha1 hash for ref
function $refDelta(byte) {
ref += toHex(byte);
if (++offset < 20) return $refDelta;
return $body;
}
// Common helper for generating 2-character hex numbers
function toHex(num) {
return num < 0x10 ? "0" + num.toString(16) : num.toString(16);
}
// Common helper for emitting all three object shapes
function emitObject() {
var body = bodec.join(parts);
if (body.length !== length) {
throw new Error("Body length mismatch");
}
var item = {
type: numToType[type],
size: length,
body: body,
offset: start
};
if (ref) item.ref = ref;
parts.length = 0;
start = 0;
offset = 0;
type = 0;
length = 0;
ref = null;
emit(item);
}
// Feed the deflated code to the inflate engine
function $body(byte, i, chunk) {
if (inf.write(byte)) return $body;
var buf = inf.flush();
if (buf.length !== length) throw new Error("Length mismatch, expected " + length + " got " + buf.length);
inf.recycle();
if (buf.length) {
parts.push(buf);
}
emitObject();
// If this was all the objects, start calculating the sha1sum
if (--num) return $header;
sha1sum.update(bodec.slice(chunk, 0, i + 1));
return $checksum;
}
// 20 byte checksum
function $checksum(byte) {
checksum += toHex(byte);
if (++offset < 20) return $checksum;
var actual = sha1sum.digest();
if (checksum !== actual) throw new Error("Checksum mismatch: " + actual + " != " + checksum);
}
}
exports.encodePack = encodePack;
function encodePack(emit) {
var sha1sum = sha1();
var left;
return function (item) {
if (item === undefined) {
if (left !== 0) throw new Error("Some items were missing");
return emit();
}
if (typeof item.num === "number") {
if (left !== undefined) throw new Error("Header already sent");
left = item.num;
write(packHeader(item.num));
}
else if (typeof item.type === "string" && bodec.isBinary(item.body)) {
// The header must be sent before items.
if (typeof left !== "number") throw new Error("Headers not sent yet");
// Make sure we haven't sent all the items already
if (!left) throw new Error("All items already sent");
// Send the item in packstream format
write(packFrame(item));
// Send the checksum after the last item
if (!--left) {
emit(bodec.fromHex(sha1sum.digest()));
}
}
else {
throw new Error("Invalid item");
}
};
function write(chunk) {
sha1sum.update(chunk);
emit(chunk);
}
}
function packHeader(length) {
return bodec.fromArray([
0x50, 0x41, 0x43, 0x4b, // PACK
0, 0, 0, 2, // version 2
length >> 24, // Num of objects
(length >> 16) & 0xff,
(length >> 8) & 0xff,
length & 0xff
]);
}
function packFrame(item) {
var length = item.body.length;
// write TYPE_AND_BASE128_SIZE
var head = [(typeToNum[item.type] << 4) | (length & 0xf)];
var i = 0;
length >>= 4;
while (length) {
head[i++] |= 0x80;
head[i] = length & 0x7f;
length >>= 7;
}
if (typeof item.ref === "number") {
// write BIG_ENDIAN_MODIFIED_BASE_128_NUMBER
var offset = item.ref;
// Calculate how many digits we need in base 128 and move the pointer
i += Math.floor(Math.log(offset) / Math.log(0x80)) + 1;
// Write the last digit
head[i] = offset & 0x7f;
// Then write the rest
while (offset >>= 7) {
head[--i] = 0x80 | (--offset & 0x7f);
}
}
var parts = [bodec.fromArray(head)];
if (typeof item.ref === "string") {
parts.push(bodec.fromHex(item.ref));
}
parts.push(deflate(item.body));
return bodec.join(parts);
}

128
api.hyungi.net/node_modules/js-git/lib/pkt-line.js generated vendored Normal file
View File

@@ -0,0 +1,128 @@
"use strict";
var bodec = require('bodec');
var PACK = bodec.fromRaw("PACK");
module.exports = {
deframer: deframer,
framer: framer
};
function deframer(emit) {
var state = 0;
var offset = 4;
var length = 0;
var data;
var more = true;
return function (item) {
// Forward the EOS marker
if (item === undefined) return emit();
// Once we're in pack mode, everything goes straight through
if (state === 3) return emit(item);
// Otherwise parse the data using a state machine.
for (var i = 0, l = item.length; i < l; i++) {
var byte = item[i];
if (state === 0) {
var val = fromHexChar(byte);
if (val === -1) {
if (byte === PACK[0]) {
offset = 1;
state = 2;
continue;
}
state = -1;
throw new SyntaxError("Not a hex char: " + String.fromCharCode(byte));
}
length |= val << ((--offset) * 4);
if (offset === 0) {
if (length === 4) {
offset = 4;
more = emit("");
}
else if (length === 0) {
offset = 4;
more = emit(null);
}
else if (length > 4) {
length -= 4;
data = bodec.create(length);
state = 1;
}
else {
state = -1;
throw new SyntaxError("Invalid length: " + length);
}
}
}
else if (state === 1) {
data[offset++] = byte;
if (offset === length) {
offset = 4;
state = 0;
length = 0;
if (data[0] === 1) {
more = emit(bodec.slice(data, 1));
}
else if (data[0] === 2) {
more = emit({progress: bodec.toUnicode(data, 1)});
}
else if (data[0] === 3) {
more = emit({error: bodec.toUnicode(data, 1)});
}
else {
more = emit(bodec.toUnicode(data).trim());
}
}
}
else if (state === 2) {
if (offset < 4 && byte === PACK[offset++]) {
continue;
}
state = 3;
more = emit(bodec.join([PACK, bodec.subarray(item, i)]));
break;
}
else {
throw new Error("pkt-line decoder in invalid state");
}
}
return more;
};
}
function framer(emit) {
return function (item) {
if (item === undefined) return emit();
if (item === null) {
return emit(bodec.fromRaw("0000"));
}
if (typeof item === "string") {
item = bodec.fromUnicode(item);
}
return emit(bodec.join([frameHead(item.length + 4), item]));
};
}
function frameHead(length) {
var buffer = bodec.create(4);
buffer[0] = toHexChar(length >>> 12);
buffer[1] = toHexChar((length >>> 8) & 0xf);
buffer[2] = toHexChar((length >>> 4) & 0xf);
buffer[3] = toHexChar(length & 0xf);
return buffer;
}
function fromHexChar(val) {
return (val >= 0x30 && val < 0x40) ? val - 0x30 :
((val > 0x60 && val <= 0x66) ? val - 0x57 : -1);
}
function toHexChar(val) {
return val < 0x0a ? val + 0x30 : val + 0x57;
}

21
api.hyungi.net/node_modules/js-git/lib/wrap-handler.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
module.exports = wrapHandler;
function wrapHandler(fn, onError) {
if (onError) {
return function (err, value) {
if (err) return onError(err);
try {
return fn(value);
}
catch (err) {
return onError(err);
}
};
}
return function (err, value) {
if (err) throw err;
return fn(value);
};
}