missing files

This commit is contained in:
Marco Gario
2020-12-10 12:29:50 +01:00
parent 56407639dd
commit f06ee229b0
7 changed files with 146 additions and 0 deletions
Generated
+11
View File
@@ -0,0 +1,11 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const hash_inputs_1 = require("./hash-inputs");
const logging_1 = require("./logging");
const languages_1 = require("./languages");
const dbPath = "...";
const logger = logging_1.getRunnerLogger(true);
/* TODO: Do we need to unboundle here or before? */
const stableHash = hash_inputs_1.DatabaseHash(languages_1.Language.javascript, dbPath, logger);
logger.info(`stableHash: ${stableHash}`);
//# sourceMappingURL=dbhash.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"dbhash.js","sourceRoot":"","sources":["../src/dbhash.ts"],"names":[],"mappings":";;AAAA,+CAA2C;AAC3C,uCAA0C;AAC1C,2CAAqC;AAErC,MAAM,MAAM,GAAG,KAAK,CAAC;AACrB,MAAM,MAAM,GAAG,yBAAe,CAAC,IAAI,CAAC,CAAC;AAErC,mDAAmD;AACnD,MAAM,UAAU,GAAG,0BAAY,CAC3B,oBAAQ,CAAC,UAAU,EACnB,MAAM,EACN,MAAM,CACT,CAAC;AAEF,MAAM,CAAC,IAAI,CAAC,eAAe,UAAU,EAAE,CAAC,CAAC"}
+11
View File
@@ -0,0 +1,11 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const hash_inputs_1 = require("./hash-inputs");
const logging_1 = require("./logging");
const languages_1 = require("./languages");
const dbPath = process.argv[2];
const logger = logging_1.getRunnerLogger(true);
/* TODO: Do we need to unboundle here or before? */
const stableHash = hash_inputs_1.DatabaseHash(languages_1.Language.javascript, dbPath, logger);
logger.info(`stableHash: ${stableHash}`);
//# sourceMappingURL=dbhash_cli.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"dbhash_cli.js","sourceRoot":"","sources":["../src/dbhash_cli.ts"],"names":[],"mappings":";;AAAA,+CAA2C;AAC3C,uCAA0C;AAC1C,2CAAqC;AAErC,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAC/B,MAAM,MAAM,GAAG,yBAAe,CAAC,IAAI,CAAC,CAAC;AAErC,mDAAmD;AACnD,MAAM,UAAU,GAAG,0BAAY,CAC3B,oBAAQ,CAAC,UAAU,EACnB,MAAM,EACN,MAAM,CACT,CAAC;AAEF,MAAM,CAAC,IAAI,CAAC,eAAe,UAAU,EAAE,CAAC,CAAC"}
+63
View File
@@ -0,0 +1,63 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DatabaseHash = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const crypto = __importStar(require("crypto"));
const languages_1 = require("./languages");
async function DatabaseHash(language, dbPath, logger) {
let relDir = path.join(dbPath, `db-${language}`, "default");
let combined_all = crypto.createHash("sha256");
let combined_noExtractionTime = crypto.createHash("sha256");
let files = {};
let relFiles = fs
.readdirSync(relDir)
.filter((n) => n.endsWith(".rel"))
.map((n) => path.join(relDir, n));
if (relFiles.length === 0) {
throw new Error(`No '.rel' files found in ${relDir}. Has the 'create-database' action been called?`);
}
for (const relFile of relFiles) {
let content = fs.readFileSync(relFile); // XXX this ought to be chunked for large tables!
let solo = crypto.createHash("sha256");
solo.update(content);
files[path.relative(dbPath, relFile)] = solo.digest("hex");
if (language === languages_1.Language.javascript &&
path.basename(relFile) !== "extraction_time.rel") {
combined_noExtractionTime.update(content);
}
combined_all.update(content);
}
let stableHash = combined_noExtractionTime.digest("hex");
logger.info("database-hash:");
logger.info(JSON.stringify({
language,
combined: {
all: combined_all.digest("hex"),
noExtractionTime: stableHash,
files,
},
}, null, 2));
return stableHash;
}
exports.DatabaseHash = DatabaseHash;
//# sourceMappingURL=hash-inputs.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"hash-inputs.js","sourceRoot":"","sources":["../src/hash-inputs.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAC7B,+CAAiC;AAEjC,2CAAuC;AAGhC,KAAK,UAAU,YAAY,CAC9B,QAAkB,EAChB,MAAc,EACd,MAAc;IAEhB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,QAAQ,EAAE,EAAE,SAAS,CAAC,CAAC;IAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IAC/C,IAAI,yBAAyB,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IAC5D,IAAI,KAAK,GAEL,EAAE,CAAC;IACP,IAAI,QAAQ,GAAG,EAAE;SACd,WAAW,CAAC,MAAM,CAAC;SACnB,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;SACjC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;IACpC,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,MAAM,IAAI,KAAK,CACb,4BAA4B,MAAM,iDAAiD,CACpF,CAAC;KACH;IACD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,CAAC,iDAAiD;QACzF,IAAI,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACrB,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAC3D,IACE,QAAQ,KAAK,oBAAQ,CAAC,UAAU;YAChC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,qBAAqB,EAChD;YACA,yBAAyB,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;SAC3C;QACD,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;KAC9B;IACD,IAAI,UAAU,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACzD,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;IAC9B,MAAM,CAAC,IAAI,CACT,IAAI,CAAC,SAAS,CACZ;QACE,QAAQ;QACR,QAAQ,EAAE;YACR,GAAG,EAAE,YAAY,CAAC,MAAM,CAAC,KAAK,CAAC;YAC/B,gBAAgB,EAAE,UAAU;YAC5B,KAAK;SACN;KACF,EACD,IAAI,EACJ,CAAC,CACF,CACF,CAAC;IACF,OAAO,UAAU,CAAC;AACpB,CAAC;AAlDH,oCAkDG"}
+58
View File
@@ -0,0 +1,58 @@
import * as fs from "fs";
import * as path from "path";
import * as crypto from "crypto";
import { Language } from "./languages";
import { Logger } from "./logging";
export async function DatabaseHash(
language: Language,
dbPath: string,
logger: Logger
): Promise<string> {
let relDir = path.join(dbPath, `db-${language}`, "default");
let combined_all = crypto.createHash("sha256");
let combined_noExtractionTime = crypto.createHash("sha256");
let files: {
[name: string]: string;
} = {};
let relFiles = fs
.readdirSync(relDir)
.filter((n) => n.endsWith(".rel"))
.map((n) => path.join(relDir, n));
if (relFiles.length === 0) {
throw new Error(
`No '.rel' files found in ${relDir}. Has the 'create-database' action been called?`
);
}
for (const relFile of relFiles) {
let content = fs.readFileSync(relFile); // XXX this ought to be chunked for large tables!
let solo = crypto.createHash("sha256");
solo.update(content);
files[path.relative(dbPath, relFile)] = solo.digest("hex");
if (
language === Language.javascript &&
path.basename(relFile) !== "extraction_time.rel"
) {
combined_noExtractionTime.update(content);
}
combined_all.update(content);
}
let stableHash = combined_noExtractionTime.digest("hex");
logger.info("database-hash:");
logger.info(
JSON.stringify(
{
language,
combined: {
all: combined_all.digest("hex"),
noExtractionTime: stableHash,
files,
},
},
null,
2
)
);
return stableHash;
}