feat: add software_hashes table schema and reimport pipeline
- Add softwareHashes Drizzle model (download_id PK, md5, crc32, size_bytes, inner_path, updated_at) - Update import_mysql.sh to reimport from JSON snapshot after DB wipe - Add pnpm scripts: update:hashes, export:hashes - Create data/zxdb/ directory for JSON snapshot storage claude-opus-4-6@MacFiver
This commit is contained in:
@@ -37,4 +37,49 @@ mysql $MYSQL_ARGS < ZXDB/ZXDB_mysql.sql
|
|||||||
# echo "CREATE ROLE IF NOT EXISTS 'zxdb_readonly';"
|
# echo "CREATE ROLE IF NOT EXISTS 'zxdb_readonly';"
|
||||||
# echo "GRANT SELECT, SHOW VIEW ON \`zxdb\`.* TO 'zxdb_readonly';"
|
# echo "GRANT SELECT, SHOW VIEW ON \`zxdb\`.* TO 'zxdb_readonly';"
|
||||||
} | mysql --force $MYSQL_ARGS "$DB_NAME"
|
} | mysql --force $MYSQL_ARGS "$DB_NAME"
|
||||||
|
# ---- Reimport software_hashes from JSON snapshot if available ----
|
||||||
|
HASHES_SNAPSHOT="$SCRIPT_DIR/../data/zxdb/software_hashes.json"
|
||||||
|
if [ -f "$HASHES_SNAPSHOT" ]; then
|
||||||
|
echo "Reimporting software_hashes from $HASHES_SNAPSHOT ..."
|
||||||
|
node -e "
|
||||||
|
const fs = require('fs');
|
||||||
|
const mysql = require('mysql2/promise');
|
||||||
|
(async () => {
|
||||||
|
const snap = JSON.parse(fs.readFileSync('$HASHES_SNAPSHOT', 'utf8'));
|
||||||
|
if (!snap.rows || snap.rows.length === 0) {
|
||||||
|
console.log(' No rows in snapshot, skipping.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const pool = mysql.createPool({ uri: '$ZXDB_URL', connectionLimit: 1 });
|
||||||
|
await pool.query(\`
|
||||||
|
CREATE TABLE IF NOT EXISTS software_hashes (
|
||||||
|
download_id INT NOT NULL PRIMARY KEY,
|
||||||
|
md5 VARCHAR(32) NOT NULL,
|
||||||
|
crc32 VARCHAR(8) NOT NULL,
|
||||||
|
size_bytes BIGINT NOT NULL,
|
||||||
|
inner_path VARCHAR(500) NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
INDEX idx_sh_md5 (md5),
|
||||||
|
INDEX idx_sh_crc32 (crc32)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
|
||||||
|
\`);
|
||||||
|
await pool.query('TRUNCATE TABLE software_hashes');
|
||||||
|
// Batch insert in chunks of 500
|
||||||
|
const chunk = 500;
|
||||||
|
for (let i = 0; i < snap.rows.length; i += chunk) {
|
||||||
|
const batch = snap.rows.slice(i, i + chunk);
|
||||||
|
const values = batch.map(r => [r.download_id, r.md5, r.crc32, r.size_bytes, r.inner_path, r.updated_at]);
|
||||||
|
await pool.query(
|
||||||
|
'INSERT INTO software_hashes (download_id, md5, crc32, size_bytes, inner_path, updated_at) VALUES ?',
|
||||||
|
[values]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
console.log(' Imported ' + snap.rows.length + ' rows into software_hashes.');
|
||||||
|
await pool.end();
|
||||||
|
})().catch(e => { console.error(' Error reimporting software_hashes:', e.message); process.exit(0); });
|
||||||
|
"
|
||||||
|
else
|
||||||
|
echo "No software_hashes snapshot found at $HASHES_SNAPSHOT — skipping reimport."
|
||||||
|
fi
|
||||||
|
|
||||||
mysqldump --no-data -uroot -p -h${DB_HOST} -P${DB_PORT} "$DB_NAME" > ZXDB/ZXDB_mysql_STRUCTURE_ONLY.sql
|
mysqldump --no-data -uroot -p -h${DB_HOST} -P${DB_PORT} "$DB_NAME" > ZXDB/ZXDB_mysql_STRUCTURE_ONLY.sql
|
||||||
|
|||||||
0
data/zxdb/.gitkeep
Normal file
0
data/zxdb/.gitkeep
Normal file
@@ -10,6 +10,8 @@
|
|||||||
"deploy": "bin/deploy.sh",
|
"deploy": "bin/deploy.sh",
|
||||||
"deploy:branch": "bin/deploy.sh",
|
"deploy:branch": "bin/deploy.sh",
|
||||||
"setup:zxdb-local": "bin/setup-zxdb-local.sh",
|
"setup:zxdb-local": "bin/setup-zxdb-local.sh",
|
||||||
|
"update:hashes": "node bin/update-software-hashes.mjs",
|
||||||
|
"export:hashes": "node bin/update-software-hashes.mjs --export-only",
|
||||||
"deploy-prod": "git push --set-upstream explorer.specnext.dev deploy",
|
"deploy-prod": "git push --set-upstream explorer.specnext.dev deploy",
|
||||||
"deploy-test": "git push --set-upstream test.explorer.specnext.dev test"
|
"deploy-test": "git push --set-upstream test.explorer.specnext.dev test"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { mysqlTable, int, varchar, tinyint, char, smallint, decimal, text, mediumtext, longtext } from "drizzle-orm/mysql-core";
|
import { mysqlTable, int, varchar, tinyint, char, smallint, decimal, text, mediumtext, longtext, bigint, timestamp } from "drizzle-orm/mysql-core";
|
||||||
|
|
||||||
// Minimal subset needed for browsing/searching
|
// Minimal subset needed for browsing/searching
|
||||||
export const entries = mysqlTable("entries", {
|
export const entries = mysqlTable("entries", {
|
||||||
@@ -646,3 +646,16 @@ export const zxsrScores = mysqlTable("zxsr_scores", {
|
|||||||
score: varchar("score", { length: 100 }),
|
score: varchar("score", { length: 100 }),
|
||||||
comments: text("comments"),
|
comments: text("comments"),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ---- Derived tables (managed by update scripts, not part of ZXDB upstream) ----
|
||||||
|
|
||||||
|
// Stores MD5, CRC32 and size of the inner tape file extracted from download zips.
|
||||||
|
// Populated by bin/update-software-hashes.mjs; survives DB wipes via JSON snapshot.
|
||||||
|
export const softwareHashes = mysqlTable("software_hashes", {
|
||||||
|
downloadId: int("download_id").notNull().primaryKey(),
|
||||||
|
md5: varchar("md5", { length: 32 }).notNull(),
|
||||||
|
crc32: varchar("crc32", { length: 8 }).notNull(),
|
||||||
|
sizeBytes: bigint("size_bytes", { mode: "number" }).notNull(),
|
||||||
|
innerPath: varchar("inner_path", { length: 500 }).notNull(),
|
||||||
|
updatedAt: timestamp("updated_at").notNull().defaultNow(),
|
||||||
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user