Initial commit

main
Tomáš Mládek 2024-04-04 19:24:32 +02:00
commit 1c62f36fb3
4 changed files with 1340 additions and 0 deletions

12
Earthfile Normal file
View File

@ -0,0 +1,12 @@
VERSION 0.7
server:
FROM node:lts-hydrogen
RUN npm -g install pnpm
WORKDIR /app
COPY package.json pnpm-lock.yaml ./
RUN pnpm install
COPY . .
CMD ["pnpm", "start"]
SAVE IMAGE ksx-snapshot-server localhost:5000/ksx-snapshot-server
SAVE IMAGE --push albedo.lan:5000/ksx-snapshot-server

183
index.ts Normal file
View File

@ -0,0 +1,183 @@
import ffmpeg, { FfprobeData } from "fluent-ffmpeg";
import { promisify } from "util";
import express from "express";
import schedule from "node-schedule";
import { mkdir } from "fs-extra";
import path from "path";
import { program } from "commander";
import winston from "winston";
import { readdir, stat } from "node:fs/promises";
import { createHash } from "node:crypto";
const ffprobeAsync: (file: string) => Promise<FfprobeData> = promisify(
ffmpeg.ffprobe,
);
interface VideoInfo {
path: string;
duration: number;
}
const logger = winston.createLogger({
level: "debug",
format: winston.format.combine(
winston.format.timestamp(),
winston.format.printf(({ timestamp, level, message }) => {
return `${timestamp} [${level}]: ${message}`;
}),
),
transports: [new winston.transports.Console()],
});
async function getVideoInfo(paths: string[]): Promise<VideoInfo[]> {
const videos: VideoInfo[] = [];
for (const videoPath of paths) {
const metadata = await ffprobeAsync(videoPath);
const duration = metadata.format.duration;
if (!duration) {
logger.warn(`Failed to get duration for ${videoPath}`);
continue;
}
videos.push({ path: videoPath, duration });
}
return videos;
}
async function takeSnapshot(path: string, timestamp: number, output: string) {
return new Promise<void>((resolve) => {
ffmpeg(path)
.seekInput(timestamp)
.outputOptions("-frames:v", "1")
.output(output)
.on("end", () => {
resolve();
})
.run();
});
}
async function processSnapshots(
videos: VideoInfo[],
snapshotsDir: string,
count: number,
) {
const totalDuration = videos.reduce((acc, video) => acc + video.duration, 0);
const currentHour = new Date().getHours();
const currentDir = path.join(snapshotsDir, currentHour.toString());
await mkdir(currentDir, { recursive: true });
for (let i = 0; i < count; i++) {
while (true) {
let size = 0;
const randomTimestamp = Math.random() * totalDuration;
let cumulativeDuration = 0;
const snapshotPath = path.join(currentDir, `${i}.webp`);
for (const video of videos) {
if (cumulativeDuration + video.duration >= randomTimestamp) {
const timestampWithinVideo = randomTimestamp - cumulativeDuration;
logger.info(
`Extracting snapshot ${i + 1}/${count} from ${
video.path
} at ${timestampWithinVideo}s`,
);
await takeSnapshot(video.path, timestampWithinVideo, snapshotPath);
break;
}
cumulativeDuration += video.duration;
}
const stats = await stat(snapshotPath);
size = stats.size;
if (size < 5 * 1024) {
logger.info(
`Snapshot ${
i + 1
}/${count} too small: ${size} bytes, trying again...`,
);
} else {
break;
}
}
}
}
async function setupSnapshotServer(
videoPaths: string[],
snapshotsDir: string,
count: number,
) {
const app = express();
try {
await mkdir(snapshotsDir, { recursive: true });
const videos = await getVideoInfo(videoPaths);
// noinspection ES6MissingAwait
processSnapshots(videos, snapshotsDir, count);
schedule.scheduleJob("0 * * * *", () =>
processSnapshots(videos, snapshotsDir, count),
);
app.get("/:magic?", async (req, res) => {
logger.debug(`REQ: /${req.params.magic || ""}`);
const { magic } = req.params;
const currentHour = new Date().getHours();
const currentDir = path.resolve(
path.join(snapshotsDir, currentHour.toString()),
);
try {
const files = await readdir(currentDir);
if (magic) {
const hash = createHash("md5").update(magic).digest("hex");
const index = parseInt(hash.substring(0, 8), 16) % files.length;
// Cache until the next hour rolls over
res.sendFile(files[index], {
headers: {
"Cache-Control": `public, max-age=${
3600 - new Date().getMinutes() * 60
}`,
},
root: currentDir,
});
} else {
res.sendFile(files[Math.floor(Math.random() * files.length)], {
headers: {
"Cache-Control": "public, max-age=0",
},
root: currentDir,
});
}
} catch (error) {
logger.error("Failed to send file:", error);
res.status(500).send(`Error.`);
}
});
app.listen(3000, () =>
logger.info("Server running on http://localhost:3000"),
);
} catch (error) {
logger.error("Failed to start the server:", error);
}
}
program
.name("KSX Snapshot Server")
.argument("<paths...>", "Video file paths")
.option("-d, --directory <dir>", "Directory to store snapshots", "snapshots")
.option("-c, --count <count>", "Number of snapshots to take per hour", "32")
.action(async (videoPaths: string[], options) => {
await setupSnapshotServer(
videoPaths,
options.directory,
parseInt(options.count, 10),
);
});
program.parse();

28
package.json Normal file
View File

@ -0,0 +1,28 @@
{
"name": "ksx-snapshot-server",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "tsx index.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"commander": "^12.0.0",
"express": "^4.19.2",
"fluent-ffmpeg": "^2.1.2",
"fs-extra": "^11.2.0",
"node-schedule": "^2.1.1",
"tsx": "^4.7.1",
"winston": "^3.13.0"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/fluent-ffmpeg": "^2.1.24",
"@types/fs-extra": "^11.0.4",
"@types/node": "^20.12.2",
"@types/node-schedule": "^2.1.6"
}
}

1117
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff