[ui] custom canvas-based image fragment rendering (allow resizing)

feat/vaults
Tomáš Mládek 2022-03-19 23:08:21 +01:00
parent 5051ac25c6
commit 04615b3682
3 changed files with 48 additions and 61 deletions

View File

@ -39,6 +39,7 @@
width: 100%; width: 100%;
display: flex; display: flex;
justify-content: center; justify-content: center;
min-height: 0;
} }
img { img {

View File

@ -204,10 +204,10 @@
min-height: 0; min-height: 0;
max-height: 100%; max-height: 100%;
} }
}
.preview-image { .preview-image {
margin: auto; margin: auto;
}
} }
.r6o-editor { .r6o-editor {

View File

@ -43,24 +43,20 @@ export function xywh(mediaItem: HTMLImageElement | HTMLVideoElement) {
} }
/** /**
* Applies the media fragment when the image has loaded. We need the image's * Applies the media fragment when the image has loaded.
* original width and height.
*/ */
function addImageLoadListener(mediaFragment: MediaFragment) { function addImageLoadListener(mediaFragment: MediaFragment) {
// Base64-encoded transparent 1x1 pixel GIF
const TRANSPARENT_GIF =
"data:image/gif;base64,R0lGODlhAQABAPAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==";
const mediaItem = mediaFragment.mediaItem; const mediaItem = mediaFragment.mediaItem;
const onload = function () { // Prevent onload firing when the fragment loads; but still react when `src`
// Prevent onload firing when the 1x1 pixel GIF loads; but still react when `src` // is changed programatically.
// is changed programatically. let lastSrc: string;
if (mediaItem.src !== TRANSPARENT_GIF) { function onload() {
if (mediaItem.src !== lastSrc) {
// Required on reloads because of size calculations. // Required on reloads because of size calculations.
mediaItem.style.cssText = "";
applyFragment(mediaFragment); applyFragment(mediaFragment);
mediaItem.src = TRANSPARENT_GIF; lastSrc = mediaItem.src;
} }
}; }
mediaItem.addEventListener("load", onload); mediaItem.addEventListener("load", onload);
} }
@ -86,34 +82,26 @@ function addVideoLoadListener(mediaFragment: MediaFragment) {
* 2D transformation according to the fragment's x and y values. * 2D transformation according to the fragment's x and y values.
*/ */
function applyFragment(fragment: MediaFragment) { function applyFragment(fragment: MediaFragment) {
let x: string, y: string, w: string, h: string;
const originalWidth =
fragment.mediaType === "img"
? fragment.mediaItem.width
: fragment.mediaItem.videoWidth;
const originalHeight =
fragment.mediaType === "img"
? fragment.mediaItem.height
: fragment.mediaItem.videoHeight;
// Unit is pixel:
if (fragment.unit === "pixel:") {
const scale =
fragment.mediaType === "img"
? originalWidth / fragment.mediaItem.naturalWidth
: originalWidth / fragment.mediaItem.clientWidth;
w = fragment.w * scale + "px";
h = fragment.h * scale + "px";
x = "-" + fragment.x * scale + "px";
y = "-" + fragment.y * scale + "px";
// Unit is percent:
} else {
w = (originalWidth * fragment.w) / 100 + "px";
h = (originalHeight * fragment.h) / 100 + "px";
x = "-" + (originalWidth * fragment.x) / 100 + "px";
y = "-" + (originalHeight * fragment.y) / 100 + "px";
}
// Media item is a video // Media item is a video
if (fragment.mediaType === "video") { if (fragment.mediaType === "video") {
let x: string, y: string, w: string, h: string;
const originalWidth = fragment.mediaItem.videoWidth;
const originalHeight = fragment.mediaItem.videoHeight;
// Unit is pixel:
if (fragment.unit === "pixel:") {
const scale = originalWidth / fragment.mediaItem.clientWidth;
w = fragment.w * scale + "px";
h = fragment.h * scale + "px";
x = "-" + fragment.x * scale + "px";
y = "-" + fragment.y * scale + "px";
// Unit is percent:
} else {
w = (originalWidth * fragment.w) / 100 + "px";
h = (originalHeight * fragment.h) / 100 + "px";
x = "-" + (originalWidth * fragment.x) / 100 + "px";
y = "-" + (originalHeight * fragment.y) / 100 + "px";
}
const wrapper = document.createElement("div"); const wrapper = document.createElement("div");
wrapper.style.cssText += wrapper.style.cssText +=
"overflow:hidden;" + "overflow:hidden;" +
@ -148,25 +136,23 @@ function applyFragment(fragment: MediaFragment) {
} }
// Media item is an image // Media item is an image
} else { } else {
fragment.mediaItem.style.cssText += let x: number, y: number, w: number, h: number;
"width:" + if (fragment.unit === "pixel:") {
w + x = fragment.x;
";" + y = fragment.y;
"height:" + w = fragment.w;
h + h = fragment.h;
";" + } else {
"background:url(" + x = (fragment.x / 100) * fragment.mediaItem.naturalWidth;
fragment.mediaItem.src + y = (fragment.y / 100) * fragment.mediaItem.naturalHeight;
") " + // background-image w = (fragment.w / 100) * fragment.mediaItem.naturalWidth;
"no-repeat " + // background-repeat h = (fragment.h / 100) * fragment.mediaItem.naturalHeight;
x + }
" " + const canvas = document.createElement("canvas");
y + canvas.width = w;
"; " + // background-position canvas.height = h;
"background-size: " + const context = canvas.getContext("2d");
originalWidth + context.drawImage(fragment.mediaItem, x, y, w, h, 0, 0, w, h);
"px " + fragment.mediaItem.src = canvas.toDataURL();
originalHeight +
"px;";
} }
} }