ximg: process multipreview on the fly

Signed-off-by: Varun Patil <varunpatil@ucla.edu>
pull/563/head
Varun Patil 2023-04-01 22:35:11 -07:00
parent 7e8e877320
commit 052134be6e
2 changed files with 99 additions and 34 deletions

View File

@ -93,8 +93,6 @@ class ImageController extends GenericApiController
// stream the response // stream the response
header('Content-Type: application/octet-stream'); header('Content-Type: application/octet-stream');
header('Expires: '.gmdate('D, d M Y H:i:s \G\M\T', time() + 7 * 3600 * 24));
header('Cache-Control: max-age='. 7 * 3600 * 24 .', private');
foreach ($files as $bodyFile) { foreach ($files as $bodyFile) {
if (!isset($bodyFile['reqid']) || !isset($bodyFile['fileid']) || !isset($bodyFile['x']) || !isset($bodyFile['y']) || !isset($bodyFile['a'])) { if (!isset($bodyFile['reqid']) || !isset($bodyFile['fileid']) || !isset($bodyFile['x']) || !isset($bodyFile['y']) || !isset($bodyFile['a'])) {
@ -135,12 +133,18 @@ class ImageController extends GenericApiController
} }
ob_start(); ob_start();
echo json_encode([ // Encode parameters
$json = json_encode([
'reqid' => $reqid, 'reqid' => $reqid,
'Content-Length' => \strlen($content), 'len' => \strlen($content),
'Content-Type' => $preview->getMimeType(), 'type' => $preview->getMimeType(),
]); ]);
echo "\n";
// Send the length of the json as a single byte
echo \chr(\strlen($json));
echo $json;
// Send the image
echo $content; echo $content;
ob_end_flush(); ob_end_flush();
} catch (\OCP\Files\NotFoundException $e) { } catch (\OCP\Files\NotFoundException $e) {

View File

@ -102,37 +102,94 @@ async function flushPreviewQueue() {
const res = await fetchMultipreview(files); const res = await fetchMultipreview(files);
if (res.status !== 200) throw new Error("Error fetching multi-preview"); if (res.status !== 200) throw new Error("Error fetching multi-preview");
// Create fake headers for 7-day expiry
const headers = {
"cache-control": "max-age=604800",
expires: new Date(Date.now() + 604800000).toUTCString(),
};
// Read blob // Read blob
const blob = res.data; const reader = res.body.getReader();
// 256KB buffer for reading data into
let buffer = new Uint8Array(256 * 1024);
let bufSize = 0;
// Parameters of the image we're currently reading
let params: {
reqid: number;
len: number;
type: string;
} = null;
// Index at which we are currently reading
let idx = 0; let idx = 0;
while (idx < blob.size) {
// Read a line of JSON from blob
const line = await blob.slice(idx, idx + 256).text();
const newlineIndex = line?.indexOf("\n");
const jsonParsed = JSON.parse(line?.slice(0, newlineIndex));
const imgLen = jsonParsed["Content-Length"];
const imgType = jsonParsed["Content-Type"];
const reqid = jsonParsed["reqid"];
idx += newlineIndex + 1;
// Read the image data while (true) {
const imgBlob = blob.slice(idx, idx + imgLen); // Read data from the response
idx += imgLen; const { value, done } = await reader.read();
if (done) break; // End of stream
// Initiate callbacks // Check in case 1/3 the buffer is full then reset it
fetchPreviewQueueCopy if (idx > buffer.length / 3) {
.filter((p) => p.reqid === reqid && !p.done) buffer.set(buffer.slice(idx));
.forEach((p) => { bufSize -= idx;
try { idx = 0;
const dummy = getResponse(imgBlob, imgType, res.headers); }
resolve(p.origUrl, dummy);
p.done = true; // Double the length of the buffer until it fits
} catch (e) { // Hopefully this never happens
// In case of error, we want to try fetching the single while (bufSize + value.length > buffer.length) {
// image instead, so we don't reject here const newBuffer = new Uint8Array(buffer.length * 2);
} newBuffer.set(buffer);
buffer = newBuffer;
console.warn("Doubling multipreview buffer size", buffer.length);
}
// Copy data into buffer
buffer.set(value, bufSize);
bufSize += value.length;
// Process the buffer until we exhaust it or need more data
while (true) {
if (!params) {
// Read the length of the JSON as a single byte
if (bufSize - idx < 1) break;
const jsonLen = buffer[idx];
const jsonStart = idx + 1;
// Read the JSON
if (bufSize - jsonStart < jsonLen) break;
const jsonB = buffer.slice(jsonStart, jsonStart + jsonLen);
const jsonT = new TextDecoder().decode(jsonB);
params = JSON.parse(jsonT);
idx = jsonStart + jsonLen;
}
// Read the image data
if (bufSize - idx < params.len) break;
const imgBlob = new Blob([buffer.slice(idx, idx + params.len)], {
type: params.type,
}); });
idx += params.len;
// Initiate callbacks
fetchPreviewQueueCopy
.filter((p) => p.reqid === params.reqid && !p.done)
.forEach((p) => {
try {
const dummy = getResponse(imgBlob, params.type, headers);
resolve(p.origUrl, dummy);
p.done = true;
} catch (e) {
// In case of error, we want to try fetching the single
// image instead, so we don't reject here
}
});
// Reset for next iteration
params = null;
}
} }
} catch (e) { } catch (e) {
console.error("Multipreview error", e); console.error("Multipreview error", e);
@ -171,7 +228,7 @@ export async function fetchImage(url: string): Promise<Blob> {
origUrl: url, origUrl: url,
url: urlObj, url: urlObj,
fileid, fileid,
reqid: Math.random(), reqid: Math.round(Math.random() * 1e8),
}); });
// Add to pending // Add to pending
@ -233,7 +290,11 @@ export async function fetchOneImage(url: string) {
export async function fetchMultipreview(files: any[]) { export async function fetchMultipreview(files: any[]) {
const multiUrl = API.IMAGE_MULTIPREVIEW(); const multiUrl = API.IMAGE_MULTIPREVIEW();
return await axios.post(multiUrl, files, { return await fetch(multiUrl, {
responseType: "blob", method: "POST",
body: JSON.stringify(files),
headers: {
"Content-Type": "application/json",
},
}); });
} }