ximg: process multipreview on the fly

Signed-off-by: Varun Patil <varunpatil@ucla.edu>
pull/563/head
Varun Patil 2023-04-01 22:35:11 -07:00
parent 7e8e877320
commit 052134be6e
2 changed files with 99 additions and 34 deletions

View File

@ -93,8 +93,6 @@ class ImageController extends GenericApiController
// stream the response
header('Content-Type: application/octet-stream');
header('Expires: '.gmdate('D, d M Y H:i:s \G\M\T', time() + 7 * 3600 * 24));
header('Cache-Control: max-age='. 7 * 3600 * 24 .', private');
foreach ($files as $bodyFile) {
if (!isset($bodyFile['reqid']) || !isset($bodyFile['fileid']) || !isset($bodyFile['x']) || !isset($bodyFile['y']) || !isset($bodyFile['a'])) {
@ -135,12 +133,18 @@ class ImageController extends GenericApiController
}
ob_start();
echo json_encode([
// Encode parameters
$json = json_encode([
'reqid' => $reqid,
'Content-Length' => \strlen($content),
'Content-Type' => $preview->getMimeType(),
'len' => \strlen($content),
'type' => $preview->getMimeType(),
]);
echo "\n";
// Send the length of the json as a single byte
echo \chr(\strlen($json));
echo $json;
// Send the image
echo $content;
ob_end_flush();
} catch (\OCP\Files\NotFoundException $e) {

View File

@ -102,37 +102,94 @@ async function flushPreviewQueue() {
const res = await fetchMultipreview(files);
if (res.status !== 200) throw new Error("Error fetching multi-preview");
// Create fake headers for 7-day expiry
const headers = {
"cache-control": "max-age=604800",
expires: new Date(Date.now() + 604800000).toUTCString(),
};
// Read blob
const blob = res.data;
const reader = res.body.getReader();
// 256KB buffer for reading data into
let buffer = new Uint8Array(256 * 1024);
let bufSize = 0;
// Parameters of the image we're currently reading
let params: {
reqid: number;
len: number;
type: string;
} = null;
// Index at which we are currently reading
let idx = 0;
while (idx < blob.size) {
// Read a line of JSON from blob
const line = await blob.slice(idx, idx + 256).text();
const newlineIndex = line?.indexOf("\n");
const jsonParsed = JSON.parse(line?.slice(0, newlineIndex));
const imgLen = jsonParsed["Content-Length"];
const imgType = jsonParsed["Content-Type"];
const reqid = jsonParsed["reqid"];
idx += newlineIndex + 1;
// Read the image data
const imgBlob = blob.slice(idx, idx + imgLen);
idx += imgLen;
while (true) {
// Read data from the response
const { value, done } = await reader.read();
if (done) break; // End of stream
// Initiate callbacks
fetchPreviewQueueCopy
.filter((p) => p.reqid === reqid && !p.done)
.forEach((p) => {
try {
const dummy = getResponse(imgBlob, imgType, res.headers);
resolve(p.origUrl, dummy);
p.done = true;
} catch (e) {
// In case of error, we want to try fetching the single
// image instead, so we don't reject here
}
// Check in case 1/3 the buffer is full then reset it
if (idx > buffer.length / 3) {
buffer.set(buffer.slice(idx));
bufSize -= idx;
idx = 0;
}
// Double the length of the buffer until it fits
// Hopefully this never happens
while (bufSize + value.length > buffer.length) {
const newBuffer = new Uint8Array(buffer.length * 2);
newBuffer.set(buffer);
buffer = newBuffer;
console.warn("Doubling multipreview buffer size", buffer.length);
}
// Copy data into buffer
buffer.set(value, bufSize);
bufSize += value.length;
// Process the buffer until we exhaust it or need more data
while (true) {
if (!params) {
// Read the length of the JSON as a single byte
if (bufSize - idx < 1) break;
const jsonLen = buffer[idx];
const jsonStart = idx + 1;
// Read the JSON
if (bufSize - jsonStart < jsonLen) break;
const jsonB = buffer.slice(jsonStart, jsonStart + jsonLen);
const jsonT = new TextDecoder().decode(jsonB);
params = JSON.parse(jsonT);
idx = jsonStart + jsonLen;
}
// Read the image data
if (bufSize - idx < params.len) break;
const imgBlob = new Blob([buffer.slice(idx, idx + params.len)], {
type: params.type,
});
idx += params.len;
// Initiate callbacks
fetchPreviewQueueCopy
.filter((p) => p.reqid === params.reqid && !p.done)
.forEach((p) => {
try {
const dummy = getResponse(imgBlob, params.type, headers);
resolve(p.origUrl, dummy);
p.done = true;
} catch (e) {
// In case of error, we want to try fetching the single
// image instead, so we don't reject here
}
});
// Reset for next iteration
params = null;
}
}
} catch (e) {
console.error("Multipreview error", e);
@ -171,7 +228,7 @@ export async function fetchImage(url: string): Promise<Blob> {
origUrl: url,
url: urlObj,
fileid,
reqid: Math.random(),
reqid: Math.round(Math.random() * 1e8),
});
// Add to pending
@ -233,7 +290,11 @@ export async function fetchOneImage(url: string) {
export async function fetchMultipreview(files: any[]) {
const multiUrl = API.IMAGE_MULTIPREVIEW();
return await axios.post(multiUrl, files, {
responseType: "blob",
return await fetch(multiUrl, {
method: "POST",
body: JSON.stringify(files),
headers: {
"Content-Type": "application/json",
},
});
}