File size: 3,216 Bytes
0ad74ed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
import { update_object, walk_and_store_blobs } from "../helpers/data";
import {
Command,
type ApiData,
type EndpointInfo,
type JsApiData
} from "../types";
import { FileData } from "../upload";
import type { Client } from "..";
import {
FILE_PROCESSING_ERROR_MSG,
NODEJS_FS_ERROR_MSG,
ROOT_URL_ERROR_MSG
} from "../constants";
export async function handle_blob(
this: Client,
endpoint: string,
data: unknown[],
api_info: EndpointInfo<JsApiData | ApiData>
): Promise<unknown[]> {
const self = this;
await process_local_file_commands(self, data);
const blobRefs = await walk_and_store_blobs(
data,
undefined,
[],
true,
api_info
);
const results = await Promise.all(
blobRefs.map(async ({ path, blob, type }) => {
if (!blob) return { path, type };
const response = await self.upload_files(endpoint, [blob]);
const file_url = response.files && response.files[0];
return {
path,
file_url,
type,
name:
typeof File !== "undefined" && blob instanceof File
? blob?.name
: undefined
};
})
);
results.forEach(({ path, file_url, type, name }) => {
if (type === "Gallery") {
update_object(data, file_url, path);
} else if (file_url) {
const file = new FileData({ path: file_url, orig_name: name });
update_object(data, file, path);
}
});
return data;
}
export async function process_local_file_commands(
client: Client,
data: unknown[]
): Promise<void> {
const root = client.config?.root || client.config?.root_url;
if (!root) {
throw new Error(ROOT_URL_ERROR_MSG);
}
await recursively_process_commands(client, data);
}
async function recursively_process_commands(
client: Client,
data: any,
path: string[] = []
): Promise<void> {
for (const key in data) {
if (data[key] instanceof Command) {
await process_single_command(client, data, key);
} else if (typeof data[key] === "object" && data[key] !== null) {
await recursively_process_commands(client, data[key], [...path, key]);
}
}
}
async function process_single_command(
client: Client,
data: any,
key: string
): Promise<void> {
let cmd_item = data[key] as Command;
const root = client.config?.root || client.config?.root_url;
if (!root) {
throw new Error(ROOT_URL_ERROR_MSG);
}
try {
let fileBuffer: Buffer;
let fullPath: string;
// check if running in a Node.js environment
if (
typeof process !== "undefined" &&
process.versions &&
process.versions.node
) {
const fs = await import("fs/promises");
const path = await import("path");
fullPath = path.resolve(process.cwd(), cmd_item.meta.path);
fileBuffer = await fs.readFile(fullPath); // Read file from disk
} else {
throw new Error(NODEJS_FS_ERROR_MSG);
}
const file = new Blob([fileBuffer], { type: "application/octet-stream" });
const response = await client.upload_files(root, [file]);
const file_url = response.files && response.files[0];
if (file_url) {
const fileData = new FileData({
path: file_url,
orig_name: cmd_item.meta.name || ""
});
// replace the command object with the fileData object
data[key] = fileData;
}
} catch (error) {
console.error(FILE_PROCESSING_ERROR_MSG, error);
}
}
|