Created
March 11, 2024 09:36
-
-
Save byurhannurula/ad03d688b6f8b35f10334ee6ef4f57b6 to your computer and use it in GitHub Desktop.
ICP Canister usage
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import fetch from 'isomorphic-fetch'; | |
import { HttpAgent } from '@dfinity/agent'; | |
import { TRACING_CANISTER_ID, FACADE_CANISTER_ID } from '../config'; | |
import { createActor as createTracingActor } from './declarations/tracing'; | |
import { createActor as createFacadeActor } from './declarations/facade'; | |
import { custodianIdentity } from './identity'; | |
const agent = new HttpAgent({ | |
identity: custodianIdentity, | |
host: 'http://127.0.0.1:4943', | |
fetch, | |
}); | |
export const tracing = createTracingActor(TRACING_CANISTER_ID, { agent }); | |
export const facade = createFacadeActor(FACADE_CANISTER_ID, { agent }); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { Ed25519KeyIdentity } from '@dfinity/identity'; | |
import { readFileSync } from 'fs'; | |
const secretKey = readFileSync('./identity.secret', { encoding: 'utf8' }); | |
export const custodianIdentity = Ed25519KeyIdentity.fromSecretKey( | |
Buffer.from(secretKey, 'hex') | |
); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Upload api endpoint | |
async uploadFile(req, res) { | |
try { | |
const { body, files, user } = req; | |
if (!body.parentFolder) { | |
throw new Error('Parent folder is required.'); | |
} | |
const parentFolder = await Folder.findById(body.parentFolder); | |
if (!parentFolder) { | |
throw new Error('Invalid parent folder ID.'); | |
} | |
hasAccessToBulding(parentFolder, user); | |
const fileCreateQueries = []; | |
for (let i = 0; i < files.length; i++) { | |
const file = files[i]; | |
const fileId = Types.ObjectId().toString(); | |
const metadata = { | |
...JSON.parse(body.metadata), | |
mimeType: file.mimetype, | |
size: file.size, | |
}; | |
const fileData = { | |
metadata, | |
fileName: file.originalname, | |
fileLocation: file.path, | |
organization: req.orgId, | |
parentFolders: [body.parentFolder], | |
aiTerms: body.aiTerms, | |
}; | |
await yupValidate('createFile', fileData); | |
fileCreateQueries.push({ | |
insertOne: { | |
document: fileData, | |
}, | |
}); | |
// Queue the file upload job | |
await enqueueFileUploadJob({ | |
userId: user.id, | |
organizationId: req.orgId, | |
name: file.originalname, | |
mimetype: file.mimetype, | |
size: file.size, | |
path: file.path, | |
id: fileId, | |
}); | |
} | |
await File.bulkWrite(fileCreateQueries); | |
return res.send(responseObj({}, true, 'Data is uploaded successfully!')); | |
} catch (error) { | |
processControllerError(res, error); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// This function is run by queue | |
export async function handleFileUpload(job) { | |
try { | |
const { userId, id, name, size, path, mimetype, organizationId } = job.data.data; | |
if (!path) { | |
throw new Error('Missing path in file data'); | |
} | |
const isExistsLocally = await checkFileExists(path); | |
if (!isExistsLocally) { | |
throw new Error(`File not found locally: ${path}`); | |
} | |
const file = await fs.promises.readFile(path); | |
const fileBlob = new Blob(file, { type: mimetype }); | |
const fileUintArray = new Uint8Array(file); | |
if (!fileBlob || !fileUintArray) { | |
throw new Error(`Could not read the file: ${path}`); | |
} | |
// Check if the file size is larger than 1.5MB | |
const maxChunkSize = 1.5 * 1024 * 1024; // 1.5MB | |
const filePayload = { | |
id, | |
name, | |
size, | |
content: fileUintArray, | |
}; | |
let result; | |
if (fileBlob.size > maxChunkSize) { | |
// File needs to be chunked | |
let offset = 0; | |
while (offset < size) { | |
const chunk = fileUintArray.slice(offset, offset + maxChunkSize); | |
filePayload.content = chunk; | |
// eslint-disable-next-line no-await-in-loop | |
result = await facade.uploadFile(filePayload, userId, true); | |
// Update offset for the next chunk | |
offset += maxChunkSize; | |
} | |
} else { | |
// File is small, upload as a single chunk | |
result = await facade.uploadFile(filePayload, userId, false); | |
} | |
console.log( | |
`[STORAGE_CANISTER_UPLOAD_RESULT]: ${JSON.stringify(result, null, 4)}` | |
); | |
if (result.Ok) { | |
await File.updateOne( | |
{ organization: organizationId, fileName: name, fileLocation: path }, | |
{ | |
canisterLocation: JSON.stringify(result.Ok), | |
} | |
); | |
const logResult = await tracing.addLog(userId, 'uploaded', id, name); | |
// for testing purposes | |
// const serializedLogResult = JSON.stringify( | |
// logResult, | |
// (key, value) => { | |
// if (typeof value === 'bigint') { | |
// return value.toString(); | |
// } | |
// return value; | |
// }, | |
// 4 | |
// ); | |
console.log(`[TRACING_CANISTER_ADD_LOG_RESULT]: ${serializedLogResult}`); | |
console.log(`File upload successful for user ${userId}, fileId: ${id}`); | |
} | |
} catch (error) { | |
console.error(`File upload failed: ${error.message}`); | |
} | |
} | |
// This function should be run when there is need to read the file | |
export async function getAndConcatenateFile(userId, fileId, canisterId) { | |
let chunkNumber = 0; | |
let concatenatedFile = new Uint8Array(); | |
let fileData = {}; | |
while (true) { | |
// eslint-disable-next-line no-await-in-loop | |
const fileResponse = await facade.getFile( | |
userId, | |
fileId, | |
canisterId, | |
chunkNumber | |
); | |
console.log('[FILE_RESPONSE]', fileResponse); | |
if (!fileResponse.Ok) { | |
console.error('Failed to retrieve file chunk.'); | |
return; | |
} | |
// Append the current chunk to the concatenated file | |
concatenatedFile = new Uint8Array([ | |
...concatenatedFile, | |
...fileResponse.Ok.chunk, | |
]); | |
if (!fileResponse.Ok.hasNext) { | |
// No more chunks, exit the loop | |
fileData = { | |
id: fileResponse.Ok.id, | |
name: fileResponse.Ok.name, | |
}; | |
break; | |
} | |
// Increment the chunk number for the next iteration | |
chunkNumber++; | |
} | |
return uint8ToBase64(concatenatedFile); | |
} | |
// ============================================================================= | |
// Tracing canister usage: | |
await tracing.addLog(userId, 'consumed', fileId, fileName); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment