chore(refactor): break handle sign function into smaller chunks
This commit is contained in:
parent
c530abd298
commit
b145624f4c
@ -34,6 +34,7 @@ import {
|
||||
CreateSignatureEventContent,
|
||||
Meta,
|
||||
ProfileMetadata,
|
||||
SignedEvent,
|
||||
SignedEventContent,
|
||||
User,
|
||||
UserRole
|
||||
@ -366,26 +367,15 @@ export const SignPage = () => {
|
||||
setIsLoading(true)
|
||||
setLoadingSpinnerDesc('parsing hashes.json file')
|
||||
|
||||
const hashesFileContent = await readContentOfZipEntry(
|
||||
zip,
|
||||
'hashes.json',
|
||||
'string'
|
||||
)
|
||||
const hashesFileContent = await readHashesFile()
|
||||
if (!hashesFileContent) return
|
||||
|
||||
if (!hashesFileContent) {
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
let hashes = await parseJson(hashesFileContent).catch((err) => {
|
||||
console.log('err in parsing the content of hashes.json :>> ', err)
|
||||
toast.error(
|
||||
err.message || 'error occurred in parsing the content of hashes.json'
|
||||
)
|
||||
setIsLoading(false)
|
||||
return null
|
||||
})
|
||||
|
||||
const hashes = await parseHashes(hashesFileContent)
|
||||
if (!hashes) return
|
||||
|
||||
setLoadingSpinnerDesc('Generating hashes for files')
|
||||
@ -395,51 +385,21 @@ export const SignPage = () => {
|
||||
const prevSig = getPrevSignersSig(hexToNpub(usersPubkey!))
|
||||
if (!prevSig) return
|
||||
|
||||
const signedEvent = await signEventForMetaFile(
|
||||
JSON.stringify({
|
||||
prevSig
|
||||
}),
|
||||
nostrController,
|
||||
setIsLoading
|
||||
)
|
||||
|
||||
const signedEvent = await signEventForMeta(prevSig)
|
||||
if (!signedEvent) return
|
||||
|
||||
const metaCopy = _.cloneDeep(meta)
|
||||
const updatedMeta = updateMetaSignatures(meta, signedEvent)
|
||||
|
||||
metaCopy.docSignatures = {
|
||||
...metaCopy.docSignatures,
|
||||
[hexToNpub(signedEvent.pubkey)]: JSON.stringify(signedEvent, null, 2)
|
||||
}
|
||||
|
||||
const stringifiedMeta = JSON.stringify(metaCopy, null, 2)
|
||||
const stringifiedMeta = JSON.stringify(updatedMeta, null, 2)
|
||||
zip.file('meta.json', stringifiedMeta)
|
||||
|
||||
const metaHash = await getHash(stringifiedMeta)
|
||||
if (!metaHash) return
|
||||
|
||||
hashes = {
|
||||
...hashes,
|
||||
[usersPubkey!]: metaHash
|
||||
}
|
||||
|
||||
zip.file('hashes.json', JSON.stringify(hashes, null, 2))
|
||||
|
||||
const arrayBuffer = await zip
|
||||
.generateAsync({
|
||||
type: 'arraybuffer',
|
||||
compression: 'DEFLATE',
|
||||
compressionOptions: {
|
||||
level: 6
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log('err in zip:>> ', err)
|
||||
setIsLoading(false)
|
||||
toast.error(err.message || 'Error occurred in generating zip file')
|
||||
return null
|
||||
})
|
||||
const updatedHashes = updateHashes(hashes, metaHash)
|
||||
zip.file('hashes.json', JSON.stringify(updatedHashes, null, 2))
|
||||
|
||||
const arrayBuffer = await generateZipArrayBuffer(zip)
|
||||
if (!arrayBuffer) return
|
||||
|
||||
const key = await generateEncryptionKey()
|
||||
@ -450,80 +410,180 @@ export const SignPage = () => {
|
||||
const blob = new Blob([encryptedArrayBuffer])
|
||||
|
||||
if (await isOnline()) {
|
||||
setLoadingSpinnerDesc('Uploading zip file to file storage.')
|
||||
const fileUrl = await uploadToFileStorage(blob, nostrController)
|
||||
.then((url) => {
|
||||
toast.success('zip file uploaded to file storage')
|
||||
return url
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log('err in upload:>> ', err)
|
||||
setIsLoading(false)
|
||||
toast.error(err.message || 'Error occurred in uploading zip file')
|
||||
return null
|
||||
})
|
||||
|
||||
if (!fileUrl) return
|
||||
|
||||
// check if the current user is the last signer
|
||||
const usersNpub = hexToNpub(usersPubkey!)
|
||||
const lastSignerIndex = signers.length - 1
|
||||
const signerIndex = signers.indexOf(usersNpub)
|
||||
const isLastSigner = signerIndex === lastSignerIndex
|
||||
|
||||
// if current user is the last signer, then send DMs to all signers and viewers
|
||||
if (isLastSigner) {
|
||||
const userSet = new Set<`npub1${string}`>()
|
||||
|
||||
if (submittedBy) {
|
||||
userSet.add(hexToNpub(submittedBy))
|
||||
}
|
||||
|
||||
signers.forEach((signer) => {
|
||||
userSet.add(signer)
|
||||
})
|
||||
|
||||
viewers.forEach((viewer) => {
|
||||
userSet.add(viewer)
|
||||
})
|
||||
|
||||
const users = Array.from(userSet)
|
||||
|
||||
for (const user of users) {
|
||||
// todo: execute in parallel
|
||||
await sendDM(
|
||||
fileUrl,
|
||||
key,
|
||||
npubToHex(user)!,
|
||||
nostrController,
|
||||
false,
|
||||
setAuthUrl
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const nextSigner = signers[signerIndex + 1]
|
||||
await sendDM(
|
||||
fileUrl,
|
||||
key,
|
||||
npubToHex(nextSigner)!,
|
||||
nostrController,
|
||||
true,
|
||||
setAuthUrl
|
||||
)
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
|
||||
// update search params with updated file url and encryption key
|
||||
setSearchParams({
|
||||
file: fileUrl,
|
||||
key: key
|
||||
})
|
||||
await handleOnlineFlow(blob, key)
|
||||
} else {
|
||||
handleDecryptedArrayBuffer(arrayBuffer).finally(() => setIsLoading(false))
|
||||
}
|
||||
}
|
||||
|
||||
// Read the content of the hashes.json file
|
||||
const readHashesFile = async (): Promise<string | null> => {
|
||||
return await readContentOfZipEntry(zip!, 'hashes.json', 'string').catch(
|
||||
(err) => {
|
||||
console.log('Error reading hashes.json file:', err)
|
||||
setIsLoading(false)
|
||||
return null
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Parse the JSON content of the hashes file
|
||||
const parseHashes = async (
|
||||
hashesFileContent: string
|
||||
): Promise<Record<string, string> | null> => {
|
||||
return await parseJson<Record<string, string>>(hashesFileContent).catch(
|
||||
(err) => {
|
||||
console.log('Error parsing hashes.json content:', err)
|
||||
toast.error(err.message || 'Error parsing hashes.json content')
|
||||
setIsLoading(false)
|
||||
return null
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Sign the event for the meta file
|
||||
const signEventForMeta = async (prevSig: string) => {
|
||||
return await signEventForMetaFile(
|
||||
JSON.stringify({ prevSig }),
|
||||
nostrController,
|
||||
setIsLoading
|
||||
)
|
||||
}
|
||||
|
||||
// Update the meta signatures
|
||||
const updateMetaSignatures = (meta: Meta, signedEvent: SignedEvent): Meta => {
|
||||
const metaCopy = _.cloneDeep(meta)
|
||||
metaCopy.docSignatures = {
|
||||
...metaCopy.docSignatures,
|
||||
[hexToNpub(signedEvent.pubkey)]: JSON.stringify(signedEvent, null, 2)
|
||||
}
|
||||
return metaCopy
|
||||
}
|
||||
|
||||
// Update the hashes with the new meta hash
|
||||
const updateHashes = (
|
||||
hashes: Record<string, string>,
|
||||
metaHash: string
|
||||
): Record<string, string> => {
|
||||
return {
|
||||
...hashes,
|
||||
[usersPubkey!]: metaHash
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the zip array buffer
|
||||
const generateZipArrayBuffer = async (
|
||||
zip: JSZip
|
||||
): Promise<ArrayBuffer | null> => {
|
||||
return await zip
|
||||
.generateAsync({
|
||||
type: 'arraybuffer',
|
||||
compression: 'DEFLATE',
|
||||
compressionOptions: {
|
||||
level: 6
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log('Error generating zip file:', err)
|
||||
setIsLoading(false)
|
||||
toast.error(err.message || 'Error generating zip file')
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
// Handle the online flow: upload file and send DMs
|
||||
const handleOnlineFlow = async (blob: Blob, key: string) => {
|
||||
const fileUrl = await uploadZipFile(blob)
|
||||
if (!fileUrl) return
|
||||
|
||||
const isLastSigner = checkIsLastSigner(signers)
|
||||
|
||||
if (isLastSigner) {
|
||||
await sendDMToAllUsers(fileUrl, key)
|
||||
} else {
|
||||
await sendDMToNextSigner(fileUrl, key)
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
|
||||
// Update search params with updated file URL and encryption key
|
||||
setSearchParams({
|
||||
file: fileUrl,
|
||||
key: key
|
||||
})
|
||||
}
|
||||
|
||||
// Upload the zip file to file storage
|
||||
const uploadZipFile = async (blob: Blob): Promise<string | null> => {
|
||||
setLoadingSpinnerDesc('Uploading zip file to file storage.')
|
||||
const fileUrl = await uploadToFileStorage(blob, nostrController)
|
||||
.then((url) => {
|
||||
toast.success('Zip file uploaded to file storage')
|
||||
return url
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log('Error uploading file:', err)
|
||||
setIsLoading(false)
|
||||
toast.error(err.message || 'Error uploading file')
|
||||
return null
|
||||
})
|
||||
|
||||
return fileUrl
|
||||
}
|
||||
|
||||
// Check if the current user is the last signer
|
||||
const checkIsLastSigner = (signers: string[]): boolean => {
|
||||
const usersNpub = hexToNpub(usersPubkey!)
|
||||
const lastSignerIndex = signers.length - 1
|
||||
const signerIndex = signers.indexOf(usersNpub)
|
||||
return signerIndex === lastSignerIndex
|
||||
}
|
||||
|
||||
// Send DM to all users (signers and viewers)
|
||||
const sendDMToAllUsers = async (fileUrl: string, key: string) => {
|
||||
const userSet = new Set<`npub1${string}`>()
|
||||
|
||||
if (submittedBy) {
|
||||
userSet.add(hexToNpub(submittedBy))
|
||||
}
|
||||
|
||||
signers.forEach((signer) => {
|
||||
userSet.add(signer)
|
||||
})
|
||||
|
||||
viewers.forEach((viewer) => {
|
||||
userSet.add(viewer)
|
||||
})
|
||||
|
||||
const users = Array.from(userSet)
|
||||
|
||||
for (const user of users) {
|
||||
await sendDM(
|
||||
fileUrl,
|
||||
key,
|
||||
npubToHex(user)!,
|
||||
nostrController,
|
||||
false,
|
||||
setAuthUrl
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Send DM to the next signer
|
||||
const sendDMToNextSigner = async (fileUrl: string, key: string) => {
|
||||
const usersNpub = hexToNpub(usersPubkey!)
|
||||
const signerIndex = signers.indexOf(usersNpub)
|
||||
const nextSigner = signers[signerIndex + 1]
|
||||
await sendDM(
|
||||
fileUrl,
|
||||
key,
|
||||
npubToHex(nextSigner)!,
|
||||
nostrController,
|
||||
true,
|
||||
setAuthUrl
|
||||
)
|
||||
}
|
||||
|
||||
const handleExport = async () => {
|
||||
if (!meta || !zip || !usersPubkey) return
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user