880 lines
25 KiB
TypeScript
880 lines
25 KiB
TypeScript
import axios from 'axios'
|
|
import saveAs from 'file-saver'
|
|
import JSZip from 'jszip'
|
|
import _ from 'lodash'
|
|
import { Event, verifyEvent } from 'nostr-tools'
|
|
import { useCallback, useEffect, useState } from 'react'
|
|
import { useAppSelector } from '../../hooks'
|
|
import { useLocation, useNavigate, useParams } from 'react-router-dom'
|
|
import { toast } from 'react-toastify'
|
|
import { LoadingSpinner } from '../../components/LoadingSpinner'
|
|
import { NostrController } from '../../controllers'
|
|
import { appPrivateRoutes, appPublicRoutes } from '../../routes'
|
|
import { CreateSignatureEventContent, Meta, SignedEvent } from '../../types'
|
|
import {
|
|
ARRAY_BUFFER,
|
|
decryptArrayBuffer,
|
|
DEFLATE,
|
|
encryptArrayBuffer,
|
|
extractMarksFromSignedMeta,
|
|
extractZipUrlAndEncryptionKey,
|
|
filterMarksByPubkey,
|
|
findOtherUserMarks,
|
|
generateEncryptionKey,
|
|
generateKeysFile,
|
|
getCurrentUserFiles,
|
|
getCurrentUserMarks,
|
|
getHash,
|
|
hexToNpub,
|
|
isOnline,
|
|
loadZip,
|
|
npubToHex,
|
|
parseJson,
|
|
processMarks,
|
|
readContentOfZipEntry,
|
|
sendNotification,
|
|
signEventForMetaFile,
|
|
timeout,
|
|
unixNow,
|
|
updateMarks,
|
|
updateUsersAppData,
|
|
uploadMetaToFileStorage
|
|
} from '../../utils'
|
|
import { CurrentUserMark, Mark } from '../../types/mark.ts'
|
|
import PdfMarking from '../../components/PDFView/PdfMarking.tsx'
|
|
import {
|
|
convertToSigitFile,
|
|
getZipWithFiles,
|
|
SigitFile
|
|
} from '../../utils/file.ts'
|
|
import { generateTimestamp } from '../../utils/opentimestamps.ts'
|
|
import { MARK_TYPE_CONFIG } from '../../components/MarkTypeStrategy/MarkStrategy.tsx'
|
|
import { getLastSignersSig } from '../../utils/sign.ts'
|
|
|
|
export const SignPage = () => {
|
|
const navigate = useNavigate()
|
|
const location = useLocation()
|
|
const params = useParams()
|
|
|
|
const usersAppData = useAppSelector((state) => state.userAppData)
|
|
|
|
/**
|
|
* Received from `location.state`
|
|
*
|
|
* uploadedZip will be received from home page when a user uploads a sigit zip wrapper that contains keys.json
|
|
* arrayBuffer (decryptedArrayBuffer) will be received in navigation from create page in offline mode
|
|
* meta (metaInNavState) will be received in navigation from create & home page in online mode
|
|
*/
|
|
let metaInNavState = location?.state?.meta || undefined
|
|
const { arrayBuffer: decryptedArrayBuffer, uploadedZip } = location.state || {
|
|
decryptedArrayBuffer: undefined,
|
|
uploadedZip: undefined
|
|
}
|
|
|
|
/**
|
|
* If userAppData (redux) is available, and we have the route param (sigit id)
|
|
* which is actually a `createEventId`, we will fetch a `sigit`
|
|
* based on the provided route ID and set fetched `sigit` to the `metaInNavState`
|
|
*/
|
|
if (usersAppData) {
|
|
const sigitCreateId = params.id
|
|
|
|
if (sigitCreateId) {
|
|
const sigit = usersAppData.sigits[sigitCreateId]
|
|
|
|
if (sigit) {
|
|
metaInNavState = sigit
|
|
}
|
|
}
|
|
}
|
|
|
|
const [files, setFiles] = useState<{ [filename: string]: SigitFile }>({})
|
|
|
|
const [isLoading, setIsLoading] = useState(true)
|
|
const [loadingSpinnerDesc, setLoadingSpinnerDesc] = useState('')
|
|
|
|
const [meta, setMeta] = useState<Meta | null>(null)
|
|
|
|
const [submittedBy, setSubmittedBy] = useState<string>()
|
|
|
|
const [signers, setSigners] = useState<`npub1${string}`[]>([])
|
|
const [viewers, setViewers] = useState<`npub1${string}`[]>([])
|
|
const [marks, setMarks] = useState<Mark[]>([])
|
|
const [creatorFileHashes, setCreatorFileHashes] = useState<{
|
|
[key: string]: string
|
|
}>({})
|
|
const [currentFileHashes, setCurrentFileHashes] = useState<{
|
|
[key: string]: string | null
|
|
}>({})
|
|
|
|
const usersPubkey = useAppSelector((state) => state.auth.usersPubkey)
|
|
|
|
const nostrController = NostrController.getInstance()
|
|
const [currentUserMarks, setCurrentUserMarks] = useState<CurrentUserMark[]>(
|
|
[]
|
|
)
|
|
const [otherUserMarks, setOtherUserMarks] = useState<Mark[]>([])
|
|
|
|
useEffect(() => {
|
|
const handleUpdatedMeta = async (meta: Meta) => {
|
|
const createSignatureEvent = await parseJson<Event>(
|
|
meta.createSignature
|
|
).catch((err) => {
|
|
console.log('err in parsing the createSignature event:>> ', err)
|
|
toast.error(
|
|
err.message || 'error occurred in parsing the create signature event'
|
|
)
|
|
setIsLoading(false)
|
|
return null
|
|
})
|
|
|
|
if (!createSignatureEvent) return
|
|
|
|
const isValidCreateSignature = verifyEvent(createSignatureEvent)
|
|
|
|
if (!isValidCreateSignature) {
|
|
toast.error('Create signature is invalid')
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
const createSignatureContent =
|
|
await parseJson<CreateSignatureEventContent>(
|
|
createSignatureEvent.content
|
|
).catch((err) => {
|
|
console.log(
|
|
`err in parsing the createSignature event's content :>> `,
|
|
err
|
|
)
|
|
toast.error(
|
|
err.message ||
|
|
`error occurred in parsing the create signature event's content`
|
|
)
|
|
setIsLoading(false)
|
|
return null
|
|
})
|
|
|
|
if (!createSignatureContent) return
|
|
|
|
setSigners(createSignatureContent.signers)
|
|
setViewers(createSignatureContent.viewers)
|
|
setCreatorFileHashes(createSignatureContent.fileHashes)
|
|
setSubmittedBy(createSignatureEvent.pubkey)
|
|
setMarks(createSignatureContent.markConfig)
|
|
|
|
if (usersPubkey) {
|
|
const metaMarks = filterMarksByPubkey(
|
|
createSignatureContent.markConfig,
|
|
usersPubkey!
|
|
)
|
|
const signedMarks = extractMarksFromSignedMeta(meta)
|
|
const currentUserMarks = getCurrentUserMarks(metaMarks, signedMarks)
|
|
const otherUserMarks = findOtherUserMarks(signedMarks, usersPubkey!)
|
|
|
|
if (meta.keys) {
|
|
for (let i = 0; i < otherUserMarks.length; i++) {
|
|
const m = otherUserMarks[i]
|
|
const { sender, keys } = meta.keys
|
|
const usersNpub = hexToNpub(usersPubkey)
|
|
if (usersNpub in keys) {
|
|
const encryptionKey = await nostrController
|
|
.nip04Decrypt(sender, keys[usersNpub])
|
|
.catch((err) => {
|
|
console.log(
|
|
'An error occurred in decrypting encryption key',
|
|
err
|
|
)
|
|
return null
|
|
})
|
|
|
|
try {
|
|
const { fetchAndDecrypt } = MARK_TYPE_CONFIG[m.type] || {}
|
|
if (
|
|
typeof fetchAndDecrypt === 'function' &&
|
|
m.value &&
|
|
encryptionKey
|
|
) {
|
|
otherUserMarks[i].value = await fetchAndDecrypt(
|
|
m.value,
|
|
encryptionKey
|
|
)
|
|
}
|
|
} catch (error) {
|
|
console.error(`Error during mark fetchAndDecrypt phase`, error)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
setOtherUserMarks(otherUserMarks)
|
|
setCurrentUserMarks(currentUserMarks)
|
|
}
|
|
}
|
|
|
|
if (meta) {
|
|
handleUpdatedMeta(meta)
|
|
}
|
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
}, [meta, usersPubkey])
|
|
|
|
const decrypt = useCallback(
|
|
async (file: File) => {
|
|
setLoadingSpinnerDesc('Decrypting file')
|
|
|
|
const zip = await loadZip(file)
|
|
if (!zip) return
|
|
|
|
const parsedKeysJson = await parseKeysJson(zip)
|
|
if (!parsedKeysJson) return
|
|
|
|
const encryptedArrayBuffer = await readContentOfZipEntry(
|
|
zip,
|
|
'compressed.sigit',
|
|
'arraybuffer'
|
|
)
|
|
|
|
if (!encryptedArrayBuffer) return
|
|
|
|
const { keys, sender } = parsedKeysJson
|
|
|
|
for (const key of keys) {
|
|
// decrypt the encryptionKey, with timeout (duration = 60 seconds)
|
|
const encryptionKey = await Promise.race([
|
|
nostrController.nip04Decrypt(sender, key),
|
|
timeout(60000)
|
|
])
|
|
.then((res) => {
|
|
return res
|
|
})
|
|
.catch((err) => {
|
|
console.log('err :>> ', err)
|
|
return null
|
|
})
|
|
|
|
// Return if encryption failed
|
|
if (!encryptionKey) continue
|
|
|
|
const arrayBuffer = await decryptArrayBuffer(
|
|
encryptedArrayBuffer,
|
|
encryptionKey
|
|
)
|
|
.catch((err) => {
|
|
console.log('err in decryption:>> ', err)
|
|
return null
|
|
})
|
|
.finally(() => {
|
|
setIsLoading(false)
|
|
})
|
|
|
|
if (arrayBuffer) return arrayBuffer
|
|
}
|
|
|
|
return null
|
|
},
|
|
[nostrController]
|
|
)
|
|
|
|
useEffect(() => {
|
|
// online mode - from create and home page views
|
|
if (metaInNavState) {
|
|
const processSigit = async () => {
|
|
setIsLoading(true)
|
|
setLoadingSpinnerDesc('Extracting zipUrl and encryption key from meta')
|
|
|
|
const res = await extractZipUrlAndEncryptionKey(metaInNavState)
|
|
if (!res) {
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
const { zipUrls, encryptionKey } = res
|
|
|
|
for (let i = 0; i < zipUrls.length; i++) {
|
|
const zipUrl = zipUrls[i]
|
|
const isLastZipUrl = i === zipUrls.length - 1
|
|
|
|
setLoadingSpinnerDesc('Fetching file from file server')
|
|
|
|
const res = await axios
|
|
.get(zipUrl, {
|
|
responseType: 'arraybuffer'
|
|
})
|
|
.catch((err) => {
|
|
console.error(
|
|
`error occurred in getting file from ${zipUrls}`,
|
|
err
|
|
)
|
|
toast.error(
|
|
err.message || `error occurred in getting file from ${zipUrls}`
|
|
)
|
|
return null
|
|
})
|
|
|
|
setIsLoading(false)
|
|
|
|
if (res) {
|
|
handleArrayBufferFromBlossom(res.data, encryptionKey)
|
|
setMeta(metaInNavState)
|
|
break
|
|
} else {
|
|
// No data returned, break from the loop
|
|
if (isLastZipUrl) {
|
|
break
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
processSigit()
|
|
} else if (decryptedArrayBuffer) {
|
|
handleDecryptedArrayBuffer(decryptedArrayBuffer).finally(() =>
|
|
setIsLoading(false)
|
|
)
|
|
} else if (uploadedZip) {
|
|
decrypt(uploadedZip)
|
|
.then((arrayBuffer) => {
|
|
if (arrayBuffer) handleDecryptedArrayBuffer(arrayBuffer)
|
|
})
|
|
.catch((err) => {
|
|
console.error(`error occurred in decryption`, err)
|
|
toast.error(err.message || `error occurred in decryption`)
|
|
})
|
|
.finally(() => {
|
|
setIsLoading(false)
|
|
})
|
|
} else {
|
|
setIsLoading(false)
|
|
}
|
|
}, [decryptedArrayBuffer, uploadedZip, metaInNavState, decrypt])
|
|
|
|
const handleArrayBufferFromBlossom = async (
|
|
arrayBuffer: ArrayBuffer,
|
|
encryptionKey: string
|
|
) => {
|
|
// array buffer returned from blossom is encrypted.
|
|
// So, first decrypt it
|
|
const decrypted = await decryptArrayBuffer(
|
|
arrayBuffer,
|
|
encryptionKey
|
|
).catch((err) => {
|
|
console.log('err in decryption:>> ', err)
|
|
toast.error(err.message || 'An error occurred in decrypting file.')
|
|
setIsLoading(false)
|
|
return null
|
|
})
|
|
|
|
if (!decrypted) return
|
|
|
|
const zip = await loadZip(decrypted)
|
|
if (!zip) {
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
const files: { [filename: string]: SigitFile } = {}
|
|
const fileHashes: { [key: string]: string | null } = {}
|
|
const fileNames = Object.values(zip.files).map((entry) => entry.name)
|
|
|
|
// generate hashes for all files in zipArchive
|
|
// these hashes can be used to verify the originality of files
|
|
for (const fileName of fileNames) {
|
|
const arrayBuffer = await readContentOfZipEntry(
|
|
zip,
|
|
fileName,
|
|
'arraybuffer'
|
|
)
|
|
|
|
if (arrayBuffer) {
|
|
files[fileName] = await convertToSigitFile(arrayBuffer, fileName)
|
|
const hash = await getHash(arrayBuffer)
|
|
if (hash) {
|
|
fileHashes[fileName] = hash
|
|
}
|
|
} else {
|
|
fileHashes[fileName] = null
|
|
}
|
|
}
|
|
|
|
setFiles(files)
|
|
setCurrentFileHashes(fileHashes)
|
|
}
|
|
|
|
const setUpdatedMarks = (markToUpdate: Mark) => {
|
|
const updatedMarks = updateMarks(marks, markToUpdate)
|
|
setMarks(updatedMarks)
|
|
}
|
|
|
|
const parseKeysJson = async (zip: JSZip) => {
|
|
const keysFileContent = await readContentOfZipEntry(
|
|
zip,
|
|
'keys.json',
|
|
'string'
|
|
)
|
|
|
|
if (!keysFileContent) return null
|
|
|
|
return await parseJson<{ sender: string; keys: string[] }>(
|
|
keysFileContent
|
|
).catch((err) => {
|
|
console.log(`Error parsing content of keys.json:`, err)
|
|
toast.error(err.message || `Error parsing content of keys.json`)
|
|
return null
|
|
})
|
|
}
|
|
|
|
const handleDecryptedArrayBuffer = async (arrayBuffer: ArrayBuffer) => {
|
|
const decryptedZipFile = new File([arrayBuffer], 'decrypted.zip')
|
|
|
|
setLoadingSpinnerDesc('Parsing zip file')
|
|
|
|
const zip = await loadZip(decryptedZipFile)
|
|
if (!zip) return
|
|
|
|
const files: { [filename: string]: SigitFile } = {}
|
|
const fileHashes: { [key: string]: string | null } = {}
|
|
const fileNames = Object.values(zip.files)
|
|
.filter((entry) => entry.name.startsWith('files/') && !entry.dir)
|
|
.map((entry) => entry.name)
|
|
|
|
for (const zipFilePath of fileNames) {
|
|
const arrayBuffer = await readContentOfZipEntry(
|
|
zip,
|
|
zipFilePath,
|
|
'arraybuffer'
|
|
)
|
|
|
|
const fileName = zipFilePath.replace(/^files\//, '')
|
|
if (arrayBuffer) {
|
|
files[fileName] = await convertToSigitFile(arrayBuffer, fileName)
|
|
|
|
// generate hashes for all entries in files folder of zipArchive
|
|
// these hashes can be used to verify the originality of files
|
|
const hash = await getHash(arrayBuffer)
|
|
if (hash) {
|
|
fileHashes[fileName] = hash
|
|
}
|
|
} else {
|
|
fileHashes[fileName] = null
|
|
}
|
|
}
|
|
|
|
setFiles(files)
|
|
setCurrentFileHashes(fileHashes)
|
|
setLoadingSpinnerDesc('Parsing meta.json')
|
|
|
|
const metaFileContent = await readContentOfZipEntry(
|
|
zip,
|
|
'meta.json',
|
|
'string'
|
|
)
|
|
|
|
if (!metaFileContent) {
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
const parsedMetaJson = await parseJson<Meta>(metaFileContent).catch(
|
|
(err) => {
|
|
console.log('err in parsing the content of meta.json :>> ', err)
|
|
toast.error(
|
|
err.message || 'error occurred in parsing the content of meta.json'
|
|
)
|
|
setIsLoading(false)
|
|
return null
|
|
}
|
|
)
|
|
|
|
setMeta(parsedMetaJson)
|
|
}
|
|
|
|
/**
|
|
* Start the signing process
|
|
* When user signs, files will automatically be published to all user preferred servers
|
|
*/
|
|
const handleSign = async () => {
|
|
if (Object.entries(files).length === 0 || !meta) return
|
|
|
|
setIsLoading(true)
|
|
|
|
setLoadingSpinnerDesc('Signing nostr event')
|
|
const usersNpub = hexToNpub(usersPubkey!)
|
|
const prevSig = getPrevSignersSig(usersNpub)
|
|
if (!prevSig) {
|
|
setIsLoading(false)
|
|
toast.error('Previous signature is invalid')
|
|
return
|
|
}
|
|
|
|
const marks = getSignerMarksForMeta() || []
|
|
|
|
let encryptionKey: string | undefined
|
|
if (meta.keys) {
|
|
const { sender, keys } = meta.keys
|
|
encryptionKey = await nostrController
|
|
.nip04Decrypt(sender, keys[usersNpub])
|
|
.catch((err) => {
|
|
// Log and display an error message if decryption fails
|
|
console.log('An error occurred in decrypting encryption key', err)
|
|
toast.error('An error occurred in decrypting encryption key')
|
|
return undefined
|
|
})
|
|
}
|
|
|
|
const processedMarks = await processMarks(marks, encryptionKey)
|
|
|
|
const signedEvent = await signEventForMeta({
|
|
prevSig,
|
|
marks: processedMarks
|
|
})
|
|
|
|
if (!signedEvent) return
|
|
|
|
const updatedMeta = updateMetaSignatures(meta, signedEvent)
|
|
|
|
setLoadingSpinnerDesc('Generating an open timestamp.')
|
|
|
|
const timestamp = await generateTimestamp(signedEvent.id)
|
|
if (timestamp) {
|
|
updatedMeta.timestamps = [...(updatedMeta.timestamps || []), timestamp]
|
|
updatedMeta.modifiedAt = unixNow()
|
|
}
|
|
|
|
if (await isOnline()) {
|
|
await handleOnlineFlow(updatedMeta, encryptionKey)
|
|
} else {
|
|
setMeta(updatedMeta)
|
|
setIsLoading(false)
|
|
}
|
|
|
|
if (metaInNavState) {
|
|
const createSignature = JSON.parse(metaInNavState.createSignature)
|
|
navigate(`${appPublicRoutes.verify}/${createSignature.id}`)
|
|
} else {
|
|
navigate(appPrivateRoutes.homePage)
|
|
}
|
|
}
|
|
|
|
// Sign the event for the meta file
|
|
const signEventForMeta = async (signerContent: {
|
|
prevSig: string
|
|
marks: Mark[]
|
|
}) => {
|
|
return await signEventForMetaFile(
|
|
JSON.stringify(signerContent),
|
|
nostrController,
|
|
setIsLoading
|
|
)
|
|
}
|
|
|
|
const getSignerMarksForMeta = (): Mark[] | undefined => {
|
|
if (currentUserMarks.length === 0) return
|
|
return currentUserMarks.map(({ mark }: CurrentUserMark) => mark)
|
|
}
|
|
|
|
// Update the meta signatures
|
|
const updateMetaSignatures = (meta: Meta, signedEvent: SignedEvent): Meta => {
|
|
const metaCopy = _.cloneDeep(meta)
|
|
metaCopy.docSignatures = {
|
|
...metaCopy.docSignatures,
|
|
[hexToNpub(signedEvent.pubkey)]: JSON.stringify(signedEvent, null, 2)
|
|
}
|
|
metaCopy.modifiedAt = unixNow()
|
|
return metaCopy
|
|
}
|
|
|
|
// create final zip file
|
|
const createFinalZipFile = async (
|
|
encryptedArrayBuffer: ArrayBuffer,
|
|
encryptionKey: string
|
|
): Promise<File | null> => {
|
|
// Get the current timestamp in seconds
|
|
const blob = new Blob([encryptedArrayBuffer])
|
|
// Create a File object with the Blob data
|
|
const file = new File([blob], `compressed.sigit`, {
|
|
type: 'application/sigit'
|
|
})
|
|
|
|
const isLastSigner = checkIsLastSigner(signers)
|
|
|
|
const userSet = new Set<string>()
|
|
|
|
if (isLastSigner) {
|
|
if (submittedBy) {
|
|
userSet.add(submittedBy)
|
|
}
|
|
|
|
signers.forEach((signer) => {
|
|
userSet.add(npubToHex(signer)!)
|
|
})
|
|
|
|
viewers.forEach((viewer) => {
|
|
userSet.add(npubToHex(viewer)!)
|
|
})
|
|
} else {
|
|
const usersNpub = hexToNpub(usersPubkey!)
|
|
const signerIndex = signers.indexOf(usersNpub)
|
|
const nextSigner = signers[signerIndex + 1]
|
|
userSet.add(npubToHex(nextSigner)!)
|
|
}
|
|
|
|
const keysFileContent = await generateKeysFile(
|
|
Array.from(userSet),
|
|
encryptionKey
|
|
)
|
|
if (!keysFileContent) return null
|
|
|
|
const zip = new JSZip()
|
|
zip.file(`compressed.sigit`, file)
|
|
zip.file('keys.json', keysFileContent)
|
|
|
|
const arraybuffer = await zip
|
|
.generateAsync({
|
|
type: 'arraybuffer',
|
|
compression: 'DEFLATE',
|
|
compressionOptions: { level: 6 }
|
|
})
|
|
.catch(handleZipError)
|
|
|
|
if (!arraybuffer) return null
|
|
|
|
return new File([new Blob([arraybuffer])], `${unixNow()}.sigit.zip`, {
|
|
type: 'application/zip'
|
|
})
|
|
}
|
|
|
|
// Check if the current user is the last signer
|
|
const checkIsLastSigner = (signers: string[]): boolean => {
|
|
const usersNpub = hexToNpub(usersPubkey!)
|
|
const lastSignerIndex = signers.length - 1
|
|
const signerIndex = signers.indexOf(usersNpub)
|
|
return signerIndex === lastSignerIndex
|
|
}
|
|
|
|
// Handle errors during zip file generation
|
|
const handleZipError = (err: unknown) => {
|
|
console.log('Error in zip:>> ', err)
|
|
setIsLoading(false)
|
|
if (err instanceof Error) {
|
|
toast.error(err.message || 'Error occurred in generating zip file')
|
|
}
|
|
return null
|
|
}
|
|
|
|
// Handle the online flow: update users app data and send notifications
|
|
const handleOnlineFlow = async (
|
|
meta: Meta,
|
|
encryptionKey: string | undefined
|
|
) => {
|
|
setLoadingSpinnerDesc('Updating users app data')
|
|
const updatedEvent = await updateUsersAppData(meta)
|
|
if (!updatedEvent) {
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
let metaUrls: string[]
|
|
try {
|
|
metaUrls = await uploadMetaToFileStorage(meta, encryptionKey)
|
|
} catch (error) {
|
|
if (error instanceof Error) {
|
|
toast.error(error.message)
|
|
}
|
|
console.error(error)
|
|
setIsLoading(false)
|
|
return
|
|
}
|
|
|
|
const userSet = new Set<`npub1${string}`>()
|
|
if (submittedBy && submittedBy !== usersPubkey) {
|
|
userSet.add(hexToNpub(submittedBy))
|
|
}
|
|
|
|
const usersNpub = hexToNpub(usersPubkey!)
|
|
const isLastSigner = checkIsLastSigner(signers)
|
|
if (isLastSigner) {
|
|
signers.forEach((signer) => {
|
|
if (signer !== usersNpub) {
|
|
userSet.add(signer)
|
|
}
|
|
})
|
|
|
|
viewers.forEach((viewer) => {
|
|
userSet.add(viewer)
|
|
})
|
|
} else {
|
|
const currentSignerIndex = signers.indexOf(usersNpub)
|
|
const prevSigners = signers.slice(0, currentSignerIndex)
|
|
|
|
prevSigners.forEach((signer) => {
|
|
userSet.add(signer)
|
|
})
|
|
|
|
const nextSigner = signers[currentSignerIndex + 1]
|
|
userSet.add(nextSigner)
|
|
}
|
|
|
|
setLoadingSpinnerDesc('Sending notifications')
|
|
const users = Array.from(userSet)
|
|
const promises = users.map((user) =>
|
|
sendNotification(npubToHex(user)!, {
|
|
metaUrls: metaUrls,
|
|
keys: meta.keys
|
|
})
|
|
)
|
|
await Promise.all(promises)
|
|
.then(() => {
|
|
toast.success('Notifications sent successfully')
|
|
setMeta(meta)
|
|
})
|
|
.catch(() => {
|
|
toast.error('Failed to publish notifications')
|
|
})
|
|
|
|
setIsLoading(false)
|
|
}
|
|
|
|
const handleExport = async () => {
|
|
const arrayBuffer = await prepareZipExport()
|
|
if (!arrayBuffer) return
|
|
|
|
const blob = new Blob([arrayBuffer])
|
|
saveAs(blob, `exported-${unixNow()}.sigit.zip`)
|
|
|
|
setIsLoading(false)
|
|
|
|
navigate(appPublicRoutes.verify)
|
|
}
|
|
|
|
const handleEncryptedExport = async () => {
|
|
const arrayBuffer = await prepareZipExport()
|
|
if (!arrayBuffer) return
|
|
|
|
const key = await generateEncryptionKey()
|
|
|
|
setLoadingSpinnerDesc('Encrypting zip file')
|
|
const encryptedArrayBuffer = await encryptArrayBuffer(arrayBuffer, key)
|
|
|
|
const finalZipFile = await createFinalZipFile(encryptedArrayBuffer, key)
|
|
|
|
if (!finalZipFile) return
|
|
saveAs(finalZipFile, `exported-${unixNow()}.sigit.zip`)
|
|
|
|
setIsLoading(false)
|
|
}
|
|
|
|
const prepareZipExport = async (): Promise<ArrayBuffer | null> => {
|
|
if (Object.entries(files).length === 0 || !meta || !usersPubkey)
|
|
return Promise.resolve(null)
|
|
|
|
const usersNpub = hexToNpub(usersPubkey)
|
|
if (
|
|
!signers.includes(usersNpub) &&
|
|
!viewers.includes(usersNpub) &&
|
|
submittedBy !== usersNpub
|
|
)
|
|
return Promise.resolve(null)
|
|
|
|
setIsLoading(true)
|
|
setLoadingSpinnerDesc('Signing nostr event')
|
|
|
|
if (!meta) return Promise.resolve(null)
|
|
|
|
const prevSig = getLastSignersSig(meta, signers)
|
|
if (!prevSig) return Promise.resolve(null)
|
|
|
|
const signedEvent = await signEventForMetaFile(
|
|
JSON.stringify({
|
|
prevSig
|
|
}),
|
|
nostrController,
|
|
setIsLoading
|
|
)
|
|
|
|
if (!signedEvent) return Promise.resolve(null)
|
|
|
|
const exportSignature = JSON.stringify(signedEvent, null, 2)
|
|
|
|
const stringifiedMeta = JSON.stringify(
|
|
{
|
|
...meta,
|
|
exportSignature
|
|
},
|
|
null,
|
|
2
|
|
)
|
|
|
|
const zip = await getZipWithFiles(meta, files)
|
|
zip.file('meta.json', stringifiedMeta)
|
|
|
|
const arrayBuffer = await zip
|
|
.generateAsync({
|
|
type: ARRAY_BUFFER,
|
|
compression: DEFLATE,
|
|
compressionOptions: {
|
|
level: 6
|
|
}
|
|
})
|
|
.catch((err) => {
|
|
console.log('err in zip:>> ', err)
|
|
setIsLoading(false)
|
|
toast.error(err.message || 'Error occurred in generating zip file')
|
|
return null
|
|
})
|
|
|
|
if (!arrayBuffer) return Promise.resolve(null)
|
|
|
|
return Promise.resolve(arrayBuffer)
|
|
}
|
|
|
|
/**
|
|
* This function accepts an npub of a signer and return the signature of its previous signer.
|
|
* This prevSig will be used in the content of the provided signer's signedEvent
|
|
*/
|
|
const getPrevSignersSig = (npub: string) => {
|
|
if (!meta) return null
|
|
|
|
// if user is first signer then use creator's signature
|
|
if (signers[0] === npub) {
|
|
try {
|
|
const createSignatureEvent: Event = JSON.parse(meta.createSignature)
|
|
return createSignatureEvent.sig
|
|
} catch (error) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
// find the index of signer
|
|
const currentSignerIndex = signers.findIndex((signer) => signer === npub)
|
|
// return null if could not found user in signer's list
|
|
if (currentSignerIndex === -1) return null
|
|
// find prev signer
|
|
const prevSigner = signers[currentSignerIndex - 1]
|
|
|
|
// get the signature of prev signer
|
|
try {
|
|
const prevSignersEvent: Event = JSON.parse(meta.docSignatures[prevSigner])
|
|
return prevSignersEvent.sig
|
|
} catch (error) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
if (isLoading) {
|
|
return <LoadingSpinner desc={loadingSpinnerDesc} />
|
|
}
|
|
|
|
return (
|
|
<PdfMarking
|
|
files={getCurrentUserFiles(files, currentFileHashes, creatorFileHashes)}
|
|
currentUserMarks={currentUserMarks}
|
|
setCurrentUserMarks={setCurrentUserMarks}
|
|
setUpdatedMarks={setUpdatedMarks}
|
|
handleSign={handleSign}
|
|
handleExport={handleExport}
|
|
handleEncryptedExport={handleEncryptedExport}
|
|
otherUserMarks={otherUserMarks}
|
|
meta={meta}
|
|
/>
|
|
)
|
|
}
|