This commit is contained in:
Stixx 2024-11-04 16:09:32 +01:00
commit faa52f6013
106 changed files with 9988 additions and 0 deletions

8
.changeset/README.md Normal file
View File

@ -0,0 +1,8 @@
# Changesets
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
with multi-package repos, or single-package repos to help you version and publish your code. You can
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
We have a quick list of common questions to get you started engaging with this project in
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)

11
.changeset/config.json Normal file
View File

@ -0,0 +1,11 @@
{
"$schema": "https://unpkg.com/@changesets/config@3.0.0/schema.json",
"changelog": "@changesets/cli/changelog",
"commit": false,
"fixed": [],
"linked": [],
"access": "public",
"baseBranch": "master",
"updateInternalDependencies": "patch",
"ignore": []
}

View File

@ -0,0 +1,5 @@
---
"blossom-server-ts": patch
---
Fix crash from upload race condition

8
.dockerignore Normal file
View File

@ -0,0 +1,8 @@
data
.env
node_modules
.github
database.json
dist
admin/node_modules
config.yml

View File

@ -0,0 +1,26 @@
name: Release to Gitea Container Registry
on:
push:
branches:
- main
jobs:
build_and_release:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Log in to local registry
uses: docker/login-action@v3
with:
registry: my.gitea.com
username: ${{ gitea.repository_owner }}
password: ${{ secrets.REGISTRY_TOKEN }} # gitea.token doesn't work for push, provide a personal access token
- name: Build Docker image
run: docker build -t git.nostrdev.com/stuff/blossom-cloudron .
- name: Push Docker image
run: docker push git.nostrdev.com/stuff/blossom-cloudron

60
.github/workflows/docker-image.yml vendored Normal file
View File

@ -0,0 +1,60 @@
name: Docker image
on:
push:
branches:
- "**"
tags:
- "v*.*.*"
pull_request:
branches:
- "master"
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -0,0 +1,47 @@
name: Release
on:
push:
branches:
- master
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
issues: write
pull-requests: write
packages: write
jobs:
release:
name: Release
runs-on: ubuntu-latest
outputs:
published: ${{ steps.changesets.outputs.published }}
publishedPackages: ${{ steps.changesets.outputs.publishedPackages }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
- name: Setup Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
cache: "pnpm"
- name: Install Dependencies
run: pnpm install
- name: Create Release Pull Request or Publish to npm
id: changesets
uses: changesets/action@v1
with:
publish: pnpm publish --no-git-checks
env:
HOME: ${{ github.workspace }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
node_modules
data
.env
build
database.json
config.yml
dist

5
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
<excludeFolder url="file://$MODULE_DIR$/temp" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,57 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<HTMLCodeStyleSettings>
<option name="HTML_SPACE_INSIDE_EMPTY_TAG" value="true" />
</HTMLCodeStyleSettings>
<JSCodeStyleSettings version="0">
<option name="FORCE_SEMICOLON_STYLE" value="true" />
<option name="SPACE_BEFORE_FUNCTION_LEFT_PARENTH" value="false" />
<option name="FORCE_QUOTE_STYlE" value="true" />
<option name="ENFORCE_TRAILING_COMMA" value="Remove" />
<option name="SPACES_WITHIN_OBJECT_LITERAL_BRACES" value="true" />
<option name="SPACES_WITHIN_IMPORTS" value="true" />
</JSCodeStyleSettings>
<TypeScriptCodeStyleSettings version="0">
<option name="FORCE_SEMICOLON_STYLE" value="true" />
<option name="SPACE_BEFORE_FUNCTION_LEFT_PARENTH" value="false" />
<option name="FORCE_QUOTE_STYlE" value="true" />
<option name="ENFORCE_TRAILING_COMMA" value="Remove" />
<option name="SPACES_WITHIN_OBJECT_LITERAL_BRACES" value="true" />
<option name="SPACES_WITHIN_IMPORTS" value="true" />
</TypeScriptCodeStyleSettings>
<VueCodeStyleSettings>
<option name="INTERPOLATION_NEW_LINE_AFTER_START_DELIMITER" value="false" />
<option name="INTERPOLATION_NEW_LINE_BEFORE_END_DELIMITER" value="false" />
</VueCodeStyleSettings>
<codeStyleSettings language="HTML">
<option name="SOFT_MARGINS" value="120" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="JavaScript">
<option name="SOFT_MARGINS" value="120" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="TypeScript">
<option name="SOFT_MARGINS" value="120" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="Vue">
<option name="SOFT_MARGINS" value="120" />
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
</code_scheme>
</component>

View File

@ -0,0 +1,5 @@
<component name="ProjectCodeStyleConfiguration">
<state>
<option name="USE_PER_PROJECT_SETTINGS" value="true" />
</state>
</component>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/blossom-cloudron.iml" filepath="$PROJECT_DIR$/.idea/blossom-cloudron.iml" />
</modules>
</component>
</project>

6
.idea/prettier.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PrettierConfiguration">
<option name="myConfigurationMode" value="AUTOMATIC" />
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

3
.netrc Normal file
View File

@ -0,0 +1,3 @@
machine github.com
login github-actions[bot]
password ghs_W3XBo3kciFJOgZB2Pu7PSfIaGpaQVY0ao59F

1
.nvmrc Normal file
View File

@ -0,0 +1 @@
20

1
.prettierignore Normal file
View File

@ -0,0 +1 @@
public/lib

5
.prettierrc Normal file
View File

@ -0,0 +1,5 @@
{
"printWidth": 120,
"useTabs": false,
"tabWidth": 2
}

36
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,36 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "dev",
"type": "node",
"request": "launch",
"args": ["./src/index.ts"],
"runtimeArgs": ["--loader", "@swc-node/register/esm"],
"cwd": "${workspaceRoot}",
"protocol": "inspector",
"internalConsoleOptions": "openOnSessionStart",
"outputCapture": "std",
"env": {
"DEBUG": "blossom-server,blossom-server:*"
}
},
{
"name": "start",
"type": "node",
"request": "launch",
"args": ["./build/index.js"],
"cwd": "${workspaceRoot}",
"protocol": "inspector",
"internalConsoleOptions": "openOnSessionStart",
"outputCapture": "std",
"env": {
"DEBUG": "blossom-server,blossom-server:*",
"DEBUG_COLORS": "1"
}
}
]
}

3
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"javascript.preferences.importModuleSpecifierEnding": "js"
}

114
CHANGELOG.md Normal file
View File

@ -0,0 +1,114 @@
# blossom-server
## 4.4.0
### Minor Changes
- af5d4c6: Add support for BUD-06 HEAD /upload endpoint
- af5d4c6: Support auth events with multiple `x` tags
### Patch Changes
- 7908d09: Bump dependencies
## 4.3.2
### Patch Changes
- 4f6080a: Fix uncaught error when fetching blob from HTTP
## 4.3.1
### Patch Changes
- Fix bug with some browsers setting incorrect mime type for .m3u8
- 8096b37: Expand S3 Storage options
## 4.3.0
### Minor Changes
- 6749892: Add `useSSL` and `region` options for s3 storage
## 4.2.0
### Minor Changes
- Add `removeWhenNoOwners` option to config
- Add window.nostr.js to landing page
## 4.1.1
### Patch Changes
- Fix typo in /upload endpoint
## 4.1.0
### Minor Changes
- Add /mirror endpoint
- Add mirror page to UI
## 4.0.1
### Patch Changes
- Replace websocket-polyfill package
## 4.0.0
### Major Changes
- Require "x" tag with sha256 hash on uploads
### Minor Changes
- Rebuild landing page with tailwind and lit
## 3.0.0
### Major Changes
- Rename "created" field to "uploaded" on blobs
### Minor Changes
- Support "since" and "until" on /list endpoint
## 2.1.2
### Patch Changes
- 7520055: Add default for publicDomain option
- 7520055: Change default local blob directory
- 7520055: Fix bug with unowned blobs in dashboard
## 2.1.1
### Patch Changes
- Create data directory if it does not exist
## 2.1.0
### Minor Changes
- Add blob details page with preview
## 2.0.0
### Major Changes
- Add simple admin dashboard
### Patch Changes
- Fix bug with app crashing when config fields missing
## 1.1.1
### Patch Changes
- Fix docker image
- Fix expiration in auth events for index.html

32
Dockerfile Normal file
View File

@ -0,0 +1,32 @@
# syntax=docker/dockerfile:1
FROM node:20-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
WORKDIR /app
COPY . .
FROM base AS prod-deps
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod --frozen-lockfile
FROM base AS build
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store cd admin && pnpm install --frozen-lockfile
RUN pnpm build
RUN cd admin && pnpm build
FROM base AS main
COPY --from=prod-deps /app/node_modules /app/node_modules
COPY --from=build ./app/build ./build
COPY --from=build ./app/admin/dist ./admin/dist
COPY ./public ./public
VOLUME [ "/app/data" ]
EXPOSE 3000
ENV DEBUG="blossom-server,blossom-server:*"
ENTRYPOINT [ "node", "." ]

21
LICENSE.txt Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2024 hzrd149
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

72
README.md Normal file
View File

@ -0,0 +1,72 @@
# 🌸 Blossom-server
blossom-server is a Typescript implementation of a [Blossom Server](https://github.com/hzrd149/blossom/blob/master/Server.md)
## Supported BUDs
- BUD-01
- `GET /<sha256>` Retrieve blob
- `HEAD /<sha256>` Check blob
- BUD-02
- `PUT /upload` Upload blob
- `GET /list<pubkey>` List blobs
- `DELETE /<sha256>` Delete blob
- BUD-04
- `PUT /mirror` Mirror blob
- BUD-06
- `HEAD /upload` Upload check
## Running with npx
This app is also packaged as an npm module which you can easily run
```sh
# copy the example config
wget https://raw.githubusercontent.com/hzrd149/blossom-server/master/config.example.yml -O config.yml
# run using npx
npx blossom-server-ts
```
## Running with docker
An example config file can be found [here](./config.example.yml)
```sh
# create data volume
docker volume create blossom_data
# run container
docker run -v blossom_data:/app/data -v $(pwd)/config.yml:/app/config.yml -p 3000:3000 ghcr.io/hzrd149/blossom-server:master
```
You can also run it using docker compose with the [`docker-compose.yml`](./docker-compose.yml) file
## Running from source
This project uses [pnpm](https://pnpm.io/) to manage dependencies. It needs to be installed first in order to build the app
Next clone the repo, install the dependencies, and build
```sh
git clone https://github.com/hzrd149/blossom-server.git
cd blossom-server
pnpm install
cd admin && pnpm install && cd ../
pnpm build
```
Next copy the config and modify it
```sh
cp config.example.yml config.yml
nano config.yml
```
And finally start the app
```sh
pnpm start
# or
node .
```
Once the server is running you can open `http://localhost:3000` to access the server

119
admin/index.html Normal file
View File

@ -0,0 +1,119 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="minimum-scale=1, initial-scale=1, width=device-width, shrink-to-fit=no" />
<meta name="theme-color" content="#000000" />
<link rel="manifest" href="./manifest.json" />
<link rel="shortcut icon" href="./favicon.ico" />
<title>Blossom Server</title>
<style>
body {
margin: 0;
padding: 0;
font-family: sans-serif;
}
.loader-container {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
background-color: #fafafa;
}
/* CSS Spinner from https://projects.lukehaas.me/css-loaders/ */
.loader,
.loader:before,
.loader:after {
border-radius: 50%;
}
.loader {
color: #283593;
font-size: 11px;
text-indent: -99999em;
margin: 55px auto;
position: relative;
width: 10em;
height: 10em;
box-shadow: inset 0 0 0 1em;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
}
.loader:before,
.loader:after {
position: absolute;
content: "";
}
.loader:before {
width: 5.2em;
height: 10.2em;
background: #fafafa;
border-radius: 10.2em 0 0 10.2em;
top: -0.1em;
left: -0.1em;
-webkit-transform-origin: 5.2em 5.1em;
transform-origin: 5.2em 5.1em;
-webkit-animation: load2 2s infinite ease 1.5s;
animation: load2 2s infinite ease 1.5s;
}
.loader:after {
width: 5.2em;
height: 10.2em;
background: #fafafa;
border-radius: 0 10.2em 10.2em 0;
top: -0.1em;
left: 5.1em;
-webkit-transform-origin: 0px 5.1em;
transform-origin: 0px 5.1em;
-webkit-animation: load2 2s infinite ease;
animation: load2 2s infinite ease;
}
@-webkit-keyframes load2 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
@keyframes load2 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
</style>
<link rel="preconnect" href="https://fonts.gstatic.com" />
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap" rel="stylesheet" />
</head>
<body>
<noscript> You need to enable JavaScript to run this app. </noscript>
<div id="root">
<div class="loader-container">
<div class="loader">Loading...</div>
</div>
</div>
</body>
<script type="module" src="/src/index.tsx"></script>
</html>

33
admin/package.json Normal file
View File

@ -0,0 +1,33 @@
{
"name": "admin",
"private": true,
"scripts": {
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"type-check": "tsc --noEmit",
"format": "prettier --write ./src"
},
"dependencies": {
"@mui/icons-material": "^5.15.15",
"@mui/material": "^5.15.15",
"dayjs": "^1.11.10",
"mime-types": "^2.1.35",
"nanoid": "^5.0.7",
"ra-data-simple-rest": "^4.16.16",
"react": "^18.2.0",
"react-admin": "^4.16.16",
"react-dom": "^18.2.0",
"react-query": "^3.39.3"
},
"devDependencies": {
"@types/mime-types": "^2.1.4",
"@types/node": "^20.12.7",
"@types/react": "^18.2.79",
"@types/react-dom": "^18.2.25",
"@vitejs/plugin-react": "^4.2.1",
"prettier": "^3.2.5",
"typescript": "^5.4.5",
"vite": "^5.2.10"
}
}

2353
admin/pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff

26
admin/src/App.tsx Normal file
View File

@ -0,0 +1,26 @@
import React from "react";
import { Admin, Resource, defaultLightTheme, defaultDarkTheme } from "react-admin";
import { Code, FolderOpen, People } from "@mui/icons-material";
import { dataProvider } from "./dataProvider";
import { authProvider } from "./authProvider";
import { CustomLayout } from "./Layout";
import * as users from "./users";
import * as blobs from "./blobs";
import * as rules from "./rules";
export const App = () => (
<Admin
dataProvider={dataProvider}
authProvider={authProvider}
disableTelemetry
lightTheme={defaultLightTheme}
darkTheme={defaultDarkTheme}
defaultTheme="dark"
layout={CustomLayout}
>
<Resource name="blobs" icon={FolderOpen} {...blobs} />
<Resource name="users" icon={People} {...users} />
<Resource name="rules" icon={Code} {...rules} />
</Admin>
);

12
admin/src/Layout.tsx Normal file
View File

@ -0,0 +1,12 @@
import React from "react";
import { Layout } from "react-admin";
import { ReactQueryDevtools } from "react-query/devtools";
export function CustomLayout(props) {
return (
<>
<Layout {...props} />
<ReactQueryDevtools initialIsOpen={false} />
</>
);
}

46
admin/src/authProvider.ts Normal file
View File

@ -0,0 +1,46 @@
import { AuthProvider, HttpError } from "react-admin";
import { API_BASE } from "./env";
function getApiURL(path: string) {
return API_BASE + path;
}
let savedAuth: null | string = null;
export function getAuthHeaders(): { Authorization: string } | {} {
if (!savedAuth) return {};
return { Authorization: savedAuth };
}
export const authProvider: AuthProvider = {
async login({ username, password }) {
const auth = "Basic " + btoa(username + ":" + password);
const res = await fetch(getApiURL("/auth"), {
method: "post",
headers: { "Content-Type": "application/json", Authorization: auth },
});
if (res.ok) {
savedAuth = auth;
return Promise.resolve();
} else savedAuth = null;
return Promise.reject(
new HttpError("Unauthorized", 401, {
message: "Invalid username or password",
}),
);
},
async logout() {
savedAuth = null;
},
async checkError() {},
async checkAuth() {
return savedAuth ? Promise.resolve() : Promise.reject();
},
async getPermissions() {},
async getIdentity() {
return { id: "admin", fullName: "admin" };
},
};
export default authProvider;

View File

@ -0,0 +1,178 @@
import * as React from "react";
import {
useMediaQuery,
Theme,
IconButton,
Card,
CardContent,
Stack,
Dialog,
DialogContent,
DialogTitle,
useTheme,
DialogActions,
Button,
} from "@mui/material";
import { OpenInNew, Visibility } from "@mui/icons-material";
import {
AutocompleteArrayInput,
BulkDeleteWithConfirmButton,
Datagrid,
DateField,
FilterList,
FilterListItem,
FilterLiveSearch,
List,
NumberField,
Pagination,
SearchInput,
SimpleList,
TextField,
useRecordContext,
} from "react-admin";
import dayjs from "dayjs";
import mime from "mime-types";
import { truncateHash } from "../helpers/string";
import BlobPreview, { canPreview } from "./BlobPreview";
const UserBulkActionButtons = (props: any) => <BulkDeleteWithConfirmButton {...props} />;
function PreviewButton() {
const theme = useTheme();
const record = useRecordContext();
const [open, setOpen] = React.useState(false);
const fullScreen = useMediaQuery(theme.breakpoints.down("md"));
return (
<>
<IconButton
aria-label="preview"
onClick={(e) => {
e.stopPropagation();
setOpen(true);
}}
size="small"
>
<Visibility />
</IconButton>
<Dialog
maxWidth="lg"
open={open}
onClick={(e) => e.stopPropagation()}
fullScreen={fullScreen}
onClose={() => setOpen(false)}
>
<DialogTitle>Preview</DialogTitle>
<DialogContent>
{/* @ts-expect-error */}
<BlobPreview blob={record} />
</DialogContent>
<DialogActions>
<Button onClick={() => setOpen(false)}>Close</Button>
</DialogActions>
</Dialog>
</>
);
}
function RowActions() {
const record = useRecordContext();
return (
<>
<Stack direction="row" useFlexGap spacing={1} justifyContent="flex-end">
{canPreview(record as any) && <PreviewButton />}
<IconButton
aria-label="delete"
href={record.url}
target="_blank"
size="small"
onClick={(e) => e.stopPropagation()}
>
<OpenInNew />
</IconButton>
</Stack>
</>
);
}
function SideBar() {
return (
<Card sx={{ order: -1, mr: 2, mt: 9, width: 200 }}>
<CardContent>
<FilterLiveSearch />
<FilterList label="Image" icon={null}>
<FilterListItem label=".png" value={{ type: ["image/png"] }} />
<FilterListItem label=".jpg" value={{ type: ["image/jpeg"] }} />
<FilterListItem label=".gif" value={{ type: ["image/gif"] }} />
<FilterListItem label=".svg" value={{ type: ["image/svg+xml"] }} />
<FilterListItem label=".bmp" value={{ type: ["image/bmp"] }} />
<FilterListItem label=".psd" value={{ type: ["image/vnd.adobe.photoshop"] }} />
</FilterList>
<FilterList label="Audio" icon={null}>
<FilterListItem label=".mp3" value={{ type: "audio/mpeg" }} />
<FilterListItem label=".m4a" value={{ type: "audio/mp4" }} />
<FilterListItem label=".flac" value={{ type: "audio/x-flac" }} />
<FilterListItem label=".ogg" value={{ type: "audio/ogg" }} />
<FilterListItem label=".wav" value={{ type: "audio/wav" }} />
<FilterListItem label=".mid" value={{ type: "audio/midi" }} />
</FilterList>
<FilterList label="Video" icon={null}>
<FilterListItem label=".mp4" value={{ type: "video/mp4" }} />
<FilterListItem label=".webm" value={{ type: "video/webm" }} />
<FilterListItem label=".avi" value={{ type: "video/x-msvideo" }} />
<FilterListItem label=".mov" value={{ type: "video/quicktime" }} />
</FilterList>
<FilterList label="Text" icon={null}>
<FilterListItem label=".txt" value={{ type: "text/plain" }} />
<FilterListItem label=".html" value={{ type: "text/html" }} />
</FilterList>
<FilterList label="Application" icon={null}>
<FilterListItem label=".pdf" value={{ type: "application/pdf" }} />
<FilterListItem label=".xml" value={{ type: "application/xml" }} />
<FilterListItem label=".doc" value={{ type: ["application/msword"] }} />
<FilterListItem label=".srt" value={{ type: ["application/x-subrip"] }} />
<FilterListItem label=".zip" value={{ type: "application/zip" }} />
<FilterListItem label=".tar" value={{ type: "application/x-tar" }} />
<FilterListItem label=".bin" value={{ type: "application/octet-stream" }} />
</FilterList>
<FilterList label="Model" icon={null}>
<FilterListItem label=".obj" value={{ type: "model/obj" }} />
<FilterListItem label=".stl" value={{ type: "model/stl" }} />
</FilterList>
</CardContent>
</Card>
);
}
export default function BlobList() {
return (
<List
filters={[
<SearchInput source="q" alwaysOn />,
<AutocompleteArrayInput
source="type"
choices={Object.keys(mime.extensions).map((type) => ({ id: type, name: type }))}
/>,
]}
filterDefaultValues={{}}
sort={{ field: "uploaded", order: "ASC" }}
pagination={<Pagination rowsPerPageOptions={[10, 25, 50, 100]} />}
aside={<SideBar />}
>
{useMediaQuery((theme: Theme) => theme.breakpoints.down("md")) ? (
<SimpleList primaryText={(record) => truncateHash(record.sha256)} secondaryText={(record) => record.type} />
) : (
<Datagrid rowClick="show" bulkActionButtons={<UserBulkActionButtons />} optimized>
<TextField source="sha256" sortable={false} />
<TextField source="type" />
<NumberField source="size" />
<DateField source="uploaded" transform={(unix: number) => dayjs.unix(unix).toDate()} showTime />
<RowActions />
</Datagrid>
)}
</List>
);
}

View File

@ -0,0 +1,17 @@
import { Typography } from "@mui/material";
export function canPreview(blob: { type: string }) {
return blob.type.startsWith("image/") || blob.type.startsWith("video/") || blob.type.startsWith("audio/");
}
export default function BlobPreview({ blob }: { blob: { type: string; url: string } }) {
if (blob.type.startsWith("image/")) {
return <img src={blob.url} />;
} else if (blob.type.startsWith("video/")) {
return <video src={blob.url} controls />;
} else if (blob.type.startsWith("audio/")) {
return <audio src={blob.url} controls style={{ minWidth: "30rem" }} />;
}
return <Typography>No preview available</Typography>;
}

View File

@ -0,0 +1,57 @@
import { Button, Stack, Typography } from "@mui/material";
import { DeleteButton, NumberField, Show, TabbedShowLayout, TextField, useShowController } from "react-admin";
import BlobPreview, { canPreview } from "./BlobPreview";
type BlobShape = {
sha256: string;
type: string;
size: number;
uploaded: number;
owners: string[];
id: string;
url: string;
};
function PreviewContent() {
const { record } = useShowController<BlobShape>();
if (!record) return;
return canPreview(record) ? <BlobPreview blob={record} /> : <Typography>No preview available</Typography>;
}
function Page() {
const { record } = useShowController<BlobShape>();
if (!record) return;
return (
<TabbedShowLayout>
<TabbedShowLayout.Tab label="Details">
<TextField source="sha256" />
<TextField source="type" />
<NumberField source="size" />
<Stack useFlexGap direction="row" spacing={2}>
<Button href={record.url} target="_blank" size="small">
Open
</Button>
<DeleteButton />
</Stack>
</TabbedShowLayout.Tab>
<TabbedShowLayout.Tab label="Preview">
<PreviewContent />
</TabbedShowLayout.Tab>
<TabbedShowLayout.Tab label="Raw">
<pre>
<code>{JSON.stringify(record, null, 2)}</code>
</pre>
</TabbedShowLayout.Tab>
</TabbedShowLayout>
);
}
export default function ShowBlob() {
return (
<Show>
<Page />
</Show>
);
}

4
admin/src/blobs/index.ts Normal file
View File

@ -0,0 +1,4 @@
import BlobList from "./BlobList";
import ShowBlob from "./ShowBlob";
export { BlobList as list, ShowBlob as show };

17
admin/src/dataProvider.ts Normal file
View File

@ -0,0 +1,17 @@
import simpleRestProvider from "ra-data-simple-rest";
import { fetchUtils } from "react-admin";
import { getAuthHeaders } from "./authProvider";
import { API_BASE } from "./env";
console.log("Connecting to", API_BASE);
export const dataProvider = simpleRestProvider(API_BASE, (url, opts) =>
fetchUtils.fetchJson(url, {
...opts,
headers: new Headers({
...opts?.headers,
...getAuthHeaders(),
Accept: "application/json",
}),
}),
);

2
admin/src/env.ts Normal file
View File

@ -0,0 +1,2 @@
// @ts-expect-error
export const API_BASE = import.meta.env.VITE_API_URL ?? "/api";

View File

@ -0,0 +1,3 @@
export function truncateHash(hash: string) {
return hash.slice(0, 4) + "…" + hash.slice(-4, hash.length);
}

9
admin/src/index.tsx Normal file
View File

@ -0,0 +1,9 @@
import React from "react";
import ReactDOM from "react-dom/client";
import { App } from "./App";
ReactDOM.createRoot(document.getElementById("root")!).render(
<React.StrictMode>
<App />
</React.StrictMode>,
);

View File

@ -0,0 +1,22 @@
import * as React from "react";
import { useMediaQuery, Theme } from "@mui/material";
import { ArrayField, Datagrid, List, SimpleList, TextField } from "react-admin";
export default function RuleList() {
return (
<List>
{useMediaQuery((theme: Theme) => theme.breakpoints.down("md")) ? (
<SimpleList primaryText={(record) => record.name} secondaryText={(record) => record.type} />
) : (
<Datagrid sort={{ field: "id", order: "ASC" }} optimized bulkActionButtons={<></>}>
<TextField source="id" />
<TextField source="type" sortable={false} />
<TextField source="expiration" sortable={false} />
<ArrayField source="pubkeys">
<SimpleList primaryText={(p) => p} />
</ArrayField>
</Datagrid>
)}
</List>
);
}

3
admin/src/rules/index.ts Normal file
View File

@ -0,0 +1,3 @@
import RuleList from "./RuleList";
export { RuleList as list };

View File

@ -0,0 +1,39 @@
import * as React from "react";
import { useMediaQuery, Theme, Avatar, Stack, Box, Typography } from "@mui/material";
import { Datagrid, List, SimpleList, TextField, useRecordContext } from "react-admin";
function NumberOfBlobs() {
const record = useRecordContext();
return <p>{record.blobs.length} blobs</p>;
}
function UserProfile() {
const record = useRecordContext();
if (!record.profile) return null;
return (
<Stack direction="row" spacing={2} useFlexGap>
<Avatar src={record.profile.image} />
<Box>
<Typography fontWeight="bold" margin={0}>
{record.profile.name}
</Typography>
<Typography margin={0}>{record.profile.nip05}</Typography>
</Box>
</Stack>
);
}
export default function UserList() {
return (
<List sort={{ field: "pubkey", order: "ASC" }}>
{useMediaQuery((theme: Theme) => theme.breakpoints.down("md")) ? (
<SimpleList primaryText={(record) => record.pubkey} />
) : (
<Datagrid optimized bulkActionButtons={<></>}>
<UserProfile />
<TextField source="pubkey" />
<NumberOfBlobs />
</Datagrid>
)}
</List>
);
}

3
admin/src/users/index.ts Normal file
View File

@ -0,0 +1,3 @@
import UserList from "./UserList";
export { UserList as list };

1
admin/src/vite-env.d.ts vendored Normal file
View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

19
admin/tsconfig.json Normal file
View File

@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["dom", "dom.iterable", "esnext"],
"skipLibCheck": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"module": "ESNext",
"moduleResolution": "Bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": ["src"]
}

8
admin/vite.config.ts Normal file
View File

@ -0,0 +1,8 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
// https://vitejs.dev/config/
export default defineConfig({
base: "/admin",
plugins: [react()],
});

89
config.example.yml Normal file
View File

@ -0,0 +1,89 @@
# Override the domain thats is used in the blobs "url" field
# By default it uses the domain the incoming HTTP request was made on
publicDomain: ""
databasePath: data/sqlite.db
dashboard:
# enable or disable the admin dashboard
enabled: true
# admin username
username: admin
# if password is left blank it will be generated each time the app starts
# password: ""
discovery:
# find files by querying nostr relays
nostr:
enabled: true
relays:
- wss://nostrue.com
- wss://relay.damus.io
- wss://nostr.wine
- wss://nos.lol
- wss://nostr-pub.wellorder.net
# find files by asking upstream CDNs
# NOTE: do not set this to your own server, it will create an infinite loop
upstream:
enabled: true
domains:
- https://cdn.satellite.earth
storage:
# local or s3
backend: local
# Imminently removes a blob when there are no owners
removeWhenNoOwners: false
# local storage
local:
dir: ./data/blobs
# see minio docs for options:
# https://min.io/docs/minio/linux/developers/javascript/API.html#new-minio-client-endpoint-port-usessl-accesskey-secretkey-region-transport-sessiontoken-partsize
# s3:
# endpoint: https://s3.endpoint.com
# port: 443
# bucket: blossom
# accessKey: xxxxxxxx
# secretKey: xxxxxxxxx
# useSSL: true
# region: us-east-1
# If this is set the server will redirect clients when loading blobs
# publicURL: https://s3.region.example.com/
# rules are checked in descending order. if a blob matches a rule it is kept
# "type" (required) the type of the blob, "*" can be used to match any type
# "expiration" (required) time passed since last accessed
# "pubkeys" (optional) a list of owners
# any blobs not matching the rules will be removed
rules:
# mime type of blob
- type: text/*
# time since last accessed
expiration: 1 month
- type: "image/*"
expiration: 1 week
- type: "video/*"
expiration: 5 days
- type: "model/*"
expiration: 1 week
- type: "*"
expiration: 2 days
upload:
# enable / disable uploads
enabled: true
# require auth to upload
requireAuth: true
# only check rules that include "pubkeys"
requirePubkeyInRule: false
list:
requireAuth: false
allowListOthers: true
tor:
enabled: false
proxy: ""

21
docker-compose.yml Normal file
View File

@ -0,0 +1,21 @@
version: "3.7"
volumes:
data: {}
services:
blossom:
image: ghcr.io/hzrd149/blossom-server:master
build: .
ports:
- 3000:3000
# enable debug logging
# environment:
# DEBUG: "*"
volumes:
# mount data volume
- data:/app/data
# mount config file
- ./config.yml:/app/config.yml
# mount custom www dir
- ./public:/app/public

75
package.json Normal file
View File

@ -0,0 +1,75 @@
{
"name": "blossom-server-ts",
"version": "4.4.0",
"description": "A blossom server implementation written in Typescript",
"main": "build/index.js",
"type": "module",
"author": "hzrd149",
"license": "MIT",
"scripts": {
"start": "node build/index.js",
"build": "tsc",
"postbuild": "cd admin && pnpm build",
"dev": "nodemon -i '**/data/**' -i '**/database.json' -i '**/database.old.json' --exec 'node' --loader @swc-node/register/esm src/index.ts",
"format": "prettier -w ."
},
"bin": "build/index.js",
"files": [
"build",
"public",
"admin/dist"
],
"dependencies": {
"@koa/cors": "^5.0.0",
"@koa/router": "^13.1.0",
"@nostr-dev-kit/ndk": "^2.10.0",
"better-sqlite3": "^11.3.0",
"blossom-server-sdk": "^0.7.2",
"dayjs": "^1.11.13",
"debug": "^4.3.7",
"file-type": "^19.5.0",
"follow-redirects": "^1.15.9",
"generate-password": "^1.7.1",
"http-errors": "1",
"koa": "^2.15.3",
"koa-basic-auth": "^4.0.0",
"koa-body": "^6.0.1",
"koa-mount": "^4.0.0",
"koa-static": "^5.0.0",
"lilconfig": "^3.1.2",
"mime": "^4.0.4",
"minio": "^8.0.1",
"mkdirp": "^3.0.1",
"nanoid": "^5.0.7",
"nostr-tools": "^2.7.2",
"socks-proxy-agent": "^8.0.4",
"websocket-polyfill": "^1.0.0",
"ws": "^8.18.0",
"yaml": "^2.5.1"
},
"devDependencies": {
"@changesets/cli": "^2.27.1",
"@swc-node/register": "^1.9.0",
"@swc/core": "^1.7.26",
"@swc/types": "^0.1.12",
"@types/better-sqlite3": "^7.6.11",
"@types/debug": "^4.1.12",
"@types/follow-redirects": "^1.14.4",
"@types/http-errors": "^2.0.4",
"@types/koa": "^2.15.0",
"@types/koa-basic-auth": "^2.0.6",
"@types/koa-mount": "^4.0.5",
"@types/koa-static": "^4.0.4",
"@types/koa__cors": "^5.0.0",
"@types/koa__router": "^12.0.4",
"@types/node": "^20.11.19",
"@types/ws": "^8.5.12",
"nodemon": "^3.1.5",
"prettier": "^3.3.3",
"typescript": "^5.6.2"
},
"resolutions": {
"websocket-polyfill": "1.0.0"
},
"packageManager": "pnpm@9.12.0"
}

3224
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

25
public/index.html Normal file
View File

@ -0,0 +1,25 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link href="./lib/tailwind.min.css" rel="stylesheet" />
<title>Blossom Server</title>
<script type="module" src="./main.js"></script>
</head>
<body>
<blossom-app></blossom-app>
<script>
window.wnjParams = {
position: "top",
// Supported values: cyan (default), green, purple, red, orange, neutral, stone
accent: "purple",
compactMode: true,
disableOverflowFix: true,
};
</script>
<script src="./lib/window.nostr.js"></script>
</body>
</html>

View File

@ -0,0 +1,42 @@
function number(n) {
if (!Number.isSafeInteger(n) || n < 0)
throw new Error(`positive integer expected, not ${n}`);
}
function bool(b) {
if (typeof b !== 'boolean')
throw new Error(`boolean expected, not ${b}`);
}
// copied from utils
export function isBytes(a) {
return (a instanceof Uint8Array ||
(a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
}
function bytes(b, ...lengths) {
if (!isBytes(b))
throw new Error('Uint8Array expected');
if (lengths.length > 0 && !lengths.includes(b.length))
throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`);
}
function hash(h) {
if (typeof h !== 'function' || typeof h.create !== 'function')
throw new Error('Hash should be wrapped by utils.wrapConstructor');
number(h.outputLen);
number(h.blockLen);
}
function exists(instance, checkFinished = true) {
if (instance.destroyed)
throw new Error('Hash instance has been destroyed');
if (checkFinished && instance.finished)
throw new Error('Hash#digest() has already been called');
}
function output(out, instance) {
bytes(out);
const min = instance.outputLen;
if (out.length < min) {
throw new Error(`digestInto() expects output buffer of length at least ${min}`);
}
}
export { number, bool, bytes, hash, exists, output };
const assert = { number, bool, bytes, hash, exists, output };
export default assert;
//# sourceMappingURL=_assert.js.map

View File

@ -0,0 +1,122 @@
import { exists, output } from './_assert.js';
import { Hash, createView, toBytes } from './utils.js';
// Polyfill for Safari 14
function setBigUint64(view, byteOffset, value, isLE) {
if (typeof view.setBigUint64 === 'function')
return view.setBigUint64(byteOffset, value, isLE);
const _32n = BigInt(32);
const _u32_max = BigInt(0xffffffff);
const wh = Number((value >> _32n) & _u32_max);
const wl = Number(value & _u32_max);
const h = isLE ? 4 : 0;
const l = isLE ? 0 : 4;
view.setUint32(byteOffset + h, wh, isLE);
view.setUint32(byteOffset + l, wl, isLE);
}
// Choice: a ? b : c
export const Chi = (a, b, c) => (a & b) ^ (~a & c);
// Majority function, true if any two inpust is true
export const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c);
/**
* Merkle-Damgard hash construction base class.
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
*/
export class HashMD extends Hash {
constructor(blockLen, outputLen, padOffset, isLE) {
super();
this.blockLen = blockLen;
this.outputLen = outputLen;
this.padOffset = padOffset;
this.isLE = isLE;
this.finished = false;
this.length = 0;
this.pos = 0;
this.destroyed = false;
this.buffer = new Uint8Array(blockLen);
this.view = createView(this.buffer);
}
update(data) {
exists(this);
const { view, buffer, blockLen } = this;
data = toBytes(data);
const len = data.length;
for (let pos = 0; pos < len;) {
const take = Math.min(blockLen - this.pos, len - pos);
// Fast path: we have at least one block in input, cast it to view and process
if (take === blockLen) {
const dataView = createView(data);
for (; blockLen <= len - pos; pos += blockLen)
this.process(dataView, pos);
continue;
}
buffer.set(data.subarray(pos, pos + take), this.pos);
this.pos += take;
pos += take;
if (this.pos === blockLen) {
this.process(view, 0);
this.pos = 0;
}
}
this.length += data.length;
this.roundClean();
return this;
}
digestInto(out) {
exists(this);
output(out, this);
this.finished = true;
// Padding
// We can avoid allocation of buffer for padding completely if it
// was previously not allocated here. But it won't change performance.
const { buffer, view, blockLen, isLE } = this;
let { pos } = this;
// append the bit '1' to the message
buffer[pos++] = 0b10000000;
this.buffer.subarray(pos).fill(0);
// we have less than padOffset left in buffer, so we cannot put length in
// current block, need process it and pad again
if (this.padOffset > blockLen - pos) {
this.process(view, 0);
pos = 0;
}
// Pad until full block byte with zeros
for (let i = pos; i < blockLen; i++)
buffer[i] = 0;
// Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that
// You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen.
// So we just write lowest 64 bits of that value.
setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE);
this.process(view, 0);
const oview = createView(out);
const len = this.outputLen;
// NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT
if (len % 4)
throw new Error('_sha2: outputLen should be aligned to 32bit');
const outLen = len / 4;
const state = this.get();
if (outLen > state.length)
throw new Error('_sha2: outputLen bigger than state');
for (let i = 0; i < outLen; i++)
oview.setUint32(4 * i, state[i], isLE);
}
digest() {
const { buffer, outputLen } = this;
this.digestInto(buffer);
const res = buffer.slice(0, outputLen);
this.destroy();
return res;
}
_cloneInto(to) {
to || (to = new this.constructor());
to.set(...this.get());
const { blockLen, buffer, length, finished, destroyed, pos } = this;
to.length = length;
to.pos = pos;
to.finished = finished;
to.destroyed = destroyed;
if (length % blockLen)
to.buffer.set(buffer);
return to;
}
}
//# sourceMappingURL=_md.js.map

View File

@ -0,0 +1 @@
export const crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined;

View File

@ -0,0 +1,123 @@
import { HashMD, Chi, Maj } from './_md.js';
import { rotr, wrapConstructor } from './utils.js';
// SHA2-256 need to try 2^128 hashes to execute birthday attack.
// BTC network is doing 2^67 hashes/sec as per early 2023.
// Round constants:
// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311)
// prettier-ignore
const SHA256_K = /* @__PURE__ */ new Uint32Array([
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
]);
// Initial state:
// first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19
// prettier-ignore
const SHA256_IV = /* @__PURE__ */ new Uint32Array([
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
]);
// Temporary buffer, not used to store anything between runs
// Named this way because it matches specification.
const SHA256_W = /* @__PURE__ */ new Uint32Array(64);
class SHA256 extends HashMD {
constructor() {
super(64, 32, 8, false);
// We cannot use array here since array allows indexing by variable
// which means optimizer/compiler cannot use registers.
this.A = SHA256_IV[0] | 0;
this.B = SHA256_IV[1] | 0;
this.C = SHA256_IV[2] | 0;
this.D = SHA256_IV[3] | 0;
this.E = SHA256_IV[4] | 0;
this.F = SHA256_IV[5] | 0;
this.G = SHA256_IV[6] | 0;
this.H = SHA256_IV[7] | 0;
}
get() {
const { A, B, C, D, E, F, G, H } = this;
return [A, B, C, D, E, F, G, H];
}
// prettier-ignore
set(A, B, C, D, E, F, G, H) {
this.A = A | 0;
this.B = B | 0;
this.C = C | 0;
this.D = D | 0;
this.E = E | 0;
this.F = F | 0;
this.G = G | 0;
this.H = H | 0;
}
process(view, offset) {
// Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array
for (let i = 0; i < 16; i++, offset += 4)
SHA256_W[i] = view.getUint32(offset, false);
for (let i = 16; i < 64; i++) {
const W15 = SHA256_W[i - 15];
const W2 = SHA256_W[i - 2];
const s0 = rotr(W15, 7) ^ rotr(W15, 18) ^ (W15 >>> 3);
const s1 = rotr(W2, 17) ^ rotr(W2, 19) ^ (W2 >>> 10);
SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0;
}
// Compression function main loop, 64 rounds
let { A, B, C, D, E, F, G, H } = this;
for (let i = 0; i < 64; i++) {
const sigma1 = rotr(E, 6) ^ rotr(E, 11) ^ rotr(E, 25);
const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0;
const sigma0 = rotr(A, 2) ^ rotr(A, 13) ^ rotr(A, 22);
const T2 = (sigma0 + Maj(A, B, C)) | 0;
H = G;
G = F;
F = E;
E = (D + T1) | 0;
D = C;
C = B;
B = A;
A = (T1 + T2) | 0;
}
// Add the compressed chunk to the current hash value
A = (A + this.A) | 0;
B = (B + this.B) | 0;
C = (C + this.C) | 0;
D = (D + this.D) | 0;
E = (E + this.E) | 0;
F = (F + this.F) | 0;
G = (G + this.G) | 0;
H = (H + this.H) | 0;
this.set(A, B, C, D, E, F, G, H);
}
roundClean() {
SHA256_W.fill(0);
}
destroy() {
this.set(0, 0, 0, 0, 0, 0, 0, 0);
this.buffer.fill(0);
}
}
// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
class SHA224 extends SHA256 {
constructor() {
super();
this.A = 0xc1059ed8 | 0;
this.B = 0x367cd507 | 0;
this.C = 0x3070dd17 | 0;
this.D = 0xf70e5939 | 0;
this.E = 0xffc00b31 | 0;
this.F = 0x68581511 | 0;
this.G = 0x64f98fa7 | 0;
this.H = 0xbefa4fa4 | 0;
this.outputLen = 28;
}
}
/**
* SHA2-256 hash function
* @param message - data that would be hashed
*/
export const sha256 = /* @__PURE__ */ wrapConstructor(() => new SHA256());
export const sha224 = /* @__PURE__ */ wrapConstructor(() => new SHA224());
//# sourceMappingURL=sha256.js.map

View File

@ -0,0 +1,187 @@
/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */
// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+.
// node.js versions earlier than v19 don't declare it in global scope.
// For node.js, package.json#exports field mapping rewrites import
// from `crypto` to `cryptoNode`, which imports native module.
// Makes the utils un-importable in browsers without a bundler.
// Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
import { crypto } from './crypto.js';
import { bytes as abytes } from './_assert.js';
// export { isBytes } from './_assert.js';
// We can't reuse isBytes from _assert, because somehow this causes huge perf issues
export function isBytes(a) {
return (a instanceof Uint8Array ||
(a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
}
// Cast array to different type
export const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength);
export const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4));
// Cast array to view
export const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
// The rotate right (circular right shift) operation for uint32
export const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift);
// The rotate left (circular left shift) operation for uint32
export const rotl = (word, shift) => (word << shift) | ((word >>> (32 - shift)) >>> 0);
export const isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44;
// The byte swap operation for uint32
export const byteSwap = (word) => ((word << 24) & 0xff000000) |
((word << 8) & 0xff0000) |
((word >>> 8) & 0xff00) |
((word >>> 24) & 0xff);
// Conditionally byte swap if on a big-endian platform
export const byteSwapIfBE = isLE ? (n) => n : (n) => byteSwap(n);
// In place byte swap for Uint32Array
export function byteSwap32(arr) {
for (let i = 0; i < arr.length; i++) {
arr[i] = byteSwap(arr[i]);
}
}
// Array where index 0xf0 (240) is mapped to string 'f0'
const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0'));
/**
* @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123'
*/
export function bytesToHex(bytes) {
abytes(bytes);
// pre-caching improves the speed 6x
let hex = '';
for (let i = 0; i < bytes.length; i++) {
hex += hexes[bytes[i]];
}
return hex;
}
// We use optimized technique to convert hex string to byte array
const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 };
function asciiToBase16(char) {
if (char >= asciis._0 && char <= asciis._9)
return char - asciis._0;
if (char >= asciis._A && char <= asciis._F)
return char - (asciis._A - 10);
if (char >= asciis._a && char <= asciis._f)
return char - (asciis._a - 10);
return;
}
/**
* @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23])
*/
export function hexToBytes(hex) {
if (typeof hex !== 'string')
throw new Error('hex string expected, got ' + typeof hex);
const hl = hex.length;
const al = hl / 2;
if (hl % 2)
throw new Error('padded hex string expected, got unpadded hex of length ' + hl);
const array = new Uint8Array(al);
for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) {
const n1 = asciiToBase16(hex.charCodeAt(hi));
const n2 = asciiToBase16(hex.charCodeAt(hi + 1));
if (n1 === undefined || n2 === undefined) {
const char = hex[hi] + hex[hi + 1];
throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi);
}
array[ai] = n1 * 16 + n2;
}
return array;
}
// There is no setImmediate in browser and setTimeout is slow.
// call of async fn will return Promise, which will be fullfiled only on
// next scheduler queue processing step and this is exactly what we need.
export const nextTick = async () => { };
// Returns control to thread each 'tick' ms to avoid blocking
export async function asyncLoop(iters, tick, cb) {
let ts = Date.now();
for (let i = 0; i < iters; i++) {
cb(i);
// Date.now() is not monotonic, so in case if clock goes backwards we return return control too
const diff = Date.now() - ts;
if (diff >= 0 && diff < tick)
continue;
await nextTick();
ts += diff;
}
}
/**
* @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99])
*/
export function utf8ToBytes(str) {
if (typeof str !== 'string')
throw new Error(`utf8ToBytes expected string, got ${typeof str}`);
return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809
}
/**
* Normalizes (non-hex) string or Uint8Array to Uint8Array.
* Warning: when Uint8Array is passed, it would NOT get copied.
* Keep in mind for future mutable operations.
*/
export function toBytes(data) {
if (typeof data === 'string')
data = utf8ToBytes(data);
abytes(data);
return data;
}
/**
* Copies several Uint8Arrays into one.
*/
export function concatBytes(...arrays) {
let sum = 0;
for (let i = 0; i < arrays.length; i++) {
const a = arrays[i];
abytes(a);
sum += a.length;
}
const res = new Uint8Array(sum);
for (let i = 0, pad = 0; i < arrays.length; i++) {
const a = arrays[i];
res.set(a, pad);
pad += a.length;
}
return res;
}
// For runtime check if class implements interface
export class Hash {
// Safe version that clones internal state
clone() {
return this._cloneInto();
}
}
const toStr = {}.toString;
export function checkOpts(defaults, opts) {
if (opts !== undefined && toStr.call(opts) !== '[object Object]')
throw new Error('Options should be object or undefined');
const merged = Object.assign(defaults, opts);
return merged;
}
export function wrapConstructor(hashCons) {
const hashC = (msg) => hashCons().update(toBytes(msg)).digest();
const tmp = hashCons();
hashC.outputLen = tmp.outputLen;
hashC.blockLen = tmp.blockLen;
hashC.create = () => hashCons();
return hashC;
}
export function wrapConstructorWithOpts(hashCons) {
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest();
const tmp = hashCons({});
hashC.outputLen = tmp.outputLen;
hashC.blockLen = tmp.blockLen;
hashC.create = (opts) => hashCons(opts);
return hashC;
}
export function wrapXOFConstructorWithOpts(hashCons) {
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest();
const tmp = hashCons({});
hashC.outputLen = tmp.outputLen;
hashC.blockLen = tmp.blockLen;
hashC.create = (opts) => hashCons(opts);
return hashC;
}
/**
* Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS.
*/
export function randomBytes(bytesLength = 32) {
if (crypto && typeof crypto.getRandomValues === 'function') {
return crypto.getRandomValues(new Uint8Array(bytesLength));
}
throw new Error('crypto.getRandomValues must be defined');
}
//# sourceMappingURL=utils.js.map

29
public/lib/lit.min.js vendored Normal file

File diff suppressed because one or more lines are too long

1
public/lib/tailwind.min.css vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

181
public/list-blobs.js Normal file
View File

@ -0,0 +1,181 @@
import { html, LitElement } from "./lib/lit.min.js";
import { unixNow, newExpirationValue, formatBytes } from "./utils.js";
export class ListForm extends LitElement {
static properties = {
pubkey: { state: true },
blobs: { state: true },
status: { state: true, type: String },
};
createRenderRoot() {
return this;
}
_auth = null;
async submit(e) {
e.preventDefault();
const formData = new FormData(e.target);
this.pubkey = formData.get("pubkey") || (await window.nostr?.getPublicKey());
if (!this.pubkey) return;
this.status = "Signing...";
this._auth = await window.nostr.signEvent({
kind: 24242,
content: "List Blobs",
created_at: unixNow(),
tags: [
["t", "list"],
["expiration", newExpirationValue()],
["server", new URL(location.protocol + "//" + location.hostname).toString()],
],
});
this.status = "Fetching...";
this.blobs = await fetch("/list/" + this.pubkey, {
headers: { authorization: "Nostr " + btoa(JSON.stringify(this._auth)) },
}).then((res) => res.json());
this.status = undefined;
}
async refresh() {
this.blobs = await fetch("/list/" + this.pubkey, {
headers: { authorization: "Nostr " + btoa(JSON.stringify(this._auth)) },
}).then((res) => res.json());
}
inputChange(e) {
this.pubkey = e.target.files[0];
}
async pubkeyFromExtension(e) {
e.preventDefault();
this.pubkey = await window.nostr.getPublicKey();
}
async deleteBlob(blob) {
const auth = await window.nostr.signEvent({
kind: 24242,
content: "Delete Item",
created_at: unixNow(),
tags: [
["t", "delete"],
["x", blob.sha256],
["expiration", newExpirationValue()],
],
});
await fetch("/" + blob.sha256, {
method: "DELETE",
headers: { authorization: "Nostr " + btoa(JSON.stringify(auth)) },
}).then(async (res) => {
if (res.ok) {
alert("Blob deleted");
await this.refresh();
} else alert(await res.text());
});
}
renderResults() {
return html`<table class="table-auto overflow-hidden">
<thead>
<tr>
<th class="px-2">sha256</th>
<th class="px-2 text-right">Type</th>
<th class="px-2 text-right">Size</th>
<th class="px-2"></th>
</tr>
</thead>
<tbody class="overflow-auto">
${this.blobs.map(
(blob) => html`
<tr class="whitespace-nowrap">
<td class="px-2 overflow-hidden truncate font-mono" style="max-width: 10em">
<a href=${blob.url} class="hover:underline" target="_blank">${blob.sha256}</a>
</td>
<td class="px-2 text-right" style="max-width: 5em">${blob.type}</td>
<td class="px-2 text-right">${formatBytes(blob.size)}</td>
<td class="px-2 text-right">
<a
href="#"
class="text-red-500 hover:underline"
@click="${(e) => {
e.preventDefault();
this.deleteBlob(blob);
}}"
>
Delete
</a>
</td>
</tr>
`,
)}
</tbody>
</table>`;
}
renderForm() {
return html`<form class="space-y-4 ml-auto" @submit="${this.submit}">
<label class="text-sm font-bold text-gray-500 tracking-wide flex flex-col">
<div class="flex gap-2">
<input
name="pubkey"
type="text"
class="rounded-md border-2 w-full p-2 h-10 min-w-20"
@change="${this.inputChange}"
.value="${this.pubkey || ""}"
placeholder="Pubkey"
style="min-width: 18rem;"
/>
<button
type="submit"
class="flex bg-blue-500 text-gray-100 py-2 px-4 rounded-md tracking-wide font-semibold hover:bg-blue-600 cursor-pointer transition ease-in duration-300"
>
${this.status || "List"}
</button>
</div>
${window.nostr &&
html`<a href="#" class="text-blue-500 ml-auto" @click="${this.pubkeyFromExtension}">From Extension</a>`}
</label>
</form>`;
}
renderContent() {
if (this.status) {
return html`<p class="my-5 text-center text-lg">${this.status}</p>`;
} else if (this.blobs) {
if (this.blobs.length === 0) return html`<p class="text-gray-500 text-md text-center p-10">Set pubkey</p>`;
return this.renderResults();
}
return html`<p class="text-gray-500 text-md text-center p-10">Set pubkey</p>`;
}
render() {
if (this.status) {
return html`<p class="my-5 text-center text-lg">${this.status}</p>`;
}
return html`<div class="w-full p-4 bg-white rounded-xl flex flex-col overflow-hidden">
<div class="flex gap-4 w-full items-flex-start flex-wrap">
<h1 class="text-xl">List blobs</h1>
${this.renderForm()}
</div>
${this.renderContent()}
<div class="flex mt-4 text-sm text-blue-400">
<a href="#">back to upload</a>
<a class="ml-auto" href="https://github.com/hzrd149/blossom">🌸 Blossom Spec</a>
</div>
</div>`;
}
}
customElements.define("list-blobs", ListForm);

49
public/main.js Normal file
View File

@ -0,0 +1,49 @@
import { html, LitElement } from "./lib/lit.min.js";
import "./upload-form.js";
import "./list-blobs.js";
import "./mirror-blobs.js";
export class BlossomApp extends LitElement {
static properties = {
selected: { state: true },
status: { state: true, type: String },
};
createRenderRoot() {
return this;
}
connectedCallback() {
super.connectedCallback();
window.addEventListener("hashchange", () => {
this.requestUpdate();
});
}
render() {
const hash = location.hash;
let content = "";
switch (hash) {
case "#list":
content = html`<list-blobs class="z-10 sm:max-w-4xl w-full"></list-blobs>`;
break;
case "#mirror":
content = html`<mirror-blobs class="z-10 sm:max-w-4xl w-full"></mirror-blobs>`;
break;
case "#upload":
default:
content = html`<upload-form class="z-10 sm:max-w-lg w-full"></upload-form>`;
break;
}
return html` <div
class="relative min-h-screen flex items-center justify-center bg-gray-50 py-12 px-4 sm:px-6 lg:px-8 bg-gray-500 bg-no-repeat bg-cover relative items-center"
>
<div class="absolute bg-black opacity-60 inset-0 z-0"></div>
${content}
</div>`;
}
}
customElements.define("blossom-app", BlossomApp);

251
public/mirror-blobs.js Normal file
View File

@ -0,0 +1,251 @@
import { html, LitElement } from "./lib/lit.min.js";
import { formatBytes, newExpirationValue, unixNow } from "./utils.js";
export class MirrorBlobs extends LitElement {
static properties = {
remoteBlobs: { state: true },
localBlobs: { state: true },
status: { state: true, type: String },
progress: { state: true },
server: { state: true, type: String },
selected: { state: true },
};
createRenderRoot() {
return this;
}
constructor() {
super();
this.selected = [];
}
async fetchRemoteBlobs() {
if (!this.server) return;
const pubkey = await window.nostr.getPublicKey();
this.status = "Signing...";
const auth = await window.nostr.signEvent({
kind: 24242,
content: "List Blobs",
created_at: unixNow(),
tags: [
["t", "list"],
["expiration", newExpirationValue()],
["server", new URL("/", this.server).toString()],
],
});
this.status = "Fetching...";
this.remoteBlobs = await fetch(new URL("/list/" + pubkey, this.server), {
headers: { authorization: "Nostr " + btoa(JSON.stringify(auth)) },
}).then((res) => res.json());
this.status = undefined;
}
localAuth = null;
async fetchLocalBlobs() {
this.pubkey = await window.nostr?.getPublicKey();
if (!this.pubkey) return;
this.status = "Signing...";
this.localAuth = await window.nostr.signEvent({
kind: 24242,
content: "List Blobs",
created_at: unixNow(),
tags: [
["t", "list"],
["expiration", newExpirationValue()],
["server", new URL(location.protocol + "//" + location.hostname).toString()],
],
});
this.status = "Fetching...";
this.localBlobs = await fetch("/list/" + this.pubkey, {
headers: { authorization: "Nostr " + btoa(JSON.stringify(this.localAuth)) },
}).then((res) => res.json());
this.status = undefined;
}
async submit(e) {
e.preventDefault();
await this.fetchLocalBlobs();
await this.fetchRemoteBlobs();
}
serverChange(e) {
this.server = e.target.value;
}
renderForm() {
return html`<form class="space-y-4 ml-auto" @submit="${this.submit}">
<label class="text-sm font-bold text-gray-500 tracking-wide flex flex-col">
<div class="flex gap-2">
<input
name="server"
type="url"
class="rounded-md border-2 w-full p-2 h-10 min-w-20"
.value="${this.server || ""}"
@change="${this.serverChange}"
placeholder="https://cdn.example.com"
style="min-width: 18rem;"
required
/>
<button
type="submit"
class="flex bg-blue-500 text-gray-100 py-2 px-4 rounded-md tracking-wide font-semibold hover:bg-blue-600 cursor-pointer transition ease-in duration-300 flex-shrink-0"
>
List Blobs
</button>
</div>
</label>
</form>`;
}
selectAll() {
const missingBlobs = this.remoteBlobs.filter((blob) => !this.localBlobs.some((b) => b.sha256 === blob.sha256));
if (this.selected.length === missingBlobs.length) {
this.selected = [];
} else this.selected = missingBlobs.map((b) => b.sha256);
}
toggleSelection(sha256) {
if (this.selected.includes(sha256)) {
this.selected = this.selected.filter((s) => s !== sha256);
} else this.selected = [...this.selected, sha256];
}
async mirrorBlobs() {
const blobs = this.remoteBlobs.filter((blob) => this.selected.includes(blob.sha256));
for (const blob of blobs) {
this.progress = blobs.indexOf(blob) + 1;
this.status = `Signing ${blob.sha256}`;
// create auth event
const auth = await window.nostr.signEvent({
kind: 24242,
content: "Mirror Blob",
created_at: unixNow(),
tags: [
["t", "upload"],
["x", blob.sha256],
["expiration", newExpirationValue()],
],
});
this.status = `Mirroring ${blob.sha256}`;
await fetch("/mirror", {
method: "PUT",
body: JSON.stringify({ url: blob.url }),
headers: { authorization: "Nostr " + btoa(JSON.stringify(auth)), "Content-Type": "application/json" },
});
}
this.progress = undefined;
this.status = undefined;
this.selected = [];
await this.fetchLocalBlobs();
}
renderContent() {
if (!window.nostr) return html`<p class="text-red-500 text-lg text-center p-10">Missing window.nostr extension</p>`;
if (this.progress !== undefined) {
return html`
<p>${this.progress}/${this.selected.length} - <span class="text-gray-500">${this.status}</span></p>
<progress class="my-2" .value="${this.progress}" max="${this.selected.length}">
${((this.selected.length / this.progress) * 100).toFixed(2)}%
</progress>
`;
} else if (this.status) {
return html`<p class="my-5 text-center text-lg">${this.status}</p>`;
} else if (this.remoteBlobs && this.localBlobs) {
const missingBlobs = this.remoteBlobs.filter((blob) => !this.localBlobs.some((b) => b.sha256 === blob.sha256));
if (missingBlobs.length === 0) {
return html`<p class="text-green-500 text-lg text-center p-10">All blobs synced ✅</p>`;
}
return html`
<div class="flex gap-2 py-2">
<button
class="text-md bg-blue-500 text-gray-100 py-1 px-3 rounded-md tracking-wide font-semibold hover:bg-blue-600 cursor-pointer transition ease-in duration-300 flex-shrink-0"
@click="${this.selectAll}"
>
Select All
</button>
<button
class="text-md bg-blue-500 text-gray-100 py-1 px-3 rounded-md tracking-wide font-semibold hover:bg-blue-600 cursor-pointer transition ease-in duration-300 flex-shrink-0 ml-auto"
@click="${this.mirrorBlobs}"
>
Mirror Blobs
</button>
</div>
${this.renderBlobs(missingBlobs)}
`;
}
return html`<p class="text-gray-500 text-lg text-center p-10">Select Blossom Server</p>`;
}
renderBlobs(blobs = []) {
return html`<table class="table-auto overflow-hidden">
<thead>
<tr>
<th></th>
<th class="px-2">sha256</th>
<th class="px-2 text-right">Type</th>
<th class="px-2 text-right">Size</th>
</tr>
</thead>
<tbody class="overflow-auto">
${blobs.map(
(blob) => html`
<tr class="whitespace-nowrap">
<td>
<input
type="checkbox"
.checked="${this.selected.includes(blob.sha256)}"
@change="${this.toggleSelection.bind(this, blob.sha256)}"
/>
</td>
<td class="px-2 overflow-hidden truncate font-mono" style="max-width: 10em">
<a href=${blob.url} class="hover:underline" target="_blank">${blob.sha256}</a>
</td>
<td class="px-2 text-right" style="max-width: 5em">${blob.type}</td>
<td class="px-2 text-right">${formatBytes(blob.size)}</td>
</tr>
`,
)}
</tbody>
</table>`;
}
render() {
return html`<div class="w-full p-4 bg-white rounded-xl flex flex-col overflow-hidden">
<div class="flex gap-4 w-full items-flex-start flex-wrap">
<h1 class="text-xl">Mirror blobs</h1>
${window.nostr && this.renderForm()}
</div>
${this.renderContent()}
<div class="flex mt-4 text-sm text-blue-400">
<a href="#">back to upload</a>
<a class="ml-auto" href="https://github.com/hzrd149/blossom">🌸 Blossom Spec</a>
</div>
</div>`;
}
}
customElements.define("mirror-blobs", MirrorBlobs);

63
public/open-blob-form.js Normal file
View File

@ -0,0 +1,63 @@
import { html, LitElement } from "./lib/lit.min.js";
export class GetBlobForm extends LitElement {
static properties = {
hasBlob: { state: true },
};
createRenderRoot() {
return this;
}
async submit(e) {
e.preventDefault();
const formData = new FormData(e.target);
const sha256 = formData.get("sha256");
this.hasBlob = await fetch("/" + sha256, { method: "HEAD" }).then((res) => res.ok);
if (this.hasBlob) {
window.open("/" + sha256, "_blank");
}
}
renderResults() {
return html`<div class="flex gap-2 flex-col max-h-xl overflow-auto">
${this.blobs.map(
(blob) => html`
<div class="flex gap-2">
<a href=${blob.url} class="hover:underline" target="_blank">${blob.sha256}</a>
</div>
`,
)}
</div>`;
}
renderForm() {
return html`<form class="gap-2" @submit="${this.submit}">
<label class="text-sm font-bold text-gray-500 tracking-wide flex flex-col">
<span class="block">View Blob (sha256)</span>
<div class="flex gap-2">
<input name="sha256" type="text" class="rounded-md border-2 w-full p-2 h-10" required />
<button
type="submit"
class="flex bg-blue-500 text-gray-100 py-2 px-4 rounded-md tracking-wide font-semibold hover:bg-blue-600 cursor-pointer transition ease-in duration-300"
>
Open
</button>
</div>
</label>
${this.hasBlob === false ? html`<p class="text-red-500 mb-4">Blob missing</p>` : null}
</form>`;
}
render() {
if (this.blob) {
return this.renderResults();
}
return this.renderForm();
}
}
customElements.define("get-blob-form", GetBlobForm);

159
public/upload-form.js Normal file
View File

@ -0,0 +1,159 @@
import { html, LitElement } from "./lib/lit.min.js";
import { unixNow, newExpirationValue, getFileSha256, formatBytes } from "./utils.js";
import "./open-blob-form.js";
export class UploadForm extends LitElement {
static properties = {
selected: { state: true },
status: { state: true, type: String },
};
createRenderRoot() {
return this;
}
async upload(e) {
e.preventDefault();
try {
if (!this.selected) throw new Error("Select file first");
let file = this.selected;
// handle an edge case where some browsers set the mime type of .m3u8 files to audio/x-mpegurl
if (file.type === "audio/x-mpegurl" && file.name.endsWith(".m3u8")) {
file = new File([file], file.name, {
type: "application/vnd.apple.mpegurl",
});
}
this.status = "Compute SHA256 hash...";
const hash = await getFileSha256(file);
this.status = "Signing...";
// create auth event
const auth = await window.nostr.signEvent({
kind: 24242,
content: "Authorize Upload",
created_at: unixNow(),
tags: [
["t", "upload"],
["x", hash],
["expiration", newExpirationValue()],
],
});
const authorization = "Nostr " + btoa(JSON.stringify(auth));
// BUD-06 check upload
this.status = "Checking Upload...";
const check = await fetch("/upload", {
method: "HEAD",
headers: {
authorization,
"X-Content-Type": file.type,
"X-Content-Length": file.size,
"X-Sha-256": hash,
},
});
if (!check.ok) {
throw new Error(check.headers.get("x-upload-message") || "Upload Rejected");
}
// Upload blob
this.status = "Uploading...";
const res = await fetch("/upload", {
method: "PUT",
body: file,
// attach Authorization: Nostr <base64> header to request
headers: { authorization },
});
if (res.ok) {
const body = await res.json();
this.selected = undefined;
window.open(body.url, "_blank");
} else {
throw new Error(await res.text());
}
} catch (error) {
if (error instanceof Error) {
alert(error.message);
}
}
this.status = "Upload";
}
inputChange(e) {
this.selected = e.target.files[0];
}
render() {
const preview = !this.selected
? html`<div class="h-full w-full text-center flex flex-col items-center justify-center items-center">
<svg
xmlns="http://www.w3.org/2000/svg"
class="w-10 h-10 text-blue-400 group-hover:text-blue-600"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12"
/>
</svg>
<p class="pointer-none text-gray-500">
<span class="text-sm">Drag and drop</span> files here <br />
or <a href="" id="" class="text-blue-600 hover:underline">select a file</a> from your computer
</p>
</div>`
: html`<p class="pointer-none text-gray-500 font-bold">${this.selected.name}</p>
<p class="text-gray-500 text-sm">${formatBytes(this.selected.size)}</p>`;
return html`<div class="w-full px-10 pt-10 pb-6 bg-white rounded-xl flex flex-col">
<div class="text-center">
<h1 class="mt-5 text-3xl font-bold text-gray-900">🌸 Blossom Server</h1>
<p class="mt-2 text-sm text-gray-400">Blobs stored simply on mediaservers</p>
<a class="text-sm text-blue-400" href="https://github.com/hzrd149/blossom-server">Github</a>
</div>
<form class="space-y-3" @submit="${this.upload}">
<div class="grid grid-cols-1 space-y-2">
<label class="text-sm font-bold text-gray-500 tracking-wide">Selected File</label>
<div class="flex items-center justify-center w-full">
<label
class="flex flex-col rounded-lg border-4 border-dashed w-full h-50 p-10 group text-center cursor-pointer"
>
${preview}
<input name="blob" type="file" class="hidden" @change="${this.inputChange}" />
</label>
</div>
</div>
<div>
<button
type="submit"
class="my-5 w-full flex justify-center bg-blue-500 text-gray-100 p-3 rounded-full tracking-wide font-semibold focus:outline-none focus:shadow-outline hover:bg-blue-600 shadow-lg cursor-pointer transition ease-in duration-300"
>
${this.status || "Upload"}
</button>
</div>
</form>
<get-blob-form></get-blob-form>
<div class="flex gap-4 mt-2">
<a class="text-md text-blue-500" href="#list">List Blobs</a>
${window.nostr && html`<a class="text-md text-blue-500" href="#mirror">Mirror Blobs</a>`}
<!-- <a class="text-md text-red-500" href="#delete">Request delete</a> -->
</div>
<a class="text-sm text-blue-400 ml-auto mt-4" href="https://github.com/hzrd149/blossom">🌸 Blossom Spec</a>
</div>`;
}
}
customElements.define("upload-form", UploadForm);

55
public/utils.js Normal file
View File

@ -0,0 +1,55 @@
export const unixNow = () => Math.floor(Date.now() / 1000);
export const newExpirationValue = () => (unixNow() + 60 * 5).toString();
export function readBlobAsArrayBuffer(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (e) => {
const result = e.target?.result;
if (result == undefined || typeof result !== "object") {
reject();
return;
}
resolve(result);
};
reader.onerror = (error) => reject(error);
reader.readAsArrayBuffer(file);
});
}
export async function getFileSha256(file) {
const { bytesToHex } = await import("./lib/@noble/hashes/utils.js");
const buffer = file instanceof File ? await file.arrayBuffer() : await readBlobAsArrayBuffer(file);
let hash;
if (crypto.subtle) {
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
hash = new Uint8Array(hashBuffer);
} else {
const { sha256 } = await import("./lib/@noble/hashes/sha256.js");
hash = sha256.create().update(new Uint8Array(buffer)).digest();
}
return bytesToHex(hash);
}
// Copied from https://git.v0l.io/Kieran/dtan/src/branch/main/src/const.ts#L220
export const kiB = Math.pow(1024, 1);
export const MiB = Math.pow(1024, 2);
export const GiB = Math.pow(1024, 3);
export const TiB = Math.pow(1024, 4);
export const PiB = Math.pow(1024, 5);
export const EiB = Math.pow(1024, 6);
export const ZiB = Math.pow(1024, 7);
export const YiB = Math.pow(1024, 8);
export function formatBytes(b, f) {
f ??= 2;
if (b >= YiB) return (b / YiB).toFixed(f) + " YiB";
if (b >= ZiB) return (b / ZiB).toFixed(f) + " ZiB";
if (b >= EiB) return (b / EiB).toFixed(f) + " EiB";
if (b >= PiB) return (b / PiB).toFixed(f) + " PiB";
if (b >= TiB) return (b / TiB).toFixed(f) + " TiB";
if (b >= GiB) return (b / GiB).toFixed(f) + " GiB";
if (b >= MiB) return (b / MiB).toFixed(f) + " MiB";
if (b >= kiB) return (b / kiB).toFixed(f) + " KiB";
return b.toFixed(f) + " B";
}

5
src/admin-api/auth.ts Normal file
View File

@ -0,0 +1,5 @@
import router from "./router.js";
router.all("/auth", (ctx) => {
ctx.body = { success: true };
});

69
src/admin-api/blobs.ts Normal file
View File

@ -0,0 +1,69 @@
import router from "./router.js";
import db, { blobDB } from "../db/db.js";
import { getBlobURL } from "../helpers/blob.js";
import storage from "../storage/index.js";
import { parseGetListQuery, setContentRange } from "./helpers.js";
import { buildConditionsFromFilter, buildOrderByFromSort } from "../helpers/sql.js";
import { Request } from "koa";
function blobRowToBlob(row: any, req?: Request) {
return {
...row,
owners: row.owners?.split(",") ?? [],
id: row.sha256,
url: getBlobURL(row, req ? req.protocol + "://" + req.host : undefined),
};
}
function safeColumn(name: string) {
if (["sha256", "type", "size", "uploaded"].includes(name)) return name;
throw new Error("Invalid table name");
}
// getOne
router.get("/blobs/:id", (ctx) => {
const row = db.prepare(baseBlobSql + " WHERE sha256 = ?" + groupByBlobHash).get(ctx.params.id);
if (row) ctx.body = blobRowToBlob(row, ctx.request);
});
// delete blob
router.delete("/blobs/:id", async (ctx) => {
await blobDB.removeBlob(ctx.params.id);
if (await storage.hasBlob(ctx.params.id)) await storage.removeBlob(ctx.params.id);
ctx.body = { success: true };
});
// getList / getMany
const baseBlobSql = `
SELECT blobs.*,GROUP_CONCAT(owners.pubkey, ',') as owners FROM blobs
LEFT JOIN owners ON owners.blob = blobs.sha256
`.trim();
const groupByBlobHash = " GROUP BY blobs.sha256";
router.get("/blobs", (ctx) => {
let sql = baseBlobSql;
let params: (string | number)[] = [];
const { filter, sort, range } = parseGetListQuery(ctx.query);
const conditions = buildConditionsFromFilter(filter, ["sha256", "type"], safeColumn);
sql += conditions.sql;
params.push(...conditions.params);
sql += groupByBlobHash;
sql += buildOrderByFromSort(sort, safeColumn);
if (range) {
sql += " LIMIT ? OFFSET ?";
params.push(range[1] - range[0], range[0]);
}
const total = (
db.prepare("SELECT COUNT(*) as count FROM blobs" + conditions.sql).get(conditions.params) as { count: number }
).count;
const blobs = db.prepare(sql).all(...params) as any[];
setContentRange(ctx, range, blobs, total);
ctx.body = blobs.map((r) => blobRowToBlob(r, ctx.request));
});

28
src/admin-api/helpers.ts Normal file
View File

@ -0,0 +1,28 @@
import { ParameterizedContext } from "koa";
import { ParsedUrlQuery } from "querystring";
export type GetListQuery = Partial<{
sort: [string, string];
range: [number, number];
filter: Record<string, any | any[]>;
}>;
export function parseGetListQuery(query: ParsedUrlQuery): GetListQuery {
const queryStrings = query as Record<string, string>;
const filter = queryStrings.filter ? (JSON.parse(queryStrings.filter) as GetListQuery["filter"]) : undefined;
const sort = queryStrings.sort ? (JSON.parse(queryStrings.sort) as GetListQuery["sort"]) : undefined;
const range = queryStrings.range ? (JSON.parse(queryStrings.range) as GetListQuery["range"]) : undefined;
return { filter, sort, range };
}
export function setContentRange(
ctx: ParameterizedContext,
range: GetListQuery["range"],
result: Array<any>,
total?: number,
) {
if (range) ctx.set("Content-Range", `rules ${range[0]}-${range[1]}/${total ?? result.length}`);
else ctx.set("Content-Range", `rules */${result.length}`);
}
export const mapParams = (arr: any[]) => arr.map(() => "?").join(", ");

8
src/admin-api/index.ts Normal file
View File

@ -0,0 +1,8 @@
import router from "./router.js";
import "./auth.js";
import "./blobs.js";
import "./rules.js";
import "./users.js";
export default router;

19
src/admin-api/router.ts Normal file
View File

@ -0,0 +1,19 @@
import Router from "@koa/router";
import { HttpError } from "koa";
const router = new Router();
router.use(async (ctx, next) => {
try {
await next();
} catch (err) {
if (err instanceof HttpError && 401 == err.status) {
ctx.status = 401;
ctx.set("WWW-Authenticate", "Basic");
ctx.body = "cant haz that";
} else {
throw err;
}
}
});
export default router;

55
src/admin-api/rules.ts Normal file
View File

@ -0,0 +1,55 @@
import dayjs from "dayjs";
import { config, saveConfig } from "../config.js";
import router from "./router.js";
import { getExpirationTime } from "../rules/index.js";
import { parseGetListQuery, setContentRange } from "./helpers.js";
// getList / getMany
router.get("/rules", (ctx) => {
let rules = Array.from(config.storage.rules);
const { filter, sort, range } = parseGetListQuery(ctx.query);
if (filter) {
const fields = Object.entries(filter);
if (fields.length > 0) {
rules = rules.filter((rule) =>
fields.some(([key, value]) => {
// @ts-expect-error
if (Array.isArray(value)) return value.includes(rule[key]);
// @ts-expect-error
return rule[key] === value;
}),
);
}
}
if (sort) {
const [key, dir] = sort;
switch (key) {
case "expiration":
const now = dayjs().unix();
rules.sort((a, b) => getExpirationTime(b, now) - getExpirationTime(a, now));
break;
}
if (dir === "ASC") rules.reverse();
}
if (range) rules = rules.slice(range[0], range[1]);
setContentRange(ctx, range, rules);
ctx.body = rules.map((rule) => ({ ...rule, id: config.storage.rules.indexOf(rule) }));
});
// getOne
router.get("/rules/:id", (ctx) => {
const id = parseInt(ctx.params.id);
return config.storage.rules[id];
});
// delete
// router.delete("/rules/:id", (ctx) => {
// config.storage.rules.filter((r) => r.id !== ctx.params.id);
// saveConfig();
// });

46
src/admin-api/users.ts Normal file
View File

@ -0,0 +1,46 @@
import db from "../db/db.js";
import { buildConditionsFromFilter, buildOrderByFromSort } from "../helpers/sql.js";
import { getUserProfile } from "../user-profiles.js";
import { parseGetListQuery, setContentRange } from "./helpers.js";
import router from "./router.js";
function mapRowToUser(row: any) {
return {
...row,
id: row.pubkey,
profile: getUserProfile(row.pubkey),
blobs: row.blobs.split(","),
};
}
function safeColumn(name: string) {
if (["pubkey"].includes(name)) return name;
throw new Error("Invalid table name");
}
const baseSql = `SELECT owners.pubkey, group_concat(owners.blob, ',') as blobs FROM owners`;
const groupBySql = " GROUP BY owners.pubkey";
// getList / getMany
router.get("/users", (ctx) => {
const { filter, sort, range } = parseGetListQuery(ctx.query);
let sql = baseSql;
let params: string[] = [];
const conditions = buildConditionsFromFilter(filter, ["name", "pubkey"], safeColumn);
sql += conditions.sql;
params.push(...conditions.params);
sql += groupBySql;
sql += buildOrderByFromSort(sort, safeColumn);
const total = db
.prepare("SELECT owners.pubkey FROM owners" + conditions.sql + groupBySql)
.all(conditions.params).length;
const users = db.prepare(sql).all(...params) as any[];
setContentRange(ctx, range, users, total);
ctx.body = users.map(mapRowToUser);
});

39
src/api/delete.ts Normal file
View File

@ -0,0 +1,39 @@
import HttpErrors from "http-errors";
import { CommonState, log, router, saveAuthToken } from "./router.js";
import { forgetBlobAccessed, hasUsedToken } from "../db/methods.js";
import { blobDB } from "../db/db.js";
import { config } from "../config.js";
import storage from "../storage/index.js";
router.delete<CommonState>("/:hash", async (ctx, next) => {
const match = ctx.path.match(/([0-9a-f]{64})(\.[a-z]+)?/);
if (!match) return next();
const sha256 = match[1];
if (!ctx.state.auth) throw new HttpErrors.Unauthorized("Missing Auth event");
if (ctx.state.authType !== "delete") throw new HttpErrors.Unauthorized("Incorrect Auth type");
if (!ctx.state.auth.tags.some((t) => t[0] === "x" && t[1] === sha256))
throw new HttpErrors.Unauthorized("Auth missing hash");
if (hasUsedToken(ctx.state.auth.id)) throw new Error("Auth already used");
// skip if blob dose not exist
if (!blobDB.hasBlob(sha256)) return;
const pubkey = ctx.state.auth.pubkey;
if (blobDB.hasOwner(sha256, pubkey)) {
blobDB.removeOwner(sha256, pubkey);
saveAuthToken(ctx.state.auth);
if (config.storage.removeWhenNoOwners && blobDB.listOwners(sha256).length === 0) {
log(`Removing ${sha256} because it has no owners`);
await blobDB.removeBlob(sha256);
if (await storage.hasBlob(sha256)) await storage.removeBlob(sha256);
forgetBlobAccessed(sha256);
}
}
ctx.status = 200;
ctx.body = { message: "Deleted" };
});

110
src/api/fetch.ts Normal file
View File

@ -0,0 +1,110 @@
import { extname } from "node:path";
import { PassThrough } from "node:stream";
import { URLSearchParams } from "node:url";
import dayjs from "dayjs";
import mime from "mime";
import HttpErrors from "http-errors";
import { config } from "../config.js";
import { BlobPointer, BlobSearch } from "../types.js";
import * as cacheModule from "../cache/index.js";
import * as upstreamDiscovery from "../discover/upstream.js";
import * as nostrDiscovery from "../discover/nostr.js";
import * as httpTransport from "../transport/http.js";
import * as uploadModule from "../storage/upload.js";
import { getFileRule } from "../rules/index.js";
import storage from "../storage/index.js";
import { updateBlobAccess } from "../db/methods.js";
import { blobDB } from "../db/db.js";
import { log, router } from "./router.js";
router.get("/:hash", async (ctx, next) => {
const match = ctx.path.match(/([0-9a-f]{64})/);
if (!match) return next();
const hash = match[1];
const ext = extname(ctx.path) ?? undefined;
const searchParams = new URLSearchParams(ctx.search);
const search: BlobSearch = {
hash,
ext,
mimeType: mime.getType(ctx.path) ?? undefined,
pubkey: searchParams.get("pubkey") ?? undefined,
};
log("Looking for", search.hash);
const cachePointer = await cacheModule.search(search);
if (cachePointer) {
updateBlobAccess(search.hash, dayjs().unix());
const redirect = cacheModule.getRedirect(cachePointer);
if (redirect) return ctx.redirect(redirect);
if (cachePointer.mimeType) ctx.type = cachePointer.mimeType;
ctx.body = await cacheModule.readPointer(cachePointer);
return;
}
// we don't have the blob, go looking for it
const pointers: BlobPointer[] = [];
if (config.discovery.nostr.enabled) {
let nostrPointers = await nostrDiscovery.search(search);
for (const pointer of nostrPointers) pointers.push(pointer);
}
if (config.discovery.upstream.enabled) {
const cdnPointer = await upstreamDiscovery.search(search);
if (cdnPointer) pointers.push(cdnPointer);
}
// download it from pointers if any where found
for (const pointer of pointers) {
try {
if (pointer.type === "http") {
const stream = await httpTransport.readHTTPPointer(pointer);
if (!ctx.type) {
// if the pointer has a binary stream, try to use the search mime type
if (pointer.mimeType === "application/octet-stream" && search.mimeType) ctx.type = search.mimeType;
else if (pointer.mimeType) ctx.type = pointer.mimeType;
else if (search.mimeType) ctx.type = search.mimeType;
}
const pass = (ctx.body = new PassThrough());
stream.pipe(pass);
// save to cache
const rule = getFileRule(
{ type: pointer.mimeType || search.mimeType, pubkey: pointer.metadata?.pubkey || search.pubkey },
config.storage.rules,
);
if (rule) {
// save the blob in the background (no await)
uploadModule.uploadWriteStream(stream).then(async (upload) => {
if (upload.sha256 !== pointer.hash) return;
// if the storage dose not have the blob. upload it
if (!(await storage.hasBlob(upload.sha256))) {
const type = upload.type || ctx.type || "";
await storage.writeBlob(upload.sha256, uploadModule.readUpload(upload), type);
await uploadModule.removeUpload(upload);
if (!blobDB.hasBlob(upload.sha256)) {
blobDB.addBlob({ sha256: upload.sha256, size: upload.size, type, uploaded: dayjs().unix() });
}
} else {
await uploadModule.removeUpload(upload);
}
});
}
return;
}
} catch (e) {}
}
if (!ctx.body) throw new HttpErrors.NotFound("Cant find blob for hash");
});

12
src/api/has.ts Normal file
View File

@ -0,0 +1,12 @@
import { blobDB } from "../db/db.js";
import { router } from "./router.js";
router.head("/:hash", async (ctx, next) => {
const match = ctx.path.match(/([0-9a-f]{64})/);
if (!match) return next();
const hash = match[1];
const has = blobDB.hasBlob(hash);
if (has) ctx.status = 200;
else ctx.status = 404;
});

8
src/api/index.ts Normal file
View File

@ -0,0 +1,8 @@
export { router as default } from "./router.js";
import "./list.js";
import "./upload.js";
import "./delete.js";
import "./has.js";
import "./fetch.js";
import "./mirror.js";

25
src/api/list.ts Normal file
View File

@ -0,0 +1,25 @@
import HttpErrors from "http-errors";
import { config } from "../config.js";
import { CommonState, getBlobDescriptor, router } from "./router.js";
import { blobDB } from "../db/db.js";
router.get<CommonState>("/list/:pubkey", async (ctx) => {
const { pubkey } = ctx.params;
const query = ctx.query;
const since = query.since ? parseInt(query.since as string) : undefined;
const until = query.until ? parseInt(query.until as string) : undefined;
if (config.list.requireAuth) {
if (!ctx.state.auth) throw new HttpErrors.Unauthorized("Missing Auth event");
if (ctx.state.authType !== "list") throw new HttpErrors.Unauthorized("Incorrect Auth type");
if (config.list.allowListOthers === false && ctx.state.auth.pubkey !== pubkey)
throw new HttpErrors.Unauthorized("Cant list other pubkeys blobs");
}
const blobs = await blobDB.getOwnerBlobs(pubkey, { since, until });
ctx.status = 200;
ctx.body = blobs.map((blob) => getBlobDescriptor(blob, ctx.request));
});

119
src/api/mirror.ts Normal file
View File

@ -0,0 +1,119 @@
import HttpErrors from "http-errors";
import { BlobMetadata } from "blossom-server-sdk";
import dayjs from "dayjs";
import { koaBody } from "koa-body";
import { IncomingMessage } from "http";
import mount from "koa-mount";
import followRedirects from "follow-redirects";
const { http, https } = followRedirects;
import storage from "../storage/index.js";
import { CommonState, getBlobDescriptor, log, router, saveAuthToken } from "./router.js";
import { getFileRule } from "../rules/index.js";
import { config } from "../config.js";
import { hasUsedToken, updateBlobAccess } from "../db/methods.js";
import { UploadMetadata, readUpload, removeUpload, saveFromResponse } from "../storage/upload.js";
import { blobDB } from "../db/db.js";
function makeRequestWithAbort(url: URL) {
return new Promise<{ response: IncomingMessage; controller: AbortController }>((res, rej) => {
const cancelController = new AbortController();
const request = (url.protocol === "https:" ? https : http).get(
url,
{
signal: cancelController.signal,
},
(response) => {
res({ response, controller: cancelController });
},
);
request.on("error", (err) => rej(err));
request.end();
});
}
router.use(mount("/mirror", koaBody()));
router.put<CommonState>("/mirror", async (ctx) => {
if (!config.upload.enabled) throw new HttpErrors.NotFound("Uploads disabled");
// check auth
if (config.upload.requireAuth) {
if (!ctx.state.auth) throw new HttpErrors.Unauthorized("Missing Auth event");
if (ctx.state.authType !== "upload") throw new HttpErrors.Unauthorized("Auth event should be 'upload'");
if (hasUsedToken(ctx.state.auth.id)) throw new HttpErrors.BadRequest("Auth event already used");
}
if (!ctx.request.body?.url) throw new HttpErrors.BadRequest("Missing url");
const downloadUrl = new URL(ctx.request.body.url);
log(`Mirroring ${downloadUrl.toString()}`);
const { response, controller } = await makeRequestWithAbort(downloadUrl);
let maybeUpload: UploadMetadata | undefined = undefined;
try {
if (!response.statusCode) throw new HttpErrors.InternalServerError("Failed to make request");
if (response.statusCode < 200 || response.statusCode >= 400)
throw new HttpErrors.InternalServerError("Download request failed");
// check rules
const contentType = response.headers["content-type"];
const pubkey = ctx.state.auth?.pubkey;
const rule = getFileRule(
{
type: contentType,
pubkey,
},
config.storage.rules,
config.upload.requireAuth && config.upload.requirePubkeyInRule,
);
if (!rule) {
if (config.upload.requirePubkeyInRule) throw new HttpErrors.Unauthorized("Pubkey not on whitelist");
else throw new HttpErrors.Unauthorized(`Server dose not accept ${contentType} blobs`);
}
let mimeType: string | undefined = undefined;
const upload = (maybeUpload = await saveFromResponse(response, downloadUrl));
mimeType = upload.type;
// check if auth has blob sha256 hash
if (
config.upload.requireAuth &&
(!ctx.state.auth || ctx.state.auth.tags.some((t) => t[0] === "x" && t[1] === upload.sha256))
)
throw new HttpErrors.BadRequest("Auth missing blob sha256 hash");
let blob: BlobMetadata;
if (!blobDB.hasBlob(upload.sha256)) {
log("Saving", upload.sha256, mimeType);
await storage.writeBlob(upload.sha256, readUpload(upload), mimeType);
await removeUpload(upload);
const now = dayjs().unix();
blob = blobDB.addBlob({ sha256: upload.sha256, size: upload.size, type: mimeType, uploaded: now });
updateBlobAccess(upload.sha256, dayjs().unix());
} else {
blob = blobDB.getBlob(upload.sha256);
await removeUpload(upload);
}
if (pubkey && !blobDB.hasOwner(upload.sha256, pubkey)) {
blobDB.addOwner(blob.sha256, pubkey);
}
if (ctx.state.auth) saveAuthToken(ctx.state.auth);
ctx.status = 200;
ctx.body = getBlobDescriptor(blob, ctx.request);
} catch (error) {
// cancel the request if anything fails
controller.abort();
if (maybeUpload) removeUpload(maybeUpload);
throw error;
}
});

64
src/api/router.ts Normal file
View File

@ -0,0 +1,64 @@
import { Request } from "koa";
import Router from "@koa/router";
import dayjs from "dayjs";
import HttpErrors from "http-errors";
import { verifyEvent, NostrEvent } from "nostr-tools";
import { BlobMetadata } from "blossom-server-sdk";
import logger from "../logger.js";
import { addToken } from "../db/methods.js";
import { getBlobURL } from "../helpers/blob.js";
export const log = logger.extend("api");
export const router = new Router();
export function getBlobDescriptor(blob: BlobMetadata, req?: Request) {
return {
sha256: blob.sha256,
size: blob.size,
uploaded: blob.uploaded,
type: blob.type,
url: getBlobURL(blob, req ? req.protocol + "://" + req.host : undefined),
};
}
function parseAuthEvent(auth: NostrEvent) {
const now = dayjs().unix();
if (auth.kind !== 24242) throw new HttpErrors.BadRequest("Unexpected auth kind");
const type = auth.tags.find((t) => t[0] === "t")?.[1];
if (!type) throw new HttpErrors.BadRequest("Auth missing type");
const expiration = auth.tags.find((t) => t[0] === "expiration")?.[1];
if (!expiration) throw new HttpErrors.BadRequest("Auth missing expiration");
if (parseInt(expiration) < now) throw new HttpErrors.BadRequest("Auth expired");
if (!verifyEvent(auth)) throw new HttpErrors.BadRequest("Invalid Auth event");
return { auth, type, expiration: parseInt(expiration) };
}
export function saveAuthToken(event: NostrEvent) {
const { expiration, type } = parseAuthEvent(event);
addToken({
id: event.id,
expiration: expiration,
type: type,
event,
});
}
// parse auth headers
export type CommonState = { auth?: NostrEvent; authType?: string; authExpiration?: number };
router.use(async (ctx, next) => {
const authStr = ctx.headers["authorization"] as string | undefined;
if (authStr?.startsWith("Nostr ")) {
const auth = authStr ? (JSON.parse(atob(authStr.replace(/^Nostr\s/i, ""))) as NostrEvent) : undefined;
if (auth) {
const { type, expiration } = parseAuthEvent(auth);
ctx.state.auth = auth;
ctx.state.authType = type;
ctx.state.authExpiration = expiration;
}
}
await next();
});

129
src/api/upload.ts Normal file
View File

@ -0,0 +1,129 @@
import HttpErrors from "http-errors";
import { BlobMetadata } from "blossom-server-sdk";
import dayjs from "dayjs";
import storage from "../storage/index.js";
import { CommonState, getBlobDescriptor, log, router, saveAuthToken } from "./router.js";
import { getFileRule } from "../rules/index.js";
import { config, Rule } from "../config.js";
import { hasUsedToken, updateBlobAccess } from "../db/methods.js";
import { readUpload, removeUpload, uploadWriteStream } from "../storage/upload.js";
import { blobDB } from "../db/db.js";
import { isHttpError } from "../helpers/error.js";
type UploadState = CommonState & {
contentType: string;
contentLength: string;
rule: Rule;
};
// handle errors
router.use(async (ctx, next) => {
try {
await next();
} catch (err) {
// BUD-06 set `X-Upload-Message` on failure
if (isHttpError(err)) {
const status = (ctx.status = err.status || 500);
ctx.set("X-Upload-Message", status > 500 ? "Something went wrong" : err.message);
} else {
ctx.set("X-Upload-Message", "Something went wrong");
}
// pass error to parent handler
throw err;
}
});
router.all<CommonState>("/upload", async (ctx, next) => {
if (!config.upload.enabled) throw new HttpErrors.NotFound("Uploads disabled");
if (ctx.method === "HEAD" || ctx.method === "PUT") {
// check auth
if (config.upload.requireAuth) {
if (!ctx.state.auth) throw new HttpErrors.Unauthorized("Missing Auth event");
if (ctx.state.authType !== "upload") throw new HttpErrors.Unauthorized("Auth event should be 'upload'");
if (hasUsedToken(ctx.state.auth.id)) throw new HttpErrors.BadRequest("Auth event already used");
// BUD-06, check if hash is in auth event
const sha256 = ctx.header["x-sha-256"];
if (typeof sha256 === "string" && !ctx.state.auth.tags.some((t) => t[0] === "x" && t[1] === sha256)) {
throw new HttpErrors.BadRequest("Auth missing sha256");
}
}
// check rules
const contentType = ctx.header["content-type"] || String(ctx.header["x-content-type"]);
let contentLength: number | undefined = undefined;
if (typeof ctx.header["x-content-length"] === "string") {
contentLength = parseInt(ctx.header["x-content-length"]);
} else if (ctx.header["content-length"]) {
contentLength = parseInt(ctx.header["content-length"]);
}
const pubkey = ctx.state.auth?.pubkey;
const rule = getFileRule(
{
type: contentType,
pubkey,
},
config.storage.rules,
config.upload.requireAuth && config.upload.requirePubkeyInRule,
);
if (!rule) {
if (config.upload.requirePubkeyInRule) throw new HttpErrors.Unauthorized("Pubkey not on whitelist");
else throw new HttpErrors.Unauthorized(`Server dose not accept ${contentType} blobs`);
}
ctx.state.contentType = contentType;
ctx.state.contentLength = contentLength;
ctx.state.rule = rule;
}
return await next();
});
router.head<UploadState>("/upload", async (ctx) => {
ctx.status = 200;
});
router.put<UploadState>("/upload", async (ctx) => {
const { contentType } = ctx.state;
let upload = await uploadWriteStream(ctx.req);
let mimeType = contentType || upload.type;
// if auth is required, check to see if the sha256 is in the auth event
if (
config.upload.requireAuth &&
(!ctx.state.auth || !ctx.state.auth.tags.some((t) => t[0] === "x" && t[1] === upload.sha256))
) {
removeUpload(upload);
throw new HttpErrors.BadRequest("Incorrect blob sha256");
}
let blob: BlobMetadata;
if (!blobDB.hasBlob(upload.sha256)) {
log("Saving", upload.sha256, mimeType);
await storage.writeBlob(upload.sha256, readUpload(upload), mimeType);
await removeUpload(upload);
const now = dayjs().unix();
blob = blobDB.addBlob({ sha256: upload.sha256, size: upload.size, type: mimeType, uploaded: now });
updateBlobAccess(upload.sha256, dayjs().unix());
} else {
blob = blobDB.getBlob(upload.sha256);
await removeUpload(upload);
}
if (ctx.state.auth?.pubkey && !blobDB.hasOwner(upload.sha256, ctx.state.auth.pubkey)) {
blobDB.addOwner(blob.sha256, ctx.state.auth.pubkey);
}
if (ctx.state.auth) saveAuthToken(ctx.state.auth);
ctx.status = 200;
ctx.body = getBlobDescriptor(blob, ctx.request);
});

93
src/cache/index.ts vendored Normal file
View File

@ -0,0 +1,93 @@
import { BlobSearch, CachePointer } from "../types.js";
import storage from "../storage/index.js";
import db, { blobDB } from "../db/db.js";
import { config } from "../config.js";
import { getExpirationTime } from "../rules/index.js";
import dayjs from "dayjs";
import { BlobMetadata } from "blossom-server-sdk/metadata";
import { forgetBlobAccessed } from "../db/methods.js";
import { S3Storage } from "blossom-server-sdk/storage";
import logger from "../logger.js";
const log = logger.extend("cache");
export async function search(search: BlobSearch): Promise<CachePointer | undefined> {
if (blobDB.hasBlob(search.hash) && (await storage.hasBlob(search.hash))) {
const type = await storage.getBlobType(search.hash);
log("Found", search.hash);
return { type: "cache", hash: search.hash, mimeType: type };
}
}
export function getRedirect(pointer: CachePointer) {
const publicURL = config.storage.s3?.publicURL;
if (storage instanceof S3Storage && publicURL) {
const object = storage.objects.find((obj) => obj.name.startsWith(pointer.hash));
if (object) return publicURL + object.name;
}
}
export async function readPointer(pointer: CachePointer) {
return await storage.readBlob(pointer.hash);
}
export async function prune() {
const now = dayjs().unix();
const checked = new Set<string>();
for (const rule of config.storage.rules) {
const expiration = getExpirationTime(rule, now);
let blobs: (BlobMetadata & { pubkey: string; accessed: number | null })[] = [];
if (rule.pubkeys?.length) {
blobs = db
.prepare(
`
SELECT blobs.*,owners.pubkey, accessed.timestamp as "accessed"
FROM blobs
LEFT JOIN owners ON owners.blob = blobs.sha256
LEFT JOIN accessed ON accessed.blob = blobs.sha256
WHERE
blobs.type LIKE ? AND
owners.pubkey IN (${Array.from(rule.pubkeys).fill("?").join(", ")})
`,
)
.all(rule.type.replace("*", "%"), ...rule.pubkeys) as (BlobMetadata & {
pubkey: string;
accessed: number | null;
})[];
} else {
blobs = db
.prepare(
`
SELECT blobs.*,owners.pubkey, accessed.timestamp as "accessed"
FROM blobs
LEFT JOIN owners ON owners.blob = blobs.sha256
LEFT JOIN accessed ON accessed.blob = blobs.sha256
WHERE
blobs.type LIKE ?
`,
)
.all(rule.type.replace("*", "%")) as (BlobMetadata & {
pubkey: string;
accessed: number | null;
})[];
}
let n = 0;
for (const blob of blobs) {
if (checked.has(blob.sha256)) continue;
if ((blob.accessed || blob.uploaded) < expiration) {
log("Removing", blob.sha256, blob.type, "because", rule);
await blobDB.removeBlob(blob.sha256);
if (await storage.hasBlob(blob.sha256)) await storage.removeBlob(blob.sha256);
forgetBlobAccessed(blob.sha256);
}
n++;
checked.add(blob.sha256);
}
if (n > 0) log("Checked", n, "blobs");
}
}

112
src/config.ts Normal file
View File

@ -0,0 +1,112 @@
import { lilconfig } from "lilconfig";
import yaml from "yaml";
import fs from "node:fs";
import { generate } from "generate-password";
import { S3StorageOptions } from "blossom-server-sdk";
import logger from "./logger.js";
import { mergeDeep } from "./helpers/object.js";
const log = logger.extend("config");
export type Rule = { id: string; type: string; pubkeys?: string[]; expiration: string };
export type Config = {
publicDomain: string;
databasePath: string;
storage: {
backend: "local" | "s3";
removeWhenNoOwners: boolean;
local?: {
dir: string;
};
s3?: {
endpoint: string;
accessKey: string;
secretKey: string;
bucket: string;
publicURL?: string;
} & S3StorageOptions;
rules: Rule[];
};
dashboard: {
enabled: boolean;
username: string;
password: string;
};
discovery: {
nostr: {
enabled: boolean;
relays: string[];
};
upstream: {
enabled: boolean;
domains: string[];
};
};
upload: {
enabled: boolean;
requireAuth: boolean;
requirePubkeyInRule: boolean;
};
list: {
requireAuth: boolean;
allowListOthers: boolean;
};
tor: {
enabled: boolean;
proxy: string;
};
};
function loadYaml(filepath: string, content: string) {
return yaml.parse(content);
}
function loadJson(filepath: string, content: string) {
return JSON.parse(content);
}
const defaultConfig: Config = {
publicDomain: "",
databasePath: "data/sqlite.db",
dashboard: { enabled: false, username: "admin", password: generate() },
discovery: {
nostr: { enabled: false, relays: [] },
upstream: { enabled: false, domains: [] },
},
storage: {
backend: "local",
removeWhenNoOwners: false,
local: { dir: "data/blobs" },
rules: [],
},
upload: { enabled: false, requireAuth: true, requirePubkeyInRule: false },
list: { requireAuth: false, allowListOthers: false },
tor: { enabled: false, proxy: "" },
};
const result = await lilconfig("blossom", {
searchPlaces: ["config.yaml", "config.yml", "config.json"],
loaders: {
".yaml": loadYaml,
".yml": loadYaml,
".json": loadJson,
},
}).search();
const config = mergeDeep(defaultConfig, result?.config ?? {}) as Config;
function saveConfig() {
if (result) {
if (result.filepath.includes(".json")) {
fs.writeFileSync(result.filepath, JSON.stringify(config), { encoding: "utf-8" });
} else {
fs.writeFileSync(result.filepath, yaml.stringify(config), { encoding: "utf-8" });
}
log("Saved config file", result.filepath);
} else {
fs.writeFileSync("config.yml", yaml.stringify(config), { encoding: "utf-8" });
log("Saved config file config.yml");
}
}
export { config, saveConfig };

31
src/db/db.ts Normal file
View File

@ -0,0 +1,31 @@
import Database from "better-sqlite3";
import { BlossomSQLite } from "blossom-server-sdk/metadata/sqlite";
import { config } from "../config.js";
import { mkdirp } from "mkdirp";
import { dirname } from "path";
await mkdirp(dirname(config.databasePath));
export const db = new Database(config.databasePath);
export const blobDB = new BlossomSQLite(db);
db.prepare(
`CREATE TABLE IF NOT EXISTS accessed (
blob TEXT(64) PRIMARY KEY,
timestamp INTEGER NOT NULL
)`,
).run();
db.prepare("CREATE INDEX IF NOT EXISTS accessed_timestamp ON accessed (timestamp)").run();
db.prepare(
`CREATE TABLE IF NOT EXISTS tokens (
id TEXT(64) PRIMARY KEY,
type TEXT NOT NULL,
pubkey TEXT(64) NOT NULL,
expiration INTEGER NOT NULL,
event TEXT NOT NULL
)`,
).run();
export default db;

4
src/db/index.ts Normal file
View File

@ -0,0 +1,4 @@
import db from "./db.js";
export * from "./methods.js";
export { db };

23
src/db/methods.ts Normal file
View File

@ -0,0 +1,23 @@
import dayjs from "dayjs";
import db from "./db.js";
import { NostrEvent } from "@nostr-dev-kit/ndk";
export async function updateBlobAccess(blob: string, accessed = dayjs().unix()) {
db.prepare(`INSERT or replace INTO accessed (blob, timestamp) VALUES (?, ?)`).run(blob, accessed);
}
export async function forgetBlobAccessed(blob: string) {
db.prepare(`DELETE FROM accessed WHERE blob = ?`).run(blob);
}
export async function addToken(token: { id: string; event: NostrEvent; expiration: number; type: string }) {
db.prepare(`INSERT INTO tokens (id, pubkey, type, expiration, event) VALUES (?, ?, ?, ?, ?)`).run(
token.id,
token.event.pubkey,
token.type,
token.expiration,
JSON.stringify(token.event),
);
}
export function hasUsedToken(token: string) {
return !!db.prepare(`SELECT * FROM tokens WHERE id = ?`).get(token);
}

View File

@ -0,0 +1,57 @@
import fs from "node:fs/promises";
import { blobDB } from "./db.js";
import logger from "../logger.js";
const log = logger.extend("migration");
type DBSchema = {
blobs: Record<string, { expiration?: number; pubkeys?: string[]; created: number; mimeType?: string; size: number }>;
usedTokens: Record<string, number>;
};
const DB_PATH = "database.json";
try {
const stats = await fs.stat(DB_PATH);
if (stats) {
log("Found old database.json file");
log("Backing up database.json file");
await fs.copyFile(DB_PATH, "database.old.json");
const str = await fs.readFile(DB_PATH, { encoding: "utf-8" });
const data = JSON.parse(str) as DBSchema;
if (data.blobs) {
let imported = 0;
for (const [sha256, blob] of Object.entries(data.blobs)) {
try {
blobDB.addBlob({
sha256,
type: blob.mimeType ?? "",
size: blob.size,
uploaded: blob.created,
});
imported++;
try {
if (blob.pubkeys) {
for (const pubkey of blob.pubkeys) {
blobDB.addOwner(sha256, pubkey);
}
}
} catch (error) {
log("Error adding owners", sha256);
if (error instanceof Error) log(error.message);
}
} catch (error) {
log("Error importing", sha256);
if (error instanceof Error) log(error.message);
}
}
log("Imported", imported, "blobs");
}
log("Removing database.json file");
await fs.rm(DB_PATH);
}
} catch (error) {}

89
src/discover/nostr.ts Normal file
View File

@ -0,0 +1,89 @@
import { NDKKind } from "@nostr-dev-kit/ndk";
import { BlobPointer, BlobSearch } from "../types.js";
import logger from "../logger.js";
import ndk from "../ndk.js";
import { npubEncode } from "nostr-tools/nip19";
const log = logger.extend("nostr-discovery");
export async function search(search: BlobSearch) {
log("Looking for", search.hash);
const pointers: BlobPointer[] = [];
const events = Array.from(
await ndk.fetchEvents({
kinds: [NDKKind.Media],
"#x": [search.hash],
}),
);
const cdnList = search.pubkey ? await getUserCDNList(search.pubkey) : [];
if (events.length > 0) {
for (const event of events) {
log(`Found 1063 event by ${npubEncode(event.pubkey)}`);
const url = event.tags.find((t) => t[0] === "url")?.[1];
const mimeType = event.tags.find((t) => t[0] === "m")?.[1];
const infohash = event.tags.find((t) => t[0] === "i")?.[1];
const magnet = event.tags.find((t) => t[0] === "magnet")?.[1];
if (url) {
try {
pointers.push({
type: "http",
hash: search.hash,
url: new URL(url).toString(),
mimeType,
metadata: { pubkey: event.pubkey },
});
} catch (e) {}
}
if (magnet || infohash) {
pointers.push({
type: "torrent",
hash: search.hash,
magnet,
infohash,
mimeType,
metadata: { pubkey: event.pubkey },
});
}
}
}
if (cdnList) {
log("Found pubkey cdn list", search.pubkey && npubEncode(search.pubkey), cdnList);
for (const cdn of cdnList) {
pointers.push({
type: "http",
hash: search.hash,
url: new URL(search.hash + (search.ext || ""), cdn).toString(),
metadata: { pubkey: search.pubkey },
});
}
}
return pointers;
}
export async function getUserCDNList(pubkey: string) {
const events = await ndk.fetchEvents({
kinds: [10063 as number],
authors: [pubkey],
});
const cdns = new Set<string>();
for (const event of events) {
for (const t of event.tags) {
if (t[0] === "r" && t[1]) {
try {
const url = new URL(t[1]);
cdns.add(url.toString());
} catch (e) {}
}
}
}
return Array.from(cdns);
}

49
src/discover/upstream.ts Normal file
View File

@ -0,0 +1,49 @@
import { BlobSearch, HTTPPointer } from "../types.js";
import { config } from "../config.js";
import http from "node:http";
import https from "node:https";
import logger from "../logger.js";
const log = logger.extend("upstream-discovery");
export async function search(search: BlobSearch) {
log("Looking for", search.hash + search.ext);
for (const cdn of config.discovery.upstream.domains) {
try {
log("Checking", cdn);
const pointer = await checkCDN(cdn, search);
if (pointer) {
log("Found", search.hash, "at", cdn);
return pointer;
}
} catch (e) {}
}
}
function checkCDN(cdn: string, search: BlobSearch): Promise<HTTPPointer> {
return new Promise<HTTPPointer>((resolve, reject) => {
const url = new URL("/" + search.hash, cdn);
const backend = url.protocol === "https:" ? https : http;
const request = backend.request(url.toString(), { method: "HEAD", timeout: 5 * 1000 }, () => {});
request.on("response", (res) => {
res.destroy();
if (!res.statusCode) return reject();
if (res.statusCode < 200 || res.statusCode >= 400) {
reject(new Error("Not Found"));
} else {
resolve({ type: "http", url: url.toString(), hash: search.hash, metadata: { pubkey: search.pubkey } });
}
});
request.on("error", () => request.destroy());
request.on("timeout", () => {
request.destroy();
reject(new Error("Timeout"));
});
request.end();
});
}

27
src/helpers.ts Normal file
View File

@ -0,0 +1,27 @@
import { nip19 } from "nostr-tools";
export function isHex(str?: string) {
if (str?.match(/^[0-9a-f]+$/i)) return true;
return false;
}
export function safeDecode(str: string) {
try {
return nip19.decode(str);
} catch (e) {}
}
export function getPubkeyFromDecodeResult(result?: nip19.DecodeResult) {
if (!result) return;
switch (result.type) {
case "naddr":
case "nprofile":
return result.data.pubkey;
case "npub":
return result.data;
}
}
export function normalizeToHexPubkey(hex: string) {
if (isHex(hex)) return hex;
const decode = safeDecode(hex);
if (!decode) return null;
return getPubkeyFromDecodeResult(decode) ?? null;
}

10
src/helpers/blob.ts Normal file
View File

@ -0,0 +1,10 @@
import { BlobMetadata } from "blossom-server-sdk";
import mime from "mime";
import { config } from "../config.js";
export function getBlobURL(blob: Pick<BlobMetadata, "sha256" | "type">, host?: string) {
const ext = blob.type && mime.getExtension(blob.type);
const domain = config.publicDomain || host;
if (!domain) throw new Error("Cant find public hostname. set publicDomain");
return new URL(blob.sha256 + (ext ? "." + ext : ""), domain).toString();
}

7
src/helpers/error.ts Normal file
View File

@ -0,0 +1,7 @@
import HttpErrors from "http-errors";
export function isHttpError(error: unknown): error is HttpErrors.HttpError {
if (!error) return false;
// @ts-expect-error
return error instanceof HttpErrors.HttpError || !!error.status || !!error.headers;
}

37
src/helpers/object.ts Normal file
View File

@ -0,0 +1,37 @@
// copied from https://stackoverflow.com/a/34749873
/**
* Simple object check.
* @param item
* @returns {boolean}
*/
export function isObject(item: any): item is Object {
return item && typeof item === "object" && !Array.isArray(item);
}
/**
* Deep merge two objects.
* @param target
* @param ...sources
*/
export function mergeDeep(target: Object, ...sources: Object[]) {
if (!sources.length) return target;
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
// @ts-expect-error
if (isObject(source[key])) {
// @ts-expect-error
if (!target[key]) Object.assign(target, { [key]: {} });
// @ts-expect-error
mergeDeep(target[key], source[key]);
} else {
// @ts-expect-error
Object.assign(target, { [key]: source[key] });
}
}
}
return mergeDeep(target, ...sources);
}

37
src/helpers/sql.ts Normal file
View File

@ -0,0 +1,37 @@
import { mapParams } from "../admin-api/helpers.js";
export function buildConditionsFromFilter(
filter: Record<string, string[]> | undefined,
searchFields: string[],
safeColumn: (name: string) => string,
) {
const conditions: string[] = [];
const params: string[] = [];
if (filter) {
for (const [key, value] of Object.entries(filter)) {
if (key === "q") {
conditions.push(`( ${searchFields.map((field) => `${safeColumn(field)} LIKE ?`).join(" OR ")} )`);
params.push(...searchFields.map(() => `%${value}%`));
} else if (Array.isArray(value)) {
conditions.push(`${safeColumn(key)} IN (${mapParams(value)})`);
params.push(...value);
} else {
conditions.push(`${safeColumn(key)} = ?`);
params.push(value);
}
}
}
return { conditions, params, sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "" };
}
export function buildOrderByFromSort(sort: [string, string] | undefined, safeColumn: (name: string) => string) {
if (sort) {
if (sort[1] === "DESC") {
return ` ORDER BY ${safeColumn(sort[0])} DESC`;
} else if (sort[1] === "ASC") {
return ` ORDER BY ${safeColumn(sort[0])} ASC`;
}
}
return "";
}

21
src/helpers/stream.ts Normal file
View File

@ -0,0 +1,21 @@
import { Duplex, Writable } from "stream";
export class SplitStream extends Duplex {
streams: Writable[];
constructor(...streams: Writable[]) {
super();
this.streams = streams;
}
_write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null | undefined) => void): void {
let failed = false;
for (const stream of this.streams) {
const res = stream.write(chunk);
if (!res) failed = true;
}
callback(failed ? new Error("Failed to write to destinations") : null);
}
_read() {}
}

87
src/index.ts Normal file
View File

@ -0,0 +1,87 @@
#!/usr/bin/env node
import "./polyfill.js";
import Koa from "koa";
import serve from "koa-static";
import path from "node:path";
import cors from "@koa/cors";
import mount from "koa-mount";
import fs from "node:fs";
import { fileURLToPath } from "node:url";
import "./db/old-db-migration.js";
import * as cacheModule from "./cache/index.js";
import router from "./api/index.js";
import logger from "./logger.js";
import { config } from "./config.js";
import { isHttpError } from "./helpers/error.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const app = new Koa();
// set CORS headers
app.use(
cors({
origin: "*",
allowMethods: "*",
allowHeaders: "Authorization,*",
exposeHeaders: "*",
}),
);
// handle errors
app.use(async (ctx, next) => {
try {
await next();
} catch (err) {
if (isHttpError(err)) {
const status = (ctx.status = err.status || 500);
if (status >= 500) console.error(err.stack);
ctx.body = status > 500 ? { message: "Something went wrong" } : { message: err.message };
} else {
console.log(err);
ctx.status = 500;
ctx.body = { message: "Something went wrong" };
}
}
});
app.use(router.routes()).use(router.allowedMethods());
if (config.dashboard.enabled) {
const { koaBody } = await import("koa-body");
const { default: basicAuth } = await import("koa-basic-auth");
const { default: adminApi } = await import("./admin-api/index.js");
app.use(mount("/api", basicAuth({ name: config.dashboard.username, pass: config.dashboard.password })));
app.use(mount("/api", koaBody()));
app.use(mount("/api", adminApi.routes())).use(mount("/api", adminApi.allowedMethods()));
app.use(mount("/admin", serve(path.resolve(__dirname, "../admin/dist"))));
logger("Dashboard started with", config.dashboard.username, config.dashboard.password);
}
try {
const www = path.resolve(process.cwd(), "public");
fs.statSync(www);
app.use(serve(www));
} catch (error) {
const www = path.resolve(__dirname, "../public");
app.use(serve(www));
}
app.listen(process.env.PORT || 3000);
logger("Started app on port", process.env.PORT || 3000);
setInterval(() => {
cacheModule.prune();
}, 1000 * 30);
async function shutdown() {
logger("Saving database...");
process.exit(0);
}
process.addListener("SIGTERM", shutdown);
process.addListener("SIGINT", shutdown);

7
src/logger.ts Normal file
View File

@ -0,0 +1,7 @@
import debug from "debug";
if (!process.env.DEBUG) debug.enable("blossom-server, blossom-server:*");
const logger = debug("blossom-server");
export default logger;

10
src/ndk.ts Normal file
View File

@ -0,0 +1,10 @@
import NDK from "@nostr-dev-kit/ndk";
import { config } from "./config.js";
const ndk = new NDK({
explicitRelayUrls: config.discovery.nostr.relays,
});
ndk.connect();
export default ndk;

3
src/polyfill.ts Normal file
View File

@ -0,0 +1,3 @@
import { WebSocket } from "ws";
global.WebSocket = global.WebSocket || WebSocket;

47
src/rules/index.ts Normal file
View File

@ -0,0 +1,47 @@
import { Rule } from "../config.js";
import dayjs from "dayjs";
import logger from "../logger.js";
import { nip19 } from "nostr-tools";
const log = logger.extend("rules");
export type RuleSearchInput = {
pubkey?: string;
type?: string;
};
export function getFileRule({ pubkey, type }: RuleSearchInput, ruleset: Rule[], requirePubkey: boolean = false) {
log("Looking for match", type, pubkey && nip19.npubEncode(pubkey));
return (
ruleset.find((r) => {
if (requirePubkey && !r.pubkeys) return false;
if (r.pubkeys && (!pubkey || !r.pubkeys.includes(pubkey))) return false;
if (r.type === "*") {
log("Found rule for", r.expiration);
return true;
}
if (r.type) {
if (!type) return false;
if (type === r.type) return true;
if (r.type.endsWith("*") && type.startsWith(r.type.replace(/\*$/, ""))) {
log("Found rule for", r.expiration);
return true;
}
return false;
}
log("Found rule for", r.expiration);
return true;
}) || null
);
}
export function getExpirationTime(rule: Rule, start: number): number {
const match = rule.expiration.match(/(\d+)\s*(\w+)/);
if (!match) throw new Error("Failed to parse expiration");
const count = parseInt(match[1]);
const unit = match[2] as dayjs.ManipulateType;
return dayjs.unix(start).subtract(count, unit).unix();
}

Some files were not shown because too many files have changed in this diff Show More