Compare commits

..

4 Commits

Author SHA1 Message Date
Tomáš Mládek 54897f1468 fix: reenable locks 2022-09-14 22:49:04 +02:00
Tomáš Mládek 0ced93adcc refactor: use trait objects instead of FsStore directly
also fix most clippy hints
2022-09-14 22:49:04 +02:00
Tomáš Mládek 2c4a7f32e5 chore: no default debug output in tests 2022-09-14 22:49:04 +02:00
Tomáš Mládek a7b0a5c00a feat!: multiple vaults
incomplete, but passes tests
2022-09-14 22:48:59 +02:00
408 changed files with 22694 additions and 41563 deletions

View File

@ -1,10 +0,0 @@
*/node_modules
.pnpm/*
.cargo/*
upend.sqlite3
.upend/*
.task/*

View File

@ -1,12 +0,0 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = false
insert_final_newline = false

1
.env Normal file
View File

@ -0,0 +1 @@
DATABASE_URL=upend.sqlite3

7
.gitignore vendored
View File

@ -4,10 +4,3 @@
**/*.rs.bk
upend.sqlite3
.upend
.task
/.pnpm
/.cargo
example_vault/zb*

139
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,139 @@
variables:
RUST_IMAGE: "rust:latest"
NODE_IMAGE: "node:lts"
CARGO_HOME: $CI_PROJECT_DIR/cargo
stages:
- lint
- build
- test
- release
cache:
key: ${CI_COMMIT_REF_SLUG}
paths:
- target
- cargo
- webui/node_modules
lint:backend:
stage: lint
image: $RUST_IMAGE
script:
- rustup component add clippy
- make backend_lint
rules:
- changes:
- migrations/**/*
- src/**/*
- Cargo.lock
- Makefile
allow_failure: true
lint:backend_no_default_features:
stage: lint
image: $RUST_IMAGE
script:
- rustup component add clippy
- make backend_lint_no_default
rules:
- changes:
- migrations/**/*
- src/**/*
- Cargo.lock
- Makefile
allow_failure: true
lint:frontend:
stage: lint
image: $NODE_IMAGE
script:
- node --version && npm --version
- make frontend_lint
rules:
- allow_failure: true
lint:frontend_lib:
stage: lint
image: $NODE_IMAGE
script:
- node --version && npm --version
- make frontend_lib_lint
rules:
- allow_failure: true
build:backend:
stage: build
image: $RUST_IMAGE
script:
- rustc --version && cargo --version
- make backend
artifacts:
paths:
- target/release/upend
expire_in: 1 day
only:
changes:
- migrations/**/*
- src/**/*
- Cargo.lock
- Makefile
build:frontend:
stage: build
image: $NODE_IMAGE
script:
- node --version && npm --version
- make frontend
artifacts:
paths:
- webui/public
- tools/upend_js
expire_in: 1 day
only:
changes:
- webui/**/*
- Makefile
test:backend:
stage: test
image: $RUST_IMAGE
script:
- make backend_test
only:
changes:
- migrations/**/*
- src/**/*
- Cargo.lock
- Makefile
test:backend_no_default_features:
stage: test
image: $RUST_IMAGE
script:
- make backend_test_no_default
only:
changes:
- migrations/**/*
- src/**/*
- Cargo.lock
- Makefile
allow_failure: true # remove at v1.0
package:
stage: release
image: $RUST_IMAGE
before_script:
- cd /tmp
- wget https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage
- chmod +x linuxdeploy-x86_64.AppImage
- ./linuxdeploy-x86_64.AppImage --appimage-extract
- ln -s $PWD/squashfs-root/AppRun /usr/local/bin/linuxdeploy-x86_64.AppImage
- cd -
script:
- make
artifacts:
paths:
- ./*.AppImage
only:
- tags

View File

@ -1,7 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dev" type="CompoundRunConfigurationType">
<toRun name="dev backend" type="CargoCommandRunConfiguration" />
<toRun name="dev frontend" type="js.build_tools.npm" />
<method v="2" />
</configuration>
</component>

View File

@ -1,19 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dev backend" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror --secret upend" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" />
<option name="allFeatures" value="false" />
<option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" />
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

View File

@ -1,19 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dev backend storybook" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror --bind 127.0.0.1:8099" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" />
<option name="allFeatures" value="false" />
<option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" />
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

View File

@ -1,22 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dev frontend" type="js.build_tools.npm">
<package-json value="$PROJECT_DIR$/webui/package.json" />
<command value="run" />
<scripts>
<script value="dev" />
</scripts>
<node-interpreter value="project" />
<envs />
<method v="2">
<option name="NpmBeforeRunTask" enabled="true">
<package-json value="$PROJECT_DIR$/sdks/js/package.json" />
<command value="run" />
<scripts>
<script value="build" />
</scripts>
<node-interpreter value="project" />
<envs />
</option>
</method>
</configuration>
</component>

View File

@ -1,7 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="storybook" type="CompoundRunConfigurationType">
<toRun name="dev backend storybook" type="CargoCommandRunConfiguration" />
<toRun name="storybook:serve" type="js.build_tools.npm" />
<method v="2" />
</configuration>
</component>

View File

@ -1,12 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="storybook:serve" type="js.build_tools.npm" nameIsGenerated="true">
<package-json value="$PROJECT_DIR$/webui/package.json" />
<command value="run" />
<scripts>
<script value="storybook:serve" />
</scripts>
<node-interpreter value="project" />
<envs />
<method v="2" />
</configuration>
</component>

View File

@ -1,11 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="test js sdk" type="JavaScriptTestRunnerJest">
<config-file value="$PROJECT_DIR$/sdks/js/jest.config.js" />
<node-interpreter value="project" />
<jest-package value="$PROJECT_DIR$/sdks/js/node_modules/jest" />
<working-dir value="$PROJECT_DIR$" />
<envs />
<scope-kind value="ALL" />
<method v="2" />
</configuration>
</component>

View File

@ -1,8 +0,0 @@
{
"recommendations": [
"svelte.svelte-vscode",
"rust-lang.rust-analyzer",
"esbenp.prettier-vscode",
"earthly.earthfile-syntax-highlighting"
]
}

View File

@ -1,161 +0,0 @@
pipeline:
test:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION ]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly +test
lint:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION ]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly +lint
# audit:
# image: earthly/earthly:v0.8.3
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock
# environment:
# - FORCE_COLOR=1
# - EARTHLY_EXEC_CMD="/bin/sh"
# secrets: [EARTHLY_CONFIGURATION]
# commands:
# - mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
# - earthly bootstrap
# - earthly +audit
appimage:nightly:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets:
[
EARTHLY_CONFIGURATION,
GPG_SIGN_KEY,
SSH_CONFIG,
SSH_UPLOAD_KEY,
SSH_KNOWN_HOSTS,
SENTRY_AUTH_TOKEN
]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly --secret GPG_SIGN_KEY --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS +deploy-appimage-nightly
when:
branch: [ main ]
docker:nightly:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD, SENTRY_AUTH_TOKEN ]
commands:
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly --push +docker-minimal
- earthly --push +docker
when:
branch: [ main ]
docker:release:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD, SENTRY_AUTH_TOKEN ]
commands:
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly --strict --push +docker-minimal --tag=latest
- earthly --strict --push +docker-minimal --tag=$CI_COMMIT_TAG
- earthly --strict --push +docker --tag=latest
- earthly --strict --push +docker --tag=$CI_COMMIT_TAG
when:
event: [ tag ]
jslib:publish:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION, NPM_TOKEN ]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly --strict --push --secret NPM_TOKEN +publish-js-all
when:
branch: [ main ]
gitea:prerelease:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD ]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- earthly -a +current-changelog/CHANGELOG_CURRENT.md CHANGELOG_CURRENT.md
- rm -rf dist
when:
event: [ tag ]
appimage:release:
image: earthly/earthly:v0.8.3
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- FORCE_COLOR=1
- EARTHLY_EXEC_CMD="/bin/sh"
secrets: [ EARTHLY_CONFIGURATION, REGISTRY, REGISTRY_USER, REGISTRY_PASSWORD, SENTRY_AUTH_TOKEN ]
commands:
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
- earthly bootstrap
- mkdir -p dist/
- earthly --strict -a '+appimage-signed/*' dist/
when:
event: [ tag ]
# todo: webext
gitea:release:
image: woodpeckerci/plugin-gitea-release
settings:
base_url: https://git.thm.place
files:
- "dist/*"
checksum: sha512
api_key:
from_secret: woodpecker_api_key
target: main
note: CHANGELOG_CURRENT.md
when:
event: [ tag ]

View File

@ -1,921 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
## [0.0.76] - 2024-02-06
### Bug Fixes
- [JSLIB]: Fix types for `putBlob()`, returns a single address
### Features
- [WEBUI,JSLIB]: Upload progress
- [WEBUI]: Files can be added or removed from the upload dialog
- [WEBUI]: Select all uploaded files when done
- [WEBUI]: Start upload on Enter press
### Operations & Development
- Enable CACHE
- --force pnpm install, DRY Earthfile slightly
- Cache all rust earthly targets
- Get rid of AppImage upload to S3
- Update Earthly image version
- Remove parallelization
- [WEBUI]: Force rebundling of dependencies for `dev` script
- Intellij dev config builds jslib before webui launch
- Git ignore uploaded files in example_vault
### Styling
- [WEBUI]: Upload progress bar spacing, hide add button
### Build
- [WEBEXT]: Update shared paths with webui, fix build
- Further refactor Earthfile & build process
- Fix upend-bin target
## [0.0.75] - 2024-02-02
### Bug Fixes
- [WEBUI]: Fix upload, re-add forgotten components (Footer, AddModal, DropPasteHandler)
### Operations & Development
- Update Earthly image version
### Refactor
- [WEBUI]: Fix typo, rename ProgessBar -> ProgressBar
### Styling
- [WEBUI]: Fix uneven heights of roots
## [0.0.74] - 2024-01-28
### Bug Fixes
- [CLI]: Serve new SPA version
- [WEBUI]: Selector race conditions / wonkiness
- [CLI]: Serving web ui in Docker/AppImage
- [WEBUI]: Ordering of attributes in Selector
- [JSLIB]: Correct types for `UpObject.attr()`
### Features
- [JSLIB]: Add timeouts / aborts to all api calls
- [WEBUI]: Required & optional attributes
### Miscellaneous
- [WEBUI]: Put /dist into .eslintignore
### Operations & Development
- [WEBUI]: Fix HMR
- Make `dev` intellij config not run --release version
### Refactor
- [WEBUI]: Switch to SvelteKit | touchdown
- [WEBUI]: Switch to SvelteKit | great lint fixing
- [WEBUI]: Switch to SvelteKit | prettier everything
- [WEBUI]: Switch to SvelteKit | fix image annotation
- [WEBUI]: Switch to SvelteKit | fix nested blob preview
- [WEBUI]: Switch to SvelteKit | properly handle BrowseColumn error
- [WEBUI]: Misc fixes in ImageViewer
### Styling
- [WEBUI]: Blob preview labels
### Build
- [WEBUI]: Finish webui SPA build config
- Optimize Earthly target dependencies
## [0.0.73] - 2024-01-27
### Bug Fixes
- [WEBUI]: Version display
- [WEBUI]: Don't require confirmation for set remove in combine
- [WEBUI]: "Required" without "Included" also now works in Combine
- [WEBUI]: "Groups" label in Inspect column
- [WEBUI]: Allow selection with cmd for macos
- [WEBUI]: Various app sizing fixes
- [WEBUI]: Fix sizing / overflows on <=1080 screens?
- [WEBUI]: Upobject label overflow
- [WEBUI]: Fix editing through inspect attribute list
- [WEBUI]: Surface allows rudimentary rescaling
- [WEBUI]: UpLink label overflows
- [WEBUI]: Overflow of "Used" section in Attribute Inspect
- [WEBUI]: Lint
- [WEBUI]: Remove surface story, fix lint
- [WEBUI]: Z-index on surface
- [WEBUI]: Surface: point position matches axes
- [WEBUI]: Surface starts at center
- [WEBUI]: Error on search confirm
- [WEBUI]: SurfaceColumn with new Selectors
- [WEBUI]: Error in SurfaceColumn due to missing `y`
- [WEBUI]: "initial" Selector values are no longer uneditable
- [WEBUI]: Multiple Surface columns
- [WEBUI]: Position of selector on surface
- [WEBUI]: Surface centering on resize
- [WEBUI]: Fix duplicate Selector options (?)
- [DB]: Handling (again) existing files + tests
- Prevent crashes while formatting unexpected value types
- Selectors keep focus while adding entries
- [WEBUI]: Url type display in UpObject
- [WEBUI]: Attribute columns being squashed to unreadability
- [WEBUI]: Editable overflow
- Uploads via API are assigned paths like via FS
- [CLI]: Image previews work for paths without extensions
- [CLI]: Add ID3_PICTURE attribute description
- [WEBUI]: Sort & optimize Keyed section
- [WEBUI]: Selection in EntryList
### Features
- [WEBUI]: Proper set operations
- [WEBUI]: Add group view, duplicate group view
- [WEBUI]: Quick & dirty reverse path resolution for duplicate group distinction
- [WEBUI]: Turn groups view into a column, allow selection
- [DB]: Add new vault scan modes (flat, depthfirst)
- [DB]: Add an "INCOMING" rescan mode
- [DB]: Add an "INCOMING" rescan mode
- [DB]: Duplicate blob paths on initial scan
- [JSLIB]: Add vault options functions
- [WEBUI]: Show current vault mode in setup
- [JSLIB]: Add variables to jslib query builder
- [WEBUI]: Distinguish between correctly & incorrectly typed members in Inspect
- [WEBUI]: Surface: add "display as point"
- [WEBUI]: Surface view as Column in Browse
- [CLI]: Add `--rescan_mode` CLI option, fix storybook cmd
- [WEBUI]: "Last searched" options in header
- [WEBUI]: SurfaceColumn's axes are fully reflected in URL
- [JSLIB]: Or/and/not/join query builder support
- [WEBUI]: SurfaceColumn automatically finds PERPENDICULAR attributes, if set
- [WEBUI]: Press shift and click close to reload a column
- [WEBUI]: Proper autofit of SurfaceColumn
- [CLI,WEBUI]: Check file presence via HEAD, disable download button if necessary
- [WEBUI]: Stable type sort in Inspect: by amount of attributes, address
- [JSLIB]: Implement toString for UpObject
- Add spinner to Selector
- [CLI]: Add ID3 image extraction
- [WEBUI]: Allow search / selection of entries via their attributes
- [WEBUI]: Display KEYs in UpObject banner
- [WEBUI]: Vault name in title on home
- [WEBUI]: Add Keyed display to Home
- [WEBUI]: Add section links from Home
### Miscellaneous
- Specify crate resolver
- [JSLIB]: Add eslint ava
- [JSLIB]: Rebuild before running tests
- [JSLIB]: Version bump
- [JSLIB]: Fix eslint
- [WEBUI]: Update storybook
- [WEBUI]: Update entity addresses for storybook
- [JSLIB]: Bump version
- Add intellij run configurations
- Fix types
### Operations & Development
- Add appimages & changelogs to gitea releases
- Test before lint
- Use detached signature for appimages
- Add mail pipeline step
- Fix mail?
- Remove mail (for the time being)
- Fix prerelease step
### Performance
- [WEBUI]: Only check for file existence for UpObjct banners
- [WEBUI]: Use addressToComponents to get attribute addresses without querying backend
- [JSLIB]: Add `attr` cache
- Cancel unfinished updates in Selector
- [WEBUI]: Early set for static Selector options
### Refactor
- [WEBUI]: Use EntitySetEditor in Inspect & MultiGroup
- [DB]: Better impls for UNode/UHierPath
- [WEBUI]: Upobject label into own component
- [DB]: Use `parse` instead of `from_str`
- [DB]: Refactor tests in fs store
- Tree mode -> (new) blob mode
- [DB]: Use jwalk instead of walkdir
- [DB]: Refactor rescan process
- [JSLIB]: Specific constant for any instead of undefined
- [WEBUI]: Use new query api
- [CLI]: Use cargo manifest dir for resources in dev mode
- [WEBUI]: Selector refactor, non-destructive search
- [WEBUI]: Button labels on columns are i18n'd
- [WEBUI]: Get rid of `any` in Surface
- [WEBUI]: I18n in UpObject
- [JSLIB]: Remove `url` and `attribute` from `getAddress`, fix build
- [CLI]: Remove forgotten println
- [CLI]: Refix log level for vault rescans
- Chores in Selector.svelte
- Dbg calls in Selector.svelte identify element
- Remove unnecessary `scoped` leftovers from Vue
- Formatting
- [DB]: Remove deprecation notice until there's actually a better way
- Clippy fixes
- [WEBUI]: Use constants
### Styling
- [WEBUI]: Non-inspect columns are lighter
- [WEBUI]: Padding on groups in inspect
- [WEBUI]: Notes in properties, enlarge scrollable area
- [WEBUI]: Roots on home are in a column
- [WEBUI]: Embolden 0 axes in Surface, text shadow
- [WEBUI]: Reorder options in selector
- [WEBUI]: Fix partially hidden Home footer; spacing
- [WEBUI]: Column/inspect sizing, avoid scrollbar overlap
- [WEBUI]: 2 columns at home
- Show multiple roots as banners instead of full cards
- [WEBUI]: # -> ⌘
- [WEBUI]: Key display in non-banners also
- [WEBUI]: Monospace & diminished key display
- [WEBUI]: Hide type keys
## [0.0.72] - 2023-10-22
### Bug Fixes
- [WEBUI]: Inner group preview sizing
- [WEBUI]: Various mobile improvements (#23)
- [WEBUI]: Ultrawide detail mode
- Double ^C actually stops
- [WEBEXT]: External instances, link opens stored instance
- Fix mime detection on mac os
- Web ui flag
- Api fetch store info
- [WEBUI]: Resolve upobjects with empty labels, explicitly disable resolving
- Gallery empty state
- Upgrade shadow-rs, fix libgit build
- Disable libgit2 shadow-rs functionality, actually fix build
- Local js dependencies
- Build wasmlib before frontend
- Upend js lib build (`files`)
- Minor entity not yet loaded bug
- Backlinks, untyped links don't include OFs
- Unclickable items in detail mode, fixes #57
- Concurrent image loading
- Impl display for upmultihash, fix preview debug log
- Docker improvements
- (loading) image overflow
- Appimage webui path
- Docker-minimal missing libssl3
- Upgrade vite, get rid of vite build voodoo
- Audiopreview overflow
- Never cache index.html, prevent stale assets
- Don't hide jobs
- Footer only showable when jobs present
- Duplicate wasm initialization
- Don't show type editor for nontypes
- Entrylist scroll hijack
- Wasm lint
- Make `componentsToAddress` usable from JS
- Webui layout & sizing fixes
- Add url attributes to url type address
- Webui, detail doesn't take up the whole screen
- 3d model preview overflow
- Don't duplicate columns unless shift is pressed
- Hide browse add column after blur
- Accessibility & lints
- Audio annotations not being saved properly
- Entitylist entry add
- Selector overflow in entitylist
- [JSLIB]: :sparkles: allow initialization of wasm via wasm modules
- [JSLIB]: :wrench: moved wasm from dependencies to dev dependencies
- [WEBUI]: :bug: add placeholder to indicate url pasting in entitylist
- [JSLIB]: :rotating_light: fix lint fail due to missing type-only imports
- [DB]: :bug: fix join behavior
- [JSLIB]: :technologist: better error messages for api/query
- [DB]: :bug: actually fix join behavior, improve performance as well
- [WEBUI]: :ambulance: fix upend wasm import
- [JSLIB]: :wrench: fix gitignore
- [WEBUI]: Properly center banner select highlight
- [WEBUI]: Make non-inspect columns play nice with index context
- [CLI]: Proper version in vault info
### Features
- [WEBEXT]: Add link to instance
- Add `get` cli command, cli commands don't panic
- [CLI]: Request the whole obj listing for `get`
- Limit concurrent image loading
- Upend.js `attr` includes backlinks
- Provenance, vault stats
- Add endpoint to aid with db migration
- Extractors append types
- Add link to typed entry views
- Rudimentary type editor
- Add download button to UpObject
- Concurrent image loading indication
- Add debug logging for external command extractors
- Use `audiowaveform` for audio preview generation
- Allow specifying vault name as env
- Add basic group section to home
- Add group count
- Property adding in entrylist
- Modeless group operations
- Modeless entrylist editing
- Always show members in inspect
- Show URL types in non-banner upobjects
- :package: upend jslib + wasm can be used from node
- [JSLIB]: :sparkles: add basic query builder
- [JSLIB]: :recycle: eav helper getters for uplisting
- [JSLIB]: :sparkles: getRaw() just returns URL, fetchRaw() fetches the actual content
- [WEBUI]: :construction: selection via ctrl+drag
- [WEBUI]: :construction: generic `BrowseColumn`, EntryView accepts `entities`
- [WEBUI]: :construction: base of select all
- [WEBUI]: :construction: allow selection removal
- [WEBUI]: :sparkles: batch adding/removing groups
- Add selection & batch operations
- [WEBUI]: :sparkles: rudimentary combine column
- [WEBUI]: All "combined" can now be selected
### Miscellaneous
- [WEBEXT]: More descriptive message for visiting upend
- [WEBEXT]: Version bump
- Add `debug`
- Don't print header if result is empty in cli
- [CLI]: Gracefull failback if API format changes
- [WEBEXT]: Version bump
- [CI]: Include web-ext artifacts in (pre)releases
- Remove unused dependencies
- Fix tests on mac
- EntryList default columns
- Include versions of all packages in /info
- Deprecate get_all_attributes (#38)
- Migrate from yarn to pnpm
- Fix taskfile (pnpm --frozen-lockfile)
- Lock update
- Rename photo extractor to EXIF extractor
- Remove unnecessary std::, reformat
- Reformat webui w/ prettier
- Add VS Code recommended extensions
- Add .editorconfig
- Rename build dockerfiles
- Add prettier for webui
- Add deploy:docker task
- Change db/store traces to trace level
- Log level to trace
- Dev:frontend relies on build:jslib
- Pnpm lock update
- Reformat?
- Remove prod tasks from Taskfile
- Update cargo & webui deps
- Rename Gallery to EntityList
- Logging for swr fetch
- Update upend logo
- Fix stories errors
- Update git cliff config
- Change wording on "Create object", i18n
- [JSLIB]: :recycle: tidy up tsconfig.json
- :technologist: add earthly to recommended extensions
- [JSLIB]: :wrench: tidy up gitignore
- [JSLIB]: :recycle: use wasmlib from npm
- [JSLIB]: :bookmark: version bump to 0.0.5
### Operations & Development
- Fix publish api key (?)
- Fix woodpecker path check
- Prerelease every push to main
- Verbose build of upend.js
- Move from using global `rust` image to local `rust-upend`
- Also use local node docker image
- Also cache target for incremental builds
- Only upload nightlies from main
- Upload packages to minio
- Fix docker tasks
- Add `gpg-agent` to upend-deploy docker
- Also build a minimal docker image
- Only publish dockers from main
- Add an audit target
- Add logging to Inspect
- Add earthly target to update changelog
- Add `--push` to deploy target
- [JSLIB]: :rocket: publish jslib on tag
- [JSLIB]: :white_check_mark: test jslib in CI
- [JSLIB]: :sparkles: publish jslib whenever version is bumped
- [JSLIB]: :rocket: publish wasmlib to repo
- [JSLIB]: :bug: fix earthly publish target
- :construction_worker: sequential js publish
- [JSLIB]: :ambulance: do not attempt to publish jslib unless we're on `main`
### Refactor
- Move actix app creation into separate module
- [**breaking**] Unify groups, tags, types (on the backend)
- Split inspect groups into its own widget
- InspectGroups more self-sufficient
- Get_resource_path, looks in /usr/share
- Add `DEBUG:IMAGEHALT` localstorage variable that halts concurrent image loading
- Add global mock/debug switches
- Generic magic for addressable/asmultihash
- Unify debug logs in webui
- Provenance api log
- EntryList uses CSS grid instead of tables
- [JSLIB]: Reexport UpEndApi in index
- :truck: rename jslib to use `@upnd` scope
- [JSLIB]: :recycle: config obj instead of positional args in api
### Styling
- Smaller iconbutton text
- Don't use detail layout under 1600px width
- Referred to after members
- No more labelborder, more conventional table view
- [WEBUI]: Transition select state in EntityList
- [WEBUI]: Slightly reduce empty space in selectedcolumn
### Testing
- Rudimentary route test
- Add /api/hier test
- [SERVER]: Add test for /api/obj/ entity info
- Improve db open tests
- [BASE]: :bug: `in` actually tested
### Release
- V0.0.72
## [0.0.71] - 2023-06-03
### Bug Fixes
- "database is locked" errors on init (?)
- UpLink not updating
- Text overflow
- Prevent bonkers behavior on PUT (deny_unknown_fields)
- Useful attribute mouseover
- (group) previews getting hung up on a spinner
- Overflow & spacing issues
- Audio preview sizing issue
- Image group overflow
- Sort attributes by label too
- Update upend_js to include entry provenance and timestamp
- Don't use "Link" under the button
- Unresolved audio annotations labels
- Invariant entries have 0 timestamp
- Pdf viewer
- Tests
- Taskfile
- Put types
- Commands
- Don't show tags if empty
- Image fragment viewing
- Selector unlabeled attr handling
- Suggest attributes on empty selector
- Empty selector attr option
- Selector hanging open
- Incorrect max_size in /api/address
- Proper error message when web ui not enabled
- Increase multihash size to 256 bytes
- Panics due to async black magic
- Proper external fetch error handling
- Don't needlessly insert hashy filename
- Content-type for cors
- Url labels on client, not backend
- Await upend visit, contentType isn't array
### Documentation
- Add conceptual tutorial
### Features
- Attribute label display in Selector, create attribute feature
- [**breaking**] Add provenance & timestamp to Entry
- Add "as entries" inspect option
- Display entity type in banner
- Also show timestamp & provenance in EntryList
- Add optional `provenance` query parameter to API calls
- Only suggest type's attributes in attributeview editing
- [CLI]: Insert entities for files with =, urls
- Guess entryvalue in cli
- Add `@=` support in cli queries
- [CLI]: Implement tsv format for queries
- Add addressing/hashing of remote urls
- Proof of concept v0.1 web extension companion
- Add external blobs via url at /api/blob
- Add PUT /api/hier handler (for creation)
- Extension supports adding
- Webext display added time
- Web extractor adds LBLs
### Media
- Add more buttony upend icon
### Miscellaneous
- Clippy lints
- [WEBUI]: Fix eslint errors
- Add text examples
- Gitattributes fix
- Add 2 levels of directories to example
- [**breaking**] Separate server functionality into a crate
- Fix missing vendor files in dev
- Rename to entries
- Remove duplicate sort
- Add clean:vault task
- Don't necessarily build jslib
- Update `webpage`
- `cargo update`, fix clippy lints
- [**breaking**] Separate PUT /api/obj and PUT /api/blob endpoint
- Server -> cli
- Update repository in Cargo.toml
- Fix clippy
- Silence storybook errors
- Open browser on `task dev`
- Get rid of MTIME
- Allow 127.0.0.1 origin by default
- Cli docstrings
- Use url instead of string in address
- Add user agent to reqwests
- Remove jsconfig.json
- Lint webext
- Fix rust lints
- Update actix deps, get rid of one future incompat warning
- Use api client from upend.js in webui
- Forgotten placeholder var
- Update yarn.lock for webui
- [WEBEXT]: Fix url desync, types
- Prevent double browser opening
- Rename uploadFile to putBlob, enable remote url
- Send a header with version
- Safeguard in webext against running in upend
- Version bump webext
- Stuff for mozilla webext packaging
- Bump webext version
- Fancify readme
- Links in readme
- Switch to using git cliff for changelogs
- Release
### Operations & Development
- Update clean task
- Fix deps
- Switch from Gitlab CI to Woodpecker, Taskfile fixes
- Conditions on lints
### Refactor
- Unify put input handling
- Move tools/upend_cli functionality to the cli crate
- Various
- Move entitylisting to upend.js, dry, formatting
- Add api client to upend.js
- Use global reqwest client
### Styling
- !is_release instead of is_debug
- Smaller add icon
- Improve browse icons
- Add text to iconbuttons
- Also show attr in type
## [0.0.70] - 2023-03-08
### Bug Fixes
- Always resolve UpObject when banner (check for blobbiness)
- Unnecessary underline on UpObject banner
- Endless loading on group preview
- Selector mouse behavior, focus event
- Inflight queryonce cache never revalidated
- Rotate models right side up in (pre)view
- Audio regions editable state
- Blobpreview endless loading state
- Image overflow in inspect detail
- Various audioviewer bugs & improvements
- Only record annotation color if not default
- Blob viewer jumping
- Resize AudioViewer
- Detail mode
### Features
- Add arrow key support to Selector
- Double click on surface to add a point
- Update surface URL when changing axes
- Shift+click to add on right
- Resizable columns
### Miscellaneous
- Rename /media to /assets
- Add example vault with 1 video
- Add blobpreview, blobviewer video stories
- Add vertical video + stories
- Add example images
- Add image stories for blobs
- Add upobjectcard story, routerdecorator, link upobject story
- Add Gallery story
- Stories
- Add Selector stories
- Add Surface story
- Add --reinitialize to sb command
- Fix Surface story, add prefilled story
- Add example files (2 photos, 2 stls)
- Add 3d model stories
- Warn when reinitializing
- Add audio example, update ATTRIBUTION
- Run release version of upend for storybook
- Add RouterDecorator to BlobViewer story
- Add yarn interactive tools
- Release upend version 0.0.70
### Refactor
- Gallery thumbnail is now UpObjectCard
### Styling
- No min-height for blob preview (?)
### Build
- Storybook init
## [0.0.69] - 2023-01-15
### Bug Fixes
- [UI]: Don't update last/num visited if object is nonexistent
- [UI]: Simplify BlobPreview markup, improve loading state
- [UI]: Jobs update after reload triggered
- [API]: Malformed entries aren't parsed as invariants during PUT
- [ERROR]: Address deserialize errors include origin
- [UI]: Footer space, markup
- [UI]: Selector initial attribute value
- [UI]: Surface inaccuracies, zoom reacts everywhere, points are centered
### Features
- [UI]: Footer is persistent and can be hidden
- [CLI]: Initial upend cli
- Add cli addressing from `sha256sum` output
- Add attribute view
- Add rudimentary surface view
- [UI]: Reverse surface Y scale, add loading state
- Add current position display to Surface view
### Miscellaneous
- [UI]: Adjust OFT features on videoviewer timecode
- [UI]: Footer is hidden by default
- Ignore rel-noreferrer
- [UI]: Remove unnecessary imports
### Performance
- [UI]: Supply labels from sort keys
- Load d3 asynchronously
### Styling
- [UI]: Switch Inter for IBM Plex
- [UI]: Switched root font size from 15px to 16px
## [0.0.68] - 2022-12-22
### Bug Fixes
- Add custom logging handler (elucidate db locked errors?)
- .wavs also detected as audio
- Add proper targets to db logging, panic in debug mode
- Format duration, also change formatting to xhxmxs
- Duration attribute label
- Target
- Tracing target has to be static
- Properly set WAL, eliminate (?) intermittent `database locked` errors
- Box-sizing: border-box
- Centered spinner on image previews
- Placeholder width/height for spinner in blobpreview
- .identified on UpObject
- Border on play icon
- Spinner centering
- Update vite, fix dynamic imports
- Unsupported display in detail mode
- Gallery without sort
### Features
- Add --allow-hosts CLI option
- Add i18next, move attribute labels
- Supported format detection in videoviewer
- Loading state in videoviewer preview
### Miscellaneous
- Log instrumenting
- Don't package by default
- Log -> tracing
- Update web deps
- Css fix
### Operations & Development
- Make makefile more command-y
### Performance
- Only resort once initial query has finished
- Only show items in gallery once sorted
- Enable lazy loading of images (?)
### Ui
- Replace spinner
## [0.0.67] - 2022-10-23
### Bug Fixes
- Icons when ui served from server
- Continue with other extractors when one fails
- Audio detection of .oggs
- Also loading peaks
- Add .mp3 override to type detection
- .mp3 override in media extractor
### Features
- Download blob with identified filename
- Add media (duration) extractor
- Add duration display for audio preview
### Miscellaneous
- Unused css rule
- Shut up svelte check
- Enable tracing span for extractors
- Change extractor error level to debug, add extractor markers
### Refactor
- Unify media type detection
## [0.0.66] - 2022-10-22
### Bug Fixes
- Confirm before generating audio peaks in browser, avoid lock-ups in Chrome
- Remove BlobViewer duplicity in Inspect
### Miscellaneous
- --ui-enabled actually does something
- 32 max port retries
- Disallow `console.log`
## [0.0.65] - 2022-10-21
### Bug Fixes
- Skip empty files on vault update
- Update tests to handle Skipped paths
- Use `cargo clean` in Makefile/CI
- Markdown display
- Forgot to denote `TYPE` as denoting to types
- Blobpreview sizing
- Blobpreview hashbadge more in line with handled
- Minor css fixes
### Features
- Add cli option to open executable files
- Group preview
- Recurse up to 3 levels resolving group previews
- On group preview, prefer objects with previews
### Miscellaneous
- Put config into its own struct
- Update address constants (fix file detection, group adding)
- Separate clean commands in Makefile
- Switch from `built` to `shadow_rs`
## [0.0.64] - 2022-10-16
### Bug Fixes
- Update project url, fix tests
- Add global locks to db, fix sqlite errors (?)
- Do not needlessly trigger drop handler UI
- Use Gallery in Search, order by match order
- Remember atttributeview state in search
- Only get() connection in UpEndConnection when necessary
- Video loading state in VideoViewer
- HashBadge display in Chrome*
- .avi previews as video
- VIdeoViewer play after click
- VIdeoViewer size in detail
- Limit thumbnail generation to 1 thread per image
- Lint due to `NodeJS` types
- Consistent font sizing of timecode
- VideoViewer vertical thumbnails
- Reenable locks
- Reenable initial quick vault scan
- Restore store stats functionality somewhat
- Limit previews to NUM_CPU/2 at a time, avoid brown lock-ups
- Previews are cached re: mimetype as well
- Create store dir if not exists
- Don't run an initial full-hash update every start
- Image thumbnails of audio (size query arg collision)
- Actually remove objects on rescan
- Svg (pre)views
- No spurious "Database locked" on startup
### Features
- Levenshtein sort entries in Selector
- Use `match-sorter` instead of just levenshtein distance
- Add timecode display to VideoViewer during previewing
- [**breaking**] Switch from k12 to sha256, use proper multihash /base impl
- [**breaking**] Multiple vaults
- Add options to previews
- If `audiowaveform` is present, generate & cache peaks on backend
### Miscellaneous
- Add logging to fs tests
- Fix frontend lint
- Missing types, fix (some) Svelte check warnings
- Switch from log to tracing
- Log failed path updates immediately
- Note to self about detail animations
- Refactor widgets + gallery
- VideoViewer preview optimization
- Fix svelte warnings
- Extract all API URLs into a global variable
- Allow CORS from localhost
- No default debug output in tests
- Fix vault/db path semantics, previews in db path, `--clean` param
- Lower default size&quality of image previews
- Add logging
- Fix typo
### Operations & Development
- Update Makefile for new webui build also
### Performance
- First check for files in /raw/
- Lower seek time for thumbnails
- Correct `ffmpeg` params for efficient video previews
- Implement speed-ups for vault db
- Remove `valid` index on files
- SQLite NORMAL mode on fs vault connections
- Add checks to avoid duplicate metadata extraction
### Refactor
- Use trait objects instead of FsStore directly
### Build
- Switch from Rollup to Vite, upgrade Svelte
### Hotfix
- Disable transactions for now
## [0.0.6] - 2021-06-19
### Line-break
- Anywhere for attr tables
## [0.0.3] - 2021-06-19
### Refactor
- Remove query_entries(), from_sexp into TryFrom, query_to_sqlite is a method
### Models
- :File uses Hash instead of plain Vec<u8>
<!-- generated by git-cliff -->

3572
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,98 @@
[workspace]
members = ["base", "db", "cli", "wasm"]
resolver = "2"
[package]
name = "upend"
description = "A user-oriented all-purpose graph database."
version = "0.0.63"
homepage = "https://upend.dev/"
repository = "https://gitlab.com/tmladek/upend/"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
build = "build.rs"
[dependencies]
clap = "2.33.0"
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
thiserror = "1.0"
rayon = "1.4.0"
futures-util = "~0.3.12"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
actix = "^0.10"
actix-files = "^0.5"
actix-rt = "^2.0"
actix-web = "^3.3"
actix_derive = "^0.5"
jsonwebtoken = "8"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
bs58 = "^0.4"
tiny-keccak = { version = "2.0", features = ["k12"] }
unsigned-varint = { version = "^0", features = ["std"] }
uuid = { version = "0.8", features = ["v4"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
rand = "0.8"
mime = "^0.3.16"
tree_magic_mini = "3.0.2"
dotenv = "0.15.0"
xdg = "^2.1"
opener = { version = "^0.5.0", optional = true }
is_executable = { version = "1.0.1", optional = true }
webbrowser = { version = "^0.5.5", optional = true }
nonempty = "0.6.0"
actix-multipart = "0.3.0"
image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.4.0", optional = true }
id3 = { version = "1.0.2", optional = true }
kamadak-exif = { version = "0.5.4", optional = true }
[build-dependencies]
built = "0.5.1"
[features]
default = [
"desktop",
"previews",
"previews-image",
"extractors-web",
"extractors-audio",
"extractors-photo",
]
desktop = ["webbrowser", "opener", "is_executable"]
previews = []
previews-image = ["image", "webp", "kamadak-exif"]
extractors-web = ["webpage"]
extractors-audio = ["id3"]
extractors-photo = ["kamadak-exif"]

288
Earthfile
View File

@ -1,288 +0,0 @@
VERSION 0.8
# Base targets
base-rust:
FROM rust:bookworm
RUN rustup component add clippy
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
RUN cargo install wasm-pack wasm-bindgen-cli && rustup target add wasm32-unknown-unknown
RUN cargo install cargo-audit
WORKDIR /upend
CACHE $HOME/.cargo
COPY Cargo.toml Cargo.lock .
COPY base/Cargo.toml base/Cargo.toml
COPY cli/Cargo.toml cli/Cargo.toml
COPY db/Cargo.toml db/Cargo.toml
COPY wasm/Cargo.toml wasm/Cargo.toml
RUN cargo fetch --locked
base-backend:
FROM +base-rust
COPY --dir base cli db wasm .
base-node:
FROM node:lts-iron
RUN npm install -g pnpm
WORKDIR /upend
CACHE $HOME/.local/share/pnpm
COPY +wasmlib/pkg-web wasm/pkg-web
COPY +wasmlib/pkg-node wasm/pkg-node
COPY sdks/js/package.json sdks/js/pnpm-lock.yaml sdks/js/
RUN cd sdks/js && rm -rf node_modules && pnpm install --frozen-lockfile
COPY webui/package.json webui/pnpm-lock.yaml webui/
RUN cd webui && rm -rf node_modules && pnpm install --frozen-lockfile
COPY --dir webui webext .
COPY --dir sdks/js sdks/
base-frontend:
FROM +base-node
COPY +jslib/dist sdks/js/dist
WORKDIR webui
RUN rm -rf node_modules && pnpm install --frozen-lockfile
# Intermediate targets
upend-bin:
FROM +base-backend
CACHE --id=rust-target target
COPY +git-version/version.txt .
RUN UPEND_VERSION=$(cat version.txt) cargo build --release
RUN cp target/release/upend upend.bin
SAVE ARTIFACT upend.bin upend
webui:
FROM +base-frontend
RUN pnpm build
SAVE ARTIFACT dist
wasmlib:
FROM --platform=linux/amd64 +base-rust
COPY --dir base wasm .
WORKDIR wasm
CACHE target
RUN wasm-pack build --target web --out-dir pkg-web && \
wasm-pack build --target nodejs --out-dir pkg-node
RUN sed -e 's%"name": "upend_wasm"%"name": "@upnd/wasm-web"%' -i pkg-web/package.json && \
sed -e 's%"name": "upend_wasm"%"name": "@upnd/wasm-node"%' -i pkg-node/package.json
SAVE ARTIFACT pkg-web
SAVE ARTIFACT pkg-node
jslib:
FROM +base-node
WORKDIR sdks/js
RUN pnpm build
SAVE ARTIFACT dist
webext:
FROM +base-node
WORKDIR webext
RUN pnpm build
SAVE ARTIFACT web-ext-artifacts/*.zip
# Final targets
appimage:
FROM debian:bookworm
RUN apt-get update && \
apt-get -y install wget pipx binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf python3-pip python3-setuptools squashfs-tools strace util-linux zsync && \
pipx ensurepath && \
pipx install appimage-builder
COPY +upend-bin/upend AppDir/usr/bin/upend
COPY --dir +webui/dist AppDir/usr/share/upend/webui
COPY assets/upend.png AppDir/usr/share/icons/upend.png
COPY build/AppImageBuilder.yml .
RUN sed -e "s/latest/$(./AppDir/usr/bin/upend --version | cut -d ' ' -f 2)/" -i AppImageBuilder.yml
RUN pipx run appimage-builder
SAVE ARTIFACT UpEnd*
appimage-signed:
FROM alpine
RUN apk add gpg gpg-agent
RUN --secret GPG_SIGN_KEY echo "$GPG_SIGN_KEY" | gpg --import
COPY +appimage/*.AppImage .
RUN gpg --detach-sign --sign --armor *.AppImage
SAVE ARTIFACT *.AppImage
SAVE ARTIFACT *.asc
docker-minimal:
FROM debian:bookworm
RUN apt-get update && \
apt-get -y install libssl3 ca-certificates && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
DO +DOCKER_COMMON
ARG tag=trunk
SAVE IMAGE --push upend/upend:$tag-minimal
docker:
FROM debian:bookworm
RUN apt-get update && \
apt-get -y install --no-install-recommends ffmpeg wget libssl3 ca-certificates && \
wget https://github.com/bbc/audiowaveform/releases/download/1.8.1/audiowaveform_1.8.1-1-12_amd64.deb && \
apt-get -y install ./audiowaveform_1.8.1-1-12_amd64.deb && \
rm -v audiowaveform_1.8.1-1-12_amd64.deb && \
apt-get remove -y wget && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
DO +DOCKER_COMMON
ARG tag=trunk
SAVE IMAGE --push upend/upend:$tag
DOCKER_COMMON:
FUNCTION
COPY +upend-bin/upend /usr/bin/upend
COPY --dir +webui/dist /usr/share/upend/webui
ENTRYPOINT ["/usr/bin/upend"]
CMD ["serve", "/vault", "--bind", "0.0.0.0:8093"]
EXPOSE 8093
ENV UPEND_NO_DESKTOP=true
ENV UPEND_ALLOW_HOST='*'
# CI targets
lint:
WAIT
BUILD +lint-backend
BUILD +lint-frontend
BUILD +lint-jslib
END
lint-backend:
FROM +base-backend
CACHE --id=rust-target target
RUN cargo clippy --workspace
lint-frontend:
FROM +base-frontend
RUN pnpm check && pnpm lint
lint-jslib:
FROM +base-node
WORKDIR sdks/js
RUN pnpm lint
audit:
WAIT
BUILD +audit-backend
BUILD +audit-frontend
END
audit-backend:
FROM +base-backend
CACHE --id=rust-target target
RUN cargo audit --workspace
audit-frontend:
FROM +base-frontend
RUN pnpm audit
test:
WAIT
BUILD +test-backend
BUILD +test-jslib
END
test-backend:
FROM +base-backend
CACHE --id=rust-target target
RUN cargo nextest run --workspace
test-jslib:
FROM +base-node
WORKDIR sdks/js
RUN pnpm build && pnpm test
# Deployment targets
deploy-appimage-nightly:
FROM alpine
RUN apk add openssh-client
RUN --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS \
mkdir -p $HOME/.ssh && \
echo "$SSH_CONFIG" > $HOME/.ssh/config && \
echo "$SSH_UPLOAD_KEY" > $HOME/.ssh/id_rsa && \
echo "$SSH_KNOWN_HOSTS" > $HOME/.ssh/known_hosts && \
chmod 600 $HOME/.ssh/*
COPY +appimage-signed/* .
RUN --push scp -v *.AppImage *.asc mainsite:releases/nightly
publish-js-all:
WAIT
BUILD +publish-js-wasm
BUILD +publish-js-lib
END
publish-js-lib:
FROM +base-npm-publish
WORKDIR /upend/sdks/js
DO +NPM_PUBLISH --pkg_name=@upnd/upend
publish-js-wasm:
FROM +base-npm-publish
WORKDIR /upend/wasm/pkg-web
DO +NPM_PUBLISH --pkg_name=@upnd/wasm-web
WORKDIR /upend/wasm/pkg-node
DO +NPM_PUBLISH --pkg_name=@upnd/wasm-node
base-npm-publish:
FROM +base-node
RUN --secret NPM_TOKEN echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > $HOME/.npmrc
COPY +jslib/dist sdks/js/dist
NPM_PUBLISH:
FUNCTION
ARG pkg_name
IF --no-cache [ "`npm view $pkg_name version`" != "`node -p \"require('./package.json').version\"`" ]
RUN echo "Publishing $pkg_name to npm..."
RUN --push npm publish --access public
ELSE
RUN echo "Nothing to do for $pkg_name."
END
# Utility targets
git-version:
FROM debian:bookworm
RUN apt-get update && \
apt-get -y install git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
COPY build/get_version.sh build/get_version.sh
COPY .git .git
RUN ./build/get_version.sh > /tmp/upend_version.txt && cat /tmp/upend_version.txt
SAVE ARTIFACT /tmp/upend_version.txt version.txt
changelog:
FROM orhunp/git-cliff
COPY .git .git
RUN git-cliff --bump -o CHANGELOG.md
SAVE ARTIFACT CHANGELOG.md
current-changelog:
FROM orhunp/git-cliff
COPY .git .git
RUN git-cliff --current -o CHANGELOG_CURRENT.md
SAVE ARTIFACT CHANGELOG_CURRENT.md
update-changelog:
LOCALLY
COPY +changelog/CHANGELOG.md .
RUN git add CHANGELOG.md && git commit -m "release: Update CHANGELOG"
RUN --push git push
dev-local:
FROM debian:bookworm
COPY +jslib/dist /js-dist
COPY +wasmlib/pkg-web /wasm-web
COPY +wasmlib/pkg-node /wasm-node
SAVE ARTIFACT /js-dist AS LOCAL sdks/js/dist
SAVE ARTIFACT /wasm-web AS LOCAL wasm/pkg-web
SAVE ARTIFACT /wasm-node AS LOCAL wasm/pkg-node
dev-update-sdk:
LOCALLY
WORKDIR sdks/js
RUN pnpm build
WORKDIR webui
RUN pnpm install

51
Makefile Normal file
View File

@ -0,0 +1,51 @@
all: package
package: backend frontend
rm -fr dist
linuxdeploy-x86_64.AppImage --appdir dist
cp target/release/upend dist/usr/bin/upend
cp -r webui/public dist/usr/bin/webui
cp media/upend.png dist/usr/share/icons/upend.png
VERSION="$$(grep '^version' Cargo.toml|grep -Eo '[0-9]+\.[0-9]+\.[0-9]+')" \
linuxdeploy-x86_64.AppImage --appdir dist -d upend.desktop --output appimage
backend: target/release/upend
target/release/upend:
cargo build --release
tools/upend_js/index.js:
cd tools/upend_js && yarn install && yarn build
frontend: tools/upend_js/index.js
cd webui && yarn add ../tools/upend_js && yarn install && yarn build
lint: backend_lint frontend_lint
backend_lint:
cargo clippy
backend_lint_no_default:
cargo clippy --no-default-features
frontend_lint:
cd webui && yarn add ../tools/upend_js && yarn install && yarn check && yarn lint
frontend_lib_lint:
cd tools/upend_js && yarn install && yarn lint
backend_test:
cargo test --workspace --verbose
backend_test_no_default:
cargo test --no-default-features --workspace --verbose
clean:
rm -vr target
rm -vr webui/dist webui/public/vendor
rm -vr tools/upend_js/*.js
update_schema:
rm -f upend.sqlite3
diesel migration run --migration-dir migrations/upend/
diesel print-schema > src/database/inner/schema.rs

View File

@ -1,12 +1,11 @@
[![UpEnd](./assets/logotype.svg)](https://upend.dev)
[![CI badge](https://ci.thm.place/api/badges/thm/upend/status.svg)](https://ci.thm.place/thm/upend)
# UpEnd
UpEnd is a project born out of frustration with several long-standing limitations in personal computing, as well as the recently reinvigorated interest in personal information management, hypertext and augmented knowledge work.
The core issues / concepts it intends to address are:
1. limitations of the hierarchical structure as present in nearly all of software
1. the neglect of (unrealized potential of) of development of base OS abstractions and features
https://upend.dev
In short, UpEnd is an attempt to build a new ubiquitous storage layer for the personal computer - kind of like "the filesystem" is now, but with more advanced semantics that better reflect the inherent interconnectedness of the data, as well as its inner "meaning", which is nowadays mostly locked within so-called application silos. Namely, it should allow for more than trivial hierarchies, building on the work done on tag-based systems and transhierarchical systems, in that all data objects (which can be files but also arbitrary structures) can be *meaningfully* interrelated (e.g. multiple audio tracks being renditions of the same symphony; books as well as paintings being related to the same author/genre...), arbitrarily annotated (à la ID3 tags) and traversed according to their _connections_ - not _locations_; while not doing away with the benefits of hierarchies altogether.
More elaboration on this project can be found in my notes: https://t.mldk.cz/notes/883493cb-d722-45e6-bb1c-391ab523ac8b.html

View File

@ -1,83 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
style="fill:none"
width="846.19855"
height="256"
version="1.1"
id="svg48"
sodipodi:docname="logotype.svg"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
inkscape:export-filename="../webext/icon.png"
inkscape:export-xdpi="24.094118"
inkscape:export-ydpi="24.094118"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs52">
<rect
x="161.36443"
y="42.186115"
width="915.60449"
height="208.29945"
id="rect615" />
</defs>
<sodipodi:namedview
id="namedview50"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
showgrid="false"
inkscape:zoom="0.33624578"
inkscape:cx="-460.97232"
inkscape:cy="350.93377"
inkscape:window-width="3436"
inkscape:window-height="1397"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg48" />
<style
id="style44">
path {
fill:none;
stroke:#0a0a0a;
stroke-width:15px;
stroke-linecap:round;
stroke-linejoin:round
}
@media (prefers-color-scheme: dark) {
path {
stroke: white;
}
}
</style>
<rect
style="fill:#002b36;fill-opacity:1;stroke:none;stroke-width:47.7208;stroke-dasharray:none;stroke-opacity:1"
id="rect941"
width="846.19855"
height="256"
x="0"
y="0"
ry="23.239944" />
<path
d="m 48.588212,53.0882 v 0 H 207.41179 m -79.41179,0 v 0 l -79.411788,79.41179 m 158.823578,0 v 0 L 128,53.0882 m 0,158.82358 v 0 V 53.0882"
id="path46"
style="stroke:#ffffff;stroke-width:21.1764;stroke-dasharray:none;stroke-opacity:1" />
<text
xml:space="preserve"
id="text613"
style="font-style:normal;font-weight:normal;font-size:180px;line-height:1.25;font-family:sans-serif;white-space:pre;shape-inside:url(#rect615);fill:#000000;fill-opacity:1;stroke:none"
transform="translate(105.21791,-28.388025)"><tspan
x="161.36523"
y="205.96308"
id="tspan696"><tspan
style="font-weight:500;font-family:'IBM Plex Sans';-inkscape-font-specification:'IBM Plex Sans Medium';fill:#ffffff"
id="tspan694">UpEnd</tspan></tspan></text>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

View File

@ -1,54 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
style="fill:none"
width="256"
height="256"
version="1.1"
id="svg48"
sodipodi:docname="upend_b.svg"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
inkscape:export-filename="../webext/icon.png"
inkscape:export-xdpi="24.094118"
inkscape:export-ydpi="24.094118"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs52" />
<sodipodi:namedview
id="namedview50"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
showgrid="false"
inkscape:zoom="1.2810146"
inkscape:cx="-133.87826"
inkscape:cy="98.749853"
inkscape:window-width="2329"
inkscape:window-height="1397"
inkscape:window-x="0"
inkscape:window-y="260"
inkscape:window-maximized="1"
inkscape:current-layer="svg48"
showguides="true" />
<style
id="style44">&#10; path {&#10; fill:none;&#10; stroke:#0a0a0a;&#10; stroke-width:15px;&#10; stroke-linecap:round;&#10; stroke-linejoin:round&#10; }&#10;&#10; @media (prefers-color-scheme: dark) {&#10; path {&#10; stroke: white;&#10; }&#10; }&#10; </style>
<rect
style="display:inline;fill:#002b36;fill-opacity:1;stroke:none;stroke-width:26.2477;stroke-dasharray:none;stroke-opacity:1"
id="rect941"
width="256"
height="256"
x="-256"
y="0"
ry="23.239944"
transform="scale(-1,1)" />
<path
style="color:#000000;fill:#ffffff;stroke:none;stroke-linecap:round;stroke-linejoin:round;-inkscape-stroke:none"
d="M 48.587891,53 A 10.5882,10.5882 0 0 0 38,63.587891 10.5882,10.5882 0 0 0 48.587891,74.175781 H 102.43945 L 41.101562,135.51367 a 10.5882,10.5882 0 0 0 0,14.97266 10.5882,10.5882 0 0 0 14.97461,0 L 117.41211,89.148437 V 222.41211 A 10.5882,10.5882 0 0 0 128,233 10.5882,10.5882 0 0 0 138.58789,222.41211 V 89.148437 l 61.33594,61.337893 a 10.5882,10.5882 0 0 0 14.97461,0 10.5882,10.5882 0 0 0 0,-14.97266 L 153.56055,74.175781 h 53.85156 A 10.5882,10.5882 0 0 0 218,63.587891 10.5882,10.5882 0 0 0 207.41211,53 H 128 Z"
id="path46" />
</svg>

Before

Width:  |  Height:  |  Size: 2.5 KiB

View File

@ -1,46 +0,0 @@
[package]
name = "upend-base"
version = "0.0.1"
homepage = "https://upend.dev/"
repository = "https://git.thm.place/thm/upend"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
[lib]
path = "src/lib.rs"
[features]
diesel = []
wasm = ["wasm-bindgen", "uuid/js"]
[dependencies]
log = "0.4"
lazy_static = "1.4.0"
diesel = { version = "1.4", features = ["sqlite"] }
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
lexpr = "0.2.6"
cid = { version = "0.10.1", features = ["serde"] }
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4", "serde"] }
url = { version = "2", features = ["serde"] }
nonempty = "0.6.0"
wasm-bindgen = { version = "0.2", optional = true }
shadow-rs = { version = "0.23", default-features = false }
[build-dependencies]
shadow-rs = { version = "0.23", default-features = false }

View File

@ -1,3 +0,0 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

View File

@ -1,324 +0,0 @@
use crate::entry::Attribute;
use crate::error::{AddressComponentsDecodeError, UpEndError};
use crate::hash::{
b58_decode, b58_encode, AsMultihash, AsMultihashError, LargeMultihash, UpMultihash, IDENTITY,
};
use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryFrom;
use std::fmt;
use std::hash::Hash;
use std::str::FromStr;
use url::Url;
use uuid::Uuid;
#[cfg(feature = "wasm")]
use wasm_bindgen::prelude::*;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Address {
Hash(UpMultihash),
Uuid(Uuid),
Attribute(Attribute),
Url(Url),
}
#[cfg_attr(feature = "wasm", wasm_bindgen(getter_with_clone, inspectable))]
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct AddressComponents {
pub t: String,
pub c: Option<String>,
}
#[cfg(feature = "wasm")]
#[wasm_bindgen]
impl AddressComponents {
#[wasm_bindgen(constructor)]
pub fn new(t: String, c: Option<String>) -> Self {
AddressComponents { t, c }
}
}
/// multicodec RAW code
const RAW: u64 = 0x55;
/// multicodec UpEnd UUID code (reserved area)
const UP_UUID: u64 = 0x300001;
/// multicodec UpEnd Attribute code (reserved area)
const UP_ATTRIBUTE: u64 = 0x300000;
/// multicodec URL code (technically `http`)
const UP_URL: u64 = 0x01e0;
pub type UpCid = cid::CidGeneric<256>;
impl Address {
pub fn encode(&self) -> Result<Vec<u8>, UpEndError> {
let (codec, hash) = match self {
Self::Hash(hash) => (RAW, hash.into()),
Self::Uuid(uuid) => (
UP_UUID,
LargeMultihash::wrap(IDENTITY, uuid.as_bytes()).map_err(UpEndError::from_any)?,
),
Self::Attribute(attribute) => (
UP_ATTRIBUTE,
LargeMultihash::wrap(IDENTITY, attribute.to_string().as_bytes())
.map_err(UpEndError::from_any)?,
),
Self::Url(url) => (
UP_URL,
LargeMultihash::wrap(IDENTITY, url.to_string().as_bytes())
.map_err(UpEndError::from_any)?,
),
};
let cid = UpCid::new_v1(codec, hash);
Ok(cid.to_bytes())
}
pub fn decode(buffer: &[u8]) -> Result<Self, UpEndError> {
let cid = UpCid::try_from(buffer).map_err(|err| {
UpEndError::AddressParseError(format!("Error decoding address: {}", err))
})?;
if cid.codec() == RAW {
return Ok(Address::Hash(UpMultihash::from(*cid.hash())));
}
let hash = cid.hash();
if hash.code() != IDENTITY {
return Err(UpEndError::AddressParseError(format!(
"Unexpected multihash code \"{}\" for codec \"{}\"",
hash.code(),
cid.codec()
)));
}
let digest = cid.hash().digest().to_vec();
match cid.codec() {
UP_UUID => Ok(Address::Uuid(
Uuid::from_slice(digest.as_slice()).map_err(UpEndError::from_any)?,
)),
UP_ATTRIBUTE => {
let attribute = String::from_utf8(digest).map_err(UpEndError::from_any)?;
if attribute.is_empty() {
Ok(Address::Attribute(Attribute::null()))
} else {
Ok(Address::Attribute(attribute.parse()?))
}
}
UP_URL => Ok(Address::Url(
Url::parse(&String::from_utf8(digest).map_err(UpEndError::from_any)?)
.map_err(UpEndError::from_any)?,
)),
_ => Err(UpEndError::AddressParseError(
"Error decoding address: Unknown codec.".to_string(),
)),
}
}
pub fn as_components(&self) -> AddressComponents {
// TODO: make this automatically derive from `Address` definition
let (entity_type, entity_content) = match self {
Address::Hash(uphash) => ("Hash", Some(b58_encode(uphash.to_bytes()))),
Address::Uuid(uuid) => ("Uuid", Some(uuid.to_string())),
Address::Attribute(attribute) => ("Attribute", Some(attribute.to_string())),
Address::Url(url) => ("Url", Some(url.to_string())),
};
AddressComponents {
t: entity_type.to_string(),
c: entity_content,
}
}
pub fn from_components(components: AddressComponents) -> Result<Self, UpEndError> {
// TODO: make this automatically derive from `Address` definition
let address = match components {
AddressComponents { t, c } if t == "Attribute" => Address::Attribute(
c.ok_or(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::MissingValue,
))?
.parse()?,
),
AddressComponents { t, c } if t == "Url" => Address::Url(if let Some(string) = c {
Url::parse(&string).map_err(|e| {
UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::UrlDecodeError(e.to_string()),
)
})?
} else {
Err(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::MissingValue,
))?
}),
AddressComponents { t, c } if t == "Uuid" => match c {
Some(c) => c.parse()?,
None => Address::Uuid(Uuid::new_v4()),
},
AddressComponents { t, .. } => Err(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::UnknownType(t),
))?,
};
Ok(address)
}
}
impl Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(b58_encode(self.encode().map_err(ser::Error::custom)?).as_str())
}
}
struct AddressVisitor;
impl<'de> Visitor<'de> for AddressVisitor {
type Value = Address;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))?;
Address::decode(bytes.as_ref())
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))
}
}
impl<'de> Deserialize<'de> for Address {
fn deserialize<D>(deserializer: D) -> Result<Address, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(AddressVisitor)
}
}
impl FromStr for Address {
type Err = UpEndError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Address::decode(
b58_decode(s)
.map_err(|e| {
UpEndError::HashDecodeError(format!("Error deserializing address: {}", e))
})?
.as_ref(),
)
}
}
impl fmt::Display for Address {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", b58_encode(self.encode().map_err(|_| fmt::Error)?))
}
}
impl fmt::Debug for Address {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Address<{}>: {}",
match self {
Address::Hash(_) => "Hash",
Address::Uuid(_) => "UUID",
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self
)
}
}
pub trait Addressable: AsMultihash {
fn address(&self) -> Result<Address, AsMultihashError> {
Ok(Address::Hash(self.as_multihash()?))
}
}
impl<T> Addressable for T where T: AsMultihash {}
#[cfg(test)]
mod tests {
use url::Url;
use uuid::Uuid;
use crate::addressing::{Address, IDENTITY};
use crate::constants::{
TYPE_ATTRIBUTE_ADDRESS, TYPE_HASH_ADDRESS, TYPE_URL_ADDRESS, TYPE_UUID_ADDRESS,
};
use crate::hash::{LargeMultihash, UpMultihash};
use super::UpEndError;
#[test]
fn test_hash_codec() -> Result<(), UpEndError> {
let addr = Address::Hash(UpMultihash::from(
LargeMultihash::wrap(IDENTITY, &[1, 2, 3, 4, 5]).unwrap(),
));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
let addr = &*TYPE_HASH_ADDRESS;
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, &decoded);
Ok(())
}
#[test]
fn test_uuid_codec() -> Result<(), UpEndError> {
let addr = Address::Uuid(Uuid::new_v4());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
let addr = &*TYPE_UUID_ADDRESS;
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, &decoded);
Ok(())
}
#[test]
fn test_attribute_codec() -> Result<(), UpEndError> {
let addr = Address::Attribute("ATTRIBUTE".parse().unwrap());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
let addr = &*TYPE_ATTRIBUTE_ADDRESS;
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, &decoded);
Ok(())
}
#[test]
fn test_url_codec() -> Result<(), UpEndError> {
let addr = Address::Url(Url::parse("https://upend.dev/an/url/that/is/particularly/long/because/multihash/used/to/have/a/small/limit").unwrap());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
let addr = &*TYPE_URL_ADDRESS;
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, &decoded);
Ok(())
}
}

View File

@ -1,3 +0,0 @@
use shadow_rs::shadow;
shadow!(build);

View File

@ -1,32 +0,0 @@
use crate::addressing::Address;
use crate::entry::Attribute;
use crate::entry::InvariantEntry;
use crate::hash::{LargeMultihash, UpMultihash};
/// Attribute denoting (hierarchical) relation, in the "upwards" direction. For example, a file `IN` a group, an image `IN` photos, etc.
pub const ATTR_IN: &str = "IN";
/// Attribute denoting that an entry belongs to the set relating to a given (hierarchical) relation.
/// For example, a data blob may have a label entry, and to qualify that label within the context of belonging to a given hierarchical group, that label entry and the hierarchical entry will be linked with `BY`.
pub const ATTR_BY: &str = "BY";
/// Attribute denoting that an attribute belongs to a given "tagging" entity. If an entity belongs to (`IN`) a "tagging" entity, it is expected to have attributes that are `OF` that entity.
pub const ATTR_OF: &str = "OF";
/// Attribute denoting a human readable label.
pub const ATTR_LABEL: &str = "LBL";
/// Attribute denoting the date & time an entity was noted in the database.
/// (TODO: This info can be trivially derived from existing entry timestamps, while at the same time the "Introduction problem" is still open.)
pub const ATTR_ADDED: &str = "ADDED";
/// Attribute for cross-vault unambiguous referencing of non-hashable (e.g. UUID) entities.
pub const ATTR_KEY: &str = "KEY";
lazy_static! {
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
attribute: ATTR_KEY.parse().unwrap(),
value: "HIER_ROOT".into(),
};
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
pub static ref TYPE_HASH_ADDRESS: Address =
Address::Hash(UpMultihash::from(LargeMultihash::default()));
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute(Attribute::null());
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
}

View File

@ -1,51 +0,0 @@
#[derive(Debug, Clone)]
pub enum UpEndError {
HashDecodeError(String),
AddressParseError(String),
AddressComponentsDecodeError(AddressComponentsDecodeError),
EmptyAttribute,
CannotSerializeInvalid,
QueryParseError(String),
Other(String),
}
#[derive(Debug, Clone)]
pub enum AddressComponentsDecodeError {
UnknownType(String),
UrlDecodeError(String),
MissingValue,
}
impl std::fmt::Display for UpEndError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
UpEndError::HashDecodeError(err) => format!("Could not decode hash: {err}"),
UpEndError::AddressParseError(err) => format!("Error parsing address: {err}"),
UpEndError::AddressComponentsDecodeError(cde) => match cde {
AddressComponentsDecodeError::UnknownType(t) =>
format!("Unknown type: \"{t}\""),
AddressComponentsDecodeError::MissingValue =>
String::from("Address type requires a value."),
AddressComponentsDecodeError::UrlDecodeError(err) =>
format!("Couldn't decode URL: {err}"),
},
UpEndError::CannotSerializeInvalid =>
String::from("Invalid EntryValues cannot be serialized."),
UpEndError::QueryParseError(err) => format!("Error parsing query: {err}"),
UpEndError::Other(err) => format!("Unknown error: {err}"),
UpEndError::EmptyAttribute => String::from("Attribute cannot be empty."),
}
)
}
}
impl std::error::Error for UpEndError {}
impl UpEndError {
pub fn from_any<E: std::fmt::Display>(error: E) -> Self {
UpEndError::Other(error.to_string())
}
}

View File

@ -1,181 +0,0 @@
use std::fmt;
use crate::{addressing::Address, error::UpEndError};
use multihash::Hasher;
use serde::{
de::{self, Visitor},
ser, Deserialize, Deserializer, Serialize, Serializer,
};
/// multihash SHA2-256 code
pub const SHA2_256: u64 = 0x12;
/// multihash identity code
pub const IDENTITY: u64 = 0x00;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[cfg_attr(feature = "diesel", derive(diesel::FromSqlRow))]
pub struct UpMultihash(LargeMultihash);
impl UpMultihash {
pub fn to_bytes(&self) -> Vec<u8> {
self.0.to_bytes()
}
pub fn from_bytes<T: AsRef<[u8]>>(input: T) -> Result<Self, UpEndError> {
Ok(UpMultihash(
LargeMultihash::from_bytes(input.as_ref())
.map_err(|e| UpEndError::HashDecodeError(e.to_string()))?,
))
}
pub fn from_sha256<T: AsRef<[u8]>>(input: T) -> Result<Self, UpEndError> {
Ok(UpMultihash(
LargeMultihash::wrap(SHA2_256, input.as_ref()).map_err(UpEndError::from_any)?,
))
}
}
impl std::fmt::Display for UpMultihash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", b58_encode(self.to_bytes()))
}
}
pub(crate) type LargeMultihash = multihash::MultihashGeneric<256>;
impl From<LargeMultihash> for UpMultihash {
fn from(value: LargeMultihash) -> Self {
UpMultihash(value)
}
}
impl From<&UpMultihash> for LargeMultihash {
fn from(value: &UpMultihash) -> Self {
value.0
}
}
impl Serialize for UpMultihash {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(
b58_encode(
Address::Hash(self.clone())
.encode()
.map_err(ser::Error::custom)?,
)
.as_str(),
)
}
}
struct UpMultihashVisitor;
impl<'de> Visitor<'de> for UpMultihashVisitor {
type Value = UpMultihash;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing UpMultihash: {}", e)))?;
Ok(UpMultihash(LargeMultihash::from_bytes(&bytes).map_err(
|e| de::Error::custom(format!("Error parsing UpMultihash: {}", e)),
)?))
}
}
impl<'de> Deserialize<'de> for UpMultihash {
fn deserialize<D>(deserializer: D) -> Result<UpMultihash, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(UpMultihashVisitor)
}
}
#[cfg(feature = "diesel")]
impl diesel::types::FromSql<diesel::sql_types::Binary, diesel::sqlite::Sqlite> for UpMultihash {
fn from_sql(
bytes: Option<&<diesel::sqlite::Sqlite as diesel::backend::Backend>::RawValue>,
) -> diesel::deserialize::Result<Self> {
Ok(UpMultihash(LargeMultihash::from_bytes(
diesel::not_none!(bytes).read_blob(),
)?))
}
}
pub fn sha256hash<T: AsRef<[u8]>>(input: T) -> Result<UpMultihash, AsMultihashError> {
let mut hasher = multihash::Sha2_256::default();
hasher.update(input.as_ref());
Ok(UpMultihash(
LargeMultihash::wrap(SHA2_256, hasher.finalize())
.map_err(|e| AsMultihashError(e.to_string()))?,
))
}
pub fn b58_encode<T: AsRef<[u8]>>(vec: T) -> String {
multibase::encode(multibase::Base::Base58Btc, vec.as_ref())
}
pub fn b58_decode<T: AsRef<str>>(input: T) -> Result<Vec<u8>, UpEndError> {
let input = input.as_ref();
let (_base, data) =
multibase::decode(input).map_err(|err| UpEndError::HashDecodeError(err.to_string()))?;
Ok(data)
}
#[derive(Debug, Clone)]
pub struct AsMultihashError(pub String);
impl std::fmt::Display for AsMultihashError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::error::Error for AsMultihashError {}
impl From<std::io::Error> for AsMultihashError {
fn from(err: std::io::Error) -> Self {
AsMultihashError(err.to_string())
}
}
pub trait AsMultihash {
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError>;
}
impl<T> AsMultihash for T
where
T: AsRef<[u8]>,
{
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError> {
sha256hash(self)
}
}
#[cfg(test)]
mod tests {
use crate::hash::{b58_decode, b58_encode};
#[test]
fn test_encode_decode() {
let content = "Hello, World!".as_bytes();
let encoded = b58_encode(content);
let decoded = b58_decode(encoded);
assert!(decoded.is_ok());
assert_eq!(content, decoded.unwrap());
}
}

View File

@ -1,10 +0,0 @@
#[macro_use]
extern crate lazy_static;
pub mod addressing;
pub mod common;
pub mod constants;
pub mod entry;
pub mod error;
pub mod hash;
pub mod lang;

View File

@ -1,3 +1,3 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
fn main() {
built::write_built_file().expect("Failed to acquire build-time information");
}

View File

@ -1,38 +0,0 @@
# appimage-builder recipe see https://appimage-builder.readthedocs.io for details
version: 1
AppDir:
path: AppDir
app_info:
id: upend
name: UpEnd
icon: upend
version: latest
exec: usr/bin/upend
exec_args: $@
apt:
arch:
- amd64
allow_unauthenticated: true
sources:
- sourceline: deb http://deb.debian.org/debian/ bookworm main non-free-firmware
- sourceline: deb http://security.debian.org/debian-security bookworm-security
main non-free-firmware
- sourceline: deb http://deb.debian.org/debian/ bookworm-updates main non-free-firmware
stable
include:
- libssl3
- libc6:amd64
- locales
files:
include:
- lib64/ld-linux-x86-64.so.2
exclude:
- usr/share/man
- usr/share/doc/*/README.*
- usr/share/doc/*/changelog.*
- usr/share/doc/*/NEWS.*
- usr/share/doc/*/TODO.*
AppImage:
arch: x86_64
update-information: guess

View File

@ -1,13 +0,0 @@
#!/bin/sh
which git > /dev/null || {
echo "git not found"
exit 1
}
git_tag=$(git describe --tags --exact-match HEAD 2>/dev/null)
if [ -z "$git_tag" ]; then
echo "dev_$(git rev-parse --short HEAD)"
else
echo "$git_tag" | sed -e 's/^v//g'
fi

View File

@ -1,3 +0,0 @@
FROM alpine
RUN apk add git gpg gpg-agent openssh-client
RUN sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin

View File

@ -1,3 +0,0 @@
FROM node:lts
RUN sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
RUN npm install -g pnpm

View File

@ -1,8 +0,0 @@
FROM upend-rust
RUN apt-get update && apt-get -y install wget curl file && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN wget https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage && \
chmod +x linuxdeploy-x86_64.AppImage && \
./linuxdeploy-x86_64.AppImage --appimage-extract && \
ln -s $PWD/squashfs-root/AppRun /usr/local/bin/linuxdeploy-x86_64.AppImage

View File

@ -1,4 +0,0 @@
FROM rust:bookworm
RUN sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
RUN cargo install wasm-pack && rustup target add wasm32-unknown-unknown
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin

View File

@ -1,113 +0,0 @@
[package]
name = "upend-cli"
authors = ["Tomáš Mládek <t@mldk.cz>"]
version = "0.1.0"
edition = "2021"
[[bin]]
name = "upend"
path = "src/main.rs"
[dependencies]
upend-base = { path = "../base" }
upend-db = { path = "../db" }
clap = { version = "4.2.4", features = ["derive", "env", "color", "string", "cargo"] }
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
thiserror = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
futures = "0.3.24"
futures-util = "~0.3.12"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
actix = "0.13"
actix-files = "0.6"
actix-rt = "2"
actix-web = "4"
actix_derive = "0.6"
actix-cors = "0.6"
actix-multipart = "0.6.0"
jsonwebtoken = "8"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
rand = "0.8"
mime = "^0.3.16"
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
opener = { version = "^0.5.0", optional = true }
is_executable = { version = "1.0.1", optional = true }
webbrowser = { version = "^0.5.5", optional = true }
nonempty = "0.6.0"
image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.5.0", optional = true, default-features = false }
id3 = { version = "1.0.2", optional = true }
kamadak-exif = { version = "0.5.4", optional = true }
shadow-rs = { version = "0.23", default-features = false }
reqwest = { version = "0.11.16", features = ["blocking", "json"] }
url = "2"
bytes = "1.4.0"
signal-hook = "0.3.15"
actix-web-lab = { version = "0.20.2", features = ["spa"] }
[build-dependencies]
shadow-rs = { version = "0.23", default-features = false }
[features]
default = [
"desktop",
"previews",
"previews-image",
"extractors-web",
"extractors-audio",
"extractors-exif",
"extractors-media",
]
desktop = ["webbrowser", "opener", "is_executable"]
previews = []
previews-image = ["image", "webp", "kamadak-exif"]
extractors-web = ["webpage"]
extractors-audio = ["id3"]
extractors-exif = ["kamadak-exif"]
extractors-media = []

View File

@ -1,3 +0,0 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

View File

@ -1,34 +0,0 @@
use std::env::current_exe;
use std::path::PathBuf;
use lazy_static::lazy_static;
use shadow_rs::{is_debug, shadow};
shadow!(build);
lazy_static! {
pub static ref RESOURCE_PATH: PathBuf = if is_debug() {
let project_root = build::CARGO_MANIFEST_DIR.parse::<PathBuf>().unwrap();
project_root.join("./tmp/resources")
} else {
current_exe()
.unwrap()
.parent()
.unwrap()
.join("../share/upend")
};
pub static ref WEBUI_PATH: PathBuf = RESOURCE_PATH.join("webui");
static ref APP_USER_AGENT: String = format!("upend / {}", build::PKG_VERSION);
pub static ref REQWEST_CLIENT: reqwest::blocking::Client = reqwest::blocking::Client::builder()
.user_agent(APP_USER_AGENT.as_str())
.build()
.unwrap();
pub static ref REQWEST_ASYNC_CLIENT: reqwest::Client = reqwest::Client::builder()
.user_agent(APP_USER_AGENT.as_str())
.build()
.unwrap();
}
pub fn get_version() -> &'static str {
option_env!("UPEND_VERSION").unwrap_or("unknown")
}

View File

@ -1,7 +0,0 @@
#[derive(Clone, Debug)]
pub struct UpEndConfig {
pub vault_name: Option<String>,
pub desktop_enabled: bool,
pub trust_executables: bool,
pub secret: String,
}

View File

@ -1,191 +0,0 @@
use std::io::Write;
use std::sync::Arc;
use super::Extractor;
use anyhow::{anyhow, Result};
use lazy_static::lazy_static;
use upend_base::{
addressing::Address,
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
entry::{Entry, EntryValue, InvariantEntry},
};
use upend_db::stores::Blob;
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
BlobMode, OperationContext, UpEndConnection,
};
lazy_static! {
pub static ref ID3_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
attribute: ATTR_KEY.parse().unwrap(),
value: "TYPE_ID3".into(),
};
pub static ref ID3_TYPE_LABEL: Entry = Entry {
entity: ID3_TYPE_INVARIANT.entity().unwrap(),
attribute: ATTR_LABEL.parse().unwrap(),
value: "ID3".into(),
provenance: "INVARIANT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
};
}
pub struct ID3Extractor;
impl Extractor for ID3Extractor {
fn get(
&self,
address: &Address,
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,
&format!(
r#"Getting ID3 info from "{:}""#,
file_path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy()
),
)?;
let tags = id3::Tag::read_from_path(file_path)?;
let mut result: Vec<Entry> = vec![];
for frame in tags.frames() {
if let id3::Content::Text(text) = frame.content() {
result.extend(vec![
Entry {
entity: address.clone(),
attribute: format!("ID3_{}", frame.id()).parse()?,
value: match frame.id() {
"TYER" | "TBPM" => EntryValue::guess_from(text),
_ => text.clone().into(),
},
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
Entry {
entity: Address::Attribute(format!("ID3_{}", frame.id()).parse()?),
attribute: ATTR_LABEL.parse().unwrap(),
value: format!("ID3: {}", frame.name()).into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
}
}
let mut has_pictures = false;
for (idx, picture) in tags.pictures().enumerate() {
let tmp_dir = tempfile::tempdir()?;
let tmp_path = tmp_dir.path().join(format!("img-{}", idx));
let mut file = std::fs::File::create(&tmp_path)?;
file.write_all(&picture.data)?;
let hash = store.store(
connection,
Blob::from_filepath(&tmp_path),
None,
Some(BlobMode::StoreOnly),
context.clone(),
)?;
result.push(Entry {
entity: address.clone(),
attribute: "ID3_PICTURE".parse()?,
value: EntryValue::Address(Address::Hash(hash)),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
});
has_pictures = true;
}
if has_pictures {
result.push(Entry {
entity: Address::Attribute("ID3_PICTURE".parse()?),
attribute: ATTR_LABEL.parse().unwrap(),
value: "ID3 Embedded Image".into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
}
if !result.is_empty() {
result.extend(
result
.iter()
.filter(|e| e.attribute != ATTR_LABEL)
.map(|e| Entry {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
.collect::<Vec<Entry>>(),
);
result.extend(vec![
(&ID3_TYPE_INVARIANT as &InvariantEntry).try_into().unwrap(),
ID3_TYPE_LABEL.clone(),
Entry {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
}
let _ = job_handle.update_state(JobState::Done);
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])
}
}
fn is_needed(&self, address: &Address, connection: &UpEndConnection) -> Result<bool> {
let is_audio = connection.retrieve_object(address)?.iter().any(|e| {
if e.attribute == FILE_MIME_KEY {
if let EntryValue::String(mime) = &e.value {
return mime.starts_with("audio") || mime == "application/x-riff";
}
}
false
});
if !is_audio {
return Ok(false);
}
let is_extracted = !connection
.query(format!("(matches @{} (contains \"ID3\") ?)", address).parse()?)?
.is_empty();
if is_extracted {
return Ok(false);
}
Ok(true)
}
}

View File

@ -1,173 +0,0 @@
use std::sync::Arc;
use super::Extractor;
use anyhow::{anyhow, Result};
use lazy_static::lazy_static;
use upend_base::entry::Attribute;
use upend_base::{
addressing::Address,
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
entry::{Entry, EntryValue, InvariantEntry},
};
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
OperationContext, UpEndConnection,
};
pub struct ExifExtractor;
// TODO: EXIF metadata is oftentimes a constant/enum value. What's the proper
// model for enum-like values in UpEnd?
lazy_static! {
pub static ref EXIF_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
attribute: ATTR_KEY.parse().unwrap(),
value: "TYPE_EXIF".into(),
};
pub static ref EXIF_TYPE_LABEL: Entry = Entry {
entity: EXIF_TYPE_INVARIANT.entity().unwrap(),
attribute: ATTR_LABEL.parse().unwrap(),
value: "EXIF".into(),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None
};
}
impl Extractor for ExifExtractor {
fn get(
&self,
address: &Address,
_connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,
&format!(
r#"Getting EXIF info from "{:}""#,
file_path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy()
),
)?;
let file = std::fs::File::open(file_path)?;
let mut bufreader = std::io::BufReader::new(&file);
let exifreader = exif::Reader::new();
let exif = exifreader.read_from_container(&mut bufreader)?;
let mut result: Vec<Entry> = vec![];
for field in exif
.fields()
.filter(|field| !matches!(field.value, exif::Value::Undefined(..)))
{
if let Some(tag_description) = field.tag.description() {
let attribute: Attribute = format!("EXIF_{}", field.tag.1).parse()?;
result.extend(vec![
Entry {
entity: address.clone(),
attribute: attribute.clone(),
value: match field.tag {
exif::Tag::ExifVersion => {
EntryValue::String(format!("{}", field.display_value()))
}
_ => {
EntryValue::guess_from(format!("{}", field.display_value()))
}
},
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
Entry {
entity: Address::Attribute(attribute),
attribute: ATTR_LABEL.parse().unwrap(),
value: format!("EXIF: {}", tag_description).into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
}
}
if !result.is_empty() {
result.extend(
result
.iter()
.filter(|e| e.attribute != ATTR_LABEL)
.map(|e| Entry {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
.collect::<Vec<Entry>>(),
);
result.extend(vec![
(&EXIF_TYPE_INVARIANT as &InvariantEntry)
.try_into()
.unwrap(),
EXIF_TYPE_LABEL.clone(),
Entry {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
}
let _ = job_handle.update_state(JobState::Done);
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])
}
}
fn is_needed(&self, address: &Address, connection: &UpEndConnection) -> Result<bool> {
let is_exif = connection.retrieve_object(address)?.iter().any(|e| {
if e.attribute == FILE_MIME_KEY {
if let EntryValue::String(mime) = &e.value {
return mime.starts_with("image");
}
}
false
});
if !is_exif {
return Ok(false);
}
let is_extracted = !connection
.query(format!("(matches @{} (contains \"EXIF\") ?)", address).parse()?)?
.is_empty();
if is_extracted {
return Ok(false);
}
Ok(true)
}
}

View File

@ -1,162 +0,0 @@
use std::{process::Command, sync::Arc};
use super::Extractor;
use anyhow::{anyhow, Result};
use lazy_static::lazy_static;
use tracing::{debug, trace};
use upend_base::{
addressing::Address,
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
entry::{Entry, EntryValue, InvariantEntry},
};
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
OperationContext, UpEndConnection,
};
const DURATION_KEY: &str = "MEDIA_DURATION";
lazy_static! {
pub static ref MEDIA_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
attribute: ATTR_KEY.parse().unwrap(),
value: "TYPE_MEDIA".into(),
};
pub static ref MEDIA_TYPE_LABEL: Entry = Entry {
entity: MEDIA_TYPE_INVARIANT.entity().unwrap(),
attribute: ATTR_LABEL.parse().unwrap(),
value: "Multimedia".into(),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None,
};
pub static ref DURATION_OF_MEDIA: Entry = Entry {
entity: Address::Attribute(DURATION_KEY.parse().unwrap()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None,
};
}
pub struct MediaExtractor;
impl Extractor for MediaExtractor {
fn get(
&self,
address: &Address,
_connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,
&format!(
r#"Getting media info from "{:}""#,
file_path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy()
),
)?;
// https://superuser.com/a/945604/409504
let mut ffprobe = Command::new("ffprobe");
let command = ffprobe
.args(["-v", "error"])
.args(["-show_entries", "format=duration"])
.args(["-of", "default=noprint_wrappers=1:nokey=1"])
.arg(file_path);
trace!("Running `{:?}`", command);
let now = std::time::Instant::now();
let ffprobe_cmd = command.output()?;
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !ffprobe_cmd.status.success() {
return Err(anyhow!(
"Failed to retrieve file duration: {:?}",
String::from_utf8_lossy(&ffprobe_cmd.stderr)
));
}
let duration = String::from_utf8(ffprobe_cmd.stdout)?
.trim()
.parse::<f64>()?;
let result = vec![
Entry {
entity: address.clone(),
attribute: DURATION_KEY.parse().unwrap(),
value: EntryValue::Number(duration),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
(&MEDIA_TYPE_INVARIANT as &InvariantEntry)
.try_into()
.unwrap(),
MEDIA_TYPE_LABEL.clone(),
DURATION_OF_MEDIA.clone(),
Entry {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
];
let _ = job_handle.update_state(JobState::Done);
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])
}
}
fn is_needed(&self, address: &Address, connection: &UpEndConnection) -> Result<bool> {
let is_media = connection.retrieve_object(address)?.iter().any(|e| {
if e.attribute == FILE_MIME_KEY {
if let EntryValue::String(mime) = &e.value {
return mime.starts_with("audio") || mime.starts_with("video");
}
}
if e.attribute == ATTR_LABEL {
if let EntryValue::String(label) = &e.value {
let label = label.to_lowercase();
return label.ends_with(".ogg")
|| label.ends_with(".mp3")
|| label.ends_with(".wav");
}
}
false
});
if !is_media {
return Ok(false);
}
let is_extracted = !connection
.query(format!("(matches @{} (contains \"{}\") ?)", address, DURATION_KEY).parse()?)?
.is_empty();
if is_extracted {
return Ok(false);
}
Ok(true)
}
}

View File

@ -1,172 +0,0 @@
use std::sync::Arc;
use super::Extractor;
use crate::common::REQWEST_CLIENT;
use anyhow::anyhow;
use anyhow::Result;
use upend_base::addressing::Address;
use upend_base::constants::ATTR_LABEL;
use upend_base::constants::ATTR_OF;
use upend_base::constants::TYPE_URL_ADDRESS;
use upend_base::entry::Entry;
use upend_base::entry::EntryValue;
use upend_db::jobs::JobContainer;
use upend_db::jobs::JobState;
use upend_db::stores::UpStore;
use upend_db::{OperationContext, UpEndConnection};
use webpage::HTML;
pub struct WebExtractor;
impl Extractor for WebExtractor {
fn get(
&self,
address: &Address,
_connection: &UpEndConnection,
_store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Url(url) = address {
let mut job_handle =
job_container.add_job(None, &format!("Getting info about {url:?}"))?;
let response = REQWEST_CLIENT.get(url.clone()).send()?;
let html = HTML::from_string(response.text()?, Some(url.to_string()));
if let Ok(html) = html {
let _ = job_handle.update_progress(50.0);
let mut entries = vec![
html.title.as_ref().map(|html_title| Entry {
entity: address.clone(),
attribute: "HTML_TITLE".parse().unwrap(),
value: html_title.clone().into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
html.title.map(|html_title| Entry {
entity: address.clone(),
attribute: ATTR_LABEL.parse().unwrap(),
value: html_title.into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
html.description.map(|html_desc| Entry {
entity: address.clone(),
attribute: "HTML_DESCRIPTION".parse().unwrap(),
value: html_desc.into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
];
for (key, value) in html.opengraph.properties {
let attribute = format!("OG_{}", key.to_uppercase());
if attribute == "OG_TITLE" {
entries.push(Some(Entry {
entity: address.clone(),
attribute: ATTR_LABEL.parse()?,
value: value.clone().into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}));
}
entries.push(Some(Entry {
entity: address.clone(),
attribute: attribute.parse()?,
value: value.into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}));
}
for image in html.opengraph.images {
entries.push(Some(Entry {
entity: address.clone(),
attribute: "OG_IMAGE".parse()?,
value: image.url.into(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}))
}
let _ = job_handle.update_state(JobState::Done);
return Ok(entries
.into_iter()
.flatten()
.flat_map(|e| {
vec![
Entry {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(TYPE_URL_ADDRESS.clone()),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
e,
]
})
.collect());
}
Err(anyhow!("Failed for unknown reason."))
} else {
Ok(vec![])
}
}
fn is_needed(&self, address: &Address, connection: &UpEndConnection) -> Result<bool> {
Ok(connection
.query(
format!(r#"(matches @{address} (in "HTML_TITLE" "HTML_DESCRIPTION") ?)"#)
.parse()?,
)?
.is_empty())
}
}
#[cfg(test)]
mod test {
use upend_db::jobs::JobContainer;
use upend_db::stores::fs::FsStore;
use url::Url;
use super::*;
use anyhow::Result;
use std::sync::Arc;
use tempfile::TempDir;
#[test]
fn test_extract() -> Result<()> {
let temp_dir = TempDir::new().unwrap();
let open_result = upend_db::UpEndDatabase::open(&temp_dir, true)?;
let connection = open_result.db.connection()?;
let store =
Arc::new(Box::new(FsStore::from_path(&temp_dir)?) as Box<dyn UpStore + Sync + Send>);
let job_container = JobContainer::new();
let address = Address::Url(Url::parse("https://upend.dev").unwrap());
assert!(WebExtractor.is_needed(&address, &connection)?);
WebExtractor.insert_info(
&address,
&connection,
store,
job_container,
OperationContext::default(),
)?;
assert!(!WebExtractor.is_needed(&address, &connection)?);
Ok(())
}
}

View File

@ -1,558 +0,0 @@
#[macro_use]
extern crate upend_db;
use crate::common::{REQWEST_ASYNC_CLIENT, WEBUI_PATH};
use crate::config::UpEndConfig;
use actix_web::HttpServer;
use anyhow::Result;
use clap::{Args, CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};
use filebuffer::FileBuffer;
use rand::{thread_rng, Rng};
use regex::Captures;
use regex::Regex;
use reqwest::Url;
use serde_json::json;
use std::collections::HashMap;
use std::net::SocketAddr;
use std::path::Path;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use tracing::trace;
use tracing::{debug, error, info, warn};
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
use upend_base::addressing::Address;
use upend_base::entry::EntryValue;
use upend_base::hash::{sha256hash, UpMultihash};
use upend_db::jobs::JobContainer;
use upend_db::stores::fs::FsStore;
use upend_db::stores::UpStore;
use upend_db::{BlobMode, OperationContext, UpEndDatabase};
use crate::util::exec::block_background;
mod common;
mod config;
mod routes;
mod serve;
mod util;
mod extractors;
mod previews;
#[derive(Debug, Parser)]
#[command(name = "upend", author)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Perform a query against an UpEnd server instance.
Query {
/// URL of the UpEnd instance to query.
#[arg(short, long, default_value = "http://localhost:8093")]
url: Url,
/// The query itself, in L-expression format; prefix a filepath by `@=` to insert its hash in its place.
query: String,
/// Output format
#[arg(short, long, default_value = "tsv")]
format: OutputFormat,
},
Get {
/// URL of the UpEnd instance to query.
#[arg(short, long, default_value = "http://localhost:8093")]
url: Url,
/// The address of the entity; prefix a filepath by `=` to insert its hash.
entity: String,
/// The attribute to get the value(s) of. Optional.
attribute: Option<String>,
/// Output format
#[arg(short, long, default_value = "tsv")]
format: OutputFormat,
},
/// Insert an entry into an UpEnd server instance.
Insert {
/// URL of the UpEnd instance to query.
#[arg(short, long, default_value = "http://localhost:8093")]
url: Url,
/// The address of the entity; prefix a filepath by `=` to insert its hash.
entity: String,
/// The attribute of the entry.
attribute: String,
/// The value; its type will be heuristically determined.
value: String,
/// Output format
#[arg(short, long, default_value = "tsv")]
format: OutputFormat,
},
/// Get the address of a file, attribute, or URL.
Address {
/// Type of input to be addressed
_type: AddressType,
/// Path to a file, hash...
input: String,
/// Output format
#[arg(short, long, default_value = "tsv")]
format: OutputFormat,
},
/// Start an UpEnd server instance.
Serve(ServeArgs),
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
enum OutputFormat {
/// JSON
Json,
/// Tab Separated Values
Tsv,
/// Raw, as received from the server
Raw,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
enum AddressType {
/// Hash a file and output its address.
File,
/// Compute an address from the output of `sha256sum`
Sha256sum,
}
#[derive(Debug, Args)]
struct ServeArgs {
/// Directory to serve a vault from.
#[arg()]
directory: PathBuf,
/// Address and port to bind the Web interface on.
#[arg(long, default_value = "127.0.0.1:8093")]
bind: String,
/// Path to blob store ($VAULT_PATH by default).
#[arg(long)]
store_path: Option<PathBuf>,
/// Do not open a web browser with the UI.
#[arg(long)]
no_browser: bool,
/// Disable desktop features (web browser, native file opening).
#[arg(long, env = "UPEND_NO_DESKTOP")]
no_desktop: bool,
/// Trust the vault, and open local executable files.
#[arg(long)]
trust_executables: bool,
/// Do not serve the web UI.
#[arg(long)]
no_ui: bool,
/// Do not run a database update on start.
#[arg(long)]
no_initial_update: bool,
/// Which mode to use for rescanning the vault.
#[arg(long)]
rescan_mode: Option<BlobMode>,
/// Clean up temporary files (e.g. previews) on start.
#[arg(long)]
clean: bool,
/// Delete and initialize database, if it exists already.
#[arg(long)]
reinitialize: bool,
/// Name of the vault.
#[arg(long, env = "UPEND_VAULT_NAME")]
vault_name: Option<String>,
/// Secret to use for authentication.
#[arg(long, env = "UPEND_SECRET")]
secret: Option<String>,
/// Allowed host/domain name the API can serve.
#[arg(long, env = "UPEND_ALLOW_HOST")]
allow_host: Vec<String>,
}
#[actix_web::main]
async fn main() -> Result<()> {
let command = Cli::command().version(crate::common::get_version());
let args = Cli::from_arg_matches(&command.get_matches())?;
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(),
)
.init();
match args.command {
Commands::Query { url, query, format } => {
let re = Regex::new(r#"@(="([^"]+)"|=([^ ]+))"#).unwrap();
let query = re
.replace_all(&query, |caps: &Captures| {
if let Some(filepath_match) = caps.get(2).or_else(|| caps.get(3)) {
let address = hash_path(filepath_match.as_str()).unwrap();
format!("@{}", address)
} else {
panic!("Error preprocessing query. Captures: {:?}", caps)
}
})
.to_string();
let api_url = url.join("/api/query")?;
debug!("Querying \"{}\": {}", api_url, query);
let response = REQWEST_ASYNC_CLIENT
.post(api_url)
.body(query)
.send()
.await?;
response.error_for_status_ref()?;
print_response_entries(response, format).await?;
Ok(())
}
Commands::Get {
url,
entity,
attribute,
format,
} => {
let response = if let Some(attribute) = attribute {
let api_url = url.join("/api/query")?;
let entity = match entity {
entity if entity.starts_with('=') => hash_path(&entity[1..])?.to_string(),
entity if entity.starts_with("http") => {
Address::Url(entity.parse()?).to_string()
}
_ => entity,
};
let query = format!("(matches @{} \"{}\" ?)", entity, attribute);
debug!("Querying \"{}\": {}", api_url, query);
REQWEST_ASYNC_CLIENT
.post(api_url)
.body(query)
.send()
.await?
} else {
let entity = match entity {
entity if entity.starts_with('=') => hash_path(&entity[1..])?.to_string(),
_ => todo!("Only GETting blobs (files) is implemented."),
};
let api_url = url.join(&format!("/api/obj/{entity}"))?;
debug!("Getting object \"{}\" from {}", entity, api_url);
REQWEST_ASYNC_CLIENT.get(api_url).send().await?
};
response.error_for_status_ref()?;
print_response_entries(response, format).await?;
Ok(())
}
Commands::Insert {
url,
entity,
attribute,
value,
format: _,
} => {
let api_url = url.join("/api/obj")?;
let entity = match entity {
entity if entity.starts_with('=') => hash_path(&entity[1..])?.to_string(),
entity if entity.starts_with("http") => Address::Url(entity.parse()?).to_string(),
_ => entity,
};
let value = EntryValue::guess_from(value);
let body = json!({
"entity": entity,
"attribute": attribute,
"value": value
});
debug!("Inserting {:?} at \"{}\"", body, api_url);
let response = REQWEST_ASYNC_CLIENT.put(api_url).json(&body).send().await?;
match response.error_for_status_ref() {
Ok(_) => {
let data: Vec<String> = response.json().await?;
Ok(println!("{}", data[0]))
}
Err(err) => {
error!("{}", response.text().await?);
Err(err.into())
}
}
}
Commands::Address {
_type,
input,
format,
} => {
let address = match _type {
AddressType::File => hash_path(&input)?,
AddressType::Sha256sum => {
let digest = multibase::Base::Base16Lower.decode(input)?;
Address::Hash(UpMultihash::from_sha256(digest).unwrap())
}
};
match format {
OutputFormat::Json => Ok(println!("\"{}\"", address)),
OutputFormat::Tsv | OutputFormat::Raw => Ok(println!("{}", address)),
}
}
Commands::Serve(args) => {
info!("Starting UpEnd {}...", common::build::PKG_VERSION);
let term_now = Arc::new(std::sync::atomic::AtomicBool::new(false));
for sig in signal_hook::consts::TERM_SIGNALS {
signal_hook::flag::register_conditional_shutdown(*sig, 1, Arc::clone(&term_now))?;
signal_hook::flag::register(*sig, Arc::clone(&term_now))?;
}
let job_container = JobContainer::new();
let vault_path = args.directory;
let open_result = UpEndDatabase::open(&vault_path, args.reinitialize)
.expect("failed to open database!");
let upend = Arc::new(open_result.db);
let store = Arc::new(Box::new(
FsStore::from_path(args.store_path.unwrap_or_else(|| vault_path.clone())).unwrap(),
) as Box<dyn UpStore + Send + Sync>);
let webui_enabled = if args.no_ui {
false
} else {
let exists = WEBUI_PATH.exists();
if !exists {
warn!(
"Couldn't locate Web UI directory ({:?}), disabling...",
*WEBUI_PATH
);
}
exists
};
let browser_enabled = !args.no_desktop && webui_enabled && !args.no_browser;
let preview_path = upend.path.join("previews");
#[cfg(feature = "previews")]
let preview_store = Some(Arc::new(crate::previews::PreviewStore::new(
preview_path.clone(),
store.clone(),
)));
#[cfg(feature = "previews")]
let preview_thread_pool = Some(Arc::new(
rayon::ThreadPoolBuilder::new()
.num_threads(num_cpus::get() / 2)
.build()
.unwrap(),
));
if args.clean {
info!("Cleaning temporary directories...");
if preview_path.exists() {
std::fs::remove_dir_all(&preview_path).unwrap();
debug!("Removed {preview_path:?}");
} else {
debug!("No preview path exists, continuing...");
}
}
#[cfg(not(feature = "previews"))]
let preview_store = None;
#[cfg(not(feature = "previews"))]
let preview_thread_pool = None;
let mut bind: SocketAddr = args.bind.parse().expect("Incorrect bind format.");
let secret = args.secret.unwrap_or_else(|| {
warn!("No secret supplied, generating one at random.");
thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(32)
.map(char::from)
.collect()
});
let state = routes::State {
upend: upend.clone(),
store,
job_container: job_container.clone(),
preview_store,
preview_thread_pool,
config: UpEndConfig {
vault_name: Some(args.vault_name.unwrap_or_else(|| {
vault_path
.iter()
.last()
.unwrap()
.to_string_lossy()
.into_owned()
})),
desktop_enabled: !args.no_desktop,
trust_executables: args.trust_executables,
secret,
},
public: Arc::new(Mutex::new(upend.connection()?.get_users()?.is_empty())),
};
// Start HTTP server
let mut cnt = 0;
let server = loop {
let state = state.clone();
let allowed_origins = args.allow_host.clone();
let server = HttpServer::new(move || {
serve::get_app(webui_enabled, allowed_origins.clone(), state.clone())
});
let bind_result = server.bind(&bind);
if let Ok(server) = bind_result {
break server;
} else {
warn!("Failed to bind at {:?}, trying next port number...", bind);
bind.set_port(bind.port() + 1);
}
if cnt > 32 {
panic!("Couldn't start server.")
} else {
cnt += 1;
}
};
if !args.no_initial_update && (!open_result.new || args.rescan_mode.is_some()) {
info!("Running update...");
block_background::<_, _, anyhow::Error>(move || {
let connection: upend_db::UpEndConnection = upend.connection()?;
let tree_mode = if let Some(rescan_mode) = args.rescan_mode {
connection.set_vault_options(upend_db::VaultOptions {
blob_mode: Some(rescan_mode.clone()),
})?;
rescan_mode
} else {
connection
.get_vault_options()
.unwrap()
.blob_mode
.unwrap_or_default()
};
let _ = state.store.update(
&upend,
job_container.clone(),
upend_db::stores::UpdateOptions {
initial: false,
tree_mode,
},
OperationContext::default(),
);
let _ = extractors::extract_all(
upend,
state.store,
job_container,
OperationContext::default(),
);
Ok(())
});
}
#[cfg(feature = "desktop")]
{
if browser_enabled {
let ui_result = webbrowser::open(&format!("http://localhost:{}", bind.port()));
if ui_result.is_err() {
warn!("Could not open UI in browser!");
}
}
}
info!("Starting server at: {}", &bind);
server.run().await?;
Ok(())
}
}
}
type Entries = HashMap<String, serde_json::Value>;
async fn print_response_entries(response: reqwest::Response, format: OutputFormat) -> Result<()> {
match format {
OutputFormat::Json | OutputFormat::Raw => println!("{}", response.text().await?),
OutputFormat::Tsv => {
let mut entries = if response.url().path().contains("/obj/") {
#[derive(serde::Deserialize)]
struct ObjResponse {
entries: Entries,
}
response.json::<ObjResponse>().await?.entries
} else {
response.json::<Entries>().await?
}
.into_iter()
.peekable();
if entries.peek().is_some() {
eprintln!("entity\tattribute\tvalue\ttimestamp\tprovenance");
entries.for_each(|(_, entry)| {
println!(
"{}\t{}\t{}\t{}\t{}",
entry
.get("entity")
.and_then(|e| e.as_str())
.unwrap_or("???"),
entry
.get("attribute")
.and_then(|a| a.as_str())
.unwrap_or("???"),
entry
.get("value")
.and_then(|v| v.get("c"))
.map(|c| format!("{c}"))
.unwrap_or("???".to_string()),
entry
.get("timestamp")
.and_then(|t| t.as_str())
.unwrap_or("???"),
entry
.get("provenance")
.and_then(|p| p.as_str())
.unwrap_or("???"),
)
})
}
}
}
Ok(())
}
fn hash_path<P: AsRef<Path>>(filepath: P) -> Result<Address> {
let filepath = filepath.as_ref();
debug!("Hashing {:?}...", filepath);
let fbuffer = FileBuffer::open(filepath)?;
let hash = sha256hash(&fbuffer)?;
trace!("Finished hashing {:?}...", filepath);
Ok(Address::Hash(hash))
}

View File

@ -1,82 +0,0 @@
use anyhow::anyhow;
use anyhow::Result;
use std::collections::HashMap;
use std::io::Read;
use std::path::Path;
use std::process::Command;
use tracing::{debug, trace};
use super::Previewable;
pub struct AudioPath<'a>(pub &'a Path);
const COLOR: &str = "dc322f"; // solarized red
impl<'a> Previewable for AudioPath<'a> {
fn get_thumbnail(&self, options: HashMap<String, String>) -> Result<Option<Vec<u8>>> {
match options.get("type").map(|x| x.as_str()) {
Some("json") => {
let outfile = tempfile::Builder::new().suffix(".json").tempfile()?;
// -i long_clip.mp3 -o long_clip.json --pixels-per-second 20 --bits 8
let audiowaveform_cmd = Command::new("audiowaveform")
.args(["-i", &self.0.to_string_lossy()])
.args(["-o", &*outfile.path().to_string_lossy()])
.args(["--pixels-per-second", "20"])
.args(["--bits", "8"])
.output()?;
if !audiowaveform_cmd.status.success() {
return Err(anyhow!(
"Failed to retrieve audiofile peaks: {:?}",
String::from_utf8_lossy(&audiowaveform_cmd.stderr)
));
}
let mut buffer = Vec::new();
outfile.as_file().read_to_end(&mut buffer)?;
Ok(Some(buffer))
}
Some("image") | None => {
let outfile = tempfile::Builder::new().suffix(".png").tempfile()?;
let color = options
.get("color")
.map(String::to_owned)
.unwrap_or_else(|| COLOR.into());
let mut audiowaveform = Command::new("audiowaveform");
let command = audiowaveform
.args(["-i", &self.0.to_string_lossy()])
.args([
"--border-color",
"00000000",
"--background-color",
"00000000",
"--waveform-color",
&color,
"--no-axis-label",
])
.args(["--width", "860", "--height", "256"])
.args(["-o", &*outfile.path().to_string_lossy()]);
trace!("Running `{:?}`", command);
let now = std::time::Instant::now();
let cmd_output = command.output()?;
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !cmd_output.status.success() {
return Err(anyhow!(
"Failed to render thumbnail: {:?}",
String::from_utf8_lossy(&cmd_output.stderr)
));
}
let mut buffer = Vec::new();
outfile.as_file().read_to_end(&mut buffer)?;
Ok(Some(buffer))
}
Some(_) => Err(anyhow!("type has to be one of: image, json")),
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,89 +0,0 @@
use crate::routes;
use actix_web_lab::web::spa;
pub fn get_app<S>(
ui_enabled: bool,
allowed_origins: S,
state: crate::routes::State,
) -> actix_web::App<
impl actix_web::dev::ServiceFactory<
actix_web::dev::ServiceRequest,
Response = actix_web::dev::ServiceResponse<impl actix_web::body::MessageBody>,
Config = (),
InitError = (),
Error = actix_web::Error,
>,
>
where
S: IntoIterator<Item = String> + Clone,
{
let allowed_origins: Vec<String> = allowed_origins.into_iter().collect();
let cors = actix_cors::Cors::default()
.allowed_origin("http://localhost")
.allowed_origin("http://127.0.0.1")
.allowed_origin_fn(|origin, _req_head| {
origin.as_bytes().starts_with(b"http://localhost:")
|| origin.as_bytes().starts_with(b"http://127.0.0.1:")
|| origin.as_bytes().starts_with(b"moz-extension://")
})
.allowed_origin_fn(move |origin, _req_head| {
allowed_origins
.clone()
.into_iter()
.any(|allowed_origin| allowed_origin == "*" || *origin == allowed_origin)
})
.allowed_header("content-type")
.allow_any_method();
let app = actix_web::App::new()
.wrap(cors)
.wrap(
actix_web::middleware::DefaultHeaders::new()
.add(("UPEND-VERSION", crate::common::build::PKG_VERSION)),
)
.app_data(actix_web::web::PayloadConfig::new(4_294_967_296))
.app_data(actix_web::web::Data::new(state))
.wrap(actix_web::middleware::Logger::default().exclude("/api/jobs"))
.service(routes::login)
.service(routes::register)
.service(routes::logout)
.service(routes::whoami)
.service(routes::get_raw)
.service(routes::head_raw)
.service(routes::get_thumbnail)
.service(routes::get_query)
.service(routes::get_object)
.service(routes::put_object)
.service(routes::put_blob)
.service(routes::put_object_attribute)
.service(routes::delete_object)
.service(routes::get_address)
.service(routes::get_all_attributes)
.service(routes::api_refresh)
.service(routes::list_hier)
.service(routes::list_hier_roots)
.service(routes::vault_stats)
.service(routes::store_stats)
.service(routes::get_jobs)
.service(routes::get_info)
.service(routes::get_options)
.service(routes::put_options)
.service(routes::get_user_entries);
if ui_enabled {
return app.service(
spa()
.index_file(crate::common::WEBUI_PATH.to_str().unwrap().to_owned() + "/index.html")
.static_resources_location(crate::common::WEBUI_PATH.to_str().unwrap())
.finish(),
);
}
#[actix_web::get("/")]
async fn unavailable_index() -> actix_web::HttpResponse {
actix_web::HttpResponse::ServiceUnavailable().body("Web UI not enabled.")
}
app.service(unavailable_index)
}

View File

@ -1 +0,0 @@
pub mod exec;

View File

@ -1,78 +0,0 @@
# git-cliff ~ default configuration file
# https://git-cliff.org/docs/configuration
#
# Lines starting with "#" are comments.
# Configuration options are organized into tables and keys.
# See documentation for more information on available options.
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://tera.netlify.app/docs
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{% if commit.scope %}[{{ commit.scope | upper }}]: {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_parsers = [
{message = "[\\.]{3}", group = "Ignore", skip = true},
{message = "^feat", group = "Features"},
{message = "^fix", group = "Bug Fixes"},
{message = "^doc", group = "Documentation"},
{message = "^perf", group = "Performance"},
{message = "^refactor", group = "Refactor"},
{message = "^style", group = "Styling"},
{message = "^test", group = "Testing"},
{message = "^media", group = "Media"},
{message = "^chore\\(release\\): prepare for", skip = true},
{message = "^chore", group = "Miscellaneous"},
{message = "wip", group = "Work in Progress", skip = true},
{message = "^(ci|dev)", group = "Operations & Development"},
{body = ".*security", group = "Security"},
]
commit_preprocessors = [
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](https://github.com/orhun/git-cliff/issues/${2}))"}, # replace issue numbers
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = true
# filter out the commits that are not matched by commit parsers
filter_commits = false
# glob pattern for matching git tags
tag_pattern = "v[0-9]*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View File

@ -1,67 +0,0 @@
[package]
name = "upend-db"
version = "0.0.2"
homepage = "https://upend.dev/"
repository = "https://git.thm.place/thm/upend"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
[lib]
path = "src/lib.rs"
[dependencies]
upend-base = { path = "../base", features = ["diesel"] }
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
password-hash = "0.5.0"
argon2 = "0.5.3"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4"] }
url = { version = "2", features = ["serde"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
jwalk = "0.8.1"
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
nonempty = "0.6.0"
shadow-rs = { version = "0.23", default-features = false }
[build-dependencies]
shadow-rs = { version = "0.23", default-features = false }

View File

@ -1,3 +0,0 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

View File

@ -1 +0,0 @@
DROP TABLE users;

View File

@ -1,7 +0,0 @@
CREATE TABLE users
(
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
username VARCHAR NOT NULL,
password VARCHAR NOT NULL,
UNIQUE (username)
);

View File

@ -1,2 +0,0 @@
ALTER TABLE data
DROP COLUMN user;

View File

@ -1,2 +0,0 @@
ALTER TABLE data
ADD COLUMN user VARCHAR;

View File

@ -1,3 +0,0 @@
use shadow_rs::shadow;
shadow!(build);

View File

@ -1,87 +0,0 @@
use crate::inner::models;
use std::convert::TryFrom;
use upend_base::addressing::{Address, Addressable};
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
use upend_base::error::UpEndError;
impl TryFrom<&models::Entry> for Entry {
type Error = UpEndError;
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
if let Some(value_str) = &e.value_str {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.parse()?,
value: value_str.parse().unwrap(),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
} else if let Some(value_num) = e.value_num {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.parse()?,
value: EntryValue::Number(value_num),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
} else {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.parse()?,
value: EntryValue::Number(f64::NAN),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
}
}
}
impl TryFrom<&Entry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
let base_entry = models::Entry {
identity: e.address()?.encode()?,
entity_searchable: match &e.entity {
Address::Attribute(attr) => Some(attr.to_string()),
Address::Url(url) => Some(url.to_string()),
_ => None,
},
entity: e.entity.encode()?,
attribute: e.attribute.to_string(),
value_str: None,
value_num: None,
immutable: false,
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
};
match e.value {
EntryValue::Number(n) => Ok(models::Entry {
value_str: None,
value_num: Some(n),
..base_entry
}),
_ => Ok(models::Entry {
value_str: Some(e.value.to_string()?),
value_num: None,
..base_entry
}),
}
}
}
impl TryFrom<&ImmutableEntry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &ImmutableEntry) -> Result<Self, Self::Error> {
Ok(models::Entry {
immutable: true,
..models::Entry::try_from(&e.0)?
})
}
}

View File

@ -1,738 +0,0 @@
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
#[macro_use]
mod macros;
pub mod common;
pub mod engine;
pub mod entry;
pub mod hierarchies;
pub mod jobs;
pub mod stores;
mod inner;
mod util;
use crate::common::build;
use crate::engine::execute;
use crate::inner::models;
use crate::inner::schema::data;
use crate::util::LoggerSink;
use anyhow::{anyhow, Result};
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
use diesel::result::{DatabaseErrorKind, Error};
use diesel::sqlite::SqliteConnection;
use hierarchies::initialize_hier;
use serde::{Deserialize, Serialize};
use shadow_rs::is_release;
use std::convert::TryFrom;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, RwLock};
use std::time::Duration;
use tracing::{debug, error, trace, warn};
use upend_base::addressing::{Address, Addressable};
use upend_base::entry::{Attribute, Entry, EntryValue, ImmutableEntry};
use upend_base::error::UpEndError;
use upend_base::hash::UpMultihash;
use upend_base::lang::Query;
#[derive(Debug)]
pub struct ConnectionOptions {
pub busy_timeout: Option<Duration>,
pub enable_wal_mode: bool,
pub mutex: Arc<Mutex<()>>,
}
impl ConnectionOptions {
pub fn apply(&self, connection: &SqliteConnection) -> QueryResult<()> {
let _lock = self.mutex.lock().unwrap();
if let Some(duration) = self.busy_timeout {
debug!("Setting busy_timeout to {:?}", duration);
connection.execute(&format!("PRAGMA busy_timeout = {};", duration.as_millis()))?;
}
connection.execute(if self.enable_wal_mode {
debug!("Enabling WAL journal mode & truncating WAL log...");
"PRAGMA journal_mode = WAL; PRAGMA wal_autocheckpoint = 1000; PRAGMA wal_checkpoint(TRUNCATE);"
} else {
debug!("Enabling TRUNCATE journal mode");
"PRAGMA journal_mode = TRUNCATE;"
})?;
debug!(r#"Setting "synchronous" to NORMAL"#);
connection.execute("PRAGMA synchronous = NORMAL;")?;
Ok(())
}
}
impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
for ConnectionOptions
{
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), diesel::r2d2::Error> {
self.apply(conn).map_err(diesel::r2d2::Error::QueryError)
}
}
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
#[derive(Debug)]
pub struct LoggingHandler {
pub name: &'static str,
}
impl diesel::r2d2::HandleError<diesel::r2d2::Error> for LoggingHandler {
fn handle_error(&self, error: diesel::r2d2::Error) {
error!(name = self.name, "Database error: {}", error);
if !is_release() {
panic!("Database error! This should not happen! {}", error);
}
}
}
pub struct OpenResult {
pub db: UpEndDatabase,
pub new: bool,
}
pub struct UpEndDatabase {
pub path: PathBuf,
pool: Arc<DbPool>,
lock: Arc<RwLock<()>>,
}
pub const UPEND_SUBDIR: &str = ".upend";
pub const DATABASE_FILENAME: &str = "upend.sqlite3";
impl UpEndDatabase {
pub fn open<P: AsRef<Path>>(dirpath: P, reinitialize: bool) -> Result<OpenResult> {
embed_migrations!("./migrations/upend");
let upend_path = dirpath.as_ref().join(UPEND_SUBDIR);
if reinitialize {
warn!("Reinitializing - removing previous database...");
let _ = fs::remove_dir_all(&upend_path);
}
let new = !upend_path.exists();
if new {
trace!("Creating UpEnd subdirectory...");
fs::create_dir(&upend_path)?;
}
trace!("Creating pool.");
let manager = ConnectionManager::<SqliteConnection>::new(
upend_path.join(DATABASE_FILENAME).to_str().unwrap(),
);
let pool = r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionOptions {
busy_timeout: Some(Duration::from_secs(30)),
enable_wal_mode: true,
mutex: Arc::new(Mutex::new(())),
}))
.error_handler(Box::new(LoggingHandler { name: "main" }))
.build(manager)?;
trace!("Pool created.");
let db = UpEndDatabase {
path: upend_path,
pool: Arc::new(pool),
lock: Arc::new(RwLock::new(())),
};
let connection = db.connection().unwrap();
if !new {
let db_major: u64 = connection
.get_meta("VERSION")?
.ok_or(anyhow!("Database version not found!"))?
.parse()?;
if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() {
return Err(anyhow!("Incompatible database! Found version "));
}
}
trace!("Running migrations...");
embedded_migrations::run_with_output(
&db.pool.get()?,
&mut LoggerSink {
..Default::default()
},
)?;
initialize_hier(&connection)?;
Ok(OpenResult { db, new })
}
pub fn connection(&self) -> Result<UpEndConnection> {
Ok(UpEndConnection {
pool: self.pool.clone(),
lock: self.lock.clone(),
})
}
}
pub struct UpEndConnection {
pool: Arc<DbPool>,
lock: Arc<RwLock<()>>,
}
impl UpEndConnection {
pub fn transaction<T, E, F>(&self, f: F) -> Result<T, E>
where
F: FnOnce() -> Result<T, E>,
E: From<Error>,
{
/*
let span = span!(tracing::Level::TRACE, "transaction");
let _span = span.enter();
let _lock = self.transaction_lock.lock().unwrap();
self.conn.exclusive_transaction(f)
*/
// Disable transactions for now.
f()
}
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<Option<String>> {
use crate::inner::schema::meta::dsl;
let key = key.as_ref();
trace!("Querying META:{key}");
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result = dsl::meta
.filter(dsl::key.eq(key))
.load::<models::MetaValue>(&conn)?;
let result = result.first();
Ok(result.map(|v| v.value.clone()))
}
pub fn set_meta<S: AsRef<str>, T: AsRef<str>>(&self, key: S, value: T) -> Result<()> {
use crate::inner::schema::meta::dsl;
let key = key.as_ref();
let value = value.as_ref();
trace!("Setting META:{key} to {value}");
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
diesel::replace_into(dsl::meta)
.values((dsl::key.eq(key), dsl::value.eq(value)))
.execute(&conn)?;
Ok(())
}
pub fn set_vault_options(&self, options: VaultOptions) -> Result<()> {
if let Some(blob_mode) = options.blob_mode {
let tree_mode = match blob_mode {
BlobMode::Flat => "FLAT".to_string(),
BlobMode::Mirror => "MIRROR".to_string(),
BlobMode::Incoming(None) => "INCOMING".to_string(),
BlobMode::Incoming(Some(group)) => format!("INCOMING:{}", group),
BlobMode::StoreOnly => "STORE_ONLY".to_string(),
};
self.set_meta("VAULT_BLOB_MODE", tree_mode)?;
}
Ok(())
}
pub fn get_vault_options(&self) -> Result<VaultOptions> {
let blob_mode = match self.get_meta("VAULT_BLOB_MODE")? {
Some(mode) => match mode.as_str() {
"FLAT" => Some(BlobMode::Flat),
"MIRROR" => Some(BlobMode::Mirror),
"INCOMING" => Some(BlobMode::Incoming(None)),
"STORE_ONLY" => Some(BlobMode::StoreOnly),
mode if mode.starts_with("INCOMING:") => {
Some(BlobMode::Incoming(Some(mode[9..].to_string())))
}
_ => {
warn!("Unknown vault tree mode: {}", mode);
None
}
},
None => None,
};
Ok(VaultOptions { blob_mode })
}
pub fn get_users(&self) -> Result<Vec<String>> {
use crate::inner::schema::users::dsl;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result = dsl::users.select(dsl::username).load::<String>(&conn)?;
Ok(result)
}
pub fn set_user(&self, username: &str, password: &str) -> Result<bool> {
use crate::inner::schema::users::dsl;
let salt = password_hash::SaltString::generate(&mut password_hash::rand_core::OsRng);
let argon2 = Argon2::default();
let hashed_password = argon2
.hash_password(password.as_ref(), &salt)
.map_err(|e| anyhow!(e))?
.to_string();
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
let result = diesel::replace_into(dsl::users)
.values((
dsl::username.eq(username),
dsl::password.eq(hashed_password),
))
.execute(&conn)?;
Ok(result > 0)
}
pub fn authenticate_user(&self, username: &str, password: &str) -> Result<()> {
use crate::inner::schema::users::dsl;
let conn = self.pool.get()?;
let user_result = dsl::users
.filter(dsl::username.eq(username))
.load::<models::UserValue>(&conn)?;
match user_result.first() {
Some(user) => {
let parsed_hash = PasswordHash::new(&user.password).map_err(|e| anyhow!(e))?;
let argon2 = Argon2::default();
argon2
.verify_password(password.as_ref(), &parsed_hash)
.map_err(|e| anyhow!(e))
}
None => {
let argon2 = Argon2::default();
let _ = argon2
.verify_password(password.as_ref(), &PasswordHash::new(&DUMMY_HASH).unwrap());
Err(anyhow!("user not found"))
}
}
}
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let entry = data
.filter(identity.eq(Address::Hash(hash.clone()).encode()?))
.load::<models::Entry>(&conn)?;
match entry.len() {
0 => Ok(None),
1 => Ok(Some(Entry::try_from(entry.first().unwrap())?)),
_ => {
unreachable!(
"Multiple entries returned with the same hash - this should be impossible!"
)
}
}
}
pub fn retrieve_object(&self, object_address: &Address) -> Result<Vec<Entry>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let primary = data
.filter(entity.eq(object_address.encode()?))
.or_filter(value_str.eq(EntryValue::Address(object_address.clone()).to_string()?))
.load::<models::Entry>(&conn)?;
let entries = primary
.iter()
.map(Entry::try_from)
.collect::<Result<Vec<Entry>, UpEndError>>()?;
let secondary = data
.filter(
entity.eq_any(
entries
.iter()
.map(|e| e.address())
.filter_map(Result::ok)
.map(|addr| addr.encode())
.collect::<Result<Vec<Vec<u8>>, UpEndError>>()?,
),
)
.load::<models::Entry>(&conn)?;
let secondary_entries = secondary
.iter()
.map(Entry::try_from)
.collect::<Result<Vec<Entry>, UpEndError>>()?;
Ok([entries, secondary_entries].concat())
}
pub fn remove_object(&self, object_address: Address) -> Result<usize> {
use crate::inner::schema::data::dsl::*;
trace!("Deleting {}!", object_address);
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
let matches = data
.filter(identity.eq(object_address.encode()?))
.or_filter(entity.eq(object_address.encode()?))
.or_filter(value_str.eq(EntryValue::Address(object_address).to_string()?));
Ok(diesel::delete(matches).execute(&conn)?)
}
pub fn query(&self, query: Query) -> Result<Vec<Entry>> {
trace!("Querying: {:?}", query);
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let entries = execute(&conn, query)?;
let entries = entries
.iter()
.map(Entry::try_from)
.filter_map(Result::ok)
.collect();
Ok(entries)
}
pub fn insert_entry(&self, entry: Entry) -> Result<Address> {
trace!("Inserting: {}", entry);
let db_entry = models::Entry::try_from(&entry)?;
self.insert_model_entry(db_entry)?;
Ok(entry.address()?)
}
pub fn insert_entry_immutable(&self, entry: Entry) -> Result<Address> {
trace!("Inserting immutably: {}", entry);
let address = entry.address()?;
let db_entry = models::Entry::try_from(&ImmutableEntry(entry))?;
self.insert_model_entry(db_entry)?;
Ok(address)
}
fn insert_model_entry(&self, entry: models::Entry) -> Result<usize> {
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
let result = diesel::insert_into(data::table)
.values(&entry)
.execute(&conn);
match result {
Ok(num) => Ok(num),
Err(error) => match error {
Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => Ok(0),
_ => Err(anyhow!(error)),
},
}
}
// #[deprecated]
pub fn get_all_addresses(&self) -> Result<Vec<Address>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result = data
.select(entity)
.distinct()
.load::<Vec<u8>>(&conn)?
.into_iter()
.filter_map(|buf| Address::decode(&buf).ok())
.collect();
Ok(result)
}
// #[deprecated]
pub fn get_all_attributes(&self) -> Result<Vec<Attribute>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result = data
.select(attribute)
.distinct()
.order_by(attribute)
.load::<String>(&conn)?;
Ok(result
.into_iter()
.map(|a| a.parse())
.collect::<Result<Vec<Attribute>, UpEndError>>()?)
}
pub fn get_stats(&self) -> Result<serde_json::Value> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let total_entry_count = data.count().load::<i64>(&conn)?;
let total_entry_count = total_entry_count
.first()
.ok_or(anyhow!("Couldn't get entry count"))?;
let api_entry_count = data
.filter(provenance.like("API%"))
.count()
.load::<i64>(&conn)?;
let api_entry_count = api_entry_count
.first()
.ok_or(anyhow!("Couldn't get API entry count"))?;
let implicit_entry_count = data
.filter(provenance.like("%IMPLICIT%"))
.count()
.load::<i64>(&conn)?;
let implicit_entry_count = implicit_entry_count
.first()
.ok_or(anyhow!("Couldn't get API entry count"))?;
Ok(serde_json::json!({
"entryCount": {
"total": total_entry_count,
"api": api_entry_count,
"explicit": api_entry_count - implicit_entry_count
}
}))
}
// #[deprecated]
pub fn get_explicit_entries(&self) -> Result<Vec<Entry>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result: Vec<models::Entry> = data
.filter(
provenance
.like("API%")
.and(provenance.not_like("%IMPLICIT%")),
)
.load(&conn)?;
Ok(result
.iter()
.map(Entry::try_from)
.collect::<Result<Vec<Entry>, UpEndError>>()?)
}
}
lazy_static! {
static ref DUMMY_HASH: String = Argon2::default()
.hash_password(
"password".as_ref(),
&password_hash::SaltString::generate(&mut password_hash::rand_core::OsRng)
)
.unwrap()
.to_string();
}
#[cfg(test)]
mod test {
use upend_base::constants::{ATTR_IN, ATTR_LABEL};
use super::*;
use tempfile::TempDir;
#[test]
fn test_open() {
let tempdir = TempDir::new().unwrap();
let result = UpEndDatabase::open(&tempdir, false);
let result = result.unwrap();
assert!(result.new);
// Not new
let result = UpEndDatabase::open(&tempdir, false);
let result = result.unwrap();
assert!(!result.new);
// reinitialize true, new again
let result = UpEndDatabase::open(&tempdir, true);
let result = result.unwrap();
assert!(result.new);
}
#[test]
fn test_query() {
let tempdir = TempDir::new().unwrap();
let result = UpEndDatabase::open(&tempdir, false).unwrap();
let db = result.db;
let connection = db.connection().unwrap();
let random_entity = Address::Uuid(uuid::Uuid::new_v4());
upend_insert_val!(connection, random_entity, ATTR_LABEL, "FOOBAR").unwrap();
upend_insert_val!(connection, random_entity, "FLAVOUR", "STRANGE").unwrap();
let query = format!(r#"(matches @{random_entity} ? ?)"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let other_entity = Address::Uuid(uuid::Uuid::new_v4());
upend_insert_val!(connection, other_entity, ATTR_LABEL, "BAZQUX").unwrap();
upend_insert_val!(connection, other_entity, "CHARGE", "POSITIVE").unwrap();
let query = format!(r#"(matches (in @{random_entity} @{other_entity}) ? ?)"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 4);
let query = r#"(matches ? (in "FLAVOUR" "CHARGE") ?)"#.parse().unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let query = format!(r#"(matches ? "{ATTR_LABEL}" (in "FOOBAR" "BAZQUX"))"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let query = format!(r#"(matches ? "{ATTR_LABEL}" (contains "OOBA"))"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 1);
let query = r#"(or (matches ? ? (contains "OOBA")) (matches ? (contains "HARGE") ?) )"#
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let query =
format!(r#"(and (matches ? ? (contains "OOBA")) (matches ? "{ATTR_LABEL}" ?) )"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 1);
let query = format!(
r#"(and
(or
(matches ? ? (contains "OOBA"))
(matches ? (contains "HARGE") ?)
)
(not (matches ? "{ATTR_LABEL}" ?))
)"#
)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 1);
let edge_entity = Address::Uuid(uuid::Uuid::new_v4());
upend_insert_addr!(connection, random_entity, ATTR_IN, other_entity).unwrap();
upend_insert_addr!(connection, edge_entity, ATTR_IN, random_entity).unwrap();
let query = format!(
r#"(join
(matches ?a "{ATTR_IN}" @{other_entity})
(matches ? "{ATTR_IN}" ?a)
)"#
)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].entity, edge_entity);
assert_eq!(result[0].value, EntryValue::Address(random_entity));
}
#[test]
fn test_users() {
let tempdir = TempDir::new().unwrap();
let result = UpEndDatabase::open(&tempdir, false).unwrap();
let db = result.db;
let connection = db.connection().unwrap();
assert!(connection.authenticate_user("thm", "hunter2").is_err());
connection.set_user("thm", "hunter2").unwrap();
connection.authenticate_user("thm", "hunter2").unwrap();
assert!(connection.authenticate_user("thm", "password").is_err());
connection.set_user("thm", "password").unwrap();
connection.authenticate_user("thm", "password").unwrap();
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VaultOptions {
pub blob_mode: Option<BlobMode>,
}
/// Specifies how to store new blobs
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub enum BlobMode {
#[default]
/// Mirror the original tree
Mirror,
/// Use only the last level of the tree as a group
Flat,
/// Place all files in a single group
Incoming(Option<String>),
/// Only store files, don't place them anywhere
StoreOnly,
}
impl std::str::FromStr for BlobMode {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"flat" => Ok(BlobMode::Flat),
"mirror" => Ok(BlobMode::Mirror),
"incoming" => Ok(BlobMode::Incoming(None)),
s if s.starts_with("incoming:") => Ok(BlobMode::Incoming(Some(s[9..].to_string()))),
"store_only" => Ok(BlobMode::StoreOnly),
_ => Err(anyhow!("Unknown blob mode: {}", s)),
}
}
}
#[derive(Debug, Clone)]
pub struct OperationContext {
pub user: Option<String>,
pub provenance: String,
}
impl Default for OperationContext {
fn default() -> Self {
Self {
user: None,
provenance: "SYSTEM".to_string(),
}
}
}

View File

@ -1,27 +0,0 @@
#[macro_export]
macro_rules! upend_insert_val {
($db_connection:expr, $entity:expr, $attribute:expr, $value:expr) => {{
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: $attribute.parse().unwrap(),
value: upend_base::entry::EntryValue::String(String::from($value)),
provenance: "SYSTEM INIT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
})
}};
}
#[macro_export]
macro_rules! upend_insert_addr {
($db_connection:expr, $entity:expr, $attribute:expr, $addr:expr) => {{
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: $attribute.parse().unwrap(),
value: upend_base::entry::EntryValue::Address($addr.clone()),
provenance: "SYSTEM INIT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
})
}};
}

File diff suppressed because it is too large Load Diff

View File

@ -1,59 +0,0 @@
# UpEnd - A Conceptual Tutorial
UpEnd is not a traditional database - at its core, there are no tables, objects, or files. An entire UpEnd **Vault** is a flat list of **entries**.
An **entry** is a single "statement" that's true within the system. The core of an entry is an _Entity/Attribute/Value triplet_. For example:
| Entity | Attribute | Value |
| ----------------------- | ------------- | --------------- |
| John | Age | 23 |
| Prague | Is In Country | Czech Republic |
| (hash of) `track01.mp3` | Artist | Various Artists |
| https://upend.dev | Title | UpEnd |
Formally speaking:
- **Entity** is the thing the statement is about. It can be one of the following:
- **Hash**, typically of a file, but also possibly of another **entry**.
- [**UUID**](https://en.wikipedia.org/wiki/Universally_unique_identifier), for arbitrary objects that exist solely within UpEnd (groups/tags, annotations, etc.)
- **URL**, anything that exists on the web.
- **Attribute**, for data that belong to UpEnd's attributes (such as their different names, etc.).
- **Attribute** is the "kind" of a statement.
- It can be any text string, but there are some "reserved" attributes by UpEnd by default.
- **Value** is the actual "fact" you're stating. It can be one of the following:
- A text string
- A number
- An address of an **entity**.
(Each **entry** also has a _timestamp_, denoting when it was added, and _provenance_, i.e. the origin of this entry - whether it was added by an automatic process or a user. A full example of an **entry** therefore would be `John / Age / 23 / 2023-05-01 19:20:00 / API IMPORT`)
All other concepts within UpEnd arise as a consequence of combinations of **entries**.
**Objects** emerge as multiple **entries** with the same **entities** accrue. In other words, an **object** is a collection of entries pointing to the same **entity**. A file object therefore may look something like:
| Entity | Attribute | Value |
| ----------------------- | --------- | ---------------- |
| (hash of) `photo01.jpg` | Author | John Doe |
| (hash of) `photo01.jpg` | Label | photo01.jpg |
| (hash of) `photo01.jpg` | Label | Birthday 001.jpg |
| (hash of) `photo01.jpg` | Taken at | 2020-04-01 |
| (hash of) `photo01.jpg` | ... | ... |
(In the UI, the **Entity** part of entry listings is often left out, as it's redundant and implied by the object view.)
However, while a file object has an obvious **entity** to point to, a _Tag_ or a folder has no inherent identity of its own, and therefore no hash. This is the purpose of [_UUIDs_](https://en.wikipedia.org/wiki/Universally_unique_identifier). A _UUID_ is randomly generated for every object as needed.
A **Group** is a equivalent of a folder or a tag. Its purpose is to serve as a collection of related items.
It is a "conventional" object - there is nothing about UpEnd that necessitates **Groups** to exist, but since it provides a very useful abstraction, there is built-in functionality that works with **Groups**, as well as affordances in the UI. It looks like this:
| Entity | Attribute | Value |
| --------------------------------------------- | --------- | ----------------------- |
| `f9305ca5-eabd-4a97-9aa4-37036d2a6ca4` (UUID) | Label | Birthday Photos |
| `f9305ca5-eabd-4a97-9aa4-37036d2a6ca4` (UUID) | Contains | (hash of) `photo01.jpg` |
>Issue `CONTENT UNADDRESSABLE`
>This means that while the **vaults** of various users will refer to the same files by the same **Entity** addresses - because a file is uniquely identified by its hash - this does not apply to any other objects such as **Groups**, as they are identified by a _UUID_, which is random. If two **vaults** were therefore combined, **entries** referring to the same files would "add up" correctly, and your existing **entries** about given files would be complemented by the **entries** of the other **vault**, but any **groups** would potentially be duplicated.
>This is an inherent problem, and cannot be easily solved; if everything were content-addressed, including **groups**, any single change (such as adding or removing a file from a **group**) would ripple throughout the entire system, as other related **entries** would have to update their **entities** or **values** to match this new address, which would change their content, and therefore their hash, and so on. Furthermore, this would also mean that no two folders (or **groups**) could ever share names, for example, as their content would at one point be identical, and therefore their identity as well. UUIDs provide a way for two otherwise identical **objects** to coexist.
>Not all is lost, though - two vaults can still combine in a meaningful way allowing mutual understanding - but it does necessitate an explicit mechanism resolving the semantics of combining _UUID_ referred objects such as **groups** (in other words, a separate addressing scheme). For example, if it's desirable that no matter what vault you happen to be in, the `music` **group** is always the same (and thus two users categorizing their favorite articles in the `music` group can see each other's articles) a convention can be established that all "universal" **groups** also receive an entry with a `Universal Key` **attribute**, which is then used to tell which **groups** are supposed to be the same across different vaults - and which are, for example, just a `music` group someone happened to create to categorize their favorite songs.
>Notably, this issue is completely moot unless you happen to compare different **vaults**. If all you're concerned with is a single **vault** on your computer, you don't need to worry at all about *UUID* objects.

View File

@ -1,5 +0,0 @@
video/** filter=lfs diff=lfs merge=lfs -text
images/** filter=lfs diff=lfs merge=lfs -text
3d/** filter=lfs diff=lfs merge=lfs -text
audio/** filter=lfs diff=lfs merge=lfs -text
text/** filter=lfs diff=lfs merge=lfs -text

BIN
example_vault/3d/David_(Michelangelo).stl (Stored with Git LFS)

Binary file not shown.

Binary file not shown.

View File

@ -1,11 +0,0 @@
- `video/video_wide.webm` is [**Sirui 35mm Anamorphic Lens TEST FOOTAGE (pure cinematography)** by _DreamDuo Films_][video_wide]
- `video/video_vertical.webm` is [**free footage video background | portrait background** by _CahRusli_][video_vertical]
- `audio/drjohndee...mp3` from https://archive.org/details/dr_john_dee_2301_librivox
- `images` - `church.jpg`, `landscape.jpg`, `vertical_rocks.jpg` by _Tomáš Mládek_
- `images/The_Blue_Marble.jpg` from https://commons.wikimedia.org/wiki/File:The_Blue_Marble.jpg
- `images/The Great Wave off Kanagawa.jpg` from https://en.wikipedia.org/wiki/File:Tsunami_by_hokusai_19th_century.jpg
- `models/David_(Michelangelo).stl` from https://commons.wikimedia.org/wiki/File:David_(Michelangelo).stl
- `models/Scan_the_World_-_Venus_de_Milo.stl` from https://commons.wikimedia.org/wiki/File:Scan_the_World_-_Venus_de_Milo.stl
[video_wide]: https://www.youtube.com/watch?v=rGVeryrPMEA
[video_vertical]: https://www.youtube.com/shorts/QbhDvqZ50Lw

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
example_vault/images/The_Blue_Marble.jpg (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/images/church.jpg (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/images/landscape.jpg (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/images/vertical_rocks.jpg (Stored with Git LFS)

Binary file not shown.

View File

@ -1 +0,0 @@
noop

View File

@ -1 +0,0 @@
noop

BIN
example_vault/text/example.md (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/text/pg70433.txt (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/video/video_vertical.webm (Stored with Git LFS)

Binary file not shown.

BIN
example_vault/video/video_wide.webm (Stored with Git LFS)

Binary file not shown.

BIN
media/upend.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

18
media/upend.svg Normal file
View File

@ -0,0 +1,18 @@
<svg style="fill:none" width="255" height="255" xmlns="http://www.w3.org/2000/svg">
<style>
path {
fill:none;
stroke:#0a0a0a;
stroke-width:15px;
stroke-linecap:round;
stroke-linejoin:round
}
@media (prefers-color-scheme: dark) {
path {
stroke: white;
}
}
</style>
<path d="M7.5 7.5v0h240m-120 0v0l-120 120m240 0v0l-120-120m0 240v0-240" />
</svg>

After

Width:  |  Height:  |  Size: 490 B

View File

@ -17,9 +17,7 @@ CREATE TABLE data
attribute VARCHAR NOT NULL,
value_str VARCHAR,
value_num NUMERIC,
immutable BOOLEAN NOT NULL,
provenance VARCHAR NOT NULL,
timestamp DATETIME NOT NULL
immutable BOOLEAN NOT NULL
);
CREATE INDEX data_entity ON data (entity);

View File

@ -1,3 +0,0 @@
node_modules
/dist
tests

View File

@ -1,20 +0,0 @@
{
"ignorePatterns": ["**/*.js"],
"env": {
"browser": true,
"es2021": true
},
"extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": "latest",
"sourceType": "module",
"project": "./tsconfig.json"
},
"plugins": ["@typescript-eslint", "ava"],
"rules": {
"@typescript-eslint/consistent-type-exports": "error",
"@typescript-eslint/consistent-type-imports": "error",
"ava/assertion-arguments": "error"
}
}

2
sdks/js/.gitignore vendored
View File

@ -1,2 +0,0 @@
node_modules
dist

View File

@ -1,5 +0,0 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
};

View File

@ -1,40 +0,0 @@
{
"name": "@upnd/upend",
"version": "0.5.5",
"description": "Client library to interact with the UpEnd system.",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist/**/*"
],
"exports": {
".": "./dist/index.js",
"./*": "./dist/*.js",
"./wasm": "./dist/wasm/index.js",
"./wasm/*": "./dist/wasm/*.js"
},
"scripts": {
"build": "tsc --build --verbose",
"test": "jest",
"lint": "eslint ."
},
"author": "Tomáš Mládek <t@mldk.cz>",
"license": "AGPL-3.0",
"devDependencies": {
"@types/debug": "^4.1.8",
"@types/jest": "^29.5.12",
"@typescript-eslint/eslint-plugin": "latest",
"@typescript-eslint/parser": "latest",
"eslint": "^8.7.0",
"eslint-plugin-ava": "^14.0.0",
"jest": "^29.7.0",
"ts-jest": "^29.1.2",
"typescript": "^4.4.4"
},
"dependencies": {
"@upnd/wasm-node": "^0.1.0",
"@upnd/wasm-web": "^0.1.0",
"debug": "^4.3.4",
"lru-cache": "^7.0.0"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,545 +0,0 @@
import LRU from "lru-cache";
import type { Query, UpObject } from "./index";
import { UpListing } from "./index";
import type {
Address,
ADDRESS_TYPE,
AttributeListingResult,
EntityListing,
IJob,
IValue,
ListingResult,
PutInput,
PutResult,
StoreInfo,
VaultInfo,
} from "./types";
import type { AddressComponents, UpEndWasmExtensions } from "./wasm";
import debug from "debug";
import { browser } from "./util";
const dbg = debug("upend:api");
export type { AddressComponents };
export type UpendApiError = {
kind: "Unauthorized" | "HttpError" | "FetchError" | "Unknown";
message?: string;
error?: Error;
};
export class UpEndApi {
private instanceUrl = "";
private readonly wasmExtensions: UpEndWasmExtensions | undefined = undefined;
public readonly timeout: number;
private queryOnceLRU = new LRU<string, UpListing>({ max: 128 });
private inFlightRequests: { [key: string]: Promise<UpListing> | null } = {};
private key: string | undefined;
private readonly onError: ((error: UpendApiError) => void) | undefined;
constructor(config?: {
instanceUrl?: string;
wasmExtensions?: UpEndWasmExtensions;
timeout?: number;
authKey?: string;
onError?: (error: UpendApiError) => void;
}) {
this.setInstanceUrl(config?.instanceUrl || "http://localhost:8093");
this.wasmExtensions = config?.wasmExtensions;
this.timeout = config?.timeout || 30_000;
this.key = config?.authKey;
this.onError = config?.onError;
}
public setInstanceUrl(apiUrl: string) {
this.instanceUrl = apiUrl.replace(/\/+$/g, "");
}
public get apiUrl() {
return this.instanceUrl + "/api";
}
public async fetchEntity(
address: string,
options?: ApiFetchOptions,
): Promise<UpObject> {
dbg("Fetching Entity %s", address);
const entityFetch = await this.fetch(
`${this.apiUrl}/obj/${address}`,
options,
);
const entityResult = (await entityFetch.json()) as EntityListing;
const entityListing = new UpListing(entityResult.entries);
return entityListing.getObject(address);
}
public async fetchEntry(address: string, options?: ApiFetchOptions) {
dbg("Fetching entry %s", address);
const response = await this.fetch(`${this.apiUrl}/raw/${address}`, options);
const data = await response.json();
const listing = new UpListing({ address: data });
return listing.entries[0];
}
public async query(
query: string | Query,
options?: ApiFetchOptions,
): Promise<UpListing> {
const queryStr = query.toString();
const cacheResult = this.queryOnceLRU.get(queryStr);
if (!cacheResult) {
if (!this.inFlightRequests[queryStr]) {
dbg(`Querying: ${query}`);
this.inFlightRequests[queryStr] = new Promise((resolve, reject) => {
this.fetch(`${this.apiUrl}/query`, options, {
method: "POST",
body: queryStr,
keepalive: true,
})
.then(async (response) => {
if (!response.ok) {
reject(
`Query ${queryStr} failed: ${response.status} ${
response.statusText
}: ${await response.text()}}`,
);
}
resolve(new UpListing(await response.json()));
this.inFlightRequests[queryStr] = null;
})
.catch((err) => reject(err));
});
} else {
dbg(`Chaining request for ${queryStr}...`);
}
return await (this.inFlightRequests[queryStr] as Promise<UpListing>); // TODO?
} else {
dbg(`Returning cached: ${queryStr}`);
return cacheResult;
}
}
public async putEntry(
input: PutInput,
options?: ApiFetchOptions,
): Promise<PutResult> {
dbg("Putting %O", input);
const response = await this.fetch(`${this.apiUrl}/obj`, options, {
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(input),
});
return await response.json();
}
public async putEntityAttribute(
entity: Address,
attribute: string,
value: IValue,
provenance?: string,
options?: ApiFetchOptions,
): Promise<Address> {
dbg("Putting %s = %o for %s (%s)", attribute, value, entity, provenance);
let url = `${this.apiUrl}/obj/${entity}/${attribute}`;
if (provenance) {
url += `?provenance=${provenance}`;
}
const response = await this.fetch(url, options, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(value),
});
return await response.json();
}
public async putBlob(
fileOrUrl: File | URL,
options?: ApiFetchOptions & { onProgress?: (ev: ProgressEvent) => void },
): Promise<Address> {
dbg("Putting Blob: %O", fileOrUrl);
const formData = new FormData();
if (fileOrUrl instanceof File) {
formData.append(fileOrUrl.name, fileOrUrl);
} else {
formData.append("@url", fileOrUrl.toString());
}
const signal = this.getAbortSignal(options);
if (browser && fileOrUrl instanceof File) {
dbg("Using XHR for file upload");
const xhrdbg = debug("upend:api:xhr");
const xhr = new XMLHttpRequest();
signal.addEventListener("abort", () => xhr.abort());
for (const event of [
"loadstart",
"load",
"loadend",
"progress",
"abort",
"error",
] as const) {
xhr.addEventListener(event, (ev) => xhrdbg(`XHR ${event}: %O`, ev));
xhr.upload.addEventListener(event, (ev) =>
xhrdbg(`XHR upload ${event}: %O`, ev),
);
if (options?.onProgress) {
xhr.upload.addEventListener(event, options.onProgress);
}
}
return new Promise((resolve, reject) => {
xhr.open("PUT", `${this.apiUrl}/blob`, true);
xhr.onload = () => {
if (xhr.status >= 200 && xhr.status < 300) {
try {
resolve(JSON.parse(xhr.responseText));
} catch (e) {
reject(e);
}
} else {
reject(xhr.statusText);
}
};
xhr.send(formData);
});
} else {
const response = await this.fetch(`${this.apiUrl}/blob`, options, {
method: "PUT",
body: formData,
});
if (!response.ok) {
throw Error(await response.text());
}
return await response.json();
}
}
public async deleteEntry(
address: Address,
options?: ApiFetchOptions,
): Promise<void> {
dbg("Deleting entry %s", address);
await this.fetch(`${this.apiUrl}/obj/${address}`, options, {
method: "DELETE",
});
}
public getRaw(
address: Address,
config?: { preview?: boolean; authenticated?: boolean },
) {
let result = `${this.apiUrl}/${config?.preview ? "thumb" : "raw"}/${address}`;
if (config?.authenticated) {
result += `?auth_key=${this.key}`;
}
return result;
}
public async fetchRaw(
address: Address,
preview = false,
options?: ApiFetchOptions,
) {
dbg("Getting %s raw (preview = %s)", address, preview);
return await this.fetch(this.getRaw(address, { preview }), options);
}
public async refreshVault(options?: ApiFetchOptions) {
dbg("Triggering vault refresh");
return await this.fetch(`${this.apiUrl}/refresh`, options, {
method: "POST",
});
}
public async nativeOpen(address: Address, options?: ApiFetchOptions) {
dbg("Opening %s natively", address);
return this.fetch(`${this.apiUrl}/raw/${address}?native=1`, options);
}
public async fetchRoots(options?: ApiFetchOptions): Promise<ListingResult> {
dbg("Fetching hierarchical roots...");
const response = await this.fetch(`${this.apiUrl}/hier_roots`, options);
const roots = await response.json();
dbg("Hierarchical roots: %O", roots);
return roots;
}
public async fetchJobs(options?: ApiFetchOptions): Promise<IJob[]> {
// dbg("Fetching jobs...");
const response = await this.fetch(`${this.apiUrl}/jobs`, options);
return await response.json();
}
public async fetchAllAttributes(
options?: ApiFetchOptions,
): Promise<AttributeListingResult> {
dbg("Fetching all attributes...");
const response = await this.fetch(`${this.apiUrl}/all/attributes`, options);
const result = await response.json();
dbg("All attributes: %O", result);
return await result;
}
public async fetchInfo(options?: ApiFetchOptions): Promise<VaultInfo> {
dbg("Fetching vault info...");
const response = await this.fetch(`${this.apiUrl}/info`, options);
const result = await response.json();
dbg("Vault info: %O", result);
return result;
}
public async fetchOptions(options?: ApiFetchOptions): Promise<VaultOptions> {
dbg("Fetching vault options...");
const response = await this.fetch(`${this.apiUrl}/options`, options);
const result = await response.json();
dbg("Vault options: %O", result);
return result;
}
public async fetchStoreInfo(
options?: ApiFetchOptions,
): Promise<{ [key: string]: StoreInfo }> {
dbg("Fetching store info...");
const response = await this.fetch(`${this.apiUrl}/stats/store`, options);
const result = await response.json();
dbg("Store info: %O");
return await result;
}
public async getAddress(
input: { urlContent: string } | ADDRESS_TYPE,
options?: ApiFetchOptions,
): Promise<string> {
let response: Response;
if (typeof input === "string") {
if (this.wasmExtensions) {
await this.wasmExtensions.init();
return this.wasmExtensions.AddressTypeConstants[input];
}
response = await this.fetch(
`${this.apiUrl}/address?type=${input}`,
options,
);
} else {
if ("urlContent" in input) {
response = await this.fetch(
`${this.apiUrl}/address?url_content=${input.urlContent}`,
options,
);
} else {
throw new Error("Input cannot be empty.");
}
}
const result = await response.json();
dbg("Address for %o = %s", input, result);
return result;
}
public async addressToComponents(
address: string,
): Promise<AddressComponents> {
if (!this.wasmExtensions) {
throw new Error("WASM extensions not supplied.");
}
await this.wasmExtensions.init();
return this.wasmExtensions.addr_to_components(address);
}
public async componentsToAddress(
components: AddressComponents,
): Promise<string> {
if (!this.wasmExtensions) {
throw new Error("WASM extensions not initialized.");
}
await this.wasmExtensions.init();
return this.wasmExtensions.components_to_addr(components);
}
public async getVaultOptions(
options?: ApiFetchOptions,
): Promise<VaultOptions> {
const response = await this.fetch(`${this.apiUrl}/options`, options);
return await response.json();
}
public async setVaultOptions(
options: VaultOptions,
apiOptions?: ApiFetchOptions,
): Promise<void> {
const payload: Record<string, unknown> = {};
if (options.blob_mode) {
const blob_mode: Record<string, unknown> = {};
blob_mode[options.blob_mode] = null;
payload["blob_mode"] = blob_mode;
}
const response = await this.fetch(`${this.apiUrl}/options`, apiOptions, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
if (!response.ok) {
throw Error(await response.text());
}
}
public async authenticate(
credentials: {
username: string;
password: string;
},
mode: "key",
options?: ApiFetchOptions,
): Promise<{ key: string }>;
public async authenticate(
credentials: {
username: string;
password: string;
},
mode?: "cookie",
options?: ApiFetchOptions,
): Promise<void>;
public async authenticate(
credentials: {
username: string;
password: string;
},
mode: "key" | "cookie" | undefined,
options?: ApiFetchOptions,
): Promise<{ key: string } | void> {
const via = mode || "cookie";
const response = await this.fetch(
`${this.apiUrl}/auth/login?via=${via}`,
options,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(credentials),
},
);
if (!response.ok) {
throw Error(await response.text());
}
if (mode === "key") {
const data = await response.json();
if (!data.key) {
throw Error("No key returned from server.");
}
this.key = data.key;
return data.key;
}
}
public async register(credentials: {
username: string;
password: string;
}): Promise<void> {
await this.fetch(`${this.apiUrl}/auth/register`, undefined, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(credentials),
});
}
public async authStatus(
options?: ApiFetchOptions,
): Promise<{ user: string } | undefined> {
const response = await this.fetch(`${this.apiUrl}/auth/whoami`, options);
return await response.json();
}
public async resetAuth(mode: "key"): Promise<void>;
public async resetAuth(
mode?: "cookie",
options?: ApiFetchOptions,
): Promise<void>;
public async resetAuth(
mode?: "key" | "cookie",
options?: ApiFetchOptions,
): Promise<void> {
if (mode === "key") {
this.key = undefined;
} else {
await this.fetch(`${this.apiUrl}/auth/logout`, options, {
method: "POST",
});
}
}
private getAbortSignal(options: ApiFetchOptions | undefined) {
const controller = options?.abortController || new AbortController();
const timeout = options?.timeout || this.timeout;
if (timeout > 0) {
setTimeout(() => {
dbg("Aborting request after %d ms", timeout);
controller.abort();
}, timeout);
}
return controller.signal;
}
private async fetch(
url: string,
options: ApiFetchOptions | undefined,
requestInit?: RequestInit & { headers?: Record<string, string> },
): Promise<Response> {
const signal = this.getAbortSignal(options);
const headers = requestInit?.headers || {};
if (this.key) {
headers["Authorization"] = `Bearer ${this.key}`;
}
let result: Response;
let error: UpendApiError | undefined;
try {
result = await fetch(url, {
...requestInit,
signal,
headers,
});
if (!result.ok) {
if (result.status === 401) {
error = { kind: "Unauthorized", message: await result.text() };
} else {
error = {
kind: "HttpError",
message: `HTTP Error ${result.status}: ${result.statusText}`,
};
}
}
} catch (e) {
error = { kind: "FetchError", error: e as Error };
}
if (error) {
if (this.onError) {
this.onError(error);
}
throw error;
}
return result!;
}
}
export interface ApiFetchOptions {
timeout?: number;
abortController?: AbortController;
}
export type VaultBlobMode = "Flat" | "Mirror" | "Incoming";
export interface VaultOptions {
blob_mode: VaultBlobMode;
}

View File

@ -1,37 +0,0 @@
/**
* Attribute denoting (hierarchical) relation, in the "upwards" direction. For example, a file `IN` a group, an image `IN` photos, etc.
*/
export const ATTR_IN = "IN";
/**
* Attribute denoting that an entry belongs to the set relating to a given (hierarchical) relation.
* For example, a data blob may have a label entry, and to qualify that label within the context of belonging to a given hierarchical group, that label entry and the hierarchical entry will be linked with `BY`.
*/
export const ATTR_BY = "BY";
/**
* Attribute denoting that an attribute belongs to a given "tagging" entity. If an entity belongs to (`IN`) a "tagging" entity, it is expected to have attributes that are `OF` that entity.
*/
export const ATTR_OF = "OF";
/**
* Attribute denoting a human readable label.
*/
export const ATTR_LABEL = "LBL";
/**
* Attribute denoting the date & time an entity was noted in the database.
* (TODO: This info can be trivially derived from existing entry timestamps, while at the same time the "Introduction problem" is still open.)
*/
export const ATTR_ADDED = "ADDED";
/**
* Attribute for cross-vault unambiguous referencing of non-hashable (e.g. UUID) entities.
*/
export const ATTR_KEY = "KEY";
/**
* Address of the root hierarchical entry.
*/
export const HIER_ROOT_ADDR =
"zb2rhmpmTFPxdhaxTQg5Ug3KHFU8DZNUPh8TaPY2v8UQVJbQk";

View File

@ -1,141 +0,0 @@
import type { IEntry, IValue, ListingResult } from "./types";
export { UpEndApi } from "./api";
export { Query } from "./query";
export class UpListing {
public readonly entries: UpEntry[];
private _objects: { [key: string]: UpObject } = {};
constructor(listing: ListingResult) {
this.entries = Object.entries(listing).map(
(lr) => new UpEntry(...lr, this),
);
}
public get objects() {
const allEntities = new Set(this.entries.map((e) => e.entity));
const result: { [key: string]: UpObject } = {};
Array.from(allEntities).forEach(
(entity) => (result[entity] = new UpObject(entity, this)),
);
return result;
}
public getObject(address: string) {
if (!this._objects[address]) {
this._objects[address] = new UpObject(address, this);
}
return this._objects[address];
}
public get entities(): string[] {
return Array.from(new Set(this.entries.map((e) => `@${e.entity}`)));
}
public get attributes(): string[] {
return Array.from(new Set(this.entries.map((e) => e.attribute)));
}
public get values(): IValue[] {
return Array.from(new Set(this.entries.map((e) => e.value)));
}
}
export class UpObject {
public readonly address: string;
public listing: UpListing | undefined;
constructor(address: string, listing?: UpListing) {
this.address = address;
this.listing = listing;
}
public bind(listing: UpListing) {
this.listing = listing;
}
public get attributes() {
return (this.listing?.entries || []).filter(
(e) => e.entity === this.address,
);
}
public get backlinks() {
return (this.listing?.entries || []).filter(
(e) => e.value.c === this.address,
);
}
private _attr: Record<string, UpEntry[] | undefined> | undefined;
public get attr(): Record<string, UpEntry[] | undefined> {
if (!this._attr) {
const result = {} as { [key: string]: UpEntry[] };
this.attributes.forEach((entry) => {
if (!result[entry.attribute]) {
result[entry.attribute] = [];
}
result[entry.attribute].push(entry);
});
this.backlinks.forEach((entry) => {
const attribute = `~${entry.attribute}`;
if (!result[attribute]) {
result[attribute] = [];
}
result[attribute].push(entry);
});
this._attr = result;
}
return this._attr;
}
public get(attr: string): string | number | null | undefined {
return this.attr[attr]?.[0].value.c;
}
public identify(): string[] {
return (this.attr["LBL"] || []).map((e) => String(e.value.c));
}
public toString(): string {
return [`@${this.address}`, this.identify().join(", ")]
.filter(Boolean)
.join(" | ");
}
public asDict() {
return {
address: this.address,
attributes: this.attr,
};
}
}
export class UpEntry extends UpObject implements IEntry {
entity: string;
attribute: string;
value: IValue;
provenance: string;
user: string;
timestamp: string;
constructor(address: string, entry: IEntry, listing: UpListing) {
super(address, listing);
this.entity = entry.entity;
this.attribute = entry.attribute;
this.value = entry.value;
this.provenance = entry.provenance;
this.user = entry.user;
this.timestamp = entry.timestamp;
}
public toString(): string {
return `(${this.entity} ${this.attribute} ${this.value.c} [${this.value.t}])`;
}
}

View File

@ -1,91 +0,0 @@
import type { Address } from "./types";
import { isAddress } from "./types";
export const Any = "?";
class Var {
constructor(public readonly name: string) {}
}
export function Variable(name: string): Var {
return new Var(name);
}
type QueryPart<T> = T | T[] | typeof Any | Var;
export class Query {
private _query: string | undefined;
public static matches(
entity: QueryPart<string>,
attribute: QueryPart<string>,
value: QueryPart<string | number | Address>
): Query {
const query = new Query();
let entityStr;
if (entity === Any) {
entityStr = "?";
} else if (entity instanceof Var) {
entityStr = `?${entity.name}`;
} else {
entityStr = Array.isArray(entity) ? `(in ${entity.join(" ")})` : entity;
}
let attributeStr;
if (attribute === Any) {
attributeStr = "?";
} else if (attribute instanceof Var) {
attributeStr = `?${attribute.name}`;
} else {
attributeStr = Array.isArray(attribute)
? `(in ${attribute.map((a) => `"${a}"`).join(" ")})`
: `"${attribute}"`;
}
let valueStr;
if (value === Any) {
valueStr = "?";
} else if (value instanceof Var) {
valueStr = `?${value.name}`;
} else {
valueStr = (Array.isArray(value) ? value : [value])
.map((v) => {
if (typeof v === "number") return v;
if (isAddress(v)) return v;
if (typeof v === "string") return `"${v}"`;
})
.join(" ");
valueStr = Array.isArray(value) ? `(in ${valueStr})` : valueStr;
}
query._query = `(matches ${entityStr} ${attributeStr} ${valueStr})`;
return query;
}
public static or(...queries: Query[]): Query {
const query = new Query();
query._query = `(or ${queries.join(" ")})`;
return query;
}
public static and(...queries: Query[]): Query {
const query = new Query();
query._query = `(and ${queries.join(" ")})`;
return query;
}
public static not(query: Query): Query {
const q = new Query();
q._query = `(not ${query})`;
return q;
}
public static join(...queries: Query[]): Query {
const query = new Query();
query._query = `(join ${queries.join(" ")})`;
return query;
}
public toString(): string {
if (!this._query) throw new Error("Query is not defined");
return this._query;
}
}

Some files were not shown because too many files have changed in this diff Show More