Compare commits
334 Commits
2ffaee778f
...
7e368df812
Author | SHA1 | Date |
---|---|---|
Tomáš Mládek | 7e368df812 | |
Tomáš Mládek | e06d2bccfe | |
Tomáš Mládek | 9f61581ba7 | |
Tomáš Mládek | bc74fbfff6 | |
Tomáš Mládek | 8d165e1f8c | |
Tomáš Mládek | 97f6dd86bf | |
Tomáš Mládek | 041c058a77 | |
Tomáš Mládek | 1bd83062bb | |
Tomáš Mládek | 58c5329781 | |
Tomáš Mládek | 07a150b99d | |
Tomáš Mládek | 1738643050 | |
Tomáš Mládek | 3b32597fb6 | |
Tomáš Mládek | a30ef465a3 | |
Tomáš Mládek | 069c86855b | |
Tomáš Mládek | f9002604fe | |
Tomáš Mládek | edc666f56a | |
Tomáš Mládek | 750bca9ee0 | |
Tomáš Mládek | 703a3e5391 | |
Tomáš Mládek | 50020b969e | |
Tomáš Mládek | 60a8b15164 | |
Tomáš Mládek | 17bc53a6fe | |
Tomáš Mládek | f9037a4370 | |
Tomáš Mládek | 196447da0f | |
Tomáš Mládek | 05ee557d1a | |
Tomáš Mládek | 02bfe94f39 | |
Tomáš Mládek | 0e59bc8bd5 | |
Tomáš Mládek | 8932341445 | |
Tomáš Mládek | 1f270d6dc7 | |
Tomáš Mládek | 669b348160 | |
Tomáš Mládek | 175518e3a6 | |
Tomáš Mládek | 94818b992a | |
Tomáš Mládek | f2261998ee | |
Tomáš Mládek | 730cc02d7a | |
Tomáš Mládek | 4d8ac0717d | |
Tomáš Mládek | 68e7d67d7b | |
Tomáš Mládek | cb7dfadf3d | |
Tomáš Mládek | 35e1e902a2 | |
Tomáš Mládek | 1e9f83d043 | |
Tomáš Mládek | 88170789a0 | |
Tomáš Mládek | e03e09ccaf | |
Tomáš Mládek | 58ca734443 | |
Tomáš Mládek | 7897ce7354 | |
Tomáš Mládek | d87405ae5b | |
Tomáš Mládek | c5e14eae0d | |
Tomáš Mládek | 4ccfc63318 | |
Tomáš Mládek | 894faa94ae | |
Tomáš Mládek | 0b488d9384 | |
Tomáš Mládek | 121c615642 | |
Tomáš Mládek | cd008c10e2 | |
Tomáš Mládek | 0ede2af16c | |
Tomáš Mládek | 3e5353a5a4 | |
Tomáš Mládek | ff44061a21 | |
Tomáš Mládek | 794b130645 | |
Tomáš Mládek | 2faa113691 | |
Tomáš Mládek | dd9ff79e20 | |
Tomáš Mládek | 050e3f81d7 | |
Tomáš Mládek | afe0b858b6 | |
Tomáš Mládek | 656dc23bfb | |
Tomáš Mládek | 1dd4f059d3 | |
Tomáš Mládek | 7b1c37eb54 | |
Tomáš Mládek | a2396675c5 | |
Tomáš Mládek | ab17644b0d | |
Tomáš Mládek | 4c3727451b | |
Tomáš Mládek | e32233c4f7 | |
Tomáš Mládek | 473cb2ffa0 | |
Tomáš Mládek | 9b52eba0b4 | |
Tomáš Mládek | 052c56ed1d | |
Tomáš Mládek | afa5bd088d | |
Tomáš Mládek | c5c157a856 | |
Tomáš Mládek | 3344e69544 | |
Tomáš Mládek | 33768e2695 | |
Tomáš Mládek | 9d6ebfc31c | |
Tomáš Mládek | f1b608f824 | |
Tomáš Mládek | ea9aa96674 | |
Tomáš Mládek | ce4e045e07 | |
Tomáš Mládek | c246b267d1 | |
Tomáš Mládek | 53135d4a9e | |
Tomáš Mládek | 3196294033 | |
Tomáš Mládek | 1d1476c7b8 | |
Tomáš Mládek | 9f2f7c0218 | |
Tomáš Mládek | 787aa00f94 | |
Tomáš Mládek | de3ef7de0f | |
Tomáš Mládek | ec81f8147b | |
Tomáš Mládek | 59c2d9c078 | |
Tomáš Mládek | f18217a3e5 | |
Tomáš Mládek | ba221c2662 | |
Tomáš Mládek | c16ff963c8 | |
Tomáš Mládek | 303ac3ec07 | |
Tomáš Mládek | 3dcfe48803 | |
Tomáš Mládek | e6862351f9 | |
Tomáš Mládek | 2da5a28a42 | |
Tomáš Mládek | 316f236d3a | |
Tomáš Mládek | 1660585df3 | |
Tomáš Mládek | 009007fc8b | |
Tomáš Mládek | 298d92c9a5 | |
Tomáš Mládek | f14c035051 | |
Tomáš Mládek | d047eaf7ac | |
Tomáš Mládek | f1184ad2b3 | |
Tomáš Mládek | b275d04c23 | |
Tomáš Mládek | 0811d9ccd8 | |
Tomáš Mládek | 75faa28ff3 | |
Tomáš Mládek | b8a78e2c3a | |
Tomáš Mládek | 6467d6c3b7 | |
Tomáš Mládek | de9f808b7a | |
Tomáš Mládek | b78d1be240 | |
Tomáš Mládek | 852d64b38d | |
Tomáš Mládek | faa75278a1 | |
Tomáš Mládek | 7533697907 | |
Tomáš Mládek | 2958d44cc0 | |
Tomáš Mládek | 309a968550 | |
Tomáš Mládek | c0daf59d46 | |
Tomáš Mládek | f1b3f84ee3 | |
Tomáš Mládek | 3ed765e90e | |
Tomáš Mládek | 8879aba3c2 | |
Tomáš Mládek | 18a84dee66 | |
Tomáš Mládek | 8f6395e097 | |
Tomáš Mládek | 8c1dc5388f | |
Tomáš Mládek | e52560ae07 | |
Tomáš Mládek | 0353e43dcf | |
Tomáš Mládek | bbcaa58dd1 | |
Tomáš Mládek | b3a77a773c | |
Tomáš Mládek | b546423977 | |
Tomáš Mládek | b4bc684ed3 | |
Tomáš Mládek | 631bbc1772 | |
Tomáš Mládek | b423fdcb22 | |
Tomáš Mládek | b48655f169 | |
Tomáš Mládek | 33b52a3452 | |
Tomáš Mládek | 0dfa131fea | |
Tomáš Mládek | 7191a20176 | |
Tomáš Mládek | c3ac5adaf0 | |
Tomáš Mládek | a1765d480a | |
Tomáš Mládek | 3b303e4872 | |
Tomáš Mládek | 65eb252619 | |
Tomáš Mládek | e6d7328b29 | |
Tomáš Mládek | 8043e25008 | |
Tomáš Mládek | 10e0b8804b | |
Tomáš Mládek | db173e03f7 | |
Tomáš Mládek | bfce05600b | |
Tomáš Mládek | 8a32b583d1 | |
Tomáš Mládek | 8917221b42 | |
Tomáš Mládek | 7a59f81fb4 | |
Tomáš Mládek | 83102c5d4f | |
Tomáš Mládek | ac7bcb29b6 | |
Tomáš Mládek | e41960230f | |
Tomáš Mládek | d23d02413e | |
Tomáš Mládek | c0a705bb33 | |
Tomáš Mládek | 8181af3e01 | |
Tomáš Mládek | 6993709c56 | |
Tomáš Mládek | 0fa5b67643 | |
Tomáš Mládek | 7bed050cd0 | |
Tomáš Mládek | 0690aef307 | |
Tomáš Mládek | 79b359854b | |
Tomáš Mládek | 5c47e087e6 | |
Tomáš Mládek | e2dcb07ec9 | |
Tomáš Mládek | 90d10858fa | |
Tomáš Mládek | cce9906bc8 | |
Tomáš Mládek | cc3f618375 | |
Tomáš Mládek | 2f636288b6 | |
Tomáš Mládek | 30e0f10ce8 | |
Tomáš Mládek | f90f3fa189 | |
Tomáš Mládek | 3c4276e22d | |
Tomáš Mládek | 2027b543fd | |
Tomáš Mládek | b99f9bc15c | |
Tomáš Mládek | 06f7d1a4a6 | |
Tomáš Mládek | dfcc1b1969 | |
Tomáš Mládek | db85fc11a6 | |
Tomáš Mládek | b5c3e1758b | |
Tomáš Mládek | e9caac0bea | |
Tomáš Mládek | 1890b29624 | |
Tomáš Mládek | 2c75a76446 | |
Tomáš Mládek | 6169dd25a3 | |
Tomáš Mládek | 0f17538307 | |
Tomáš Mládek | 03e3aafd70 | |
Tomáš Mládek | 8c4ca4ef16 | |
Tomáš Mládek | 4dc5f49245 | |
Tomáš Mládek | 8793691cbb | |
Tomáš Mládek | a5b4d13bb1 | |
Tomáš Mládek | 0df4c78036 | |
Tomáš Mládek | a1fa423634 | |
Tomáš Mládek | 4a8d9b4ece | |
Tomáš Mládek | 69e72a6440 | |
Tomáš Mládek | 2cca09e291 | |
Tomáš Mládek | 2b25c03471 | |
Tomáš Mládek | c4f86824c9 | |
Tomáš Mládek | 22747e2577 | |
Tomáš Mládek | f5adb3fff8 | |
Tomáš Mládek | 779015ae32 | |
Tomáš Mládek | be45fcdac5 | |
Tomáš Mládek | 9fa7ee9f68 | |
Tomáš Mládek | f5c1ee4169 | |
Tomáš Mládek | 2d8c9623fa | |
Tomáš Mládek | 8f00f73b69 | |
Tomáš Mládek | f88ecb7c9f | |
Tomáš Mládek | 91d8688bc9 | |
Tomáš Mládek | 46a1088d22 | |
Tomáš Mládek | 28861370a7 | |
Tomáš Mládek | b050eaf893 | |
Tomáš Mládek | d59949868d | |
Tomáš Mládek | 2be171c98a | |
Tomáš Mládek | 317bd98264 | |
Tomáš Mládek | 5b1828021c | |
Tomáš Mládek | b9144ead92 | |
Tomáš Mládek | 27aeca9f4f | |
Tomáš Mládek | 044e19e9a7 | |
Tomáš Mládek | 12cd5b61e1 | |
Tomáš Mládek | cfa6f7e6a7 | |
Tomáš Mládek | 49085a2f04 | |
Tomáš Mládek | f03523681b | |
Tomáš Mládek | d528f03905 | |
Tomáš Mládek | f889e029ec | |
Tomáš Mládek | 15072f61c6 | |
Tomáš Mládek | 3f1dbedd06 | |
Tomáš Mládek | c4f356b5b3 | |
Tomáš Mládek | cda25f7f17 | |
Tomáš Mládek | df25f9180d | |
Tomáš Mládek | 3b957093b7 | |
Tomáš Mládek | efb5ad2295 | |
Tomáš Mládek | 209c0eeb40 | |
Tomáš Mládek | 838ce28647 | |
Tomáš Mládek | 6f00c2f583 | |
Tomáš Mládek | c617d1853b | |
Tomáš Mládek | 826aa26198 | |
Tomáš Mládek | 58b90e1650 | |
Tomáš Mládek | 587917fb3f | |
Tomáš Mládek | d8fa68f558 | |
Tomáš Mládek | 715f5b0e39 | |
Tomáš Mládek | dea40124f9 | |
Tomáš Mládek | ba8d272bc2 | |
Tomáš Mládek | dc9a626a4e | |
Tomáš Mládek | 862ed1c08a | |
Tomáš Mládek | 659ed571b6 | |
Tomáš Mládek | d10b28621e | |
Tomáš Mládek | 203b105b15 | |
Tomáš Mládek | 2150841ee6 | |
Tomáš Mládek | bf823bc1c8 | |
Tomáš Mládek | 65936efe38 | |
Tomáš Mládek | 0dc1a6aa45 | |
Tomáš Mládek | 851b21ce81 | |
Tomáš Mládek | 52098758a1 | |
Tomáš Mládek | 8f1c713ef8 | |
Tomáš Mládek | 0b211c237d | |
Tomáš Mládek | f597f0a69a | |
Tomáš Mládek | 0ffe5ee688 | |
Tomáš Mládek | 6656e9f5d1 | |
Tomáš Mládek | 4dbf8b745b | |
Tomáš Mládek | b2a25520e4 | |
Tomáš Mládek | b47b87629e | |
Tomáš Mládek | eef2d3f5a4 | |
Tomáš Mládek | 2b6a41ebe4 | |
Tomáš Mládek | a8dd4735d3 | |
Tomáš Mládek | d0903de812 | |
Tomáš Mládek | 5cc013a42c | |
Tomáš Mládek | 6a3d71d2d4 | |
Tomáš Mládek | ea8d30ebc4 | |
Tomáš Mládek | 86c8921fdd | |
Tomáš Mládek | c15052656a | |
Tomáš Mládek | 5447be9fd3 | |
Tomáš Mládek | cfd1384582 | |
Tomáš Mládek | 37d5cee2ad | |
Tomáš Mládek | 6288e8faec | |
Tomáš Mládek | 64a43eb428 | |
Tomáš Mládek | 8708eccfbe | |
Tomáš Mládek | e1d12565ad | |
Tomáš Mládek | c26f96bda0 | |
Tomáš Mládek | 69aa8a862f | |
Tomáš Mládek | 40b4154c3d | |
Tomáš Mládek | 377f0af161 | |
Tomáš Mládek | de8d6b1c59 | |
Tomáš Mládek | a0bd0db457 | |
Tomáš Mládek | 9fc95185af | |
Tomáš Mládek | 3af6aa5866 | |
Tomáš Mládek | 65ae8dac2e | |
Tomáš Mládek | df7f5d2c19 | |
Tomáš Mládek | 120e5a46cc | |
Tomáš Mládek | eb2cdd6810 | |
Tomáš Mládek | 58eb842a13 | |
Tomáš Mládek | 1edd92148e | |
Tomáš Mládek | 75d1bd9f8b | |
Tomáš Mládek | 59f1abd5e2 | |
Tomáš Mládek | 8060f7224d | |
Tomáš Mládek | 44e1d1687a | |
Tomáš Mládek | bf223cf247 | |
Tomáš Mládek | 0ed585aa32 | |
Tomáš Mládek | 318a7a941f | |
Tomáš Mládek | 3526a164fa | |
Tomáš Mládek | bb8d390d9e | |
Tomáš Mládek | f66857ca3b | |
Tomáš Mládek | 6003eebbe8 | |
Tomáš Mládek | a5603ecd66 | |
Tomáš Mládek | 6e78fa250c | |
Tomáš Mládek | 91cfa6a2da | |
Tomáš Mládek | f79995b6f4 | |
Tomáš Mládek | 4cc38dfaa3 | |
Tomáš Mládek | b59e0205af | |
Tomáš Mládek | f4c8a9ac74 | |
Tomáš Mládek | acdd128d5f | |
Tomáš Mládek | 1f551fc087 | |
Tomáš Mládek | 11e0bfa96d | |
Tomáš Mládek | 4b27f14097 | |
Tomáš Mládek | a361c75270 | |
Tomáš Mládek | ae0c588928 | |
Tomáš Mládek | c3305efaaa | |
Tomáš Mládek | 560286dbed | |
Tomáš Mládek | dd40dcb0b2 | |
Tomáš Mládek | 474e685941 | |
Tomáš Mládek | d9b714e106 | |
Tomáš Mládek | ee28a99004 | |
Tomáš Mládek | 78db1c0166 | |
Tomáš Mládek | 736c382e75 | |
Tomáš Mládek | 5284d9435e | |
Tomáš Mládek | 84e0f8f29b | |
Tomáš Mládek | b909e2d978 | |
Tomáš Mládek | 769b62d02e | |
Tomáš Mládek | 8c7fe30815 | |
Tomáš Mládek | 3a34fc346c | |
Tomáš Mládek | 959a613ea3 | |
Tomáš Mládek | 257044e66d | |
Tomáš Mládek | a4c915f73f | |
Tomáš Mládek | a29d66d829 | |
Tomáš Mládek | 0a5398b0a7 | |
Tomáš Mládek | a5e33a5061 | |
Tomáš Mládek | 686da82bb6 | |
Tomáš Mládek | 1059bd0b65 | |
Tomáš Mládek | 3294299c5d | |
Tomáš Mládek | c4b09ea234 | |
Tomáš Mládek | 646f77b712 | |
Tomáš Mládek | 1c858f8c44 | |
Tomáš Mládek | 2a23bb545f | |
Tomáš Mládek | 520dec104d | |
Tomáš Mládek | b1eba7369f | |
Tomáš Mládek | f6845a5a3a | |
Tomáš Mládek | 70d4be1be3 | |
Tomáš Mládek | b76af4ea89 | |
Tomáš Mládek | 6fb0d5f1b6 |
|
@ -1,6 +1,7 @@
|
||||||
node_modules/*
|
*/node_modules
|
||||||
/.pnpm/*
|
|
||||||
/.cargo/*
|
.pnpm/*
|
||||||
|
.cargo/*
|
||||||
|
|
||||||
upend.sqlite3
|
upend.sqlite3
|
||||||
.upend/*
|
.upend/*
|
||||||
|
|
|
@ -9,3 +9,5 @@ upend.sqlite3
|
||||||
|
|
||||||
/.pnpm
|
/.pnpm
|
||||||
/.cargo
|
/.cargo
|
||||||
|
|
||||||
|
example_vault/zb*
|
|
@ -0,0 +1,7 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="dev" type="CompoundRunConfigurationType">
|
||||||
|
<toRun name="dev backend" type="CargoCommandRunConfiguration" />
|
||||||
|
<toRun name="dev frontend" type="js.build_tools.npm" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,19 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="dev backend" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
|
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror --secret upend" />
|
||||||
|
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||||
|
<envs />
|
||||||
|
<option name="emulateTerminal" value="true" />
|
||||||
|
<option name="channel" value="DEFAULT" />
|
||||||
|
<option name="requiredFeatures" value="true" />
|
||||||
|
<option name="allFeatures" value="false" />
|
||||||
|
<option name="withSudo" value="false" />
|
||||||
|
<option name="buildTarget" value="REMOTE" />
|
||||||
|
<option name="backtrace" value="SHORT" />
|
||||||
|
<option name="isRedirectInput" value="false" />
|
||||||
|
<option name="redirectInputPath" value="" />
|
||||||
|
<method v="2">
|
||||||
|
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,19 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="dev backend storybook" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
|
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror --bind 127.0.0.1:8099" />
|
||||||
|
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||||
|
<envs />
|
||||||
|
<option name="emulateTerminal" value="true" />
|
||||||
|
<option name="channel" value="DEFAULT" />
|
||||||
|
<option name="requiredFeatures" value="true" />
|
||||||
|
<option name="allFeatures" value="false" />
|
||||||
|
<option name="withSudo" value="false" />
|
||||||
|
<option name="buildTarget" value="REMOTE" />
|
||||||
|
<option name="backtrace" value="SHORT" />
|
||||||
|
<option name="isRedirectInput" value="false" />
|
||||||
|
<option name="redirectInputPath" value="" />
|
||||||
|
<method v="2">
|
||||||
|
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,22 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="dev frontend" type="js.build_tools.npm">
|
||||||
|
<package-json value="$PROJECT_DIR$/webui/package.json" />
|
||||||
|
<command value="run" />
|
||||||
|
<scripts>
|
||||||
|
<script value="dev" />
|
||||||
|
</scripts>
|
||||||
|
<node-interpreter value="project" />
|
||||||
|
<envs />
|
||||||
|
<method v="2">
|
||||||
|
<option name="NpmBeforeRunTask" enabled="true">
|
||||||
|
<package-json value="$PROJECT_DIR$/sdks/js/package.json" />
|
||||||
|
<command value="run" />
|
||||||
|
<scripts>
|
||||||
|
<script value="build" />
|
||||||
|
</scripts>
|
||||||
|
<node-interpreter value="project" />
|
||||||
|
<envs />
|
||||||
|
</option>
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,7 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="storybook" type="CompoundRunConfigurationType">
|
||||||
|
<toRun name="dev backend storybook" type="CargoCommandRunConfiguration" />
|
||||||
|
<toRun name="storybook:serve" type="js.build_tools.npm" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,12 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="storybook:serve" type="js.build_tools.npm" nameIsGenerated="true">
|
||||||
|
<package-json value="$PROJECT_DIR$/webui/package.json" />
|
||||||
|
<command value="run" />
|
||||||
|
<scripts>
|
||||||
|
<script value="storybook:serve" />
|
||||||
|
</scripts>
|
||||||
|
<node-interpreter value="project" />
|
||||||
|
<envs />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,11 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="test js sdk" type="JavaScriptTestRunnerJest">
|
||||||
|
<config-file value="$PROJECT_DIR$/sdks/js/jest.config.js" />
|
||||||
|
<node-interpreter value="project" />
|
||||||
|
<jest-package value="$PROJECT_DIR$/sdks/js/node_modules/jest" />
|
||||||
|
<working-dir value="$PROJECT_DIR$" />
|
||||||
|
<envs />
|
||||||
|
<scope-kind value="ALL" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -1,7 +1,8 @@
|
||||||
{
|
{
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"svelte.svelte-vscode",
|
"svelte.svelte-vscode",
|
||||||
"rust-lang.rust-analyzer",
|
"rust-lang.rust-analyzer",
|
||||||
"esbenp.prettier-vscode"
|
"esbenp.prettier-vscode",
|
||||||
|
"earthly.earthfile-syntax-highlighting"
|
||||||
]
|
]
|
||||||
}
|
}
|
151
.woodpecker.yml
151
.woodpecker.yml
|
@ -1,19 +1,32 @@
|
||||||
pipeline:
|
pipeline:
|
||||||
lint:
|
test:
|
||||||
image: earthly/earthly:v0.7.15
|
image: earthly/earthly:v0.8.3
|
||||||
volumes:
|
volumes:
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
environment:
|
environment:
|
||||||
- FORCE_COLOR=1
|
- FORCE_COLOR=1
|
||||||
- EARTHLY_EXEC_CMD="/bin/sh"
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
secrets: [EARTHLY_CONFIGURATION]
|
secrets: [ EARTHLY_CONFIGURATION ]
|
||||||
|
commands:
|
||||||
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
|
- earthly bootstrap
|
||||||
|
- earthly +test
|
||||||
|
|
||||||
|
lint:
|
||||||
|
image: earthly/earthly:v0.8.3
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
environment:
|
||||||
|
- FORCE_COLOR=1
|
||||||
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
|
secrets: [ EARTHLY_CONFIGURATION ]
|
||||||
commands:
|
commands:
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
- earthly bootstrap
|
- earthly bootstrap
|
||||||
- earthly +lint
|
- earthly +lint
|
||||||
|
|
||||||
# audit:
|
# audit:
|
||||||
# image: earthly/earthly:v0.7.15
|
# image: earthly/earthly:v0.8.3
|
||||||
# volumes:
|
# volumes:
|
||||||
# - /var/run/docker.sock:/var/run/docker.sock
|
# - /var/run/docker.sock:/var/run/docker.sock
|
||||||
# environment:
|
# environment:
|
||||||
|
@ -25,52 +38,8 @@ pipeline:
|
||||||
# - earthly bootstrap
|
# - earthly bootstrap
|
||||||
# - earthly +audit
|
# - earthly +audit
|
||||||
|
|
||||||
test:
|
appimage:nightly:
|
||||||
image: earthly/earthly:v0.7.15
|
image: earthly/earthly:v0.8.3
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
environment:
|
|
||||||
- FORCE_COLOR=1
|
|
||||||
- EARTHLY_EXEC_CMD="/bin/sh"
|
|
||||||
secrets: [EARTHLY_CONFIGURATION]
|
|
||||||
commands:
|
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
|
||||||
- earthly bootstrap
|
|
||||||
- earthly +test
|
|
||||||
|
|
||||||
appimage:
|
|
||||||
image: earthly/earthly:v0.7.15
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
environment:
|
|
||||||
- FORCE_COLOR=1
|
|
||||||
- EARTHLY_EXEC_CMD="/bin/sh"
|
|
||||||
secrets: [EARTHLY_CONFIGURATION, REGISTRY, REGISTRY_USER, REGISTRY_PASSWORD]
|
|
||||||
commands:
|
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
|
||||||
- earthly bootstrap
|
|
||||||
- earthly +appimage
|
|
||||||
|
|
||||||
appimage:save:
|
|
||||||
group: push
|
|
||||||
image: woodpeckerci/plugin-s3
|
|
||||||
settings:
|
|
||||||
endpoint:
|
|
||||||
from_secret: S3_ENDPOINT
|
|
||||||
access_key:
|
|
||||||
from_secret: S3_ACCESS_KEY
|
|
||||||
secret_key:
|
|
||||||
from_secret: S3_SECRET_KEY
|
|
||||||
bucket: upend-ci-packages
|
|
||||||
path_style: true
|
|
||||||
region: anabasis
|
|
||||||
source: "dist/*.AppImage"
|
|
||||||
target: /
|
|
||||||
secrets: [S3_ENDPOINT, S3_ACCESS_KEY, S3_SECRET_KEY]
|
|
||||||
|
|
||||||
appimage:publish:nightly:
|
|
||||||
group: push
|
|
||||||
image: earthly/earthly:v0.7.15
|
|
||||||
volumes:
|
volumes:
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
environment:
|
environment:
|
||||||
|
@ -83,23 +52,23 @@ pipeline:
|
||||||
SSH_CONFIG,
|
SSH_CONFIG,
|
||||||
SSH_UPLOAD_KEY,
|
SSH_UPLOAD_KEY,
|
||||||
SSH_KNOWN_HOSTS,
|
SSH_KNOWN_HOSTS,
|
||||||
|
SENTRY_AUTH_TOKEN
|
||||||
]
|
]
|
||||||
commands:
|
commands:
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
- earthly bootstrap
|
- earthly bootstrap
|
||||||
- earthly --secret GPG_SIGN_KEY --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS +deploy-appimage-nightly
|
- earthly --secret GPG_SIGN_KEY --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS +deploy-appimage-nightly
|
||||||
when:
|
when:
|
||||||
branch: [main]
|
branch: [ main ]
|
||||||
|
|
||||||
docker:nightly:
|
docker:nightly:
|
||||||
group: push
|
image: earthly/earthly:v0.8.3
|
||||||
image: earthly/earthly:v0.7.15
|
|
||||||
volumes:
|
volumes:
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
environment:
|
environment:
|
||||||
- FORCE_COLOR=1
|
- FORCE_COLOR=1
|
||||||
- EARTHLY_EXEC_CMD="/bin/sh"
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
secrets: [EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD]
|
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD, SENTRY_AUTH_TOKEN ]
|
||||||
commands:
|
commands:
|
||||||
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
|
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
|
@ -107,40 +76,86 @@ pipeline:
|
||||||
- earthly --push +docker-minimal
|
- earthly --push +docker-minimal
|
||||||
- earthly --push +docker
|
- earthly --push +docker
|
||||||
when:
|
when:
|
||||||
branch: [main]
|
branch: [ main ]
|
||||||
|
|
||||||
docker:release:
|
docker:release:
|
||||||
group: push
|
image: earthly/earthly:v0.8.3
|
||||||
image: earthly/earthly:v0.7.15
|
|
||||||
volumes:
|
volumes:
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
environment:
|
environment:
|
||||||
- FORCE_COLOR=1
|
- FORCE_COLOR=1
|
||||||
- EARTHLY_EXEC_CMD="/bin/sh"
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
secrets: [EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD]
|
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD, SENTRY_AUTH_TOKEN ]
|
||||||
commands:
|
commands:
|
||||||
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
|
- echo $${DOCKER_PASSWORD}| docker login --username $${DOCKER_USER} --password-stdin
|
||||||
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
- earthly bootstrap
|
- earthly bootstrap
|
||||||
- earthly --push +docker-minimal --tag=latest
|
- earthly --strict --push +docker-minimal --tag=latest
|
||||||
- earthly --push +docker-minimal --tag=$CI_COMMIT_TAG
|
- earthly --strict --push +docker-minimal --tag=$CI_COMMIT_TAG
|
||||||
- earthly --push +docker --tag=latest
|
- earthly --strict --push +docker --tag=latest
|
||||||
- earthly --push +docker --tag=$CI_COMMIT_TAG
|
- earthly --strict --push +docker --tag=$CI_COMMIT_TAG
|
||||||
when:
|
when:
|
||||||
event: [tag]
|
event: [ tag ]
|
||||||
|
|
||||||
|
jslib:publish:
|
||||||
|
image: earthly/earthly:v0.8.3
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
environment:
|
||||||
|
- FORCE_COLOR=1
|
||||||
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
|
secrets: [ EARTHLY_CONFIGURATION, NPM_TOKEN ]
|
||||||
|
commands:
|
||||||
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
|
- earthly bootstrap
|
||||||
|
- earthly --strict --push --secret NPM_TOKEN +publish-js-all
|
||||||
|
when:
|
||||||
|
branch: [ main ]
|
||||||
|
|
||||||
|
gitea:prerelease:
|
||||||
|
image: earthly/earthly:v0.8.3
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
environment:
|
||||||
|
- FORCE_COLOR=1
|
||||||
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
|
secrets: [ EARTHLY_CONFIGURATION, DOCKER_USER, DOCKER_PASSWORD ]
|
||||||
|
commands:
|
||||||
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
|
- earthly bootstrap
|
||||||
|
- earthly -a +current-changelog/CHANGELOG_CURRENT.md CHANGELOG_CURRENT.md
|
||||||
|
- rm -rf dist
|
||||||
|
when:
|
||||||
|
event: [ tag ]
|
||||||
|
|
||||||
|
appimage:release:
|
||||||
|
image: earthly/earthly:v0.8.3
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
environment:
|
||||||
|
- FORCE_COLOR=1
|
||||||
|
- EARTHLY_EXEC_CMD="/bin/sh"
|
||||||
|
secrets: [ EARTHLY_CONFIGURATION, REGISTRY, REGISTRY_USER, REGISTRY_PASSWORD, SENTRY_AUTH_TOKEN ]
|
||||||
|
commands:
|
||||||
|
- mkdir ~/.earthly && echo "$EARTHLY_CONFIGURATION" > ~/.earthly/config.yaml
|
||||||
|
- earthly bootstrap
|
||||||
|
- mkdir -p dist/
|
||||||
|
- earthly --strict -a '+appimage-signed/*' dist/
|
||||||
|
when:
|
||||||
|
event: [ tag ]
|
||||||
|
|
||||||
|
# todo: webext
|
||||||
|
|
||||||
gitea:release:
|
gitea:release:
|
||||||
group: push
|
|
||||||
image: woodpeckerci/plugin-gitea-release
|
image: woodpeckerci/plugin-gitea-release
|
||||||
settings:
|
settings:
|
||||||
base_url: https://git.thm.place
|
base_url: https://git.thm.place
|
||||||
files:
|
files:
|
||||||
- "*.AppImage"
|
- "dist/*"
|
||||||
- "*.asc"
|
|
||||||
- webext/web-ext-artifacts/*.zip
|
|
||||||
checksum: sha512
|
checksum: sha512
|
||||||
api_key:
|
api_key:
|
||||||
from_secret: woodpecker_api_key
|
from_secret: woodpecker_api_key
|
||||||
target: main
|
target: main
|
||||||
|
note: CHANGELOG_CURRENT.md
|
||||||
when:
|
when:
|
||||||
event: [tag]
|
event: [ tag ]
|
507
CHANGELOG.md
507
CHANGELOG.md
|
@ -2,6 +2,460 @@
|
||||||
|
|
||||||
All notable changes to this project will be documented in this file.
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
## [0.0.76] - 2024-02-06
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- [JSLIB]: Fix types for `putBlob()`, returns a single address
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- [WEBUI,JSLIB]: Upload progress
|
||||||
|
- [WEBUI]: Files can be added or removed from the upload dialog
|
||||||
|
- [WEBUI]: Select all uploaded files when done
|
||||||
|
- [WEBUI]: Start upload on Enter press
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Enable CACHE
|
||||||
|
- --force pnpm install, DRY Earthfile slightly
|
||||||
|
- Cache all rust earthly targets
|
||||||
|
- Get rid of AppImage upload to S3
|
||||||
|
- Update Earthly image version
|
||||||
|
- Remove parallelization
|
||||||
|
- [WEBUI]: Force rebundling of dependencies for `dev` script
|
||||||
|
- Intellij dev config builds jslib before webui launch
|
||||||
|
- Git ignore uploaded files in example_vault
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- [WEBUI]: Upload progress bar spacing, hide add button
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
- [WEBEXT]: Update shared paths with webui, fix build
|
||||||
|
- Further refactor Earthfile & build process
|
||||||
|
- Fix upend-bin target
|
||||||
|
|
||||||
|
## [0.0.75] - 2024-02-02
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- [WEBUI]: Fix upload, re-add forgotten components (Footer, AddModal, DropPasteHandler)
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Update Earthly image version
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- [WEBUI]: Fix typo, rename ProgessBar -> ProgressBar
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- [WEBUI]: Fix uneven heights of roots
|
||||||
|
|
||||||
|
## [0.0.74] - 2024-01-28
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- [CLI]: Serve new SPA version
|
||||||
|
- [WEBUI]: Selector race conditions / wonkiness
|
||||||
|
- [CLI]: Serving web ui in Docker/AppImage
|
||||||
|
- [WEBUI]: Ordering of attributes in Selector
|
||||||
|
- [JSLIB]: Correct types for `UpObject.attr()`
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- [JSLIB]: Add timeouts / aborts to all api calls
|
||||||
|
- [WEBUI]: Required & optional attributes
|
||||||
|
|
||||||
|
### Miscellaneous
|
||||||
|
|
||||||
|
- [WEBUI]: Put /dist into .eslintignore
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- [WEBUI]: Fix HMR
|
||||||
|
- Make `dev` intellij config not run --release version
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- [WEBUI]: Switch to SvelteKit | touchdown
|
||||||
|
- [WEBUI]: Switch to SvelteKit | great lint fixing
|
||||||
|
- [WEBUI]: Switch to SvelteKit | prettier everything
|
||||||
|
- [WEBUI]: Switch to SvelteKit | fix image annotation
|
||||||
|
- [WEBUI]: Switch to SvelteKit | fix nested blob preview
|
||||||
|
- [WEBUI]: Switch to SvelteKit | properly handle BrowseColumn error
|
||||||
|
- [WEBUI]: Misc fixes in ImageViewer
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- [WEBUI]: Blob preview labels
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
- [WEBUI]: Finish webui SPA build config
|
||||||
|
- Optimize Earthly target dependencies
|
||||||
|
|
||||||
|
## [0.0.73] - 2024-01-27
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- [WEBUI]: Version display
|
||||||
|
- [WEBUI]: Don't require confirmation for set remove in combine
|
||||||
|
- [WEBUI]: "Required" without "Included" also now works in Combine
|
||||||
|
- [WEBUI]: "Groups" label in Inspect column
|
||||||
|
- [WEBUI]: Allow selection with cmd for macos
|
||||||
|
- [WEBUI]: Various app sizing fixes
|
||||||
|
- [WEBUI]: Fix sizing / overflows on <=1080 screens?
|
||||||
|
- [WEBUI]: Upobject label overflow
|
||||||
|
- [WEBUI]: Fix editing through inspect attribute list
|
||||||
|
- [WEBUI]: Surface allows rudimentary rescaling
|
||||||
|
- [WEBUI]: UpLink label overflows
|
||||||
|
- [WEBUI]: Overflow of "Used" section in Attribute Inspect
|
||||||
|
- [WEBUI]: Lint
|
||||||
|
- [WEBUI]: Remove surface story, fix lint
|
||||||
|
- [WEBUI]: Z-index on surface
|
||||||
|
- [WEBUI]: Surface: point position matches axes
|
||||||
|
- [WEBUI]: Surface starts at center
|
||||||
|
- [WEBUI]: Error on search confirm
|
||||||
|
- [WEBUI]: SurfaceColumn with new Selectors
|
||||||
|
- [WEBUI]: Error in SurfaceColumn due to missing `y`
|
||||||
|
- [WEBUI]: "initial" Selector values are no longer uneditable
|
||||||
|
- [WEBUI]: Multiple Surface columns
|
||||||
|
- [WEBUI]: Position of selector on surface
|
||||||
|
- [WEBUI]: Surface centering on resize
|
||||||
|
- [WEBUI]: Fix duplicate Selector options (?)
|
||||||
|
- [DB]: Handling (again) existing files + tests
|
||||||
|
- Prevent crashes while formatting unexpected value types
|
||||||
|
- Selectors keep focus while adding entries
|
||||||
|
- [WEBUI]: Url type display in UpObject
|
||||||
|
- [WEBUI]: Attribute columns being squashed to unreadability
|
||||||
|
- [WEBUI]: Editable overflow
|
||||||
|
- Uploads via API are assigned paths like via FS
|
||||||
|
- [CLI]: Image previews work for paths without extensions
|
||||||
|
- [CLI]: Add ID3_PICTURE attribute description
|
||||||
|
- [WEBUI]: Sort & optimize Keyed section
|
||||||
|
- [WEBUI]: Selection in EntryList
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- [WEBUI]: Proper set operations
|
||||||
|
- [WEBUI]: Add group view, duplicate group view
|
||||||
|
- [WEBUI]: Quick & dirty reverse path resolution for duplicate group distinction
|
||||||
|
- [WEBUI]: Turn groups view into a column, allow selection
|
||||||
|
- [DB]: Add new vault scan modes (flat, depthfirst)
|
||||||
|
- [DB]: Add an "INCOMING" rescan mode
|
||||||
|
- [DB]: Add an "INCOMING" rescan mode
|
||||||
|
- [DB]: Duplicate blob paths on initial scan
|
||||||
|
- [JSLIB]: Add vault options functions
|
||||||
|
- [WEBUI]: Show current vault mode in setup
|
||||||
|
- [JSLIB]: Add variables to jslib query builder
|
||||||
|
- [WEBUI]: Distinguish between correctly & incorrectly typed members in Inspect
|
||||||
|
- [WEBUI]: Surface: add "display as point"
|
||||||
|
- [WEBUI]: Surface view as Column in Browse
|
||||||
|
- [CLI]: Add `--rescan_mode` CLI option, fix storybook cmd
|
||||||
|
- [WEBUI]: "Last searched" options in header
|
||||||
|
- [WEBUI]: SurfaceColumn's axes are fully reflected in URL
|
||||||
|
- [JSLIB]: Or/and/not/join query builder support
|
||||||
|
- [WEBUI]: SurfaceColumn automatically finds PERPENDICULAR attributes, if set
|
||||||
|
- [WEBUI]: Press shift and click close to reload a column
|
||||||
|
- [WEBUI]: Proper autofit of SurfaceColumn
|
||||||
|
- [CLI,WEBUI]: Check file presence via HEAD, disable download button if necessary
|
||||||
|
- [WEBUI]: Stable type sort in Inspect: by amount of attributes, address
|
||||||
|
- [JSLIB]: Implement toString for UpObject
|
||||||
|
- Add spinner to Selector
|
||||||
|
- [CLI]: Add ID3 image extraction
|
||||||
|
- [WEBUI]: Allow search / selection of entries via their attributes
|
||||||
|
- [WEBUI]: Display KEYs in UpObject banner
|
||||||
|
- [WEBUI]: Vault name in title on home
|
||||||
|
- [WEBUI]: Add Keyed display to Home
|
||||||
|
- [WEBUI]: Add section links from Home
|
||||||
|
|
||||||
|
### Miscellaneous
|
||||||
|
|
||||||
|
- Specify crate resolver
|
||||||
|
- [JSLIB]: Add eslint ava
|
||||||
|
- [JSLIB]: Rebuild before running tests
|
||||||
|
- [JSLIB]: Version bump
|
||||||
|
- [JSLIB]: Fix eslint
|
||||||
|
- [WEBUI]: Update storybook
|
||||||
|
- [WEBUI]: Update entity addresses for storybook
|
||||||
|
- [JSLIB]: Bump version
|
||||||
|
- Add intellij run configurations
|
||||||
|
- Fix types
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Add appimages & changelogs to gitea releases
|
||||||
|
- Test before lint
|
||||||
|
- Use detached signature for appimages
|
||||||
|
- Add mail pipeline step
|
||||||
|
- Fix mail?
|
||||||
|
- Remove mail (for the time being)
|
||||||
|
- Fix prerelease step
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
- [WEBUI]: Only check for file existence for UpObjct banners
|
||||||
|
- [WEBUI]: Use addressToComponents to get attribute addresses without querying backend
|
||||||
|
- [JSLIB]: Add `attr` cache
|
||||||
|
- Cancel unfinished updates in Selector
|
||||||
|
- [WEBUI]: Early set for static Selector options
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- [WEBUI]: Use EntitySetEditor in Inspect & MultiGroup
|
||||||
|
- [DB]: Better impls for UNode/UHierPath
|
||||||
|
- [WEBUI]: Upobject label into own component
|
||||||
|
- [DB]: Use `parse` instead of `from_str`
|
||||||
|
- [DB]: Refactor tests in fs store
|
||||||
|
- Tree mode -> (new) blob mode
|
||||||
|
- [DB]: Use jwalk instead of walkdir
|
||||||
|
- [DB]: Refactor rescan process
|
||||||
|
- [JSLIB]: Specific constant for any instead of undefined
|
||||||
|
- [WEBUI]: Use new query api
|
||||||
|
- [CLI]: Use cargo manifest dir for resources in dev mode
|
||||||
|
- [WEBUI]: Selector refactor, non-destructive search
|
||||||
|
- [WEBUI]: Button labels on columns are i18n'd
|
||||||
|
- [WEBUI]: Get rid of `any` in Surface
|
||||||
|
- [WEBUI]: I18n in UpObject
|
||||||
|
- [JSLIB]: Remove `url` and `attribute` from `getAddress`, fix build
|
||||||
|
- [CLI]: Remove forgotten println
|
||||||
|
- [CLI]: Refix log level for vault rescans
|
||||||
|
- Chores in Selector.svelte
|
||||||
|
- Dbg calls in Selector.svelte identify element
|
||||||
|
- Remove unnecessary `scoped` leftovers from Vue
|
||||||
|
- Formatting
|
||||||
|
- [DB]: Remove deprecation notice until there's actually a better way
|
||||||
|
- Clippy fixes
|
||||||
|
- [WEBUI]: Use constants
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- [WEBUI]: Non-inspect columns are lighter
|
||||||
|
- [WEBUI]: Padding on groups in inspect
|
||||||
|
- [WEBUI]: Notes in properties, enlarge scrollable area
|
||||||
|
- [WEBUI]: Roots on home are in a column
|
||||||
|
- [WEBUI]: Embolden 0 axes in Surface, text shadow
|
||||||
|
- [WEBUI]: Reorder options in selector
|
||||||
|
- [WEBUI]: Fix partially hidden Home footer; spacing
|
||||||
|
- [WEBUI]: Column/inspect sizing, avoid scrollbar overlap
|
||||||
|
- [WEBUI]: 2 columns at home
|
||||||
|
- Show multiple roots as banners instead of full cards
|
||||||
|
- [WEBUI]: # -> ⌘
|
||||||
|
- [WEBUI]: Key display in non-banners also
|
||||||
|
- [WEBUI]: Monospace & diminished key display
|
||||||
|
- [WEBUI]: Hide type keys
|
||||||
|
|
||||||
|
## [0.0.72] - 2023-10-22
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- [WEBUI]: Inner group preview sizing
|
||||||
|
- [WEBUI]: Various mobile improvements (#23)
|
||||||
|
- [WEBUI]: Ultrawide detail mode
|
||||||
|
- Double ^C actually stops
|
||||||
|
- [WEBEXT]: External instances, link opens stored instance
|
||||||
|
- Fix mime detection on mac os
|
||||||
|
- Web ui flag
|
||||||
|
- Api fetch store info
|
||||||
|
- [WEBUI]: Resolve upobjects with empty labels, explicitly disable resolving
|
||||||
|
- Gallery empty state
|
||||||
|
- Upgrade shadow-rs, fix libgit build
|
||||||
|
- Disable libgit2 shadow-rs functionality, actually fix build
|
||||||
|
- Local js dependencies
|
||||||
|
- Build wasmlib before frontend
|
||||||
|
- Upend js lib build (`files`)
|
||||||
|
- Minor entity not yet loaded bug
|
||||||
|
- Backlinks, untyped links don't include OFs
|
||||||
|
- Unclickable items in detail mode, fixes #57
|
||||||
|
- Concurrent image loading
|
||||||
|
- Impl display for upmultihash, fix preview debug log
|
||||||
|
- Docker improvements
|
||||||
|
- (loading) image overflow
|
||||||
|
- Appimage webui path
|
||||||
|
- Docker-minimal missing libssl3
|
||||||
|
- Upgrade vite, get rid of vite build voodoo
|
||||||
|
- Audiopreview overflow
|
||||||
|
- Never cache index.html, prevent stale assets
|
||||||
|
- Don't hide jobs
|
||||||
|
- Footer only showable when jobs present
|
||||||
|
- Duplicate wasm initialization
|
||||||
|
- Don't show type editor for nontypes
|
||||||
|
- Entrylist scroll hijack
|
||||||
|
- Wasm lint
|
||||||
|
- Make `componentsToAddress` usable from JS
|
||||||
|
- Webui layout & sizing fixes
|
||||||
|
- Add url attributes to url type address
|
||||||
|
- Webui, detail doesn't take up the whole screen
|
||||||
|
- 3d model preview overflow
|
||||||
|
- Don't duplicate columns unless shift is pressed
|
||||||
|
- Hide browse add column after blur
|
||||||
|
- Accessibility & lints
|
||||||
|
- Audio annotations not being saved properly
|
||||||
|
- Entitylist entry add
|
||||||
|
- Selector overflow in entitylist
|
||||||
|
- [JSLIB]: :sparkles: allow initialization of wasm via wasm modules
|
||||||
|
- [JSLIB]: :wrench: moved wasm from dependencies to dev dependencies
|
||||||
|
- [WEBUI]: :bug: add placeholder to indicate url pasting in entitylist
|
||||||
|
- [JSLIB]: :rotating_light: fix lint fail due to missing type-only imports
|
||||||
|
- [DB]: :bug: fix join behavior
|
||||||
|
- [JSLIB]: :technologist: better error messages for api/query
|
||||||
|
- [DB]: :bug: actually fix join behavior, improve performance as well
|
||||||
|
- [WEBUI]: :ambulance: fix upend wasm import
|
||||||
|
- [JSLIB]: :wrench: fix gitignore
|
||||||
|
- [WEBUI]: Properly center banner select highlight
|
||||||
|
- [WEBUI]: Make non-inspect columns play nice with index context
|
||||||
|
- [CLI]: Proper version in vault info
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- [WEBEXT]: Add link to instance
|
||||||
|
- Add `get` cli command, cli commands don't panic
|
||||||
|
- [CLI]: Request the whole obj listing for `get`
|
||||||
|
- Limit concurrent image loading
|
||||||
|
- Upend.js `attr` includes backlinks
|
||||||
|
- Provenance, vault stats
|
||||||
|
- Add endpoint to aid with db migration
|
||||||
|
- Extractors append types
|
||||||
|
- Add link to typed entry views
|
||||||
|
- Rudimentary type editor
|
||||||
|
- Add download button to UpObject
|
||||||
|
- Concurrent image loading indication
|
||||||
|
- Add debug logging for external command extractors
|
||||||
|
- Use `audiowaveform` for audio preview generation
|
||||||
|
- Allow specifying vault name as env
|
||||||
|
- Add basic group section to home
|
||||||
|
- Add group count
|
||||||
|
- Property adding in entrylist
|
||||||
|
- Modeless group operations
|
||||||
|
- Modeless entrylist editing
|
||||||
|
- Always show members in inspect
|
||||||
|
- Show URL types in non-banner upobjects
|
||||||
|
- :package: upend jslib + wasm can be used from node
|
||||||
|
- [JSLIB]: :sparkles: add basic query builder
|
||||||
|
- [JSLIB]: :recycle: eav helper getters for uplisting
|
||||||
|
- [JSLIB]: :sparkles: getRaw() just returns URL, fetchRaw() fetches the actual content
|
||||||
|
- [WEBUI]: :construction: selection via ctrl+drag
|
||||||
|
- [WEBUI]: :construction: generic `BrowseColumn`, EntryView accepts `entities`
|
||||||
|
- [WEBUI]: :construction: base of select all
|
||||||
|
- [WEBUI]: :construction: allow selection removal
|
||||||
|
- [WEBUI]: :sparkles: batch adding/removing groups
|
||||||
|
- Add selection & batch operations
|
||||||
|
- [WEBUI]: :sparkles: rudimentary combine column
|
||||||
|
- [WEBUI]: All "combined" can now be selected
|
||||||
|
|
||||||
|
### Miscellaneous
|
||||||
|
|
||||||
|
- [WEBEXT]: More descriptive message for visiting upend
|
||||||
|
- [WEBEXT]: Version bump
|
||||||
|
- Add `debug`
|
||||||
|
- Don't print header if result is empty in cli
|
||||||
|
- [CLI]: Gracefull failback if API format changes
|
||||||
|
- [WEBEXT]: Version bump
|
||||||
|
- [CI]: Include web-ext artifacts in (pre)releases
|
||||||
|
- Remove unused dependencies
|
||||||
|
- Fix tests on mac
|
||||||
|
- EntryList default columns
|
||||||
|
- Include versions of all packages in /info
|
||||||
|
- Deprecate get_all_attributes (#38)
|
||||||
|
- Migrate from yarn to pnpm
|
||||||
|
- Fix taskfile (pnpm --frozen-lockfile)
|
||||||
|
- Lock update
|
||||||
|
- Rename photo extractor to EXIF extractor
|
||||||
|
- Remove unnecessary std::, reformat
|
||||||
|
- Reformat webui w/ prettier
|
||||||
|
- Add VS Code recommended extensions
|
||||||
|
- Add .editorconfig
|
||||||
|
- Rename build dockerfiles
|
||||||
|
- Add prettier for webui
|
||||||
|
- Add deploy:docker task
|
||||||
|
- Change db/store traces to trace level
|
||||||
|
- Log level to trace
|
||||||
|
- Dev:frontend relies on build:jslib
|
||||||
|
- Pnpm lock update
|
||||||
|
- Reformat?
|
||||||
|
- Remove prod tasks from Taskfile
|
||||||
|
- Update cargo & webui deps
|
||||||
|
- Rename Gallery to EntityList
|
||||||
|
- Logging for swr fetch
|
||||||
|
- Update upend logo
|
||||||
|
- Fix stories errors
|
||||||
|
- Update git cliff config
|
||||||
|
- Change wording on "Create object", i18n
|
||||||
|
- [JSLIB]: :recycle: tidy up tsconfig.json
|
||||||
|
- :technologist: add earthly to recommended extensions
|
||||||
|
- [JSLIB]: :wrench: tidy up gitignore
|
||||||
|
- [JSLIB]: :recycle: use wasmlib from npm
|
||||||
|
- [JSLIB]: :bookmark: version bump to 0.0.5
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Fix publish api key (?)
|
||||||
|
- Fix woodpecker path check
|
||||||
|
- Prerelease every push to main
|
||||||
|
- Verbose build of upend.js
|
||||||
|
- Move from using global `rust` image to local `rust-upend`
|
||||||
|
- Also use local node docker image
|
||||||
|
- Also cache target for incremental builds
|
||||||
|
- Only upload nightlies from main
|
||||||
|
- Upload packages to minio
|
||||||
|
- Fix docker tasks
|
||||||
|
- Add `gpg-agent` to upend-deploy docker
|
||||||
|
- Also build a minimal docker image
|
||||||
|
- Only publish dockers from main
|
||||||
|
- Add an audit target
|
||||||
|
- Add logging to Inspect
|
||||||
|
- Add earthly target to update changelog
|
||||||
|
- Add `--push` to deploy target
|
||||||
|
- [JSLIB]: :rocket: publish jslib on tag
|
||||||
|
- [JSLIB]: :white_check_mark: test jslib in CI
|
||||||
|
- [JSLIB]: :sparkles: publish jslib whenever version is bumped
|
||||||
|
- [JSLIB]: :rocket: publish wasmlib to repo
|
||||||
|
- [JSLIB]: :bug: fix earthly publish target
|
||||||
|
- :construction_worker: sequential js publish
|
||||||
|
- [JSLIB]: :ambulance: do not attempt to publish jslib unless we're on `main`
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- Move actix app creation into separate module
|
||||||
|
- [**breaking**] Unify groups, tags, types (on the backend)
|
||||||
|
- Split inspect groups into its own widget
|
||||||
|
- InspectGroups more self-sufficient
|
||||||
|
- Get_resource_path, looks in /usr/share
|
||||||
|
- Add `DEBUG:IMAGEHALT` localstorage variable that halts concurrent image loading
|
||||||
|
- Add global mock/debug switches
|
||||||
|
- Generic magic for addressable/asmultihash
|
||||||
|
- Unify debug logs in webui
|
||||||
|
- Provenance api log
|
||||||
|
- EntryList uses CSS grid instead of tables
|
||||||
|
- [JSLIB]: Reexport UpEndApi in index
|
||||||
|
- :truck: rename jslib to use `@upnd` scope
|
||||||
|
- [JSLIB]: :recycle: config obj instead of positional args in api
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- Smaller iconbutton text
|
||||||
|
- Don't use detail layout under 1600px width
|
||||||
|
- Referred to after members
|
||||||
|
- No more labelborder, more conventional table view
|
||||||
|
- [WEBUI]: Transition select state in EntityList
|
||||||
|
- [WEBUI]: Slightly reduce empty space in selectedcolumn
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
- Rudimentary route test
|
||||||
|
- Add /api/hier test
|
||||||
|
- [SERVER]: Add test for /api/obj/ entity info
|
||||||
|
- Improve db open tests
|
||||||
|
- [BASE]: :bug: `in` actually tested
|
||||||
|
|
||||||
|
### Release
|
||||||
|
|
||||||
|
- V0.0.72
|
||||||
|
|
||||||
## [0.0.71] - 2023-06-03
|
## [0.0.71] - 2023-06-03
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
@ -70,7 +524,7 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
- Add more buttony upend icon
|
- Add more buttony upend icon
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Clippy lints
|
- Clippy lints
|
||||||
- [WEBUI]: Fix eslint errors
|
- [WEBUI]: Fix eslint errors
|
||||||
|
@ -113,6 +567,15 @@ All notable changes to this project will be documented in this file.
|
||||||
- Bump webext version
|
- Bump webext version
|
||||||
- Fancify readme
|
- Fancify readme
|
||||||
- Links in readme
|
- Links in readme
|
||||||
|
- Switch to using git cliff for changelogs
|
||||||
|
- Release
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Update clean task
|
||||||
|
- Fix deps
|
||||||
|
- Switch from Gitlab CI to Woodpecker, Taskfile fixes
|
||||||
|
- Conditions on lints
|
||||||
|
|
||||||
### Refactor
|
### Refactor
|
||||||
|
|
||||||
|
@ -131,13 +594,6 @@ All notable changes to this project will be documented in this file.
|
||||||
- Add text to iconbuttons
|
- Add text to iconbuttons
|
||||||
- Also show attr in type
|
- Also show attr in type
|
||||||
|
|
||||||
### Ci
|
|
||||||
|
|
||||||
- Update clean task
|
|
||||||
- Fix deps
|
|
||||||
- Switch from Gitlab CI to Woodpecker, Taskfile fixes
|
|
||||||
- Conditions on lints
|
|
||||||
|
|
||||||
## [0.0.70] - 2023-03-08
|
## [0.0.70] - 2023-03-08
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
@ -165,7 +621,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- Shift+click to add on right
|
- Shift+click to add on right
|
||||||
- Resizable columns
|
- Resizable columns
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Rename /media to /assets
|
- Rename /media to /assets
|
||||||
- Add example vault with 1 video
|
- Add example vault with 1 video
|
||||||
|
@ -224,7 +680,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- [UI]: Reverse surface Y scale, add loading state
|
- [UI]: Reverse surface Y scale, add loading state
|
||||||
- Add current position display to Surface view
|
- Add current position display to Surface view
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- [UI]: Adjust OFT features on videoviewer timecode
|
- [UI]: Adjust OFT features on videoviewer timecode
|
||||||
- [UI]: Footer is hidden by default
|
- [UI]: Footer is hidden by default
|
||||||
|
@ -241,10 +697,6 @@ All notable changes to this project will be documented in this file.
|
||||||
- [UI]: Switch Inter for IBM Plex
|
- [UI]: Switch Inter for IBM Plex
|
||||||
- [UI]: Switched root font size from 15px to 16px
|
- [UI]: Switched root font size from 15px to 16px
|
||||||
|
|
||||||
### Wip
|
|
||||||
|
|
||||||
- Multiple modes of display for surface
|
|
||||||
|
|
||||||
## [0.0.68] - 2022-12-22
|
## [0.0.68] - 2022-12-22
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
@ -274,25 +726,24 @@ All notable changes to this project will be documented in this file.
|
||||||
- Supported format detection in videoviewer
|
- Supported format detection in videoviewer
|
||||||
- Loading state in videoviewer preview
|
- Loading state in videoviewer preview
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Log instrumenting
|
- Log instrumenting
|
||||||
- Don't package by default
|
- Don't package by default
|
||||||
- Log -> tracing
|
- Log -> tracing
|
||||||
- ...
|
|
||||||
- Update web deps
|
- Update web deps
|
||||||
- Css fix
|
- Css fix
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Make makefile more command-y
|
||||||
|
|
||||||
### Performance
|
### Performance
|
||||||
|
|
||||||
- Only resort once initial query has finished
|
- Only resort once initial query has finished
|
||||||
- Only show items in gallery once sorted
|
- Only show items in gallery once sorted
|
||||||
- Enable lazy loading of images (?)
|
- Enable lazy loading of images (?)
|
||||||
|
|
||||||
### Ci
|
|
||||||
|
|
||||||
- Make makefile more command-y
|
|
||||||
|
|
||||||
### Ui
|
### Ui
|
||||||
|
|
||||||
- Replace spinner
|
- Replace spinner
|
||||||
|
@ -314,7 +765,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- Add media (duration) extractor
|
- Add media (duration) extractor
|
||||||
- Add duration display for audio preview
|
- Add duration display for audio preview
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Unused css rule
|
- Unused css rule
|
||||||
- Shut up svelte check
|
- Shut up svelte check
|
||||||
|
@ -332,7 +783,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- Confirm before generating audio peaks in browser, avoid lock-ups in Chrome
|
- Confirm before generating audio peaks in browser, avoid lock-ups in Chrome
|
||||||
- Remove BlobViewer duplicity in Inspect
|
- Remove BlobViewer duplicity in Inspect
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- --ui-enabled actually does something
|
- --ui-enabled actually does something
|
||||||
- 32 max port retries
|
- 32 max port retries
|
||||||
|
@ -358,7 +809,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- Recurse up to 3 levels resolving group previews
|
- Recurse up to 3 levels resolving group previews
|
||||||
- On group preview, prefer objects with previews
|
- On group preview, prefer objects with previews
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Put config into its own struct
|
- Put config into its own struct
|
||||||
- Update address constants (fix file detection, group adding)
|
- Update address constants (fix file detection, group adding)
|
||||||
|
@ -406,7 +857,7 @@ All notable changes to this project will be documented in this file.
|
||||||
- Add options to previews
|
- Add options to previews
|
||||||
- If `audiowaveform` is present, generate & cache peaks on backend
|
- If `audiowaveform` is present, generate & cache peaks on backend
|
||||||
|
|
||||||
### Miscellaneous Tasks
|
### Miscellaneous
|
||||||
|
|
||||||
- Add logging to fs tests
|
- Add logging to fs tests
|
||||||
- Fix frontend lint
|
- Fix frontend lint
|
||||||
|
@ -425,6 +876,10 @@ All notable changes to this project will be documented in this file.
|
||||||
- Add logging
|
- Add logging
|
||||||
- Fix typo
|
- Fix typo
|
||||||
|
|
||||||
|
### Operations & Development
|
||||||
|
|
||||||
|
- Update Makefile for new webui build also
|
||||||
|
|
||||||
### Performance
|
### Performance
|
||||||
|
|
||||||
- First check for files in /raw/
|
- First check for files in /raw/
|
||||||
|
@ -443,10 +898,6 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
- Switch from Rollup to Vite, upgrade Svelte
|
- Switch from Rollup to Vite, upgrade Svelte
|
||||||
|
|
||||||
### Ci
|
|
||||||
|
|
||||||
- Update Makefile for new webui build also
|
|
||||||
|
|
||||||
### Hotfix
|
### Hotfix
|
||||||
|
|
||||||
- Disable transactions for now
|
- Disable transactions for now
|
||||||
|
|
|
@ -283,6 +283,56 @@ dependencies = [
|
||||||
"syn 1.0.109",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "actix-web-lab"
|
||||||
|
version = "0.20.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7675c1a84eec1b179c844cdea8488e3e409d8e4984026e92fa96c87dd86f33c6"
|
||||||
|
dependencies = [
|
||||||
|
"actix-files",
|
||||||
|
"actix-http",
|
||||||
|
"actix-router",
|
||||||
|
"actix-service",
|
||||||
|
"actix-utils",
|
||||||
|
"actix-web",
|
||||||
|
"actix-web-lab-derive",
|
||||||
|
"ahash 0.8.3",
|
||||||
|
"arc-swap",
|
||||||
|
"async-trait",
|
||||||
|
"bytes",
|
||||||
|
"bytestring",
|
||||||
|
"csv",
|
||||||
|
"derive_more",
|
||||||
|
"futures-core",
|
||||||
|
"futures-util",
|
||||||
|
"http",
|
||||||
|
"impl-more",
|
||||||
|
"itertools",
|
||||||
|
"local-channel",
|
||||||
|
"mediatype",
|
||||||
|
"mime",
|
||||||
|
"once_cell",
|
||||||
|
"pin-project-lite",
|
||||||
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"serde_html_form",
|
||||||
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
|
"tokio-stream",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "actix-web-lab-derive"
|
||||||
|
version = "0.20.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9aa0b287c8de4a76b691f29dbb5451e8dd5b79d777eaf87350c9b0cbfdb5e968"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "actix_derive"
|
name = "actix_derive"
|
||||||
version = "0.6.0"
|
version = "0.6.0"
|
||||||
|
@ -431,6 +481,24 @@ version = "1.0.75"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arc-swap"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "argon2"
|
||||||
|
version = "0.5.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
|
||||||
|
dependencies = [
|
||||||
|
"base64ct",
|
||||||
|
"blake2",
|
||||||
|
"cpufeatures",
|
||||||
|
"password-hash",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrayref"
|
name = "arrayref"
|
||||||
version = "0.3.7"
|
version = "0.3.7"
|
||||||
|
@ -449,6 +517,17 @@ version = "0.10.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
|
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-trait"
|
||||||
|
version = "0.1.76"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "531b97fb4cd3dfdce92c35dedbfdc1f0b9d8091c8ca943d6dae340ef5012d514"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autocfg"
|
name = "autocfg"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
|
@ -488,6 +567,12 @@ version = "0.21.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "414dcefbc63d77c526a76b3afcf6fbb9b5e2791c19c3aa2297733208750c6e53"
|
checksum = "414dcefbc63d77c526a76b3afcf6fbb9b5e2791c19c3aa2297733208750c6e53"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64ct"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
@ -500,6 +585,15 @@ version = "2.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
|
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "blake2"
|
||||||
|
version = "0.10.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
||||||
|
dependencies = [
|
||||||
|
"digest",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "blake2b_simd"
|
name = "blake2b_simd"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
|
@ -796,9 +890,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cpufeatures"
|
name = "cpufeatures"
|
||||||
version = "0.2.9"
|
version = "0.2.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1"
|
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
@ -812,6 +906,20 @@ dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"crossbeam-channel",
|
||||||
|
"crossbeam-deque",
|
||||||
|
"crossbeam-epoch",
|
||||||
|
"crossbeam-queue",
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-channel"
|
name = "crossbeam-channel"
|
||||||
version = "0.5.8"
|
version = "0.5.8"
|
||||||
|
@ -846,6 +954,16 @@ dependencies = [
|
||||||
"scopeguard",
|
"scopeguard",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-queue"
|
||||||
|
version = "0.3.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-utils"
|
name = "crossbeam-utils"
|
||||||
version = "0.8.16"
|
version = "0.8.16"
|
||||||
|
@ -865,6 +983,27 @@ dependencies = [
|
||||||
"typenum",
|
"typenum",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "csv"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe"
|
||||||
|
dependencies = [
|
||||||
|
"csv-core",
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "csv-core"
|
||||||
|
version = "0.1.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "darling"
|
name = "darling"
|
||||||
version = "0.14.4"
|
version = "0.14.4"
|
||||||
|
@ -998,6 +1137,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"block-buffer",
|
"block-buffer",
|
||||||
"crypto-common",
|
"crypto-common",
|
||||||
|
"subtle",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1467,6 +1607,12 @@ dependencies = [
|
||||||
"num-traits",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "impl-more"
|
||||||
|
version = "0.1.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "1.9.3"
|
version = "1.9.3"
|
||||||
|
@ -1508,6 +1654,15 @@ dependencies = [
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.12.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.9"
|
version = "1.0.9"
|
||||||
|
@ -1555,6 +1710,16 @@ dependencies = [
|
||||||
"simple_asn1",
|
"simple_asn1",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jwalk"
|
||||||
|
version = "0.8.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam",
|
||||||
|
"rayon",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kamadak-exif"
|
name = "kamadak-exif"
|
||||||
version = "0.5.5"
|
version = "0.5.5"
|
||||||
|
@ -1608,9 +1773,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.147"
|
version = "0.2.153"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libsqlite3-sys"
|
name = "libsqlite3-sys"
|
||||||
|
@ -1723,6 +1888,12 @@ dependencies = [
|
||||||
"regex-automata 0.1.10",
|
"regex-automata 0.1.10",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mediatype"
|
||||||
|
version = "0.19.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "83a018c36a54f4e12c30464bbc59311f85d3f6f4d6c1b4fa4ea9db2b174ddefc"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.6.0"
|
version = "2.6.0"
|
||||||
|
@ -2111,6 +2282,17 @@ version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "944553dd59c802559559161f9816429058b869003836120e262e8caec061b7ae"
|
checksum = "944553dd59c802559559161f9816429058b869003836120e262e8caec061b7ae"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "password-hash"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
|
||||||
|
dependencies = [
|
||||||
|
"base64ct",
|
||||||
|
"rand_core",
|
||||||
|
"subtle",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paste"
|
name = "paste"
|
||||||
version = "1.0.14"
|
version = "1.0.14"
|
||||||
|
@ -2564,6 +2746,19 @@ dependencies = [
|
||||||
"syn 2.0.29",
|
"syn 2.0.29",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_html_form"
|
||||||
|
version = "0.2.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "224e6a14f315852940f3ec103125aa6482f0e224732ed91ed3330ed633077c34"
|
||||||
|
dependencies = [
|
||||||
|
"form_urlencoded",
|
||||||
|
"indexmap 2.0.0",
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.105"
|
version = "1.0.105"
|
||||||
|
@ -2758,6 +2953,12 @@ version = "0.10.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "subtle"
|
||||||
|
version = "2.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.109"
|
version = "1.0.109"
|
||||||
|
@ -2928,9 +3129,21 @@ dependencies = [
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"signal-hook-registry",
|
||||||
"socket2 0.5.3",
|
"socket2 0.5.3",
|
||||||
|
"tokio-macros",
|
||||||
"windows-sys",
|
"windows-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-macros"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-native-tls"
|
name = "tokio-native-tls"
|
||||||
version = "0.3.1"
|
version = "0.3.1"
|
||||||
|
@ -2941,6 +3154,17 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-stream"
|
||||||
|
version = "0.1.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-util"
|
name = "tokio-util"
|
||||||
version = "0.7.8"
|
version = "0.7.8"
|
||||||
|
@ -3144,6 +3368,7 @@ dependencies = [
|
||||||
"actix-multipart",
|
"actix-multipart",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
|
"actix-web-lab",
|
||||||
"actix_derive",
|
"actix_derive",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
@ -3196,13 +3421,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "upend-db"
|
name = "upend-db"
|
||||||
version = "0.0.1"
|
version = "0.0.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"argon2",
|
||||||
"chrono",
|
"chrono",
|
||||||
"diesel",
|
"diesel",
|
||||||
"diesel_migrations",
|
"diesel_migrations",
|
||||||
"filebuffer",
|
"filebuffer",
|
||||||
|
"jwalk",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"lexpr",
|
"lexpr",
|
||||||
"libsqlite3-sys",
|
"libsqlite3-sys",
|
||||||
|
@ -3213,6 +3440,7 @@ dependencies = [
|
||||||
"nonempty",
|
"nonempty",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"password-hash",
|
||||||
"rayon",
|
"rayon",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -3225,7 +3453,6 @@ dependencies = [
|
||||||
"upend-base",
|
"upend-base",
|
||||||
"url",
|
"url",
|
||||||
"uuid",
|
"uuid",
|
||||||
"walkdir",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["base", "db", "cli", "tools/upend_wasm"]
|
members = ["base", "db", "cli", "wasm"]
|
||||||
|
resolver = "2"
|
281
Earthfile
281
Earthfile
|
@ -1,7 +1,89 @@
|
||||||
VERSION 0.7
|
VERSION 0.8
|
||||||
|
|
||||||
|
# Base targets
|
||||||
|
|
||||||
|
base-rust:
|
||||||
|
FROM rust:bookworm
|
||||||
|
RUN rustup component add clippy
|
||||||
|
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
|
||||||
|
RUN cargo install wasm-pack wasm-bindgen-cli && rustup target add wasm32-unknown-unknown
|
||||||
|
RUN cargo install cargo-audit
|
||||||
|
WORKDIR /upend
|
||||||
|
CACHE $HOME/.cargo
|
||||||
|
COPY Cargo.toml Cargo.lock .
|
||||||
|
COPY base/Cargo.toml base/Cargo.toml
|
||||||
|
COPY cli/Cargo.toml cli/Cargo.toml
|
||||||
|
COPY db/Cargo.toml db/Cargo.toml
|
||||||
|
COPY wasm/Cargo.toml wasm/Cargo.toml
|
||||||
|
RUN cargo fetch --locked
|
||||||
|
|
||||||
|
base-backend:
|
||||||
|
FROM +base-rust
|
||||||
|
COPY --dir base cli db wasm .
|
||||||
|
|
||||||
|
base-node:
|
||||||
|
FROM node:lts-iron
|
||||||
|
RUN npm install -g pnpm
|
||||||
|
WORKDIR /upend
|
||||||
|
CACHE $HOME/.local/share/pnpm
|
||||||
|
COPY +wasmlib/pkg-web wasm/pkg-web
|
||||||
|
COPY +wasmlib/pkg-node wasm/pkg-node
|
||||||
|
COPY sdks/js/package.json sdks/js/pnpm-lock.yaml sdks/js/
|
||||||
|
RUN cd sdks/js && rm -rf node_modules && pnpm install --frozen-lockfile
|
||||||
|
COPY webui/package.json webui/pnpm-lock.yaml webui/
|
||||||
|
RUN cd webui && rm -rf node_modules && pnpm install --frozen-lockfile
|
||||||
|
COPY --dir webui webext .
|
||||||
|
COPY --dir sdks/js sdks/
|
||||||
|
|
||||||
|
base-frontend:
|
||||||
|
FROM +base-node
|
||||||
|
COPY +jslib/dist sdks/js/dist
|
||||||
|
WORKDIR webui
|
||||||
|
RUN rm -rf node_modules && pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
# Intermediate targets
|
||||||
|
|
||||||
|
upend-bin:
|
||||||
|
FROM +base-backend
|
||||||
|
CACHE --id=rust-target target
|
||||||
|
COPY +git-version/version.txt .
|
||||||
|
RUN UPEND_VERSION=$(cat version.txt) cargo build --release
|
||||||
|
RUN cp target/release/upend upend.bin
|
||||||
|
SAVE ARTIFACT upend.bin upend
|
||||||
|
|
||||||
|
webui:
|
||||||
|
FROM +base-frontend
|
||||||
|
RUN pnpm build
|
||||||
|
SAVE ARTIFACT dist
|
||||||
|
|
||||||
|
wasmlib:
|
||||||
|
FROM --platform=linux/amd64 +base-rust
|
||||||
|
COPY --dir base wasm .
|
||||||
|
WORKDIR wasm
|
||||||
|
CACHE target
|
||||||
|
RUN wasm-pack build --target web --out-dir pkg-web && \
|
||||||
|
wasm-pack build --target nodejs --out-dir pkg-node
|
||||||
|
RUN sed -e 's%"name": "upend_wasm"%"name": "@upnd/wasm-web"%' -i pkg-web/package.json && \
|
||||||
|
sed -e 's%"name": "upend_wasm"%"name": "@upnd/wasm-node"%' -i pkg-node/package.json
|
||||||
|
SAVE ARTIFACT pkg-web
|
||||||
|
SAVE ARTIFACT pkg-node
|
||||||
|
|
||||||
|
jslib:
|
||||||
|
FROM +base-node
|
||||||
|
WORKDIR sdks/js
|
||||||
|
RUN pnpm build
|
||||||
|
SAVE ARTIFACT dist
|
||||||
|
|
||||||
|
webext:
|
||||||
|
FROM +base-node
|
||||||
|
WORKDIR webext
|
||||||
|
RUN pnpm build
|
||||||
|
SAVE ARTIFACT web-ext-artifacts/*.zip
|
||||||
|
|
||||||
|
# Final targets
|
||||||
|
|
||||||
appimage:
|
appimage:
|
||||||
FROM --platform=linux/amd64 +base-rust
|
FROM debian:bookworm
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get -y install wget pipx binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf python3-pip python3-setuptools squashfs-tools strace util-linux zsync && \
|
apt-get -y install wget pipx binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf python3-pip python3-setuptools squashfs-tools strace util-linux zsync && \
|
||||||
pipx ensurepath && \
|
pipx ensurepath && \
|
||||||
|
@ -12,7 +94,16 @@ appimage:
|
||||||
COPY build/AppImageBuilder.yml .
|
COPY build/AppImageBuilder.yml .
|
||||||
RUN sed -e "s/latest/$(./AppDir/usr/bin/upend --version | cut -d ' ' -f 2)/" -i AppImageBuilder.yml
|
RUN sed -e "s/latest/$(./AppDir/usr/bin/upend --version | cut -d ' ' -f 2)/" -i AppImageBuilder.yml
|
||||||
RUN pipx run appimage-builder
|
RUN pipx run appimage-builder
|
||||||
SAVE ARTIFACT UpEnd* AS LOCAL dist/
|
SAVE ARTIFACT UpEnd*
|
||||||
|
|
||||||
|
appimage-signed:
|
||||||
|
FROM alpine
|
||||||
|
RUN apk add gpg gpg-agent
|
||||||
|
RUN --secret GPG_SIGN_KEY echo "$GPG_SIGN_KEY" | gpg --import
|
||||||
|
COPY +appimage/*.AppImage .
|
||||||
|
RUN gpg --detach-sign --sign --armor *.AppImage
|
||||||
|
SAVE ARTIFACT *.AppImage
|
||||||
|
SAVE ARTIFACT *.asc
|
||||||
|
|
||||||
docker-minimal:
|
docker-minimal:
|
||||||
FROM debian:bookworm
|
FROM debian:bookworm
|
||||||
|
@ -39,7 +130,7 @@ docker:
|
||||||
SAVE IMAGE --push upend/upend:$tag
|
SAVE IMAGE --push upend/upend:$tag
|
||||||
|
|
||||||
DOCKER_COMMON:
|
DOCKER_COMMON:
|
||||||
COMMAND
|
FUNCTION
|
||||||
COPY +upend-bin/upend /usr/bin/upend
|
COPY +upend-bin/upend /usr/bin/upend
|
||||||
COPY --dir +webui/dist /usr/share/upend/webui
|
COPY --dir +webui/dist /usr/share/upend/webui
|
||||||
ENTRYPOINT ["/usr/bin/upend"]
|
ENTRYPOINT ["/usr/bin/upend"]
|
||||||
|
@ -48,37 +139,7 @@ DOCKER_COMMON:
|
||||||
ENV UPEND_NO_DESKTOP=true
|
ENV UPEND_NO_DESKTOP=true
|
||||||
ENV UPEND_ALLOW_HOST='*'
|
ENV UPEND_ALLOW_HOST='*'
|
||||||
|
|
||||||
upend-bin:
|
# CI targets
|
||||||
FROM +base-rust
|
|
||||||
RUN cargo build --release
|
|
||||||
COPY +git-version/version.txt .
|
|
||||||
RUN UPEND_VERSION=$(cat version.txt) cargo build --release
|
|
||||||
SAVE ARTIFACT target/release/upend upend
|
|
||||||
|
|
||||||
webui:
|
|
||||||
FROM +base-node
|
|
||||||
COPY +jslib/jslib tools/upend_js
|
|
||||||
WORKDIR webui
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
RUN pnpm build
|
|
||||||
SAVE ARTIFACT dist
|
|
||||||
|
|
||||||
wasmlib:
|
|
||||||
FROM --platform=linux/amd64 +base-rust
|
|
||||||
WORKDIR tools/upend_wasm
|
|
||||||
RUN wasm-pack build --target web
|
|
||||||
SAVE ARTIFACT pkg
|
|
||||||
|
|
||||||
jslib:
|
|
||||||
FROM +base-node
|
|
||||||
WORKDIR tools/upend_js
|
|
||||||
RUN pnpm build
|
|
||||||
SAVE ARTIFACT . jslib
|
|
||||||
|
|
||||||
webext:
|
|
||||||
FROM +base-node
|
|
||||||
WORKDIR webext
|
|
||||||
RUN pnpm build
|
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
WAIT
|
WAIT
|
||||||
|
@ -88,19 +149,17 @@ lint:
|
||||||
END
|
END
|
||||||
|
|
||||||
lint-backend:
|
lint-backend:
|
||||||
FROM +base-rust
|
FROM +base-backend
|
||||||
|
CACHE --id=rust-target target
|
||||||
RUN cargo clippy --workspace
|
RUN cargo clippy --workspace
|
||||||
|
|
||||||
lint-frontend:
|
lint-frontend:
|
||||||
FROM +base-node
|
FROM +base-frontend
|
||||||
COPY +jslib/jslib tools/upend_js
|
|
||||||
WORKDIR webui
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
RUN pnpm check && pnpm lint
|
RUN pnpm check && pnpm lint
|
||||||
|
|
||||||
lint-jslib:
|
lint-jslib:
|
||||||
FROM +base-node
|
FROM +base-node
|
||||||
WORKDIR tools/upend_js
|
WORKDIR sdks/js
|
||||||
RUN pnpm lint
|
RUN pnpm lint
|
||||||
|
|
||||||
audit:
|
audit:
|
||||||
|
@ -110,19 +169,18 @@ audit:
|
||||||
END
|
END
|
||||||
|
|
||||||
audit-backend:
|
audit-backend:
|
||||||
FROM +base-rust
|
FROM +base-backend
|
||||||
|
CACHE --id=rust-target target
|
||||||
RUN cargo audit --workspace
|
RUN cargo audit --workspace
|
||||||
|
|
||||||
audit-frontend:
|
audit-frontend:
|
||||||
FROM +base-node
|
FROM +base-frontend
|
||||||
COPY +jslib/jslib tools/upend_js
|
|
||||||
WORKDIR webui
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
RUN pnpm audit
|
RUN pnpm audit
|
||||||
|
|
||||||
test:
|
test:
|
||||||
WAIT
|
WAIT
|
||||||
BUILD +test-backend
|
BUILD +test-backend
|
||||||
|
BUILD +test-jslib
|
||||||
END
|
END
|
||||||
|
|
||||||
test-backend:
|
test-backend:
|
||||||
|
@ -141,20 +199,94 @@ appimage-signed:
|
||||||
deploy-appimage-nightly:
|
deploy-appimage-nightly:
|
||||||
FROM alpine
|
FROM alpine
|
||||||
RUN apk add openssh-client
|
RUN apk add openssh-client
|
||||||
COPY +appimage-signed/* .
|
|
||||||
RUN --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS \
|
RUN --secret SSH_CONFIG --secret SSH_UPLOAD_KEY --secret SSH_KNOWN_HOSTS \
|
||||||
mkdir -p $HOME/.ssh && \
|
mkdir -p $HOME/.ssh && \
|
||||||
echo "$SSH_CONFIG" > $HOME/.ssh/config && \
|
echo "$SSH_CONFIG" > $HOME/.ssh/config && \
|
||||||
echo "$SSH_UPLOAD_KEY" > $HOME/.ssh/id_rsa && \
|
echo "$SSH_UPLOAD_KEY" > $HOME/.ssh/id_rsa && \
|
||||||
echo "$SSH_KNOWN_HOSTS" > $HOME/.ssh/known_hosts && \
|
echo "$SSH_KNOWN_HOSTS" > $HOME/.ssh/known_hosts && \
|
||||||
chmod 600 $HOME/.ssh/*
|
chmod 600 $HOME/.ssh/*
|
||||||
RUN scp -v *.AppImage *.asc mainsite:releases/nightly
|
COPY +appimage-signed/* .
|
||||||
|
RUN --push scp -v *.AppImage *.asc mainsite:releases/nightly
|
||||||
|
|
||||||
|
publish-js-all:
|
||||||
|
WAIT
|
||||||
|
BUILD +publish-js-wasm
|
||||||
|
BUILD +publish-js-lib
|
||||||
|
END
|
||||||
|
|
||||||
|
publish-js-lib:
|
||||||
|
FROM +base-npm-publish
|
||||||
|
WORKDIR /upend/sdks/js
|
||||||
|
DO +NPM_PUBLISH --pkg_name=@upnd/upend
|
||||||
|
|
||||||
|
publish-js-wasm:
|
||||||
|
FROM +base-npm-publish
|
||||||
|
WORKDIR /upend/wasm/pkg-web
|
||||||
|
DO +NPM_PUBLISH --pkg_name=@upnd/wasm-web
|
||||||
|
WORKDIR /upend/wasm/pkg-node
|
||||||
|
DO +NPM_PUBLISH --pkg_name=@upnd/wasm-node
|
||||||
|
|
||||||
|
base-npm-publish:
|
||||||
|
FROM +base-node
|
||||||
|
RUN --secret NPM_TOKEN echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > $HOME/.npmrc
|
||||||
|
COPY +jslib/dist sdks/js/dist
|
||||||
|
|
||||||
|
NPM_PUBLISH:
|
||||||
|
FUNCTION
|
||||||
|
ARG pkg_name
|
||||||
|
IF --no-cache [ "`npm view $pkg_name version`" != "`node -p \"require('./package.json').version\"`" ]
|
||||||
|
RUN echo "Publishing $pkg_name to npm..."
|
||||||
|
RUN --push npm publish --access public
|
||||||
|
ELSE
|
||||||
|
RUN echo "Nothing to do for $pkg_name."
|
||||||
|
END
|
||||||
|
|
||||||
|
# Extensions (WIP)
|
||||||
|
|
||||||
|
extensions:
|
||||||
|
WAIT
|
||||||
|
BUILD +extensions-dummy
|
||||||
|
END
|
||||||
|
|
||||||
|
extensions-dummy:
|
||||||
|
FROM +base-extensions
|
||||||
|
WORKDIR dummy
|
||||||
|
RUN cargo build --release --target wasm32-unknown-unknown
|
||||||
|
SAVE ARTIFACT ../target/wasm32-unknown-unknown/release/upend_plugin_dummy.wasm
|
||||||
|
|
||||||
|
extensions-dummy-signed:
|
||||||
|
FROM +base-sign
|
||||||
|
COPY +extensions-dummy/upend_plugin_dummy.wasm .
|
||||||
|
RUN gpg --detach-sign --sign --armor upend_plugin_dummy.wasm
|
||||||
|
SAVE ARTIFACT upend_plugin_dummy.wasm
|
||||||
|
SAVE ARTIFACT upend_plugin_dummy.wasm.asc
|
||||||
|
|
||||||
|
base-extensions:
|
||||||
|
FROM rust:bookworm
|
||||||
|
RUN rustup component add clippy
|
||||||
|
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
|
||||||
|
RUN cargo install wasm-pack wasm-bindgen-cli && rustup target add wasm32-unknown-unknown
|
||||||
|
WORKDIR /upend/extensions
|
||||||
|
COPY extensions/Cargo.toml Cargo.toml
|
||||||
|
COPY extensions/Cargo.lock Cargo.lock
|
||||||
|
COPY extensions/base/Cargo.toml base/Cargo.toml
|
||||||
|
COPY extensions/dummy/Cargo.toml dummy/Cargo.toml
|
||||||
|
RUN cargo fetch --locked
|
||||||
|
COPY --dir extensions/base ./
|
||||||
|
COPY --dir extensions/dummy ./
|
||||||
|
|
||||||
|
# Utility targets
|
||||||
|
|
||||||
git-version:
|
git-version:
|
||||||
LOCALLY
|
FROM debian:bookworm
|
||||||
RUN ./build/get_version.sh | tee /tmp/upend_version.txt
|
RUN apt-get update && \
|
||||||
|
apt-get -y install git && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
COPY build/get_version.sh build/get_version.sh
|
||||||
|
COPY .git .git
|
||||||
|
RUN ./build/get_version.sh > /tmp/upend_version.txt && cat /tmp/upend_version.txt
|
||||||
SAVE ARTIFACT /tmp/upend_version.txt version.txt
|
SAVE ARTIFACT /tmp/upend_version.txt version.txt
|
||||||
RUN rm /tmp/upend_version.txt
|
|
||||||
|
|
||||||
base-rust:
|
base-rust:
|
||||||
FROM rust:bookworm
|
FROM rust:bookworm
|
||||||
|
@ -172,15 +304,30 @@ base-rust:
|
||||||
COPY --dir base cli db Cargo.toml Cargo.lock .
|
COPY --dir base cli db Cargo.toml Cargo.lock .
|
||||||
COPY --dir tools/upend_wasm tools/
|
COPY --dir tools/upend_wasm tools/
|
||||||
|
|
||||||
base-node:
|
current-changelog:
|
||||||
FROM node:lts
|
FROM orhunp/git-cliff
|
||||||
RUN npm install -g pnpm
|
COPY .git .git
|
||||||
WORKDIR /upend
|
RUN git-cliff --current -o CHANGELOG_CURRENT.md
|
||||||
COPY +wasmlib/pkg tools/upend_wasm/pkg
|
SAVE ARTIFACT CHANGELOG_CURRENT.md
|
||||||
COPY tools/upend_js/package.json tools/upend_js/pnpm-lock.yaml tools/upend_js/
|
|
||||||
RUN cd tools/upend_js && pnpm install --frozen-lockfile
|
|
||||||
COPY webui/package.json webui/pnpm-lock.yaml webui/
|
|
||||||
RUN cd webui && pnpm install --frozen-lockfile
|
|
||||||
COPY --dir webui webext .
|
|
||||||
COPY --dir tools/upend_js tools/
|
|
||||||
|
|
||||||
|
update-changelog:
|
||||||
|
LOCALLY
|
||||||
|
COPY +changelog/CHANGELOG.md .
|
||||||
|
RUN git add CHANGELOG.md && git commit -m "release: Update CHANGELOG"
|
||||||
|
RUN --push git push
|
||||||
|
|
||||||
|
dev-local:
|
||||||
|
FROM debian:bookworm
|
||||||
|
COPY +jslib/dist /js-dist
|
||||||
|
COPY +wasmlib/pkg-web /wasm-web
|
||||||
|
COPY +wasmlib/pkg-node /wasm-node
|
||||||
|
SAVE ARTIFACT /js-dist AS LOCAL sdks/js/dist
|
||||||
|
SAVE ARTIFACT /wasm-web AS LOCAL wasm/pkg-web
|
||||||
|
SAVE ARTIFACT /wasm-node AS LOCAL wasm/pkg-node
|
||||||
|
|
||||||
|
dev-update-sdk:
|
||||||
|
LOCALLY
|
||||||
|
WORKDIR sdks/js
|
||||||
|
RUN pnpm build
|
||||||
|
WORKDIR webui
|
||||||
|
RUN pnpm install
|
176
Taskfile.yml
176
Taskfile.yml
|
@ -1,176 +0,0 @@
|
||||||
# https://taskfile.dev
|
|
||||||
|
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
default:
|
|
||||||
cmds:
|
|
||||||
- task: lint
|
|
||||||
- task: test
|
|
||||||
- task: build
|
|
||||||
|
|
||||||
lint:
|
|
||||||
deps: [lint:frontend, lint:backend, lint:jslib, lint:webext]
|
|
||||||
|
|
||||||
lint:frontend:
|
|
||||||
dir: webui
|
|
||||||
deps: [setup:frontend]
|
|
||||||
sources:
|
|
||||||
- ./**/*.ts
|
|
||||||
- ./**/*.svelte
|
|
||||||
cmds:
|
|
||||||
- pnpm check
|
|
||||||
- pnpm lint
|
|
||||||
|
|
||||||
lint:backend:
|
|
||||||
sources:
|
|
||||||
- ./**/Cargo.toml
|
|
||||||
- ./**/*.rs
|
|
||||||
cmds:
|
|
||||||
- cargo clippy --workspace
|
|
||||||
|
|
||||||
lint:jslib:
|
|
||||||
deps: [build:jslib]
|
|
||||||
dir: tools/upend_js
|
|
||||||
sources:
|
|
||||||
- ./*.ts
|
|
||||||
- ./package.lock
|
|
||||||
cmds:
|
|
||||||
- pnpm lint
|
|
||||||
|
|
||||||
lint:webext:
|
|
||||||
dir: webext
|
|
||||||
deps: [setup:webext]
|
|
||||||
sources:
|
|
||||||
- ./**/*.ts
|
|
||||||
- ./**/*.svelte
|
|
||||||
cmds:
|
|
||||||
- pnpm lint
|
|
||||||
|
|
||||||
test:
|
|
||||||
deps: [test:backend]
|
|
||||||
|
|
||||||
test:backend:
|
|
||||||
sources:
|
|
||||||
- ./**/Cargo.toml
|
|
||||||
- ./**/*.rs
|
|
||||||
cmds:
|
|
||||||
- cargo nextest run --workspace
|
|
||||||
|
|
||||||
build:
|
|
||||||
deps: [build:frontend, build:backend, build:webext]
|
|
||||||
|
|
||||||
build:backend:
|
|
||||||
dir: cli
|
|
||||||
sources:
|
|
||||||
- ./**/Cargo.toml
|
|
||||||
- ./**/*.rs
|
|
||||||
cmds:
|
|
||||||
- cargo build --release
|
|
||||||
|
|
||||||
build:frontend:
|
|
||||||
dir: webui
|
|
||||||
deps: [setup:frontend]
|
|
||||||
sources:
|
|
||||||
- ./**/*.ts
|
|
||||||
- ./**/*.svelte
|
|
||||||
cmds:
|
|
||||||
- pnpm build
|
|
||||||
|
|
||||||
build:jslib:
|
|
||||||
deps: [build:wasmlib]
|
|
||||||
dir: tools/upend_js
|
|
||||||
cmds:
|
|
||||||
- pnpm install --frozen-lockfile
|
|
||||||
- pnpm build
|
|
||||||
|
|
||||||
build:wasmlib:
|
|
||||||
dir: tools/upend_wasm
|
|
||||||
cmds:
|
|
||||||
- wasm-pack build --target web
|
|
||||||
|
|
||||||
setup:frontend:
|
|
||||||
deps: [build:jslib]
|
|
||||||
dir: webui
|
|
||||||
sources:
|
|
||||||
- ../tools/upend_js/*.js
|
|
||||||
- package.lock
|
|
||||||
cmds:
|
|
||||||
- pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
build:webext:
|
|
||||||
deps: [setup:webext]
|
|
||||||
dir: webext
|
|
||||||
sources:
|
|
||||||
- ./**/*.ts
|
|
||||||
- ./**/*.svelte
|
|
||||||
cmds:
|
|
||||||
- pnpm build
|
|
||||||
|
|
||||||
build:webext:package-sources:
|
|
||||||
deps: [build:webext]
|
|
||||||
cmds:
|
|
||||||
- mkdir -p webext/web-ext-artifacts/tmp/upend/
|
|
||||||
- git ls-files -z tools/upend_js | xargs -0 cp --parents -t webext/web-ext-artifacts/tmp/upend
|
|
||||||
- git ls-files -z webext | xargs -0 cp --parents -t webext/web-ext-artifacts/tmp/upend
|
|
||||||
- cd webext/web-ext-artifacts/tmp/ && zip -vr ../upend-webext-sources.zip upend
|
|
||||||
|
|
||||||
setup:webext:
|
|
||||||
deps: [build:jslib]
|
|
||||||
dir: webext
|
|
||||||
sources:
|
|
||||||
- ../tools/upend_js/*.js
|
|
||||||
- package.lock
|
|
||||||
cmds:
|
|
||||||
- pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
dev:update_tool_deps:
|
|
||||||
cmds:
|
|
||||||
- task: build:jslib
|
|
||||||
- rm -vrf webui/node_modules/.vite/deps
|
|
||||||
|
|
||||||
dev:
|
|
||||||
deps: [dev:backend, dev:frontend]
|
|
||||||
|
|
||||||
dev:backend:
|
|
||||||
dir: cli
|
|
||||||
cmds:
|
|
||||||
- cargo run --release -- serve ../example_vault --clean --no-browser --reinitialize
|
|
||||||
|
|
||||||
dev:frontend:
|
|
||||||
deps: [build:jslib]
|
|
||||||
dir: webui
|
|
||||||
cmds:
|
|
||||||
- pnpm dev --open
|
|
||||||
|
|
||||||
clean:
|
|
||||||
deps:
|
|
||||||
[clean:backend, clean:frontend, clean:webext, clean:tools, clean:vault]
|
|
||||||
|
|
||||||
clean:backend:
|
|
||||||
cmds:
|
|
||||||
- cargo clean
|
|
||||||
|
|
||||||
clean:frontend:
|
|
||||||
cmds:
|
|
||||||
- rm -rf webui/node_modules
|
|
||||||
- rm -vrf webui/dist webui/public/vendor
|
|
||||||
|
|
||||||
clean:webext:
|
|
||||||
cmds:
|
|
||||||
- rm -rf webext/node_modules
|
|
||||||
- rm -vrf webext/web-ext-artifacts webext/dist
|
|
||||||
|
|
||||||
clean:tools:
|
|
||||||
cmds:
|
|
||||||
- rm -vrf tools/upend_js/*.js
|
|
||||||
|
|
||||||
clean:vault:
|
|
||||||
cmds:
|
|
||||||
- rm -vrf example_vault/.upend
|
|
||||||
|
|
||||||
update-schema:
|
|
||||||
cmds:
|
|
||||||
- rm -f upend.sqlite3
|
|
||||||
- diesel migration run --migration-dir migrations/upend/
|
|
||||||
- diesel print-schema > src/database/inner/schema.rs
|
|
BIN
assets/upend.png
BIN
assets/upend.png
Binary file not shown.
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 8.1 KiB |
|
@ -1,18 +1,54 @@
|
||||||
<svg style="fill:none" width="255" height="255" xmlns="http://www.w3.org/2000/svg">
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<style>
|
<svg
|
||||||
path {
|
style="fill:none"
|
||||||
fill:none;
|
width="256"
|
||||||
stroke:#0a0a0a;
|
height="256"
|
||||||
stroke-width:15px;
|
version="1.1"
|
||||||
stroke-linecap:round;
|
id="svg48"
|
||||||
stroke-linejoin:round
|
sodipodi:docname="upend_b.svg"
|
||||||
}
|
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
|
||||||
|
inkscape:export-filename="../webext/icon.png"
|
||||||
@media (prefers-color-scheme: dark) {
|
inkscape:export-xdpi="24.094118"
|
||||||
path {
|
inkscape:export-ydpi="24.094118"
|
||||||
stroke: white;
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
}
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
}
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
</style>
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
<path d="M7.5 7.5v0h240m-120 0v0l-120 120m240 0v0l-120-120m0 240v0-240" />
|
<defs
|
||||||
</svg>
|
id="defs52" />
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview50"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1.0"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="1.2810146"
|
||||||
|
inkscape:cx="-133.87826"
|
||||||
|
inkscape:cy="98.749853"
|
||||||
|
inkscape:window-width="2329"
|
||||||
|
inkscape:window-height="1397"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="260"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="svg48"
|
||||||
|
showguides="true" />
|
||||||
|
<style
|
||||||
|
id="style44"> path { fill:none; stroke:#0a0a0a; stroke-width:15px; stroke-linecap:round; stroke-linejoin:round } @media (prefers-color-scheme: dark) { path { stroke: white; } } </style>
|
||||||
|
<rect
|
||||||
|
style="display:inline;fill:#002b36;fill-opacity:1;stroke:none;stroke-width:26.2477;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
id="rect941"
|
||||||
|
width="256"
|
||||||
|
height="256"
|
||||||
|
x="-256"
|
||||||
|
y="0"
|
||||||
|
ry="23.239944"
|
||||||
|
transform="scale(-1,1)" />
|
||||||
|
<path
|
||||||
|
style="color:#000000;fill:#ffffff;stroke:none;stroke-linecap:round;stroke-linejoin:round;-inkscape-stroke:none"
|
||||||
|
d="M 48.587891,53 A 10.5882,10.5882 0 0 0 38,63.587891 10.5882,10.5882 0 0 0 48.587891,74.175781 H 102.43945 L 41.101562,135.51367 a 10.5882,10.5882 0 0 0 0,14.97266 10.5882,10.5882 0 0 0 14.97461,0 L 117.41211,89.148437 V 222.41211 A 10.5882,10.5882 0 0 0 128,233 10.5882,10.5882 0 0 0 138.58789,222.41211 V 89.148437 l 61.33594,61.337893 a 10.5882,10.5882 0 0 0 14.97461,0 10.5882,10.5882 0 0 0 0,-14.97266 L 153.56055,74.175781 h 53.85156 A 10.5882,10.5882 0 0 0 218,63.587891 10.5882,10.5882 0 0 0 207.41211,53 H 128 Z"
|
||||||
|
id="path46" />
|
||||||
|
</svg>
|
||||||
|
|
Before Width: | Height: | Size: 490 B After Width: | Height: | Size: 2.5 KiB |
|
@ -1,66 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<svg
|
|
||||||
style="fill:none"
|
|
||||||
width="255"
|
|
||||||
height="255"
|
|
||||||
version="1.1"
|
|
||||||
id="svg48"
|
|
||||||
sodipodi:docname="upend_b.svg"
|
|
||||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
|
||||||
inkscape:export-filename="../webext/icon.png"
|
|
||||||
inkscape:export-xdpi="24.094118"
|
|
||||||
inkscape:export-ydpi="24.094118"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg">
|
|
||||||
<defs
|
|
||||||
id="defs52" />
|
|
||||||
<sodipodi:namedview
|
|
||||||
id="namedview50"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:showpageshadow="2"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
inkscape:deskcolor="#d1d1d1"
|
|
||||||
showgrid="false"
|
|
||||||
inkscape:zoom="2.1915705"
|
|
||||||
inkscape:cx="240.23868"
|
|
||||||
inkscape:cy="109.9668"
|
|
||||||
inkscape:window-width="3436"
|
|
||||||
inkscape:window-height="1397"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="0"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
inkscape:current-layer="svg48" />
|
|
||||||
<style
|
|
||||||
id="style44">
|
|
||||||
path {
|
|
||||||
fill:none;
|
|
||||||
stroke:#0a0a0a;
|
|
||||||
stroke-width:15px;
|
|
||||||
stroke-linecap:round;
|
|
||||||
stroke-linejoin:round
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
path {
|
|
||||||
stroke: white;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<rect
|
|
||||||
style="fill:#002b36;fill-opacity:1;stroke:none;stroke-width:26.2477;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
id="rect941"
|
|
||||||
width="256"
|
|
||||||
height="256"
|
|
||||||
x="-0.5"
|
|
||||||
y="-0.5"
|
|
||||||
ry="23.239944" />
|
|
||||||
<path
|
|
||||||
d="m 48.088212,52.5882 v 0 H 206.91179 m -79.41179,0 v 0 l -79.411788,79.41179 m 158.823578,0 v 0 L 127.5,52.5882 m 0,158.82358 v 0 V 52.5882"
|
|
||||||
id="path46"
|
|
||||||
style="stroke:#ffffff;stroke-width:21.1764;stroke-dasharray:none;stroke-opacity:1" />
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 1.9 KiB |
|
@ -1,3 +1,4 @@
|
||||||
|
use crate::entry::Attribute;
|
||||||
use crate::error::{AddressComponentsDecodeError, UpEndError};
|
use crate::error::{AddressComponentsDecodeError, UpEndError};
|
||||||
use crate::hash::{
|
use crate::hash::{
|
||||||
b58_decode, b58_encode, AsMultihash, AsMultihashError, LargeMultihash, UpMultihash, IDENTITY,
|
b58_decode, b58_encode, AsMultihash, AsMultihashError, LargeMultihash, UpMultihash, IDENTITY,
|
||||||
|
@ -18,7 +19,7 @@ use wasm_bindgen::prelude::*;
|
||||||
pub enum Address {
|
pub enum Address {
|
||||||
Hash(UpMultihash),
|
Hash(UpMultihash),
|
||||||
Uuid(Uuid),
|
Uuid(Uuid),
|
||||||
Attribute(String),
|
Attribute(Attribute),
|
||||||
Url(Url),
|
Url(Url),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +63,7 @@ impl Address {
|
||||||
),
|
),
|
||||||
Self::Attribute(attribute) => (
|
Self::Attribute(attribute) => (
|
||||||
UP_ATTRIBUTE,
|
UP_ATTRIBUTE,
|
||||||
LargeMultihash::wrap(IDENTITY, attribute.as_bytes())
|
LargeMultihash::wrap(IDENTITY, attribute.to_string().as_bytes())
|
||||||
.map_err(UpEndError::from_any)?,
|
.map_err(UpEndError::from_any)?,
|
||||||
),
|
),
|
||||||
Self::Url(url) => (
|
Self::Url(url) => (
|
||||||
|
@ -102,9 +103,14 @@ impl Address {
|
||||||
UP_UUID => Ok(Address::Uuid(
|
UP_UUID => Ok(Address::Uuid(
|
||||||
Uuid::from_slice(digest.as_slice()).map_err(UpEndError::from_any)?,
|
Uuid::from_slice(digest.as_slice()).map_err(UpEndError::from_any)?,
|
||||||
)),
|
)),
|
||||||
UP_ATTRIBUTE => Ok(Address::Attribute(
|
UP_ATTRIBUTE => {
|
||||||
String::from_utf8(digest).map_err(UpEndError::from_any)?,
|
let attribute = String::from_utf8(digest).map_err(UpEndError::from_any)?;
|
||||||
)),
|
if attribute.is_empty() {
|
||||||
|
Ok(Address::Attribute(Attribute::null()))
|
||||||
|
} else {
|
||||||
|
Ok(Address::Attribute(attribute.parse()?))
|
||||||
|
}
|
||||||
|
}
|
||||||
UP_URL => Ok(Address::Url(
|
UP_URL => Ok(Address::Url(
|
||||||
Url::parse(&String::from_utf8(digest).map_err(UpEndError::from_any)?)
|
Url::parse(&String::from_utf8(digest).map_err(UpEndError::from_any)?)
|
||||||
.map_err(UpEndError::from_any)?,
|
.map_err(UpEndError::from_any)?,
|
||||||
|
@ -120,7 +126,7 @@ impl Address {
|
||||||
let (entity_type, entity_content) = match self {
|
let (entity_type, entity_content) = match self {
|
||||||
Address::Hash(uphash) => ("Hash", Some(b58_encode(uphash.to_bytes()))),
|
Address::Hash(uphash) => ("Hash", Some(b58_encode(uphash.to_bytes()))),
|
||||||
Address::Uuid(uuid) => ("Uuid", Some(uuid.to_string())),
|
Address::Uuid(uuid) => ("Uuid", Some(uuid.to_string())),
|
||||||
Address::Attribute(attribute) => ("Attribute", Some(attribute.clone())),
|
Address::Attribute(attribute) => ("Attribute", Some(attribute.to_string())),
|
||||||
Address::Url(url) => ("Url", Some(url.to_string())),
|
Address::Url(url) => ("Url", Some(url.to_string())),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -133,11 +139,12 @@ impl Address {
|
||||||
pub fn from_components(components: AddressComponents) -> Result<Self, UpEndError> {
|
pub fn from_components(components: AddressComponents) -> Result<Self, UpEndError> {
|
||||||
// TODO: make this automatically derive from `Address` definition
|
// TODO: make this automatically derive from `Address` definition
|
||||||
let address = match components {
|
let address = match components {
|
||||||
AddressComponents { t, c } if t == "Attribute" => {
|
AddressComponents { t, c } if t == "Attribute" => Address::Attribute(
|
||||||
Address::Attribute(c.ok_or(UpEndError::AddressComponentsDecodeError(
|
c.ok_or(UpEndError::AddressComponentsDecodeError(
|
||||||
AddressComponentsDecodeError::MissingValue,
|
AddressComponentsDecodeError::MissingValue,
|
||||||
))?)
|
))?
|
||||||
}
|
.parse()?,
|
||||||
|
),
|
||||||
AddressComponents { t, c } if t == "Url" => Address::Url(if let Some(string) = c {
|
AddressComponents { t, c } if t == "Url" => Address::Url(if let Some(string) = c {
|
||||||
Url::parse(&string).map_err(|e| {
|
Url::parse(&string).map_err(|e| {
|
||||||
UpEndError::AddressComponentsDecodeError(
|
UpEndError::AddressComponentsDecodeError(
|
||||||
|
@ -242,12 +249,17 @@ pub trait Addressable: AsMultihash {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> Addressable for T where T: AsMultihash {}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::addressing::{Address, IDENTITY};
|
use crate::addressing::{Address, IDENTITY};
|
||||||
|
use crate::constants::{
|
||||||
|
TYPE_ATTRIBUTE_ADDRESS, TYPE_HASH_ADDRESS, TYPE_URL_ADDRESS, TYPE_UUID_ADDRESS,
|
||||||
|
};
|
||||||
use crate::hash::{LargeMultihash, UpMultihash};
|
use crate::hash::{LargeMultihash, UpMultihash};
|
||||||
|
|
||||||
use super::UpEndError;
|
use super::UpEndError;
|
||||||
|
@ -260,6 +272,11 @@ mod tests {
|
||||||
let encoded = addr.encode()?;
|
let encoded = addr.encode()?;
|
||||||
let decoded = Address::decode(&encoded)?;
|
let decoded = Address::decode(&encoded)?;
|
||||||
assert_eq!(addr, decoded);
|
assert_eq!(addr, decoded);
|
||||||
|
|
||||||
|
let addr = &*TYPE_HASH_ADDRESS;
|
||||||
|
let encoded = addr.encode()?;
|
||||||
|
let decoded = Address::decode(&encoded)?;
|
||||||
|
assert_eq!(addr, &decoded);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -269,15 +286,25 @@ mod tests {
|
||||||
let encoded = addr.encode()?;
|
let encoded = addr.encode()?;
|
||||||
let decoded = Address::decode(&encoded)?;
|
let decoded = Address::decode(&encoded)?;
|
||||||
assert_eq!(addr, decoded);
|
assert_eq!(addr, decoded);
|
||||||
|
|
||||||
|
let addr = &*TYPE_UUID_ADDRESS;
|
||||||
|
let encoded = addr.encode()?;
|
||||||
|
let decoded = Address::decode(&encoded)?;
|
||||||
|
assert_eq!(addr, &decoded);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_attribute_codec() -> Result<(), UpEndError> {
|
fn test_attribute_codec() -> Result<(), UpEndError> {
|
||||||
let addr = Address::Attribute(String::from("ATTRIBUTE"));
|
let addr = Address::Attribute("ATTRIBUTE".parse().unwrap());
|
||||||
let encoded = addr.encode()?;
|
let encoded = addr.encode()?;
|
||||||
let decoded = Address::decode(&encoded)?;
|
let decoded = Address::decode(&encoded)?;
|
||||||
assert_eq!(addr, decoded);
|
assert_eq!(addr, decoded);
|
||||||
|
|
||||||
|
let addr = &*TYPE_ATTRIBUTE_ADDRESS;
|
||||||
|
let encoded = addr.encode()?;
|
||||||
|
let decoded = Address::decode(&encoded)?;
|
||||||
|
assert_eq!(addr, &decoded);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,6 +314,11 @@ mod tests {
|
||||||
let encoded = addr.encode()?;
|
let encoded = addr.encode()?;
|
||||||
let decoded = Address::decode(&encoded)?;
|
let decoded = Address::decode(&encoded)?;
|
||||||
assert_eq!(addr, decoded);
|
assert_eq!(addr, decoded);
|
||||||
|
|
||||||
|
let addr = &*TYPE_URL_ADDRESS;
|
||||||
|
let encoded = addr.encode()?;
|
||||||
|
let decoded = Address::decode(&encoded)?;
|
||||||
|
assert_eq!(addr, &decoded);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::addressing::Address;
|
use crate::addressing::Address;
|
||||||
|
use crate::entry::Attribute;
|
||||||
use crate::entry::InvariantEntry;
|
use crate::entry::InvariantEntry;
|
||||||
use crate::hash::{LargeMultihash, UpMultihash};
|
use crate::hash::{LargeMultihash, UpMultihash};
|
||||||
|
|
||||||
|
@ -19,13 +20,13 @@ pub const ATTR_KEY: &str = "KEY";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
|
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
|
||||||
attribute: String::from(ATTR_KEY),
|
attribute: ATTR_KEY.parse().unwrap(),
|
||||||
value: "HIER_ROOT".into(),
|
value: "HIER_ROOT".into(),
|
||||||
};
|
};
|
||||||
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
|
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
|
||||||
pub static ref TYPE_HASH_ADDRESS: Address =
|
pub static ref TYPE_HASH_ADDRESS: Address =
|
||||||
Address::Hash(UpMultihash::from(LargeMultihash::default()));
|
Address::Hash(UpMultihash::from(LargeMultihash::default()));
|
||||||
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
|
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
|
||||||
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute("".to_string());
|
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute(Attribute::null());
|
||||||
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
|
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,55 @@
|
||||||
use crate::addressing::{Address, Addressable};
|
use crate::addressing::Address;
|
||||||
use crate::error::UpEndError;
|
use crate::error::UpEndError;
|
||||||
use crate::hash::{b58_decode, sha256hash, AsMultihash, AsMultihashError, UpMultihash};
|
use crate::hash::{b58_decode, sha256hash, AsMultihash, AsMultihashError, UpMultihash};
|
||||||
use chrono::NaiveDateTime;
|
use chrono::NaiveDateTime;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::io::{Cursor, Write};
|
use std::io::{Cursor, Write};
|
||||||
|
use std::str::FromStr;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||||
|
pub struct Attribute(String);
|
||||||
|
|
||||||
|
impl Attribute {
|
||||||
|
pub fn null() -> Self {
|
||||||
|
Self("".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for Attribute {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Attribute {
|
||||||
|
type Err = UpEndError;
|
||||||
|
|
||||||
|
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||||
|
if value.is_empty() {
|
||||||
|
Err(UpEndError::EmptyAttribute)
|
||||||
|
} else {
|
||||||
|
Ok(Self(value.to_uppercase()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> PartialEq<S> for Attribute
|
||||||
|
where
|
||||||
|
S: AsRef<str>,
|
||||||
|
{
|
||||||
|
fn eq(&self, other: &S) -> bool {
|
||||||
|
self.0.eq_ignore_ascii_case(other.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Entry {
|
pub struct Entry {
|
||||||
pub entity: Address,
|
pub entity: Address,
|
||||||
pub attribute: String,
|
pub attribute: Attribute,
|
||||||
pub value: EntryValue,
|
pub value: EntryValue,
|
||||||
pub provenance: String,
|
pub provenance: String,
|
||||||
|
pub user: Option<String>,
|
||||||
pub timestamp: NaiveDateTime,
|
pub timestamp: NaiveDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,10 +58,11 @@ pub struct ImmutableEntry(pub Entry);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct InvariantEntry {
|
pub struct InvariantEntry {
|
||||||
pub attribute: String,
|
pub attribute: Attribute,
|
||||||
pub value: EntryValue,
|
pub value: EntryValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
#[serde(tag = "t", content = "c")]
|
#[serde(tag = "t", content = "c")]
|
||||||
pub enum EntryValue {
|
pub enum EntryValue {
|
||||||
|
@ -35,18 +73,6 @@ pub enum EntryValue {
|
||||||
Invalid,
|
Invalid,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Entry {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
entity: Address::Uuid(uuid::Uuid::nil()),
|
|
||||||
attribute: Default::default(),
|
|
||||||
value: EntryValue::Null,
|
|
||||||
provenance: "SYSTEM".into(),
|
|
||||||
timestamp: NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&InvariantEntry> for Entry {
|
impl TryFrom<&InvariantEntry> for Entry {
|
||||||
type Error = UpEndError;
|
type Error = UpEndError;
|
||||||
|
|
||||||
|
@ -56,7 +82,8 @@ impl TryFrom<&InvariantEntry> for Entry {
|
||||||
attribute: invariant.attribute.clone(),
|
attribute: invariant.attribute.clone(),
|
||||||
value: invariant.value.clone(),
|
value: invariant.value.clone(),
|
||||||
provenance: "INVARIANT".to_string(),
|
provenance: "INVARIANT".to_string(),
|
||||||
..Default::default()
|
user: None,
|
||||||
|
timestamp: NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -65,7 +92,7 @@ impl InvariantEntry {
|
||||||
pub fn entity(&self) -> Result<Address, UpEndError> {
|
pub fn entity(&self) -> Result<Address, UpEndError> {
|
||||||
let mut entity = Cursor::new(vec![0u8; 0]);
|
let mut entity = Cursor::new(vec![0u8; 0]);
|
||||||
entity
|
entity
|
||||||
.write_all(self.attribute.as_bytes())
|
.write_all(self.attribute.0.as_bytes())
|
||||||
.map_err(UpEndError::from_any)?;
|
.map_err(UpEndError::from_any)?;
|
||||||
entity
|
entity
|
||||||
.write_all(self.value.to_string()?.as_bytes())
|
.write_all(self.value.to_string()?.as_bytes())
|
||||||
|
@ -91,7 +118,7 @@ impl AsMultihash for Entry {
|
||||||
.map_err(|e| AsMultihashError(e.to_string()))?
|
.map_err(|e| AsMultihashError(e.to_string()))?
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
)?;
|
)?;
|
||||||
result.write_all(self.attribute.as_bytes())?;
|
result.write_all(self.attribute.0.as_bytes())?;
|
||||||
result.write_all(
|
result.write_all(
|
||||||
self.value
|
self.value
|
||||||
.to_string()
|
.to_string()
|
||||||
|
@ -110,9 +137,6 @@ impl AsMultihash for InvariantEntry {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Addressable for Entry {}
|
|
||||||
impl Addressable for InvariantEntry {}
|
|
||||||
|
|
||||||
impl EntryValue {
|
impl EntryValue {
|
||||||
pub fn to_string(&self) -> Result<String, UpEndError> {
|
pub fn to_string(&self) -> Result<String, UpEndError> {
|
||||||
let (type_char, content) = match self {
|
let (type_char, content) = match self {
|
||||||
|
@ -218,6 +242,14 @@ impl From<Address> for EntryValue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum EntryPart {
|
||||||
|
Entity(Address),
|
||||||
|
Attribute(Attribute),
|
||||||
|
Value(EntryValue),
|
||||||
|
Provenance(String),
|
||||||
|
Timestamp(NaiveDateTime),
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -3,6 +3,7 @@ pub enum UpEndError {
|
||||||
HashDecodeError(String),
|
HashDecodeError(String),
|
||||||
AddressParseError(String),
|
AddressParseError(String),
|
||||||
AddressComponentsDecodeError(AddressComponentsDecodeError),
|
AddressComponentsDecodeError(AddressComponentsDecodeError),
|
||||||
|
EmptyAttribute,
|
||||||
CannotSerializeInvalid,
|
CannotSerializeInvalid,
|
||||||
QueryParseError(String),
|
QueryParseError(String),
|
||||||
Other(String),
|
Other(String),
|
||||||
|
@ -35,6 +36,7 @@ impl std::fmt::Display for UpEndError {
|
||||||
String::from("Invalid EntryValues cannot be serialized."),
|
String::from("Invalid EntryValues cannot be serialized."),
|
||||||
UpEndError::QueryParseError(err) => format!("Error parsing query: {err}"),
|
UpEndError::QueryParseError(err) => format!("Error parsing query: {err}"),
|
||||||
UpEndError::Other(err) => format!("Unknown error: {err}"),
|
UpEndError::Other(err) => format!("Unknown error: {err}"),
|
||||||
|
UpEndError::EmptyAttribute => String::from("Attribute cannot be empty."),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,6 +154,15 @@ pub trait AsMultihash {
|
||||||
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError>;
|
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> AsMultihash for T
|
||||||
|
where
|
||||||
|
T: AsRef<[u8]>,
|
||||||
|
{
|
||||||
|
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError> {
|
||||||
|
sha256hash(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::hash::{b58_decode, b58_encode};
|
use crate::hash::{b58_decode, b58_encode};
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::addressing::Address;
|
use crate::addressing::Address;
|
||||||
|
use crate::entry::Attribute;
|
||||||
use crate::entry::EntryValue;
|
use crate::entry::EntryValue;
|
||||||
use crate::error::UpEndError;
|
use crate::error::UpEndError;
|
||||||
use nonempty::NonEmpty;
|
use nonempty::NonEmpty;
|
||||||
|
@ -6,15 +7,6 @@ use std::borrow::Borrow;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct Attribute(pub String);
|
|
||||||
|
|
||||||
impl From<&str> for Attribute {
|
|
||||||
fn from(str: &str) -> Self {
|
|
||||||
Self(str.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum QueryComponent<T>
|
pub enum QueryComponent<T>
|
||||||
where
|
where
|
||||||
|
@ -79,7 +71,7 @@ impl TryFrom<lexpr::Value> for Attribute {
|
||||||
|
|
||||||
fn try_from(value: lexpr::Value) -> Result<Self, Self::Error> {
|
fn try_from(value: lexpr::Value) -> Result<Self, Self::Error> {
|
||||||
match value {
|
match value {
|
||||||
lexpr::Value::String(str) => Ok(Attribute(str.to_string())),
|
lexpr::Value::String(str) => str.parse(),
|
||||||
_ => Err(UpEndError::QueryParseError(
|
_ => Err(UpEndError::QueryParseError(
|
||||||
"Can only convert to attribute from string.".into(),
|
"Can only convert to attribute from string.".into(),
|
||||||
)),
|
)),
|
||||||
|
@ -87,6 +79,7 @@ impl TryFrom<lexpr::Value> for Attribute {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum QueryPart {
|
pub enum QueryPart {
|
||||||
Matches(PatternQuery),
|
Matches(PatternQuery),
|
||||||
|
@ -331,7 +324,7 @@ mod test {
|
||||||
query,
|
query,
|
||||||
Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
||||||
entity: QueryComponent::Variable(None),
|
entity: QueryComponent::Variable(None),
|
||||||
attribute: QueryComponent::Exact("FOO".into()),
|
attribute: QueryComponent::Exact("FOO".parse().unwrap()),
|
||||||
value: QueryComponent::Variable(None)
|
value: QueryComponent::Variable(None)
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
@ -372,7 +365,7 @@ mod test {
|
||||||
query,
|
query,
|
||||||
Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
||||||
entity: QueryComponent::Variable(None),
|
entity: QueryComponent::Variable(None),
|
||||||
attribute: QueryComponent::In(vec!("FOO".into(), "BAR".into())),
|
attribute: QueryComponent::In(vec!("FOO".parse().unwrap(), "BAR".parse().unwrap())),
|
||||||
value: QueryComponent::Variable(None)
|
value: QueryComponent::Variable(None)
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
which git > /dev/null || {
|
||||||
|
echo "git not found"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
git_tag=$(git describe --tags --exact-match HEAD 2>/dev/null)
|
git_tag=$(git describe --tags --exact-match HEAD 2>/dev/null)
|
||||||
|
|
||||||
if [ -z "$git_tag" ]; then
|
if [ -z "$git_tag" ]; then
|
||||||
|
|
|
@ -29,10 +29,10 @@ once_cell = "1.7.2"
|
||||||
lru = "0.7.0"
|
lru = "0.7.0"
|
||||||
|
|
||||||
diesel = { version = "1.4", features = [
|
diesel = { version = "1.4", features = [
|
||||||
"sqlite",
|
"sqlite",
|
||||||
"r2d2",
|
"r2d2",
|
||||||
"chrono",
|
"chrono",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
] }
|
] }
|
||||||
diesel_migrations = "1.4"
|
diesel_migrations = "1.4"
|
||||||
libsqlite3-sys = { version = "^0", features = ["bundled"] }
|
libsqlite3-sys = { version = "^0", features = ["bundled"] }
|
||||||
|
@ -54,10 +54,10 @@ regex = "1"
|
||||||
|
|
||||||
multibase = "0.9"
|
multibase = "0.9"
|
||||||
multihash = { version = "*", default-features = false, features = [
|
multihash = { version = "*", default-features = false, features = [
|
||||||
"alloc",
|
"alloc",
|
||||||
"multihash-impl",
|
"multihash-impl",
|
||||||
"sha2",
|
"sha2",
|
||||||
"identity",
|
"identity",
|
||||||
] }
|
] }
|
||||||
uuid = { version = "1.4", features = ["v4"] }
|
uuid = { version = "1.4", features = ["v4"] }
|
||||||
|
|
||||||
|
@ -89,19 +89,23 @@ url = "2"
|
||||||
|
|
||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
signal-hook = "0.3.15"
|
signal-hook = "0.3.15"
|
||||||
|
actix-web-lab = { version = "0.20.2", features = ["spa"] }
|
||||||
|
|
||||||
|
extism = "1.2.0"
|
||||||
|
upend-extension-base = { path = "../extensions/base" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
shadow-rs = { version = "0.23", default-features = false }
|
shadow-rs = { version = "0.23", default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = [
|
default = [
|
||||||
"desktop",
|
"desktop",
|
||||||
"previews",
|
"previews",
|
||||||
"previews-image",
|
"previews-image",
|
||||||
"extractors-web",
|
"extractors-web",
|
||||||
"extractors-audio",
|
"extractors-audio",
|
||||||
"extractors-exif",
|
"extractors-exif",
|
||||||
"extractors-media",
|
"extractors-media",
|
||||||
]
|
]
|
||||||
desktop = ["webbrowser", "opener", "is_executable"]
|
desktop = ["webbrowser", "opener", "is_executable"]
|
||||||
previews = []
|
previews = []
|
||||||
|
|
|
@ -1,31 +1,23 @@
|
||||||
use std::env::current_exe;
|
use std::env::current_exe;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use shadow_rs::{is_debug, shadow};
|
use shadow_rs::{is_debug, shadow};
|
||||||
|
|
||||||
shadow!(build);
|
shadow!(build);
|
||||||
|
|
||||||
pub fn get_resource_path<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
|
lazy_static! {
|
||||||
let base_path = if is_debug() {
|
pub static ref RESOURCE_PATH: PathBuf = if is_debug() {
|
||||||
let cwd = std::env::current_exe()?.parent().unwrap().to_path_buf();
|
let project_root = build::CARGO_MANIFEST_DIR.parse::<PathBuf>().unwrap();
|
||||||
cwd.join("../../tmp/resources")
|
project_root.join("./tmp/resources")
|
||||||
} else {
|
} else {
|
||||||
current_exe()?
|
current_exe()
|
||||||
|
.unwrap()
|
||||||
.parent()
|
.parent()
|
||||||
.ok_or(anyhow!("couldn't locate resource path, binary in root"))?
|
.unwrap()
|
||||||
.join("../share/upend")
|
.join("../share/upend")
|
||||||
};
|
};
|
||||||
|
pub static ref WEBUI_PATH: PathBuf = RESOURCE_PATH.join("webui");
|
||||||
let result = base_path.join(dir.as_ref());
|
|
||||||
if result.exists() {
|
|
||||||
Ok(result)
|
|
||||||
} else {
|
|
||||||
Err(anyhow!("Path {result:?} doesn't exist."))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref APP_USER_AGENT: String = format!("upend / {}", build::PKG_VERSION);
|
static ref APP_USER_AGENT: String = format!("upend / {}", build::PKG_VERSION);
|
||||||
pub static ref REQWEST_CLIENT: reqwest::blocking::Client = reqwest::blocking::Client::builder()
|
pub static ref REQWEST_CLIENT: reqwest::blocking::Client = reqwest::blocking::Client::builder()
|
||||||
.user_agent(APP_USER_AGENT.as_str())
|
.user_agent(APP_USER_AGENT.as_str())
|
||||||
|
@ -36,3 +28,7 @@ lazy_static! {
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_version() -> &'static str {
|
||||||
|
option_env!("UPEND_VERSION").unwrap_or("unknown")
|
||||||
|
}
|
||||||
|
|
|
@ -4,5 +4,4 @@ pub struct UpEndConfig {
|
||||||
pub desktop_enabled: bool,
|
pub desktop_enabled: bool,
|
||||||
pub trust_executables: bool,
|
pub trust_executables: bool,
|
||||||
pub secret: String,
|
pub secret: String,
|
||||||
pub key: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use std::io::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::Extractor;
|
use super::Extractor;
|
||||||
|
@ -8,22 +9,25 @@ use upend_base::{
|
||||||
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
||||||
entry::{Entry, EntryValue, InvariantEntry},
|
entry::{Entry, EntryValue, InvariantEntry},
|
||||||
};
|
};
|
||||||
|
use upend_db::stores::Blob;
|
||||||
use upend_db::{
|
use upend_db::{
|
||||||
jobs::{JobContainer, JobState},
|
jobs::{JobContainer, JobState},
|
||||||
stores::{fs::FILE_MIME_KEY, UpStore},
|
stores::{fs::FILE_MIME_KEY, UpStore},
|
||||||
UpEndConnection,
|
BlobMode, OperationContext, UpEndConnection,
|
||||||
};
|
};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref ID3_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
pub static ref ID3_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
||||||
attribute: String::from(ATTR_KEY),
|
attribute: ATTR_KEY.parse().unwrap(),
|
||||||
value: "TYPE_ID3".into(),
|
value: "TYPE_ID3".into(),
|
||||||
};
|
};
|
||||||
pub static ref ID3_TYPE_LABEL: Entry = Entry {
|
pub static ref ID3_TYPE_LABEL: Entry = Entry {
|
||||||
entity: ID3_TYPE_INVARIANT.entity().unwrap(),
|
entity: ID3_TYPE_INVARIANT.entity().unwrap(),
|
||||||
attribute: ATTR_LABEL.into(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: "ID3".into(),
|
value: "ID3".into(),
|
||||||
..Default::default()
|
provenance: "INVARIANT".to_string(),
|
||||||
|
user: None,
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,14 +37,15 @@ impl Extractor for ID3Extractor {
|
||||||
fn get(
|
fn get(
|
||||||
&self,
|
&self,
|
||||||
address: &Address,
|
address: &Address,
|
||||||
_connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Entry>> {
|
) -> Result<Vec<Entry>> {
|
||||||
if let Address::Hash(hash) = address {
|
if let Address::Hash(hash) = address {
|
||||||
let files = store.retrieve(hash)?;
|
let files = store.retrieve(hash)?;
|
||||||
|
|
||||||
if let Some(file) = files.get(0) {
|
if let Some(file) = files.first() {
|
||||||
let file_path = file.get_file_path();
|
let file_path = file.get_file_path();
|
||||||
let mut job_handle = job_container.add_job(
|
let mut job_handle = job_container.add_job(
|
||||||
None,
|
None,
|
||||||
|
@ -57,31 +62,67 @@ impl Extractor for ID3Extractor {
|
||||||
|
|
||||||
let tags = id3::Tag::read_from_path(file_path)?;
|
let tags = id3::Tag::read_from_path(file_path)?;
|
||||||
|
|
||||||
let mut result: Vec<Entry> = tags
|
let mut result: Vec<Entry> = vec![];
|
||||||
.frames()
|
|
||||||
.flat_map(|frame| match frame.content() {
|
for frame in tags.frames() {
|
||||||
id3::Content::Text(text) => vec![
|
if let id3::Content::Text(text) = frame.content() {
|
||||||
|
result.extend(vec![
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: format!("ID3_{}", frame.id()),
|
attribute: format!("ID3_{}", frame.id()).parse()?,
|
||||||
value: match frame.id() {
|
value: match frame.id() {
|
||||||
"TYER" | "TBPM" => EntryValue::guess_from(text),
|
"TYER" | "TBPM" => EntryValue::guess_from(text),
|
||||||
_ => text.clone().into(),
|
_ => text.clone().into(),
|
||||||
},
|
},
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
Entry {
|
Entry {
|
||||||
entity: Address::Attribute(format!("ID3_{}", frame.id())),
|
entity: Address::Attribute(format!("ID3_{}", frame.id()).parse()?),
|
||||||
attribute: ATTR_LABEL.into(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: format!("ID3: {}", frame.name()).into(),
|
value: format!("ID3: {}", frame.name()).into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
],
|
]);
|
||||||
_ => vec![],
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut has_pictures = false;
|
||||||
|
for (idx, picture) in tags.pictures().enumerate() {
|
||||||
|
let tmp_dir = tempfile::tempdir()?;
|
||||||
|
let tmp_path = tmp_dir.path().join(format!("img-{}", idx));
|
||||||
|
let mut file = std::fs::File::create(&tmp_path)?;
|
||||||
|
file.write_all(&picture.data)?;
|
||||||
|
let hash = store.store(
|
||||||
|
connection,
|
||||||
|
Blob::from_filepath(&tmp_path),
|
||||||
|
None,
|
||||||
|
Some(BlobMode::StoreOnly),
|
||||||
|
context.clone(),
|
||||||
|
)?;
|
||||||
|
result.push(Entry {
|
||||||
|
entity: address.clone(),
|
||||||
|
attribute: "ID3_PICTURE".parse()?,
|
||||||
|
value: EntryValue::Address(Address::Hash(hash)),
|
||||||
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
});
|
||||||
|
has_pictures = true;
|
||||||
|
}
|
||||||
|
if has_pictures {
|
||||||
|
result.push(Entry {
|
||||||
|
entity: Address::Attribute("ID3_PICTURE".parse()?),
|
||||||
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
|
value: "ID3 Embedded Image".into(),
|
||||||
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
})
|
||||||
.collect();
|
}
|
||||||
|
|
||||||
if !result.is_empty() {
|
if !result.is_empty() {
|
||||||
result.extend(
|
result.extend(
|
||||||
|
@ -90,9 +131,11 @@ impl Extractor for ID3Extractor {
|
||||||
.filter(|e| e.attribute != ATTR_LABEL)
|
.filter(|e| e.attribute != ATTR_LABEL)
|
||||||
.map(|e| Entry {
|
.map(|e| Entry {
|
||||||
entity: Address::Attribute(e.attribute.clone()),
|
entity: Address::Attribute(e.attribute.clone()),
|
||||||
attribute: ATTR_OF.into(),
|
attribute: ATTR_OF.parse().unwrap(),
|
||||||
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
})
|
||||||
.collect::<Vec<Entry>>(),
|
.collect::<Vec<Entry>>(),
|
||||||
);
|
);
|
||||||
|
@ -101,9 +144,11 @@ impl Extractor for ID3Extractor {
|
||||||
ID3_TYPE_LABEL.clone(),
|
ID3_TYPE_LABEL.clone(),
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_IN.into(),
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::sync::Arc;
|
||||||
use super::Extractor;
|
use super::Extractor;
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
use upend_base::entry::Attribute;
|
||||||
use upend_base::{
|
use upend_base::{
|
||||||
addressing::Address,
|
addressing::Address,
|
||||||
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
||||||
|
@ -11,7 +12,7 @@ use upend_base::{
|
||||||
use upend_db::{
|
use upend_db::{
|
||||||
jobs::{JobContainer, JobState},
|
jobs::{JobContainer, JobState},
|
||||||
stores::{fs::FILE_MIME_KEY, UpStore},
|
stores::{fs::FILE_MIME_KEY, UpStore},
|
||||||
UpEndConnection,
|
OperationContext, UpEndConnection,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct ExifExtractor;
|
pub struct ExifExtractor;
|
||||||
|
@ -21,14 +22,16 @@ pub struct ExifExtractor;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref EXIF_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
pub static ref EXIF_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
||||||
attribute: String::from(ATTR_KEY),
|
attribute: ATTR_KEY.parse().unwrap(),
|
||||||
value: "TYPE_EXIF".into(),
|
value: "TYPE_EXIF".into(),
|
||||||
};
|
};
|
||||||
pub static ref EXIF_TYPE_LABEL: Entry = Entry {
|
pub static ref EXIF_TYPE_LABEL: Entry = Entry {
|
||||||
entity: EXIF_TYPE_INVARIANT.entity().unwrap(),
|
entity: EXIF_TYPE_INVARIANT.entity().unwrap(),
|
||||||
attribute: ATTR_LABEL.into(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: "EXIF".into(),
|
value: "EXIF".into(),
|
||||||
..Default::default()
|
provenance: "INVARIANT".to_string(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: None
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,11 +42,12 @@ impl Extractor for ExifExtractor {
|
||||||
_connection: &UpEndConnection,
|
_connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Entry>> {
|
) -> Result<Vec<Entry>> {
|
||||||
if let Address::Hash(hash) = address {
|
if let Address::Hash(hash) = address {
|
||||||
let files = store.retrieve(hash)?;
|
let files = store.retrieve(hash)?;
|
||||||
|
|
||||||
if let Some(file) = files.get(0) {
|
if let Some(file) = files.first() {
|
||||||
let file_path = file.get_file_path();
|
let file_path = file.get_file_path();
|
||||||
let mut job_handle = job_container.add_job(
|
let mut job_handle = job_container.add_job(
|
||||||
None,
|
None,
|
||||||
|
@ -63,42 +67,42 @@ impl Extractor for ExifExtractor {
|
||||||
let exifreader = exif::Reader::new();
|
let exifreader = exif::Reader::new();
|
||||||
let exif = exifreader.read_from_container(&mut bufreader)?;
|
let exif = exifreader.read_from_container(&mut bufreader)?;
|
||||||
|
|
||||||
let mut result: Vec<Entry> = exif
|
let mut result: Vec<Entry> = vec![];
|
||||||
|
|
||||||
|
for field in exif
|
||||||
.fields()
|
.fields()
|
||||||
.filter(|field| !matches!(field.value, exif::Value::Undefined(..)))
|
.filter(|field| !matches!(field.value, exif::Value::Undefined(..)))
|
||||||
.flat_map(|field| {
|
{
|
||||||
if let Some(tag_description) = field.tag.description() {
|
if let Some(tag_description) = field.tag.description() {
|
||||||
let attribute = format!("EXIF_{}", field.tag.1);
|
let attribute: Attribute = format!("EXIF_{}", field.tag.1).parse()?;
|
||||||
|
|
||||||
vec![
|
result.extend(vec![
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: attribute.clone(),
|
attribute: attribute.clone(),
|
||||||
value: match field.tag {
|
value: match field.tag {
|
||||||
exif::Tag::ExifVersion => {
|
exif::Tag::ExifVersion => {
|
||||||
EntryValue::String(format!("{}", field.display_value()))
|
EntryValue::String(format!("{}", field.display_value()))
|
||||||
}
|
}
|
||||||
_ => EntryValue::guess_from(format!(
|
_ => {
|
||||||
"{}",
|
EntryValue::guess_from(format!("{}", field.display_value()))
|
||||||
field.display_value()
|
}
|
||||||
)),
|
|
||||||
},
|
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
|
||||||
},
|
},
|
||||||
Entry {
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
entity: Address::Attribute(attribute),
|
user: context.user.clone(),
|
||||||
attribute: ATTR_LABEL.into(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
value: format!("EXIF: {}", tag_description).into(),
|
},
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
Entry {
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
entity: Address::Attribute(attribute),
|
||||||
},
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
]
|
value: format!("EXIF: {}", tag_description).into(),
|
||||||
} else {
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
vec![]
|
user: context.user.clone(),
|
||||||
}
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
},
|
||||||
.collect();
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !result.is_empty() {
|
if !result.is_empty() {
|
||||||
result.extend(
|
result.extend(
|
||||||
|
@ -107,9 +111,11 @@ impl Extractor for ExifExtractor {
|
||||||
.filter(|e| e.attribute != ATTR_LABEL)
|
.filter(|e| e.attribute != ATTR_LABEL)
|
||||||
.map(|e| Entry {
|
.map(|e| Entry {
|
||||||
entity: Address::Attribute(e.attribute.clone()),
|
entity: Address::Attribute(e.attribute.clone()),
|
||||||
attribute: ATTR_OF.into(),
|
attribute: ATTR_OF.parse().unwrap(),
|
||||||
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
})
|
||||||
.collect::<Vec<Entry>>(),
|
.collect::<Vec<Entry>>(),
|
||||||
);
|
);
|
||||||
|
@ -120,9 +126,11 @@ impl Extractor for ExifExtractor {
|
||||||
EXIF_TYPE_LABEL.clone(),
|
EXIF_TYPE_LABEL.clone(),
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_IN.into(),
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::{process::Command, sync::Arc};
|
||||||
use super::Extractor;
|
use super::Extractor;
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
use tracing::{debug, trace};
|
||||||
use upend_base::{
|
use upend_base::{
|
||||||
addressing::Address,
|
addressing::Address,
|
||||||
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
constants::{ATTR_IN, ATTR_KEY, ATTR_LABEL, ATTR_OF},
|
||||||
|
@ -11,27 +12,31 @@ use upend_base::{
|
||||||
use upend_db::{
|
use upend_db::{
|
||||||
jobs::{JobContainer, JobState},
|
jobs::{JobContainer, JobState},
|
||||||
stores::{fs::FILE_MIME_KEY, UpStore},
|
stores::{fs::FILE_MIME_KEY, UpStore},
|
||||||
UpEndConnection,
|
OperationContext, UpEndConnection,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DURATION_KEY: &str = "MEDIA_DURATION";
|
const DURATION_KEY: &str = "MEDIA_DURATION";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref MEDIA_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
pub static ref MEDIA_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
|
||||||
attribute: String::from(ATTR_KEY),
|
attribute: ATTR_KEY.parse().unwrap(),
|
||||||
value: "TYPE_MEDIA".into(),
|
value: "TYPE_MEDIA".into(),
|
||||||
};
|
};
|
||||||
pub static ref MEDIA_TYPE_LABEL: Entry = Entry {
|
pub static ref MEDIA_TYPE_LABEL: Entry = Entry {
|
||||||
entity: MEDIA_TYPE_INVARIANT.entity().unwrap(),
|
entity: MEDIA_TYPE_INVARIANT.entity().unwrap(),
|
||||||
attribute: ATTR_LABEL.into(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: "Multimedia".into(),
|
value: "Multimedia".into(),
|
||||||
..Default::default()
|
provenance: "INVARIANT".to_string(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: None,
|
||||||
};
|
};
|
||||||
pub static ref DURATION_OF_MEDIA: Entry = Entry {
|
pub static ref DURATION_OF_MEDIA: Entry = Entry {
|
||||||
entity: Address::Attribute(DURATION_KEY.to_string()),
|
entity: Address::Attribute(DURATION_KEY.parse().unwrap()),
|
||||||
attribute: ATTR_OF.into(),
|
attribute: ATTR_OF.parse().unwrap(),
|
||||||
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: "INVARIANT".to_string(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: None,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,11 +49,12 @@ impl Extractor for MediaExtractor {
|
||||||
_connection: &UpEndConnection,
|
_connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Entry>> {
|
) -> Result<Vec<Entry>> {
|
||||||
if let Address::Hash(hash) = address {
|
if let Address::Hash(hash) = address {
|
||||||
let files = store.retrieve(hash)?;
|
let files = store.retrieve(hash)?;
|
||||||
|
|
||||||
if let Some(file) = files.get(0) {
|
if let Some(file) = files.first() {
|
||||||
let file_path = file.get_file_path();
|
let file_path = file.get_file_path();
|
||||||
let mut job_handle = job_container.add_job(
|
let mut job_handle = job_container.add_job(
|
||||||
None,
|
None,
|
||||||
|
@ -90,9 +96,10 @@ impl Extractor for MediaExtractor {
|
||||||
let result = vec![
|
let result = vec![
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: DURATION_KEY.to_string(),
|
attribute: DURATION_KEY.parse().unwrap(),
|
||||||
value: EntryValue::Number(duration),
|
value: EntryValue::Number(duration),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
(&MEDIA_TYPE_INVARIANT as &InvariantEntry)
|
(&MEDIA_TYPE_INVARIANT as &InvariantEntry)
|
||||||
|
@ -102,9 +109,11 @@ impl Extractor for MediaExtractor {
|
||||||
DURATION_OF_MEDIA.clone(),
|
DURATION_OF_MEDIA.clone(),
|
||||||
Entry {
|
Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_IN.into(),
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
|
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
|
||||||
..Default::default()
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,9 @@ use std::{
|
||||||
};
|
};
|
||||||
use tracing::{debug, info, trace};
|
use tracing::{debug, info, trace};
|
||||||
use upend_base::{addressing::Address, entry::Entry};
|
use upend_base::{addressing::Address, entry::Entry};
|
||||||
use upend_db::{jobs::JobContainer, stores::UpStore, UpEndConnection, UpEndDatabase};
|
use upend_db::{
|
||||||
|
jobs::JobContainer, stores::UpStore, OperationContext, UpEndConnection, UpEndDatabase,
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(feature = "extractors-web")]
|
#[cfg(feature = "extractors-web")]
|
||||||
pub mod web;
|
pub mod web;
|
||||||
|
@ -27,6 +29,7 @@ pub trait Extractor {
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
job_container: JobContainer,
|
job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Entry>>;
|
) -> Result<Vec<Entry>>;
|
||||||
|
|
||||||
fn is_needed(&self, _address: &Address, _connection: &UpEndConnection) -> Result<bool> {
|
fn is_needed(&self, _address: &Address, _connection: &UpEndConnection) -> Result<bool> {
|
||||||
|
@ -39,9 +42,10 @@ pub trait Extractor {
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
job_container: JobContainer,
|
job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
if self.is_needed(address, connection)? {
|
if self.is_needed(address, connection)? {
|
||||||
let entries = self.get(address, connection, store, job_container)?;
|
let entries = self.get(address, connection, store, job_container, context)?;
|
||||||
trace!("For \"{address}\", got: {entries:?}");
|
trace!("For \"{address}\", got: {entries:?}");
|
||||||
|
|
||||||
connection.transaction(|| {
|
connection.transaction(|| {
|
||||||
|
@ -62,6 +66,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
|
||||||
db: D,
|
db: D,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
info!("Extracting metadata for all addresses.");
|
info!("Extracting metadata for all addresses.");
|
||||||
|
|
||||||
|
@ -77,7 +82,13 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
|
||||||
.par_iter()
|
.par_iter()
|
||||||
.map(|address| {
|
.map(|address| {
|
||||||
let connection = db.connection()?;
|
let connection = db.connection()?;
|
||||||
let entry_count = extract(address, &connection, store.clone(), job_container.clone());
|
let entry_count = extract(
|
||||||
|
address,
|
||||||
|
&connection,
|
||||||
|
store.clone(),
|
||||||
|
job_container.clone(),
|
||||||
|
context.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
let mut cnt = count.write().unwrap();
|
let mut cnt = count.write().unwrap();
|
||||||
*cnt += 1;
|
*cnt += 1;
|
||||||
|
@ -107,6 +118,7 @@ pub fn extract(
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
job_container: JobContainer,
|
job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> usize {
|
) -> usize {
|
||||||
let mut entry_count = 0;
|
let mut entry_count = 0;
|
||||||
trace!("Extracting metadata for {address:?}");
|
trace!("Extracting metadata for {address:?}");
|
||||||
|
@ -118,6 +130,7 @@ pub fn extract(
|
||||||
connection,
|
connection,
|
||||||
store.clone(),
|
store.clone(),
|
||||||
job_container.clone(),
|
job_container.clone(),
|
||||||
|
context.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match extract_result {
|
match extract_result {
|
||||||
|
@ -133,6 +146,7 @@ pub fn extract(
|
||||||
connection,
|
connection,
|
||||||
store.clone(),
|
store.clone(),
|
||||||
job_container.clone(),
|
job_container.clone(),
|
||||||
|
context.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match extract_result {
|
match extract_result {
|
||||||
|
@ -148,6 +162,7 @@ pub fn extract(
|
||||||
connection,
|
connection,
|
||||||
store.clone(),
|
store.clone(),
|
||||||
job_container.clone(),
|
job_container.clone(),
|
||||||
|
context.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match extract_result {
|
match extract_result {
|
||||||
|
@ -158,8 +173,13 @@ pub fn extract(
|
||||||
|
|
||||||
#[cfg(feature = "extractors-media")]
|
#[cfg(feature = "extractors-media")]
|
||||||
{
|
{
|
||||||
let extract_result =
|
let extract_result = media::MediaExtractor.insert_info(
|
||||||
media::MediaExtractor.insert_info(address, connection, store.clone(), job_container);
|
address,
|
||||||
|
connection,
|
||||||
|
store.clone(),
|
||||||
|
job_container,
|
||||||
|
context.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
match extract_result {
|
match extract_result {
|
||||||
Ok(count) => entry_count += count,
|
Ok(count) => entry_count += count,
|
||||||
|
|
|
@ -7,11 +7,14 @@ use anyhow::Result;
|
||||||
|
|
||||||
use upend_base::addressing::Address;
|
use upend_base::addressing::Address;
|
||||||
use upend_base::constants::ATTR_LABEL;
|
use upend_base::constants::ATTR_LABEL;
|
||||||
|
use upend_base::constants::ATTR_OF;
|
||||||
|
use upend_base::constants::TYPE_URL_ADDRESS;
|
||||||
use upend_base::entry::Entry;
|
use upend_base::entry::Entry;
|
||||||
|
use upend_base::entry::EntryValue;
|
||||||
use upend_db::jobs::JobContainer;
|
use upend_db::jobs::JobContainer;
|
||||||
use upend_db::jobs::JobState;
|
use upend_db::jobs::JobState;
|
||||||
use upend_db::stores::UpStore;
|
use upend_db::stores::UpStore;
|
||||||
use upend_db::UpEndConnection;
|
use upend_db::{OperationContext, UpEndConnection};
|
||||||
use webpage::HTML;
|
use webpage::HTML;
|
||||||
|
|
||||||
pub struct WebExtractor;
|
pub struct WebExtractor;
|
||||||
|
@ -23,6 +26,7 @@ impl Extractor for WebExtractor {
|
||||||
_connection: &UpEndConnection,
|
_connection: &UpEndConnection,
|
||||||
_store: Arc<Box<dyn UpStore + Send + Sync>>,
|
_store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Entry>> {
|
) -> Result<Vec<Entry>> {
|
||||||
if let Address::Url(url) = address {
|
if let Address::Url(url) = address {
|
||||||
let mut job_handle =
|
let mut job_handle =
|
||||||
|
@ -37,23 +41,26 @@ impl Extractor for WebExtractor {
|
||||||
let mut entries = vec![
|
let mut entries = vec![
|
||||||
html.title.as_ref().map(|html_title| Entry {
|
html.title.as_ref().map(|html_title| Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: "HTML_TITLE".to_string(),
|
attribute: "HTML_TITLE".parse().unwrap(),
|
||||||
value: html_title.clone().into(),
|
value: html_title.clone().into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}),
|
}),
|
||||||
html.title.map(|html_title| Entry {
|
html.title.map(|html_title| Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_LABEL.to_string(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: html_title.into(),
|
value: html_title.into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}),
|
}),
|
||||||
html.description.map(|html_desc| Entry {
|
html.description.map(|html_desc| Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: "HTML_DESCRIPTION".to_string(),
|
attribute: "HTML_DESCRIPTION".parse().unwrap(),
|
||||||
value: html_desc.into(),
|
value: html_desc.into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
|
@ -62,34 +69,53 @@ impl Extractor for WebExtractor {
|
||||||
if attribute == "OG_TITLE" {
|
if attribute == "OG_TITLE" {
|
||||||
entries.push(Some(Entry {
|
entries.push(Some(Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_LABEL.to_string(),
|
attribute: ATTR_LABEL.parse()?,
|
||||||
value: value.clone().into(),
|
value: value.clone().into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
entries.push(Some(Entry {
|
entries.push(Some(Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute,
|
attribute: attribute.parse()?,
|
||||||
value: value.into(),
|
value: value.into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
for image in html.opengraph.images {
|
for image in html.opengraph.images {
|
||||||
entries.push(Some(Entry {
|
entries.push(Some(Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: "OG_IMAGE".to_string(),
|
attribute: "OG_IMAGE".parse()?,
|
||||||
value: image.url.into(),
|
value: image.url.into(),
|
||||||
provenance: "SYSTEM EXTRACTOR".to_string(),
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = job_handle.update_state(JobState::Done);
|
let _ = job_handle.update_state(JobState::Done);
|
||||||
|
|
||||||
return Ok(entries.into_iter().flatten().collect());
|
return Ok(entries
|
||||||
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
|
.flat_map(|e| {
|
||||||
|
vec![
|
||||||
|
Entry {
|
||||||
|
entity: Address::Attribute(e.attribute.clone()),
|
||||||
|
attribute: ATTR_OF.parse().unwrap(),
|
||||||
|
value: EntryValue::Address(TYPE_URL_ADDRESS.clone()),
|
||||||
|
provenance: context.provenance.clone() + "EXTRACTOR",
|
||||||
|
user: context.user.clone(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
},
|
||||||
|
e,
|
||||||
|
]
|
||||||
|
})
|
||||||
|
.collect());
|
||||||
}
|
}
|
||||||
Err(anyhow!("Failed for unknown reason."))
|
Err(anyhow!("Failed for unknown reason."))
|
||||||
} else {
|
} else {
|
||||||
|
@ -131,7 +157,13 @@ mod test {
|
||||||
let address = Address::Url(Url::parse("https://upend.dev").unwrap());
|
let address = Address::Url(Url::parse("https://upend.dev").unwrap());
|
||||||
assert!(WebExtractor.is_needed(&address, &connection)?);
|
assert!(WebExtractor.is_needed(&address, &connection)?);
|
||||||
|
|
||||||
WebExtractor.insert_info(&address, &connection, store, job_container)?;
|
WebExtractor.insert_info(
|
||||||
|
&address,
|
||||||
|
&connection,
|
||||||
|
store,
|
||||||
|
job_container,
|
||||||
|
OperationContext::default(),
|
||||||
|
)?;
|
||||||
|
|
||||||
assert!(!WebExtractor.is_needed(&address, &connection)?);
|
assert!(!WebExtractor.is_needed(&address, &connection)?);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate upend_db;
|
extern crate upend_db;
|
||||||
|
|
||||||
use crate::common::{get_resource_path, REQWEST_ASYNC_CLIENT};
|
use crate::common::{REQWEST_ASYNC_CLIENT, WEBUI_PATH};
|
||||||
use crate::config::UpEndConfig;
|
use crate::config::UpEndConfig;
|
||||||
use actix_web::HttpServer;
|
use actix_web::HttpServer;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
@ -16,7 +16,7 @@ use std::collections::HashMap;
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::{Arc, Mutex};
|
||||||
use tracing::trace;
|
use tracing::trace;
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
|
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
|
||||||
|
@ -26,7 +26,7 @@ use upend_base::hash::{sha256hash, UpMultihash};
|
||||||
use upend_db::jobs::JobContainer;
|
use upend_db::jobs::JobContainer;
|
||||||
use upend_db::stores::fs::FsStore;
|
use upend_db::stores::fs::FsStore;
|
||||||
use upend_db::stores::UpStore;
|
use upend_db::stores::UpStore;
|
||||||
use upend_db::UpEndDatabase;
|
use upend_db::{BlobMode, OperationContext, UpEndDatabase};
|
||||||
|
|
||||||
use crate::util::exec::block_background;
|
use crate::util::exec::block_background;
|
||||||
|
|
||||||
|
@ -36,8 +36,9 @@ mod routes;
|
||||||
mod serve;
|
mod serve;
|
||||||
mod util;
|
mod util;
|
||||||
|
|
||||||
mod extractors;
|
mod extractors; // TODO REMOVE
|
||||||
mod previews;
|
mod plugins;
|
||||||
|
mod previews; // TODO REMOVE
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
#[command(name = "upend", author)]
|
#[command(name = "upend", author)]
|
||||||
|
@ -80,7 +81,7 @@ enum Commands {
|
||||||
entity: String,
|
entity: String,
|
||||||
/// The attribute of the entry.
|
/// The attribute of the entry.
|
||||||
attribute: String,
|
attribute: String,
|
||||||
/// The value; its type will be heurestically determined.
|
/// The value; its type will be heuristically determined.
|
||||||
value: String,
|
value: String,
|
||||||
/// Output format
|
/// Output format
|
||||||
#[arg(short, long, default_value = "tsv")]
|
#[arg(short, long, default_value = "tsv")]
|
||||||
|
@ -152,6 +153,10 @@ struct ServeArgs {
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
no_initial_update: bool,
|
no_initial_update: bool,
|
||||||
|
|
||||||
|
/// Which mode to use for rescanning the vault.
|
||||||
|
#[arg(long)]
|
||||||
|
rescan_mode: Option<BlobMode>,
|
||||||
|
|
||||||
/// Clean up temporary files (e.g. previews) on start.
|
/// Clean up temporary files (e.g. previews) on start.
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
clean: bool,
|
clean: bool,
|
||||||
|
@ -168,10 +173,6 @@ struct ServeArgs {
|
||||||
#[arg(long, env = "UPEND_SECRET")]
|
#[arg(long, env = "UPEND_SECRET")]
|
||||||
secret: Option<String>,
|
secret: Option<String>,
|
||||||
|
|
||||||
/// Authentication key users must supply.
|
|
||||||
#[arg(long, env = "UPEND_KEY")]
|
|
||||||
key: Option<String>,
|
|
||||||
|
|
||||||
/// Allowed host/domain name the API can serve.
|
/// Allowed host/domain name the API can serve.
|
||||||
#[arg(long, env = "UPEND_ALLOW_HOST")]
|
#[arg(long, env = "UPEND_ALLOW_HOST")]
|
||||||
allow_host: Vec<String>,
|
allow_host: Vec<String>,
|
||||||
|
@ -179,7 +180,7 @@ struct ServeArgs {
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
let command = Cli::command().version(option_env!("UPEND_VERSION").unwrap_or("unknown"));
|
let command = Cli::command().version(crate::common::get_version());
|
||||||
let args = Cli::from_arg_matches(&command.get_matches())?;
|
let args = Cli::from_arg_matches(&command.get_matches())?;
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
tracing_subscriber::fmt()
|
||||||
|
@ -338,20 +339,20 @@ async fn main() -> Result<()> {
|
||||||
FsStore::from_path(args.store_path.unwrap_or_else(|| vault_path.clone())).unwrap(),
|
FsStore::from_path(args.store_path.unwrap_or_else(|| vault_path.clone())).unwrap(),
|
||||||
) as Box<dyn UpStore + Send + Sync>);
|
) as Box<dyn UpStore + Send + Sync>);
|
||||||
|
|
||||||
let ui_path = if args.no_ui {
|
let webui_enabled = if args.no_ui {
|
||||||
None
|
false
|
||||||
} else {
|
} else {
|
||||||
let ui_path = get_resource_path("webui");
|
let exists = WEBUI_PATH.exists();
|
||||||
if ui_path.is_err() {
|
if !exists {
|
||||||
warn!(
|
warn!(
|
||||||
"Couldn't locate Web UI directory ({:?}), disabling...",
|
"Couldn't locate Web UI directory ({:?}), disabling...",
|
||||||
ui_path
|
*WEBUI_PATH
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ui_path.ok()
|
exists
|
||||||
};
|
};
|
||||||
|
|
||||||
let browser_enabled = !args.no_desktop && ui_path.is_some() && !args.no_browser;
|
let browser_enabled = !args.no_desktop && webui_enabled && !args.no_browser;
|
||||||
|
|
||||||
let preview_path = upend.path.join("previews");
|
let preview_path = upend.path.join("previews");
|
||||||
#[cfg(feature = "previews")]
|
#[cfg(feature = "previews")]
|
||||||
|
@ -394,12 +395,15 @@ async fn main() -> Result<()> {
|
||||||
.collect()
|
.collect()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let plugins = crate::plugins::Plugins::init(&get_resource_path("plugins")?)?;
|
||||||
|
|
||||||
let state = routes::State {
|
let state = routes::State {
|
||||||
upend: upend.clone(),
|
upend: upend.clone(),
|
||||||
store,
|
store,
|
||||||
job_container: job_container.clone(),
|
job_container: job_container.clone(),
|
||||||
preview_store,
|
preview_store,
|
||||||
preview_thread_pool,
|
preview_thread_pool,
|
||||||
|
plugins: plugins.into(),
|
||||||
config: UpEndConfig {
|
config: UpEndConfig {
|
||||||
vault_name: Some(args.vault_name.unwrap_or_else(|| {
|
vault_name: Some(args.vault_name.unwrap_or_else(|| {
|
||||||
vault_path
|
vault_path
|
||||||
|
@ -411,9 +415,9 @@ async fn main() -> Result<()> {
|
||||||
})),
|
})),
|
||||||
desktop_enabled: !args.no_desktop,
|
desktop_enabled: !args.no_desktop,
|
||||||
trust_executables: args.trust_executables,
|
trust_executables: args.trust_executables,
|
||||||
key: args.key,
|
|
||||||
secret,
|
secret,
|
||||||
},
|
},
|
||||||
|
public: Arc::new(Mutex::new(upend.connection()?.get_users()?.is_empty())),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Start HTTP server
|
// Start HTTP server
|
||||||
|
@ -421,11 +425,10 @@ async fn main() -> Result<()> {
|
||||||
let mut cnt = 0;
|
let mut cnt = 0;
|
||||||
let server = loop {
|
let server = loop {
|
||||||
let state = state.clone();
|
let state = state.clone();
|
||||||
let ui_path = ui_path.clone();
|
|
||||||
let allowed_origins = args.allow_host.clone();
|
let allowed_origins = args.allow_host.clone();
|
||||||
|
|
||||||
let server = HttpServer::new(move || {
|
let server = HttpServer::new(move || {
|
||||||
serve::get_app(ui_path.clone(), allowed_origins.clone(), state.clone())
|
serve::get_app(webui_enabled, allowed_origins.clone(), state.clone())
|
||||||
});
|
});
|
||||||
|
|
||||||
let bind_result = server.bind(&bind);
|
let bind_result = server.bind(&bind);
|
||||||
|
@ -443,19 +446,46 @@ async fn main() -> Result<()> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if !args.no_initial_update {
|
if !args.no_initial_update && (!open_result.new || args.rescan_mode.is_some()) {
|
||||||
info!("Running initial update...");
|
info!("Running update...");
|
||||||
let initial = open_result.new;
|
|
||||||
block_background::<_, _, anyhow::Error>(move || {
|
block_background::<_, _, anyhow::Error>(move || {
|
||||||
let _ = state.store.update(&upend, job_container.clone(), initial);
|
let connection: upend_db::UpEndConnection = upend.connection()?;
|
||||||
let _ = extractors::extract_all(upend, state.store, job_container);
|
|
||||||
|
let tree_mode = if let Some(rescan_mode) = args.rescan_mode {
|
||||||
|
connection.set_vault_options(upend_db::VaultOptions {
|
||||||
|
blob_mode: Some(rescan_mode.clone()),
|
||||||
|
})?;
|
||||||
|
rescan_mode
|
||||||
|
} else {
|
||||||
|
connection
|
||||||
|
.get_vault_options()
|
||||||
|
.unwrap()
|
||||||
|
.blob_mode
|
||||||
|
.unwrap_or_default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = state.store.update(
|
||||||
|
&upend,
|
||||||
|
job_container.clone(),
|
||||||
|
upend_db::stores::UpdateOptions {
|
||||||
|
initial: false,
|
||||||
|
tree_mode,
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
let _ = extractors::extract_all(
|
||||||
|
upend,
|
||||||
|
state.store,
|
||||||
|
job_container,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "desktop")]
|
#[cfg(feature = "desktop")]
|
||||||
{
|
{
|
||||||
if browser_enabled && ui_path.is_some() {
|
if browser_enabled {
|
||||||
let ui_result = webbrowser::open(&format!("http://localhost:{}", bind.port()));
|
let ui_result = webbrowser::open(&format!("http://localhost:{}", bind.port()));
|
||||||
if ui_result.is_err() {
|
if ui_result.is_err() {
|
||||||
warn!("Could not open UI in browser!");
|
warn!("Could not open UI in browser!");
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
use extism::{CurrentPlugin, UserData, Val};
|
||||||
|
use upend_base::error::UpEndError;
|
||||||
|
|
||||||
|
fn hello_world(
|
||||||
|
_plugin: &mut CurrentPlugin,
|
||||||
|
inputs: &[Val],
|
||||||
|
outputs: &mut [Val],
|
||||||
|
_user_data: UserData,
|
||||||
|
) -> Result<(), UpEndError> {
|
||||||
|
println!("Hello from Rust!");
|
||||||
|
outputs[0] = inputs[0].clone();
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -0,0 +1,175 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use extism::{manifest::Wasm, *};
|
||||||
|
use std::{
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
use upend_extension_base::PluginInfo;
|
||||||
|
|
||||||
|
mod host;
|
||||||
|
pub struct Plugins<'a> {
|
||||||
|
pub plugins: Vec<UpEndPlugin<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum UpEndPlugin<'a> {
|
||||||
|
Initialized(UpEndPluginInitialized<'a>),
|
||||||
|
Failed(UpEndPluginFailed),
|
||||||
|
}
|
||||||
|
pub struct UpEndPluginInitialized<'a> {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub info: PluginInfo,
|
||||||
|
pub plugin: Box<extism::Plugin<'a>>,
|
||||||
|
pub verified: Result<(), String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for UpEndPluginInitialized<'_> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("UpEndPluginInitialized")
|
||||||
|
.field("path", &self.path)
|
||||||
|
.field("info", &self.info)
|
||||||
|
.field("verified", &self.verified)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UpEndPluginFailed {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub error: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Plugins<'_> {
|
||||||
|
pub fn init(plugin_path: &Path) -> Result<Self> {
|
||||||
|
let plugin_files = plugin_path
|
||||||
|
.read_dir()?
|
||||||
|
.filter_map(|p| p.ok().and_then(|p| Some(p.path())))
|
||||||
|
.filter(|p| p.is_file() && p.extension().unwrap_or_default() == "wasm");
|
||||||
|
|
||||||
|
let mut plugins = vec![];
|
||||||
|
|
||||||
|
for plugin_path in plugin_files {
|
||||||
|
debug!("Attempting to load plugin: {:?}", plugin_path);
|
||||||
|
let file = Wasm::file(plugin_path.clone());
|
||||||
|
let manifest = Manifest::new([file]);
|
||||||
|
let plugin = Plugin::create_with_manifest(&manifest, [], true);
|
||||||
|
match plugin {
|
||||||
|
Ok(mut plugin) => {
|
||||||
|
debug!("Plugin loaded: {:?}", plugin_path);
|
||||||
|
let info = plugin.call("info", []).and_then(|v| {
|
||||||
|
serde_json::from_slice::<PluginInfo>(&v).map_err(|e| anyhow::anyhow!(e))
|
||||||
|
});
|
||||||
|
match info {
|
||||||
|
Ok(info) => {
|
||||||
|
debug!("Plugin info: {:?}", info);
|
||||||
|
|
||||||
|
let mut gpg_cmd = Command::new("gpg");
|
||||||
|
let verify_cmd = gpg_cmd
|
||||||
|
.arg("--verify")
|
||||||
|
.arg(&plugin_path.with_extension("wasm.asc"));
|
||||||
|
let verify_result = verify_cmd
|
||||||
|
.output()
|
||||||
|
.map_err(|e| format!("Failed to run gpg: {:?}", e))
|
||||||
|
.and_then(|output| {
|
||||||
|
if output.status.success() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(format!(
|
||||||
|
"Failed to verify plugin: {:?}",
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let verified = verify_result.and_then(|_| Ok(()));
|
||||||
|
|
||||||
|
plugins.push(UpEndPlugin::Initialized(UpEndPluginInitialized {
|
||||||
|
path: plugin_path.clone(),
|
||||||
|
info,
|
||||||
|
plugin: Box::new(plugin),
|
||||||
|
verified,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to get plugin info: {:?}", e);
|
||||||
|
plugins.push(UpEndPlugin::Failed(UpEndPluginFailed {
|
||||||
|
path: plugin_path.clone(),
|
||||||
|
error: format!("Failed to get plugin info: {:?}", e),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to create plugin: {:?}", e);
|
||||||
|
plugins.push(UpEndPlugin::Failed(UpEndPluginFailed {
|
||||||
|
path: plugin_path.clone(),
|
||||||
|
error: format!("Failed to create plugin: {:?}", e),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self { plugins })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use core::panic;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::common::get_resource_path;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_plugins_init() {
|
||||||
|
let plugins = Plugins::init(&get_resource_path("plugins").unwrap()).unwrap();
|
||||||
|
assert!(plugins.plugins.len() > 0);
|
||||||
|
for plugin in plugins.plugins {
|
||||||
|
assert!(
|
||||||
|
match plugin {
|
||||||
|
UpEndPlugin::Initialized(_) => true,
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
"{:?}",
|
||||||
|
plugin
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_plugins_verify() {
|
||||||
|
let plugins = Plugins::init(&get_resource_path("plugins").unwrap()).unwrap();
|
||||||
|
let verified_plugin = plugins
|
||||||
|
.plugins
|
||||||
|
.iter()
|
||||||
|
.find(|p| match p {
|
||||||
|
UpEndPlugin::Initialized(p) => p.path.to_string_lossy().contains("verified"),
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
match verified_plugin {
|
||||||
|
UpEndPlugin::Initialized(p) => p.verified.is_ok(),
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
"{:?}",
|
||||||
|
verified_plugin
|
||||||
|
);
|
||||||
|
|
||||||
|
let unverified_plugin = plugins
|
||||||
|
.plugins
|
||||||
|
.iter()
|
||||||
|
.find(|p| match p {
|
||||||
|
UpEndPlugin::Initialized(p) => !p.path.to_string_lossy().contains("verified"),
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
match unverified_plugin {
|
||||||
|
UpEndPlugin::Initialized(p) => p.verified.is_err(),
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
"{:?}",
|
||||||
|
unverified_plugin
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,10 +1,10 @@
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
|
use anyhow::Result;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use tracing::{debug, trace};
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use super::Previewable;
|
use super::Previewable;
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ impl<'a> Previewable for ImagePath<'a> {
|
||||||
})
|
})
|
||||||
.and_then(|shorts| shorts.first().cloned());
|
.and_then(|shorts| shorts.first().cloned());
|
||||||
|
|
||||||
let image = ImageReader::open(self.0)?.decode()?;
|
let image = ImageReader::open(self.0)?.with_guessed_format()?.decode()?;
|
||||||
let image = match orientation {
|
let image = match orientation {
|
||||||
Some(3) => image.rotate180(),
|
Some(3) => image.rotate180(),
|
||||||
Some(6) => image.rotate90(),
|
Some(6) => image.rotate90(),
|
||||||
|
|
|
@ -90,7 +90,7 @@ impl PreviewStore {
|
||||||
} else {
|
} else {
|
||||||
trace!("Calculating preview for {hash:?}...");
|
trace!("Calculating preview for {hash:?}...");
|
||||||
let files = self.store.retrieve(&hash)?;
|
let files = self.store.retrieve(&hash)?;
|
||||||
if let Some(file) = files.get(0) {
|
if let Some(file) = files.first() {
|
||||||
let file_path = file.get_file_path();
|
let file_path = file.get_file_path();
|
||||||
let mut job_handle = job_container.add_job(
|
let mut job_handle = job_container.add_job(
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
|
use anyhow::Result;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use tracing::{debug, trace};
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use super::Previewable;
|
use super::Previewable;
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ use crate::common::build;
|
||||||
use crate::common::REQWEST_CLIENT;
|
use crate::common::REQWEST_CLIENT;
|
||||||
use crate::config::UpEndConfig;
|
use crate::config::UpEndConfig;
|
||||||
use crate::extractors;
|
use crate::extractors;
|
||||||
|
use crate::plugins::Plugins;
|
||||||
use crate::previews::PreviewStore;
|
use crate::previews::PreviewStore;
|
||||||
use crate::util::exec::block_background;
|
use crate::util::exec::block_background;
|
||||||
use actix_files::NamedFile;
|
use actix_files::NamedFile;
|
||||||
|
@ -11,7 +12,7 @@ use actix_web::error::{
|
||||||
};
|
};
|
||||||
use actix_web::http::header::ContentDisposition;
|
use actix_web::http::header::ContentDisposition;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
delete, error, get, post, put, routes, web, Either, Error, HttpResponse, ResponseError,
|
delete, error, get, head, post, put, routes, web, Either, Error, HttpResponse, ResponseError,
|
||||||
};
|
};
|
||||||
use actix_web::{http, Responder};
|
use actix_web::{http, Responder};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
|
@ -26,7 +27,7 @@ use serde_json::json;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::{Arc, Mutex};
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
use tracing::{debug, info, trace};
|
use tracing::{debug, info, trace};
|
||||||
|
@ -38,84 +39,168 @@ use upend_base::hash::{b58_decode, b58_encode, sha256hash};
|
||||||
use upend_base::lang::Query;
|
use upend_base::lang::Query;
|
||||||
use upend_db::hierarchies::{list_roots, resolve_path, UHierPath};
|
use upend_db::hierarchies::{list_roots, resolve_path, UHierPath};
|
||||||
use upend_db::jobs;
|
use upend_db::jobs;
|
||||||
|
use upend_db::stores::UpdateOptions;
|
||||||
use upend_db::stores::{Blob, UpStore};
|
use upend_db::stores::{Blob, UpStore};
|
||||||
|
use upend_db::BlobMode;
|
||||||
|
use upend_db::OperationContext;
|
||||||
use upend_db::UpEndDatabase;
|
use upend_db::UpEndDatabase;
|
||||||
|
use upend_extension_base::PluginInfo;
|
||||||
|
use upend_db::VaultOptions;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
#[cfg(feature = "desktop")]
|
#[cfg(feature = "desktop")]
|
||||||
use is_executable::IsExecutable;
|
use is_executable::IsExecutable;
|
||||||
|
use upend_base::error::UpEndError;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct State {
|
pub struct State {
|
||||||
pub upend: Arc<UpEndDatabase>,
|
pub upend: Arc<UpEndDatabase>,
|
||||||
pub store: Arc<Box<dyn UpStore + Sync + Send>>,
|
pub store: Arc<Box<dyn UpStore + Sync + Send>>,
|
||||||
pub config: UpEndConfig,
|
pub config: UpEndConfig,
|
||||||
|
pub plugins: Arc<Plugins<'static>>,
|
||||||
pub job_container: jobs::JobContainer,
|
pub job_container: jobs::JobContainer,
|
||||||
pub preview_store: Option<Arc<PreviewStore>>,
|
pub preview_store: Option<Arc<PreviewStore>>,
|
||||||
pub preview_thread_pool: Option<Arc<rayon::ThreadPool>>,
|
pub preview_thread_pool: Option<Arc<rayon::ThreadPool>>,
|
||||||
|
pub public: Arc<Mutex<bool>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
struct JwtClaims {
|
struct JwtClaims {
|
||||||
|
user: String,
|
||||||
exp: usize,
|
exp: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct LoginRequest {
|
pub struct UserPayload {
|
||||||
key: String,
|
username: String,
|
||||||
|
password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct LoginQueryParams {
|
||||||
|
via: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/api/auth/login")]
|
#[post("/api/auth/login")]
|
||||||
pub async fn login(
|
pub async fn login(
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
payload: web::Json<LoginRequest>,
|
payload: web::Json<UserPayload>,
|
||||||
|
query: web::Query<LoginQueryParams>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
if state.config.key.is_none() || Some(&payload.key) == state.config.key.as_ref() {
|
let conn = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
let claims = JwtClaims {
|
|
||||||
exp: (SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.map_err(ErrorInternalServerError)?
|
|
||||||
.as_secs()
|
|
||||||
+ 7 * 24 * 60 * 60) as usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
let token = jsonwebtoken::encode(
|
match conn.authenticate_user(&payload.username, &payload.password) {
|
||||||
&jsonwebtoken::Header::default(),
|
Ok(()) => {
|
||||||
&claims,
|
let token = create_token(&payload.username, &state.config.secret)?;
|
||||||
&jsonwebtoken::EncodingKey::from_secret(state.config.secret.as_ref()),
|
match query.via.as_deref() {
|
||||||
)
|
Some("cookie") => Ok(HttpResponse::NoContent()
|
||||||
.map_err(ErrorInternalServerError)?;
|
.append_header((http::header::SET_COOKIE, format!("key={}; Path=/", token)))
|
||||||
|
.finish()),
|
||||||
Ok(HttpResponse::Ok().json(json!({ "token": token })))
|
_ => Ok(HttpResponse::Ok().json(json!({ "key": token }))),
|
||||||
} else {
|
}
|
||||||
Err(ErrorUnauthorized("Incorrect token."))
|
}
|
||||||
|
Err(_) => Err(ErrorUnauthorized("Invalid credentials.")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_auth(req: &HttpRequest, state: &State) -> Result<(), actix_web::Error> {
|
#[post("/api/auth/logout")]
|
||||||
if let Some(key) = &state.config.key {
|
pub async fn logout() -> Result<HttpResponse, Error> {
|
||||||
if let Some(auth_header) = req.headers().get("Authorization") {
|
Ok(HttpResponse::NoContent()
|
||||||
let auth_header = auth_header.to_str().map_err(|err| {
|
.append_header((http::header::SET_COOKIE, "key=; Path=/; Max-Age=0"))
|
||||||
ErrorBadRequest(format!("Invalid value in Authorization header: {err:?}"))
|
.finish())
|
||||||
})?;
|
}
|
||||||
|
|
||||||
let token = jsonwebtoken::decode::<JwtClaims>(
|
#[post("/api/auth/register")]
|
||||||
auth_header,
|
pub async fn register(
|
||||||
&jsonwebtoken::DecodingKey::from_secret(key.as_ref()),
|
req: HttpRequest,
|
||||||
&jsonwebtoken::Validation::default(),
|
state: web::Data<State>,
|
||||||
);
|
payload: web::Json<UserPayload>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
token
|
let conn = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
.map(|_| ())
|
|
||||||
.map_err(|err| ErrorUnauthorized(format!("Invalid token: {err:?}")))
|
match conn.set_user(&payload.username, &payload.password) {
|
||||||
} else {
|
Ok(_) => {
|
||||||
Err(ErrorUnauthorized("Authorization required."))
|
*state.public.lock().unwrap() = false;
|
||||||
|
let token = create_token(&payload.username, &state.config.secret)?;
|
||||||
|
Ok(HttpResponse::Ok().json(json!({ "token": token })))
|
||||||
|
}
|
||||||
|
Err(e) => Err(ErrorInternalServerError(e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/api/auth/whoami")]
|
||||||
|
pub async fn whoami(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
|
let user = check_auth(&req, &state)?;
|
||||||
|
Ok(HttpResponse::Ok().json(json!({ "user": user })))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_auth(req: &HttpRequest, state: &State) -> Result<Option<String>, actix_web::Error> {
|
||||||
|
if *state.public.lock().unwrap() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let header_key = req.headers().get("Authorization").and_then(|value| {
|
||||||
|
value.to_str().ok().and_then(|value| {
|
||||||
|
if value.starts_with("Bearer ") {
|
||||||
|
Some(value.trim_start_matches("Bearer ").to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let cookie_key = req.cookies().ok().and_then(|cookies| {
|
||||||
|
cookies
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.name() == "key")
|
||||||
|
.map(|cookie| cookie.value().to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
let query_key = req.query_string().split('&').find_map(|pair| {
|
||||||
|
let parts = pair.split('=').collect::<Vec<&str>>();
|
||||||
|
match parts[..] {
|
||||||
|
["auth_key", value] => Some(value.to_string()),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let key = header_key.or(cookie_key).or(query_key);
|
||||||
|
|
||||||
|
if let Some(key) = key {
|
||||||
|
let token = jsonwebtoken::decode::<JwtClaims>(
|
||||||
|
&key,
|
||||||
|
&jsonwebtoken::DecodingKey::from_secret(state.config.secret.as_ref()),
|
||||||
|
&jsonwebtoken::Validation::default(),
|
||||||
|
);
|
||||||
|
match token {
|
||||||
|
Ok(token) => Ok(Some(token.claims.user)),
|
||||||
|
Err(err) => Err(ErrorUnauthorized(format!("Invalid token: {err:?}"))),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Err(ErrorUnauthorized("Authorization required."))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn create_token(username: &str, secret: &str) -> Result<String, Error> {
|
||||||
|
let claims = JwtClaims {
|
||||||
|
user: username.to_string(),
|
||||||
|
exp: (SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.map_err(ErrorInternalServerError)?
|
||||||
|
.as_secs()
|
||||||
|
+ 7 * 24 * 60 * 60) as usize,
|
||||||
|
};
|
||||||
|
|
||||||
|
jsonwebtoken::encode(
|
||||||
|
&jsonwebtoken::Header::default(),
|
||||||
|
&claims,
|
||||||
|
&jsonwebtoken::EncodingKey::from_secret(secret.as_ref()),
|
||||||
|
)
|
||||||
|
.map_err(ErrorInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct RawRequest {
|
pub struct RawRequest {
|
||||||
native: Option<String>,
|
native: Option<String>,
|
||||||
|
@ -124,10 +209,13 @@ pub struct RawRequest {
|
||||||
|
|
||||||
#[get("/api/raw/{hash}")]
|
#[get("/api/raw/{hash}")]
|
||||||
pub async fn get_raw(
|
pub async fn get_raw(
|
||||||
|
req: HttpRequest,
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
web::Query(query): web::Query<RawRequest>,
|
web::Query(query): web::Query<RawRequest>,
|
||||||
hash: web::Path<String>,
|
hash: web::Path<String>,
|
||||||
) -> Result<impl Responder, Error> {
|
) -> Result<impl Responder, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
let address =
|
let address =
|
||||||
Address::decode(&b58_decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
Address::decode(&b58_decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
||||||
.map_err(ErrorInternalServerError)?;
|
.map_err(ErrorInternalServerError)?;
|
||||||
|
@ -139,7 +227,7 @@ pub async fn get_raw(
|
||||||
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
|
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
|
||||||
.await?
|
.await?
|
||||||
.map_err(ErrorInternalServerError)?;
|
.map_err(ErrorInternalServerError)?;
|
||||||
if let Some(blob) = blobs.get(0) {
|
if let Some(blob) = blobs.first() {
|
||||||
let file_path = blob.get_file_path();
|
let file_path = blob.get_file_path();
|
||||||
|
|
||||||
if query.native.is_none() {
|
if query.native.is_none() {
|
||||||
|
@ -212,12 +300,54 @@ pub async fn get_raw(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[head("/api/raw/{hash}")]
|
||||||
|
pub async fn head_raw(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
hash: web::Path<String>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
|
let address =
|
||||||
|
Address::decode(&b58_decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
||||||
|
.map_err(ErrorInternalServerError)?;
|
||||||
|
if let Address::Hash(hash) = address {
|
||||||
|
let hash = Arc::new(hash);
|
||||||
|
|
||||||
|
let _hash = hash.clone();
|
||||||
|
let _store = state.store.clone();
|
||||||
|
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
|
||||||
|
.await?
|
||||||
|
.map_err(ErrorInternalServerError)?;
|
||||||
|
if let Some(blob) = blobs.first() {
|
||||||
|
let file_path = blob.get_file_path();
|
||||||
|
|
||||||
|
let mut response = HttpResponse::NoContent();
|
||||||
|
if let Some(mime_type) = tree_magic_mini::from_filepath(file_path) {
|
||||||
|
if let Ok(mime) = mime_type.parse::<mime::Mime>() {
|
||||||
|
return Ok(response.content_type(mime).finish());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok(response.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(error::ErrorNotFound("NOT FOUND"))
|
||||||
|
} else {
|
||||||
|
Err(ErrorBadRequest(
|
||||||
|
"Address does not refer to a rawable object.",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/api/thumb/{hash}")]
|
#[get("/api/thumb/{hash}")]
|
||||||
pub async fn get_thumbnail(
|
pub async fn get_thumbnail(
|
||||||
|
req: HttpRequest,
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
hash: web::Path<String>,
|
hash: web::Path<String>,
|
||||||
web::Query(query): web::Query<HashMap<String, String>>,
|
web::Query(query): web::Query<HashMap<String, String>>,
|
||||||
) -> Result<Either<NamedFile, HttpResponse>, Error> {
|
) -> Result<Either<NamedFile, HttpResponse>, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
#[cfg(feature = "previews")]
|
#[cfg(feature = "previews")]
|
||||||
if let Some(preview_store) = &state.preview_store {
|
if let Some(preview_store) = &state.preview_store {
|
||||||
let hash = hash.into_inner();
|
let hash = hash.into_inner();
|
||||||
|
@ -259,7 +389,13 @@ pub async fn get_thumbnail(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/api/query")]
|
#[post("/api/query")]
|
||||||
pub async fn get_query(state: web::Data<State>, query: String) -> Result<HttpResponse, Error> {
|
pub async fn get_query(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
query: String,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
let in_query: Query = query.parse().map_err(ErrorBadRequest)?;
|
let in_query: Query = query.parse().map_err(ErrorBadRequest)?;
|
||||||
|
@ -301,9 +437,12 @@ impl EntriesAsHash for Vec<Entry> {
|
||||||
|
|
||||||
#[get("/api/obj/{address_str}")]
|
#[get("/api/obj/{address_str}")]
|
||||||
pub async fn get_object(
|
pub async fn get_object(
|
||||||
|
req: HttpRequest,
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
address: web::Path<Address>,
|
address: web::Path<Address>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
let address = address.into_inner();
|
let address = address.into_inner();
|
||||||
|
|
||||||
|
@ -345,6 +484,7 @@ pub struct InEntry {
|
||||||
pub value: EntryValue,
|
pub value: EntryValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
#[serde(untagged, deny_unknown_fields)]
|
#[serde(untagged, deny_unknown_fields)]
|
||||||
pub enum PutInput {
|
pub enum PutInput {
|
||||||
|
@ -365,7 +505,7 @@ pub async fn put_object(
|
||||||
payload: web::Json<PutInput>,
|
payload: web::Json<PutInput>,
|
||||||
web::Query(query): web::Query<UpdateQuery>,
|
web::Query(query): web::Query<UpdateQuery>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
check_auth(&req, &state)?;
|
let user = check_auth(&req, &state)?;
|
||||||
|
|
||||||
let (entry_address, entity_address) = {
|
let (entry_address, entity_address) = {
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
@ -374,11 +514,12 @@ pub async fn put_object(
|
||||||
debug!("PUTting {in_entry:?}");
|
debug!("PUTting {in_entry:?}");
|
||||||
|
|
||||||
let provenance = query.provenance.clone();
|
let provenance = query.provenance.clone();
|
||||||
|
let _user = user.clone();
|
||||||
let process_inentry = move |in_entry: InEntry| -> Result<Entry> {
|
let process_inentry = move |in_entry: InEntry| -> Result<Entry> {
|
||||||
if let Some(entity) = in_entry.entity {
|
if let Some(entity) = in_entry.entity {
|
||||||
Ok(Entry {
|
Ok(Entry {
|
||||||
entity: entity.try_into()?,
|
entity: entity.try_into()?,
|
||||||
attribute: in_entry.attribute,
|
attribute: in_entry.attribute.parse()?,
|
||||||
value: in_entry.value,
|
value: in_entry.value,
|
||||||
provenance: (match &provenance {
|
provenance: (match &provenance {
|
||||||
Some(s) => format!("API {}", s),
|
Some(s) => format!("API {}", s),
|
||||||
|
@ -387,10 +528,11 @@ pub async fn put_object(
|
||||||
.trim()
|
.trim()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: _user.clone(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Ok(Entry::try_from(&InvariantEntry {
|
Ok(Entry::try_from(&InvariantEntry {
|
||||||
attribute: in_entry.attribute,
|
attribute: in_entry.attribute.parse()?,
|
||||||
value: in_entry.value,
|
value: in_entry.value,
|
||||||
})?)
|
})?)
|
||||||
}
|
}
|
||||||
|
@ -428,21 +570,31 @@ pub async fn put_object(
|
||||||
let _address = address.clone();
|
let _address = address.clone();
|
||||||
let _job_container = state.job_container.clone();
|
let _job_container = state.job_container.clone();
|
||||||
let _store = state.store.clone();
|
let _store = state.store.clone();
|
||||||
|
let _user = user.clone();
|
||||||
block_background::<_, _, anyhow::Error>(move || {
|
block_background::<_, _, anyhow::Error>(move || {
|
||||||
let entry_count =
|
let entry_count = extractors::extract(
|
||||||
extractors::extract(&_address, &connection, _store, _job_container);
|
&_address,
|
||||||
|
&connection,
|
||||||
|
_store,
|
||||||
|
_job_container,
|
||||||
|
OperationContext {
|
||||||
|
user: _user,
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
debug!("Added {entry_count} extracted entries for {_address:?}");
|
debug!("Added {entry_count} extracted entries for {_address:?}");
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
let _user = user.clone();
|
||||||
web::block(move || {
|
web::block(move || {
|
||||||
connection.transaction::<_, anyhow::Error, _>(|| {
|
connection.transaction::<_, anyhow::Error, _>(|| {
|
||||||
if connection.retrieve_object(&address)?.is_empty() {
|
if connection.retrieve_object(&address)?.is_empty() {
|
||||||
connection.insert_entry(Entry {
|
connection.insert_entry(Entry {
|
||||||
entity: address.clone(),
|
entity: address.clone(),
|
||||||
attribute: ATTR_ADDED.to_string(),
|
attribute: ATTR_ADDED.parse().unwrap(),
|
||||||
value: EntryValue::Number(
|
value: EntryValue::Number(
|
||||||
SystemTime::now()
|
SystemTime::now()
|
||||||
.duration_since(UNIX_EPOCH)
|
.duration_since(UNIX_EPOCH)
|
||||||
|
@ -455,6 +607,7 @@ pub async fn put_object(
|
||||||
})
|
})
|
||||||
.trim()
|
.trim()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
user: _user,
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
@ -477,7 +630,7 @@ pub async fn put_blob(
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
mut payload: Multipart,
|
mut payload: Multipart,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
check_auth(&req, &state)?;
|
let user = check_auth(&req, &state)?;
|
||||||
|
|
||||||
if let Some(mut field) = payload.try_next().await? {
|
if let Some(mut field) = payload.try_next().await? {
|
||||||
let mut file = NamedTempFile::new()?;
|
let mut file = NamedTempFile::new()?;
|
||||||
|
@ -516,8 +669,21 @@ pub async fn put_blob(
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
let _store = state.store.clone();
|
let _store = state.store.clone();
|
||||||
let _filename = filename.clone();
|
let _filename = filename.clone();
|
||||||
|
let _user = user.clone();
|
||||||
let hash = web::block(move || {
|
let hash = web::block(move || {
|
||||||
_store.store(connection, Blob::from_filepath(file.path()), _filename)
|
let options = connection.get_vault_options()?;
|
||||||
|
_store
|
||||||
|
.store(
|
||||||
|
&connection,
|
||||||
|
Blob::from_filepath(file.path()),
|
||||||
|
_filename,
|
||||||
|
options.blob_mode,
|
||||||
|
OperationContext {
|
||||||
|
user: _user,
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.map_err(anyhow::Error::from)
|
||||||
})
|
})
|
||||||
.await?
|
.await?
|
||||||
.map_err(ErrorInternalServerError)?;
|
.map_err(ErrorInternalServerError)?;
|
||||||
|
@ -541,8 +707,18 @@ pub async fn put_blob(
|
||||||
let _job_container = state.job_container.clone();
|
let _job_container = state.job_container.clone();
|
||||||
let _store = state.store.clone();
|
let _store = state.store.clone();
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
let _user = user.clone();
|
||||||
block_background::<_, _, anyhow::Error>(move || {
|
block_background::<_, _, anyhow::Error>(move || {
|
||||||
let entry_count = extractors::extract(&_address, &connection, _store, _job_container);
|
let entry_count = extractors::extract(
|
||||||
|
&_address,
|
||||||
|
&connection,
|
||||||
|
_store,
|
||||||
|
_job_container,
|
||||||
|
OperationContext {
|
||||||
|
user: _user,
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
debug!("Added {entry_count} extracted entries for {_address:?}");
|
debug!("Added {entry_count} extracted entries for {_address:?}");
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
@ -560,7 +736,7 @@ pub async fn put_object_attribute(
|
||||||
value: web::Json<EntryValue>,
|
value: web::Json<EntryValue>,
|
||||||
web::Query(query): web::Query<UpdateQuery>,
|
web::Query(query): web::Query<UpdateQuery>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
check_auth(&req, &state)?;
|
let user = check_auth(&req, &state)?;
|
||||||
let (address, attribute) = path.into_inner();
|
let (address, attribute) = path.into_inner();
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
|
@ -575,7 +751,7 @@ pub async fn put_object_attribute(
|
||||||
|
|
||||||
let new_attr_entry = Entry {
|
let new_attr_entry = Entry {
|
||||||
entity: address,
|
entity: address,
|
||||||
attribute,
|
attribute: attribute.parse()?,
|
||||||
value: value.into_inner(),
|
value: value.into_inner(),
|
||||||
provenance: (match &query.provenance {
|
provenance: (match &query.provenance {
|
||||||
Some(s) => format!("API {}", s),
|
Some(s) => format!("API {}", s),
|
||||||
|
@ -583,6 +759,7 @@ pub async fn put_object_attribute(
|
||||||
})
|
})
|
||||||
.trim()
|
.trim()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
user: user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -641,7 +818,14 @@ pub async fn get_address(
|
||||||
web::Query(query): web::Query<HashMap<String, String>>,
|
web::Query(query): web::Query<HashMap<String, String>>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let (address, immutable) = if let Some(attribute) = query.get("attribute") {
|
let (address, immutable) = if let Some(attribute) = query.get("attribute") {
|
||||||
(Address::Attribute(attribute.into()), true)
|
(
|
||||||
|
Address::Attribute(
|
||||||
|
attribute
|
||||||
|
.parse()
|
||||||
|
.map_err(|e: UpEndError| ErrorBadRequest(e.to_string()))?,
|
||||||
|
),
|
||||||
|
true,
|
||||||
|
)
|
||||||
} else if let Some(url) = query.get("url") {
|
} else if let Some(url) = query.get("url") {
|
||||||
(
|
(
|
||||||
Address::Url(Url::parse(url).map_err(ErrorBadRequest)?),
|
Address::Url(Url::parse(url).map_err(ErrorBadRequest)?),
|
||||||
|
@ -680,7 +864,12 @@ pub async fn get_address(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/all/attributes")]
|
#[get("/api/all/attributes")]
|
||||||
pub async fn get_all_attributes(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
pub async fn get_all_attributes(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
let attributes = web::block(move || connection.get_all_attributes())
|
let attributes = web::block(move || connection.get_all_attributes())
|
||||||
.await?
|
.await?
|
||||||
|
@ -723,6 +912,8 @@ pub async fn list_hier(
|
||||||
path: web::Path<String>,
|
path: web::Path<String>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
|
let user = check_auth(&req, &state)?;
|
||||||
|
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
if path.is_empty() {
|
if path.is_empty() {
|
||||||
Ok(HttpResponse::MovedPermanently()
|
Ok(HttpResponse::MovedPermanently()
|
||||||
|
@ -733,9 +924,19 @@ pub async fn list_hier(
|
||||||
trace!(r#"Listing path "{}""#, upath);
|
trace!(r#"Listing path "{}""#, upath);
|
||||||
|
|
||||||
let create = !req.method().is_safe();
|
let create = !req.method().is_safe();
|
||||||
let path = web::block(move || resolve_path(&connection, &upath, create))
|
let path = web::block(move || {
|
||||||
.await?
|
resolve_path(
|
||||||
.map_err(ErrorNotFound)?;
|
&connection,
|
||||||
|
&upath,
|
||||||
|
create,
|
||||||
|
OperationContext {
|
||||||
|
user,
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.await?
|
||||||
|
.map_err(ErrorNotFound)?;
|
||||||
match path.last() {
|
match path.last() {
|
||||||
Some(addr) => Ok(HttpResponse::Found()
|
Some(addr) => Ok(HttpResponse::Found()
|
||||||
.append_header((http::header::LOCATION, format!("../../api/obj/{}", addr)))
|
.append_header((http::header::LOCATION, format!("../../api/obj/{}", addr)))
|
||||||
|
@ -746,7 +947,11 @@ pub async fn list_hier(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/hier_roots")]
|
#[get("/api/hier_roots")]
|
||||||
pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
pub async fn list_hier_roots(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
let result = web::block(move || {
|
let result = web::block(move || {
|
||||||
|
@ -762,27 +967,48 @@ pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Er
|
||||||
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
|
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
// pub struct RescanRequest {
|
pub struct RescanRequest {
|
||||||
// full: Option<String>,
|
initial: Option<bool>,
|
||||||
// }
|
tree_mode: Option<BlobMode>,
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/api/refresh")]
|
#[post("/api/refresh")]
|
||||||
pub async fn api_refresh(
|
pub async fn api_refresh(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
// web::Query(query): web::Query<RescanRequest>,
|
web::Query(query): web::Query<RescanRequest>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
check_auth(&req, &state)?;
|
let user = check_auth(&req, &state)?;
|
||||||
|
|
||||||
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
block_background::<_, _, anyhow::Error>(move || {
|
block_background::<_, _, anyhow::Error>(move || {
|
||||||
let _ = state
|
let _ = state.store.update(
|
||||||
.store
|
&state.upend,
|
||||||
.update(&state.upend, state.job_container.clone(), false);
|
state.job_container.clone(),
|
||||||
|
UpdateOptions {
|
||||||
|
initial: query.initial.unwrap_or(false),
|
||||||
|
tree_mode: query.tree_mode.unwrap_or(
|
||||||
|
connection
|
||||||
|
.get_vault_options()?
|
||||||
|
.blob_mode
|
||||||
|
.unwrap_or_default(),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
OperationContext {
|
||||||
|
user: user.clone(),
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
let _ = crate::extractors::extract_all(
|
let _ = crate::extractors::extract_all(
|
||||||
state.upend.clone(),
|
state.upend.clone(),
|
||||||
state.store.clone(),
|
state.store.clone(),
|
||||||
state.job_container.clone(),
|
state.job_container.clone(),
|
||||||
|
OperationContext {
|
||||||
|
user: user.clone(),
|
||||||
|
provenance: "API".to_string(),
|
||||||
|
},
|
||||||
);
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
@ -790,13 +1016,15 @@ pub async fn api_refresh(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/stats/vault")]
|
#[get("/api/stats/vault")]
|
||||||
pub async fn vault_stats(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
pub async fn vault_stats(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
Ok(HttpResponse::Ok().json(connection.get_stats().map_err(ErrorInternalServerError)?))
|
Ok(HttpResponse::Ok().json(connection.get_stats().map_err(ErrorInternalServerError)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/stats/store")]
|
#[get("/api/stats/store")]
|
||||||
pub async fn store_stats(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
pub async fn store_stats(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
Ok(HttpResponse::Ok().json(json!({
|
Ok(HttpResponse::Ok().json(json!({
|
||||||
"main": state.store.stats().map_err(ErrorInternalServerError)?
|
"main": state.store.stats().map_err(ErrorInternalServerError)?
|
||||||
})))
|
})))
|
||||||
|
@ -809,9 +1037,11 @@ pub struct JobsRequest {
|
||||||
|
|
||||||
#[get("/api/jobs")]
|
#[get("/api/jobs")]
|
||||||
pub async fn get_jobs(
|
pub async fn get_jobs(
|
||||||
|
req: HttpRequest,
|
||||||
state: web::Data<State>,
|
state: web::Data<State>,
|
||||||
web::Query(query): web::Query<JobsRequest>,
|
web::Query(query): web::Query<JobsRequest>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
let jobs = state
|
let jobs = state
|
||||||
.job_container
|
.job_container
|
||||||
.get_jobs()
|
.get_jobs()
|
||||||
|
@ -832,17 +1062,52 @@ pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
"name": state.config.vault_name,
|
"name": state.config.vault_name,
|
||||||
// "location": &*state.store.path,
|
// "location": &*state.store.path,
|
||||||
"version": format!(
|
"version": format!(
|
||||||
"{} / {} / {}",
|
"{} (base: {}, db: {}, cli: {})",
|
||||||
|
crate::common::get_version(),
|
||||||
upend_base::common::build::PKG_VERSION,
|
upend_base::common::build::PKG_VERSION,
|
||||||
upend_db::common::build::PKG_VERSION,
|
upend_db::common::build::PKG_VERSION,
|
||||||
build::PKG_VERSION
|
build::PKG_VERSION
|
||||||
),
|
),
|
||||||
"desktop": state.config.desktop_enabled
|
"desktop": state.config.desktop_enabled,
|
||||||
|
"public": *state.public.lock().unwrap(),
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/api/options")]
|
||||||
|
pub async fn get_options(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
Ok(HttpResponse::Ok().json(
|
||||||
|
connection
|
||||||
|
.get_vault_options()
|
||||||
|
.map_err(ErrorInternalServerError)?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/api/options")]
|
||||||
|
pub async fn put_options(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
payload: web::Json<VaultOptions>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
|
|
||||||
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
let options = payload.into_inner();
|
||||||
|
web::block(move || connection.set_vault_options(options))
|
||||||
|
.await
|
||||||
|
.map_err(ErrorInternalServerError)?
|
||||||
|
.map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().finish())
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/api/migration/user-entries")]
|
#[get("/api/migration/user-entries")]
|
||||||
pub async fn get_user_entries(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
pub async fn get_user_entries(
|
||||||
|
req: HttpRequest,
|
||||||
|
state: web::Data<State>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
check_auth(&req, &state)?;
|
||||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||||
|
|
||||||
let result = web::block(move || connection.get_explicit_entries())
|
let result = web::block(move || connection.get_explicit_entries())
|
||||||
|
@ -852,6 +1117,42 @@ pub async fn get_user_entries(state: web::Data<State>) -> Result<HttpResponse, E
|
||||||
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
|
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct Plugin {
|
||||||
|
name: String,
|
||||||
|
info: Option<PluginInfo>,
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/api/plugins")]
|
||||||
|
pub async fn get_plugins(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||||
|
let plugins: Vec<Plugin> = state
|
||||||
|
.plugins
|
||||||
|
.clone()
|
||||||
|
.plugins
|
||||||
|
.iter()
|
||||||
|
.map(|p| match p {
|
||||||
|
crate::plugins::UpEndPlugin::Initialized(p) => Plugin {
|
||||||
|
name: p.info.name.clone(),
|
||||||
|
info: Some(p.info.clone()),
|
||||||
|
error: None,
|
||||||
|
},
|
||||||
|
crate::plugins::UpEndPlugin::Failed(p) => Plugin {
|
||||||
|
name: p
|
||||||
|
.path
|
||||||
|
.components()
|
||||||
|
.last()
|
||||||
|
.map(|p| p.as_os_str().to_string_lossy().into_owned())
|
||||||
|
.unwrap_or_else(|| "???".into()),
|
||||||
|
info: None,
|
||||||
|
error: Some(p.error.clone()),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(plugins))
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ExternalFetchError {
|
enum ExternalFetchError {
|
||||||
Status(anyhow::Error),
|
Status(anyhow::Error),
|
||||||
|
@ -889,7 +1190,8 @@ impl ResponseError for ExternalFetchError {
|
||||||
}
|
}
|
||||||
|
|
||||||
const MAX_EXTERNAL_SIZE: usize = 128_000_000;
|
const MAX_EXTERNAL_SIZE: usize = 128_000_000;
|
||||||
#[tracing::instrument(skip(url), fields(url=%url))]
|
|
||||||
|
#[tracing::instrument(skip(url), fields(url = % url))]
|
||||||
fn fetch_external(url: Url) -> Result<(bytes::Bytes, Option<String>), ExternalFetchError> {
|
fn fetch_external(url: Url) -> Result<(bytes::Bytes, Option<String>), ExternalFetchError> {
|
||||||
debug!("Fetching...");
|
debug!("Fetching...");
|
||||||
|
|
||||||
|
@ -958,10 +1260,11 @@ mod tests {
|
||||||
|
|
||||||
#[actix_web::test]
|
#[actix_web::test]
|
||||||
async fn test_get_info() {
|
async fn test_get_info() {
|
||||||
let app = actix_web::test::init_service(crate::serve::get_app::<
|
let app = actix_web::test::init_service(crate::serve::get_app::<Vec<String>>(
|
||||||
std::path::PathBuf,
|
false,
|
||||||
Vec<String>,
|
vec![],
|
||||||
>(None, vec![], get_state()))
|
get_state(),
|
||||||
|
))
|
||||||
.await;
|
.await;
|
||||||
let req = actix_web::test::TestRequest::get()
|
let req = actix_web::test::TestRequest::get()
|
||||||
.uri("/api/info")
|
.uri("/api/info")
|
||||||
|
@ -970,30 +1273,21 @@ mod tests {
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct VaultInfo {
|
struct VaultInfo {
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
version: String,
|
|
||||||
desktop: bool,
|
desktop: bool,
|
||||||
}
|
}
|
||||||
let info: VaultInfo = actix_web::test::call_and_read_body_json(&app, req).await;
|
let info: VaultInfo = actix_web::test::call_and_read_body_json(&app, req).await;
|
||||||
|
|
||||||
assert_eq!(info.name, Some("TEST VAULT".to_string()));
|
assert_eq!(info.name, Some("TEST VAULT".to_string()));
|
||||||
assert_eq!(
|
|
||||||
info.version,
|
|
||||||
format!(
|
|
||||||
"{} / {} / {}",
|
|
||||||
upend_base::common::build::PKG_VERSION,
|
|
||||||
upend_db::common::build::PKG_VERSION,
|
|
||||||
build::PKG_VERSION
|
|
||||||
)
|
|
||||||
);
|
|
||||||
assert!(!info.desktop);
|
assert!(!info.desktop);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::test]
|
#[actix_web::test]
|
||||||
async fn test_get_hier() {
|
async fn test_get_hier() {
|
||||||
let app = actix_web::test::init_service(crate::serve::get_app::<
|
let app = actix_web::test::init_service(crate::serve::get_app::<Vec<String>>(
|
||||||
std::path::PathBuf,
|
false,
|
||||||
Vec<String>,
|
vec![],
|
||||||
>(None, vec![], get_state()))
|
get_state(),
|
||||||
|
))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let req = actix_web::test::TestRequest::get()
|
let req = actix_web::test::TestRequest::get()
|
||||||
|
@ -1026,7 +1320,12 @@ mod tests {
|
||||||
.uri("/api/hier/NATIVE/hello-world.txt")
|
.uri("/api/hier/NATIVE/hello-world.txt")
|
||||||
.to_request();
|
.to_request();
|
||||||
let result = actix_web::test::call_service(&app, req).await;
|
let result = actix_web::test::call_service(&app, req).await;
|
||||||
assert_eq!(result.status(), http::StatusCode::FOUND);
|
assert_eq!(
|
||||||
|
result.status(),
|
||||||
|
http::StatusCode::FOUND,
|
||||||
|
"expected redirect, got {:}",
|
||||||
|
result.status()
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result
|
result
|
||||||
.headers()
|
.headers()
|
||||||
|
@ -1040,10 +1339,11 @@ mod tests {
|
||||||
|
|
||||||
#[actix_web::test]
|
#[actix_web::test]
|
||||||
async fn test_obj_entity_info() {
|
async fn test_obj_entity_info() {
|
||||||
let app = actix_web::test::init_service(crate::serve::get_app::<
|
let app = actix_web::test::init_service(crate::serve::get_app::<Vec<String>>(
|
||||||
std::path::PathBuf,
|
false,
|
||||||
Vec<String>,
|
vec![],
|
||||||
>(None, vec![], get_state()))
|
get_state(),
|
||||||
|
))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let digest = UpMultihash::from_sha256([1, 2, 3, 4, 5]).unwrap();
|
let digest = UpMultihash::from_sha256([1, 2, 3, 4, 5]).unwrap();
|
||||||
|
@ -1057,7 +1357,7 @@ mod tests {
|
||||||
assert_eq!(result["entity"]["t"], "Hash");
|
assert_eq!(result["entity"]["t"], "Hash");
|
||||||
assert_eq!(result["entity"]["c"], digest_str);
|
assert_eq!(result["entity"]["c"], digest_str);
|
||||||
|
|
||||||
let address = Address::Attribute("TEST".to_string());
|
let address = Address::Attribute("TEST".parse().unwrap());
|
||||||
let req = actix_web::test::TestRequest::get()
|
let req = actix_web::test::TestRequest::get()
|
||||||
.uri(&format!("/api/obj/{}", address))
|
.uri(&format!("/api/obj/{}", address))
|
||||||
.to_request();
|
.to_request();
|
||||||
|
@ -1110,8 +1410,19 @@ mod tests {
|
||||||
) as Box<dyn UpStore + Send + Sync>);
|
) as Box<dyn UpStore + Send + Sync>);
|
||||||
let job_container = jobs::JobContainer::new();
|
let job_container = jobs::JobContainer::new();
|
||||||
|
|
||||||
store.update(&upend, job_container.clone(), true).unwrap();
|
store
|
||||||
|
.update(
|
||||||
|
&upend,
|
||||||
|
job_container.clone(),
|
||||||
|
UpdateOptions {
|
||||||
|
initial: true,
|
||||||
|
tree_mode: upend_db::BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let plugins = Plugins { plugins: vec![] };
|
||||||
State {
|
State {
|
||||||
upend,
|
upend,
|
||||||
store,
|
store,
|
||||||
|
@ -1120,11 +1431,12 @@ mod tests {
|
||||||
desktop_enabled: false,
|
desktop_enabled: false,
|
||||||
trust_executables: false,
|
trust_executables: false,
|
||||||
secret: "secret".to_string(),
|
secret: "secret".to_string(),
|
||||||
key: None,
|
|
||||||
},
|
},
|
||||||
|
plugins: plugins.into(),
|
||||||
job_container,
|
job_container,
|
||||||
preview_store: None,
|
preview_store: None,
|
||||||
preview_thread_pool: None,
|
preview_thread_pool: None,
|
||||||
|
public: Arc::new(Mutex::new(true)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use crate::routes;
|
use crate::routes;
|
||||||
use std::path::Path;
|
use actix_web_lab::web::spa;
|
||||||
|
|
||||||
pub fn get_app<P, S>(
|
pub fn get_app<S>(
|
||||||
ui_path: Option<P>,
|
ui_enabled: bool,
|
||||||
allowed_origins: S,
|
allowed_origins: S,
|
||||||
state: crate::routes::State,
|
state: crate::routes::State,
|
||||||
) -> actix_web::App<
|
) -> actix_web::App<
|
||||||
|
@ -15,7 +15,6 @@ pub fn get_app<P, S>(
|
||||||
>,
|
>,
|
||||||
>
|
>
|
||||||
where
|
where
|
||||||
P: AsRef<Path> + Clone,
|
|
||||||
S: IntoIterator<Item = String> + Clone,
|
S: IntoIterator<Item = String> + Clone,
|
||||||
{
|
{
|
||||||
let allowed_origins: Vec<String> = allowed_origins.into_iter().collect();
|
let allowed_origins: Vec<String> = allowed_origins.into_iter().collect();
|
||||||
|
@ -47,7 +46,11 @@ where
|
||||||
.app_data(actix_web::web::Data::new(state))
|
.app_data(actix_web::web::Data::new(state))
|
||||||
.wrap(actix_web::middleware::Logger::default().exclude("/api/jobs"))
|
.wrap(actix_web::middleware::Logger::default().exclude("/api/jobs"))
|
||||||
.service(routes::login)
|
.service(routes::login)
|
||||||
|
.service(routes::register)
|
||||||
|
.service(routes::logout)
|
||||||
|
.service(routes::whoami)
|
||||||
.service(routes::get_raw)
|
.service(routes::get_raw)
|
||||||
|
.service(routes::head_raw)
|
||||||
.service(routes::get_thumbnail)
|
.service(routes::get_thumbnail)
|
||||||
.service(routes::get_query)
|
.service(routes::get_query)
|
||||||
.service(routes::get_object)
|
.service(routes::get_object)
|
||||||
|
@ -64,11 +67,18 @@ where
|
||||||
.service(routes::store_stats)
|
.service(routes::store_stats)
|
||||||
.service(routes::get_jobs)
|
.service(routes::get_jobs)
|
||||||
.service(routes::get_info)
|
.service(routes::get_info)
|
||||||
.service(routes::get_user_entries);
|
.service(routes::get_options)
|
||||||
|
.service(routes::put_options)
|
||||||
|
.service(routes::get_user_entries)
|
||||||
|
.service(routes::get_plugins);
|
||||||
|
|
||||||
if let Some(ui_path) = ui_path {
|
if ui_enabled {
|
||||||
return app
|
return app.service(
|
||||||
.service(actix_files::Files::new("/", ui_path.as_ref()).index_file("index.html"));
|
spa()
|
||||||
|
.index_file(crate::common::WEBUI_PATH.to_str().unwrap().to_owned() + "/index.html")
|
||||||
|
.static_resources_location(crate::common::WEBUI_PATH.to_str().unwrap())
|
||||||
|
.finish(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::get("/")]
|
#[actix_web::get("/")]
|
||||||
|
|
32
cliff.toml
32
cliff.toml
|
@ -41,25 +41,27 @@ filter_unconventional = true
|
||||||
# process each line of a commit as an individual commit
|
# process each line of a commit as an individual commit
|
||||||
split_commits = false
|
split_commits = false
|
||||||
# regex for preprocessing the commit messages
|
# regex for preprocessing the commit messages
|
||||||
|
commit_parsers = [
|
||||||
|
{message = "[\\.]{3}", group = "Ignore", skip = true},
|
||||||
|
{message = "^feat", group = "Features"},
|
||||||
|
{message = "^fix", group = "Bug Fixes"},
|
||||||
|
{message = "^doc", group = "Documentation"},
|
||||||
|
{message = "^perf", group = "Performance"},
|
||||||
|
{message = "^refactor", group = "Refactor"},
|
||||||
|
{message = "^style", group = "Styling"},
|
||||||
|
{message = "^test", group = "Testing"},
|
||||||
|
{message = "^media", group = "Media"},
|
||||||
|
{message = "^chore\\(release\\): prepare for", skip = true},
|
||||||
|
{message = "^chore", group = "Miscellaneous"},
|
||||||
|
{message = "wip", group = "Work in Progress", skip = true},
|
||||||
|
{message = "^(ci|dev)", group = "Operations & Development"},
|
||||||
|
{body = ".*security", group = "Security"},
|
||||||
|
]
|
||||||
commit_preprocessors = [
|
commit_preprocessors = [
|
||||||
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](https://github.com/orhun/git-cliff/issues/${2}))"}, # replace issue numbers
|
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](https://github.com/orhun/git-cliff/issues/${2}))"}, # replace issue numbers
|
||||||
]
|
]
|
||||||
# regex for parsing and grouping commits
|
|
||||||
commit_parsers = [
|
|
||||||
{ message = "^feat", group = "Features" },
|
|
||||||
{ message = "^fix", group = "Bug Fixes" },
|
|
||||||
{ message = "^doc", group = "Documentation" },
|
|
||||||
{ message = "^perf", group = "Performance" },
|
|
||||||
{ message = "^refactor", group = "Refactor" },
|
|
||||||
{ message = "^style", group = "Styling" },
|
|
||||||
{ message = "^test", group = "Testing" },
|
|
||||||
{ message = "^media", group = "Media" },
|
|
||||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
|
||||||
{ message = "^chore", group = "Miscellaneous Tasks" },
|
|
||||||
{ body = ".*security", group = "Security" },
|
|
||||||
]
|
|
||||||
# protect breaking changes from being skipped due to matching a skipping commit_parser
|
# protect breaking changes from being skipped due to matching a skipping commit_parser
|
||||||
protect_breaking_commits = false
|
protect_breaking_commits = true
|
||||||
# filter out the commits that are not matched by commit parsers
|
# filter out the commits that are not matched by commit parsers
|
||||||
filter_commits = false
|
filter_commits = false
|
||||||
# glob pattern for matching git tags
|
# glob pattern for matching git tags
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "upend-db"
|
name = "upend-db"
|
||||||
version = "0.0.1"
|
version = "0.0.2"
|
||||||
homepage = "https://upend.dev/"
|
homepage = "https://upend.dev/"
|
||||||
repository = "https://git.thm.place/thm/upend"
|
repository = "https://git.thm.place/thm/upend"
|
||||||
authors = ["Tomáš Mládek <t@mldk.cz>"]
|
authors = ["Tomáš Mládek <t@mldk.cz>"]
|
||||||
|
@ -26,13 +26,16 @@ once_cell = "1.7.2"
|
||||||
lru = "0.7.0"
|
lru = "0.7.0"
|
||||||
|
|
||||||
diesel = { version = "1.4", features = [
|
diesel = { version = "1.4", features = [
|
||||||
"sqlite",
|
"sqlite",
|
||||||
"r2d2",
|
"r2d2",
|
||||||
"chrono",
|
"chrono",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
] }
|
] }
|
||||||
diesel_migrations = "1.4"
|
diesel_migrations = "1.4"
|
||||||
libsqlite3-sys = { version = "^0", features = ["bundled"] }
|
libsqlite3-sys = { version = "^0", features = ["bundled"] }
|
||||||
|
password-hash = "0.5.0"
|
||||||
|
argon2 = "0.5.3"
|
||||||
|
|
||||||
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -42,17 +45,17 @@ regex = "1"
|
||||||
|
|
||||||
multibase = "0.9"
|
multibase = "0.9"
|
||||||
multihash = { version = "*", default-features = false, features = [
|
multihash = { version = "*", default-features = false, features = [
|
||||||
"alloc",
|
"alloc",
|
||||||
"multihash-impl",
|
"multihash-impl",
|
||||||
"sha2",
|
"sha2",
|
||||||
"identity",
|
"identity",
|
||||||
] }
|
] }
|
||||||
uuid = { version = "1.4", features = ["v4"] }
|
uuid = { version = "1.4", features = ["v4"] }
|
||||||
url = { version = "2", features = ["serde"] }
|
url = { version = "2", features = ["serde"] }
|
||||||
|
|
||||||
filebuffer = "0.4.0"
|
filebuffer = "0.4.0"
|
||||||
tempfile = "^3.2.0"
|
tempfile = "^3.2.0"
|
||||||
walkdir = "2"
|
jwalk = "0.8.1"
|
||||||
|
|
||||||
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
|
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
DROP TABLE users;
|
|
@ -0,0 +1,7 @@
|
||||||
|
CREATE TABLE users
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
username VARCHAR NOT NULL,
|
||||||
|
password VARCHAR NOT NULL,
|
||||||
|
UNIQUE (username)
|
||||||
|
);
|
|
@ -0,0 +1,2 @@
|
||||||
|
ALTER TABLE data
|
||||||
|
DROP COLUMN user;
|
|
@ -0,0 +1,2 @@
|
||||||
|
ALTER TABLE data
|
||||||
|
ADD COLUMN user VARCHAR;
|
|
@ -1,14 +0,0 @@
|
||||||
use crate::addressing::Address;
|
|
||||||
use crate::entry::InvariantEntry;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
|
|
||||||
attribute: String::from(ATTR_KEY),
|
|
||||||
value: "HIER_ROOT".into(),
|
|
||||||
};
|
|
||||||
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
|
|
||||||
pub static ref TYPE_HASH_ADDRESS: Address = Address::Hash(crate::util::hash::Hash(vec![]));
|
|
||||||
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
|
|
||||||
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute("".to_string());
|
|
||||||
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
|
|
||||||
}
|
|
265
db/src/engine.rs
265
db/src/engine.rs
|
@ -1,5 +1,4 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::iter::zip;
|
|
||||||
|
|
||||||
use super::inner::models::Entry;
|
use super::inner::models::Entry;
|
||||||
use super::inner::schema::data;
|
use super::inner::schema::data;
|
||||||
|
@ -17,8 +16,10 @@ use diesel::{
|
||||||
};
|
};
|
||||||
use diesel::{BoxableExpression, QueryDsl};
|
use diesel::{BoxableExpression, QueryDsl};
|
||||||
use diesel::{ExpressionMethods, TextExpressionMethods};
|
use diesel::{ExpressionMethods, TextExpressionMethods};
|
||||||
use upend_base::entry::EntryValue;
|
use upend_base::addressing::Address;
|
||||||
use upend_base::lang::{PatternQuery, Query, QueryComponent, QueryPart, QueryQualifier};
|
use upend_base::entry::{EntryPart, EntryValue};
|
||||||
|
use upend_base::error::UpEndError;
|
||||||
|
use upend_base::lang::{Query, QueryComponent, QueryPart, QueryQualifier};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct QueryExecutionError(String);
|
pub struct QueryExecutionError(String);
|
||||||
|
@ -31,6 +32,12 @@ impl std::fmt::Display for QueryExecutionError {
|
||||||
|
|
||||||
impl std::error::Error for QueryExecutionError {}
|
impl std::error::Error for QueryExecutionError {}
|
||||||
|
|
||||||
|
impl From<UpEndError> for QueryExecutionError {
|
||||||
|
fn from(e: UpEndError) -> Self {
|
||||||
|
QueryExecutionError(e.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn execute(
|
pub fn execute(
|
||||||
connection: &PooledConnection<ConnectionManager<SqliteConnection>>,
|
connection: &PooledConnection<ConnectionManager<SqliteConnection>>,
|
||||||
query: Query,
|
query: Query,
|
||||||
|
@ -54,66 +61,176 @@ pub fn execute(
|
||||||
.into(),
|
.into(),
|
||||||
)),
|
)),
|
||||||
_ => {
|
_ => {
|
||||||
let subquery_results = mq
|
if let QueryQualifier::Join = mq.qualifier {
|
||||||
.queries
|
let pattern_queries = mq
|
||||||
.iter()
|
.queries
|
||||||
.map(|q| execute(connection, *q.clone()))
|
|
||||||
.collect::<Result<Vec<Vec<Entry>>, QueryExecutionError>>()?;
|
|
||||||
match mq.qualifier {
|
|
||||||
QueryQualifier::Not => unreachable!(),
|
|
||||||
QueryQualifier::And => Ok(subquery_results
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.reduce(|acc, cur| {
|
.map(|q| match *q {
|
||||||
acc.into_iter()
|
Query::SingleQuery(QueryPart::Matches(pq)) => Some(pq),
|
||||||
.filter(|e| {
|
_ => None,
|
||||||
cur.iter().map(|e| &e.identity).any(|x| x == &e.identity)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
})
|
})
|
||||||
.unwrap()), // TODO
|
.collect::<Option<Vec<_>>>()
|
||||||
QueryQualifier::Or => Ok(subquery_results.into_iter().flatten().collect()),
|
.ok_or(QueryExecutionError(
|
||||||
QueryQualifier::Join => {
|
"Cannot join on non-atomic queries.".into(),
|
||||||
let pattern_queries = mq
|
))?;
|
||||||
.queries
|
|
||||||
.into_iter()
|
|
||||||
.map(|q| match *q {
|
|
||||||
Query::SingleQuery(QueryPart::Matches(pq)) => Some(pq),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
.collect::<Option<Vec<_>>>();
|
|
||||||
|
|
||||||
if let Some(pattern_queries) = pattern_queries {
|
let mut vars: HashMap<String, Vec<EntryPart>> = HashMap::new();
|
||||||
let entries = zip(pattern_queries, subquery_results).map(
|
let mut subquery_results: Vec<Entry> = vec![];
|
||||||
|(query, results)| {
|
|
||||||
results
|
|
||||||
.into_iter()
|
|
||||||
.map(|e| EntryWithVars::new(&query, e))
|
|
||||||
.collect::<Vec<EntryWithVars>>()
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
let joined = entries
|
for query in pattern_queries {
|
||||||
.reduce(|acc, cur| {
|
let mut final_query = query.clone();
|
||||||
acc.into_iter()
|
|
||||||
.filter(|tested_entry| {
|
if let QueryComponent::Variable(Some(var_name)) = &query.entity {
|
||||||
tested_entry.vars.iter().any(|(k1, v1)| {
|
if let Some(entities) = vars.get(var_name) {
|
||||||
cur.iter().any(|other_entry| {
|
final_query.entity = QueryComponent::In(
|
||||||
other_entry
|
entities
|
||||||
.vars
|
.iter()
|
||||||
.iter()
|
.filter_map(|e| match e {
|
||||||
.any(|(k2, v2)| k1 == k2 && v1 == v2)
|
EntryPart::Entity(a) => Some(a.clone()),
|
||||||
})
|
EntryPart::Value(EntryValue::Address(a)) => {
|
||||||
})
|
Some(a.clone())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect()
|
.collect(),
|
||||||
})
|
);
|
||||||
.unwrap(); // TODO
|
|
||||||
|
|
||||||
Ok(joined.into_iter().map(|ev| ev.entry).collect())
|
if final_query.entity == QueryComponent::In(vec![]) {
|
||||||
} else {
|
return Ok(vec![]);
|
||||||
Err(QueryExecutionError(
|
}
|
||||||
"Cannot join on non-atomic queries.".into(),
|
}
|
||||||
))
|
}
|
||||||
|
|
||||||
|
if let QueryComponent::Variable(Some(var_name)) = &query.attribute {
|
||||||
|
if let Some(attributes) = vars.get(var_name) {
|
||||||
|
final_query.attribute = QueryComponent::In(
|
||||||
|
attributes
|
||||||
|
.iter()
|
||||||
|
.filter_map(|e| {
|
||||||
|
if let EntryPart::Attribute(a) = e {
|
||||||
|
Some(a.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if final_query.attribute == QueryComponent::In(vec![]) {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let QueryComponent::Variable(Some(var_name)) = &query.value {
|
||||||
|
if let Some(values) = vars.get(var_name) {
|
||||||
|
final_query.value = QueryComponent::In(
|
||||||
|
values
|
||||||
|
.iter()
|
||||||
|
.filter_map(|e| match e {
|
||||||
|
EntryPart::Entity(a) => {
|
||||||
|
Some(EntryValue::Address(a.clone()))
|
||||||
|
}
|
||||||
|
EntryPart::Attribute(a) => {
|
||||||
|
Some(EntryValue::Address(Address::Attribute(
|
||||||
|
a.clone(),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
EntryPart::Value(v) => Some(v.clone()),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if final_query.value == QueryComponent::In(vec![]) {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subquery_results = execute(
|
||||||
|
connection,
|
||||||
|
Query::SingleQuery(QueryPart::Matches(final_query)),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if subquery_results.is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let QueryComponent::Variable(Some(var_name)) = &query.entity {
|
||||||
|
vars.insert(
|
||||||
|
var_name.clone(),
|
||||||
|
subquery_results
|
||||||
|
.iter()
|
||||||
|
.map(|e| {
|
||||||
|
EntryPart::Entity(
|
||||||
|
Address::decode(&e.entity)
|
||||||
|
.map_err(|e| QueryExecutionError(e.to_string()))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let QueryComponent::Variable(Some(var_name)) = &query.attribute {
|
||||||
|
vars.insert(
|
||||||
|
var_name.clone(),
|
||||||
|
subquery_results
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.attribute.parse().map(EntryPart::Attribute))
|
||||||
|
.collect::<Result<Vec<EntryPart>, _>>()?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let QueryComponent::Variable(Some(var_name)) = &query.value {
|
||||||
|
vars.insert(
|
||||||
|
var_name.clone(),
|
||||||
|
subquery_results
|
||||||
|
.iter()
|
||||||
|
.map(|e| {
|
||||||
|
if let Some(value_string) = &e.value_str {
|
||||||
|
if let Ok(value) = value_string.parse() {
|
||||||
|
return Ok(EntryPart::Value(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(value_number) = e.value_num {
|
||||||
|
return Ok(EntryPart::Value(EntryValue::Number(
|
||||||
|
value_number,
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
Err(QueryExecutionError(
|
||||||
|
"value-less entries cannot be joined on".into(),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<EntryPart>, _>>()?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(subquery_results)
|
||||||
|
} else {
|
||||||
|
let subquery_results = mq
|
||||||
|
.queries
|
||||||
|
.iter()
|
||||||
|
.map(|q| execute(connection, *q.clone()))
|
||||||
|
.collect::<Result<Vec<Vec<Entry>>, QueryExecutionError>>()?;
|
||||||
|
match mq.qualifier {
|
||||||
|
QueryQualifier::Join | QueryQualifier::Not => unreachable!(),
|
||||||
|
QueryQualifier::And => Ok(subquery_results
|
||||||
|
.into_iter()
|
||||||
|
.reduce(|acc, cur| {
|
||||||
|
acc.into_iter()
|
||||||
|
.filter(|e| {
|
||||||
|
cur.iter()
|
||||||
|
.map(|e| &e.identity)
|
||||||
|
.any(|x| x == &e.identity)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap()), // TODO
|
||||||
|
QueryQualifier::Or => {
|
||||||
|
Ok(subquery_results.into_iter().flatten().collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,36 +240,6 @@ pub fn execute(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct EntryWithVars {
|
|
||||||
entry: Entry,
|
|
||||||
vars: HashMap<String, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EntryWithVars {
|
|
||||||
pub fn new(query: &PatternQuery, entry: Entry) -> Self {
|
|
||||||
let mut vars = HashMap::new();
|
|
||||||
|
|
||||||
if let QueryComponent::Variable(Some(var_name)) = &query.entity {
|
|
||||||
vars.insert(
|
|
||||||
var_name.clone(),
|
|
||||||
upend_base::hash::b58_encode(&entry.entity),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let QueryComponent::Variable(Some(var_name)) = &query.attribute {
|
|
||||||
vars.insert(var_name.clone(), entry.attribute.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let QueryComponent::Variable(Some(var_name)) = &query.value {
|
|
||||||
if let Some(value_str) = &entry.value_str {
|
|
||||||
vars.insert(var_name.clone(), value_str.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
EntryWithVars { entry, vars }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type SqlPredicate = dyn BoxableExpression<data::table, Sqlite, SqlType = Bool>;
|
type SqlPredicate = dyn BoxableExpression<data::table, Sqlite, SqlType = Bool>;
|
||||||
|
|
||||||
type SqlResult = Option<Box<SqlPredicate>>;
|
type SqlResult = Option<Box<SqlPredicate>>;
|
||||||
|
@ -184,10 +271,10 @@ fn to_sqlite_predicates(query: Query) -> Result<SqlResult, QueryExecutionError>
|
||||||
|
|
||||||
match &eq.attribute {
|
match &eq.attribute {
|
||||||
QueryComponent::Exact(q_attribute) => {
|
QueryComponent::Exact(q_attribute) => {
|
||||||
subqueries.push(Box::new(data::attribute.eq(q_attribute.0.clone())))
|
subqueries.push(Box::new(data::attribute.eq(q_attribute.to_string())))
|
||||||
}
|
}
|
||||||
QueryComponent::In(q_attributes) => subqueries.push(Box::new(
|
QueryComponent::In(q_attributes) => subqueries.push(Box::new(
|
||||||
data::attribute.eq_any(q_attributes.iter().map(|a| &a.0).cloned()),
|
data::attribute.eq_any(q_attributes.iter().map(|a| a.to_string())),
|
||||||
)),
|
)),
|
||||||
QueryComponent::Contains(q_attribute) => subqueries
|
QueryComponent::Contains(q_attribute) => subqueries
|
||||||
.push(Box::new(data::attribute.like(format!("%{}%", q_attribute)))),
|
.push(Box::new(data::attribute.like(format!("%{}%", q_attribute)))),
|
||||||
|
|
|
@ -1,35 +1,38 @@
|
||||||
use crate::inner::models;
|
use crate::inner::models;
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use upend_base::addressing::{Address, Addressable};
|
use upend_base::addressing::{Address, Addressable};
|
||||||
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
|
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
|
||||||
|
use upend_base::error::UpEndError;
|
||||||
|
|
||||||
impl TryFrom<&models::Entry> for Entry {
|
impl TryFrom<&models::Entry> for Entry {
|
||||||
type Error = anyhow::Error;
|
type Error = UpEndError;
|
||||||
|
|
||||||
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
|
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
|
||||||
if let Some(value_str) = &e.value_str {
|
if let Some(value_str) = &e.value_str {
|
||||||
Ok(Entry {
|
Ok(Entry {
|
||||||
entity: Address::decode(&e.entity)?,
|
entity: Address::decode(&e.entity)?,
|
||||||
attribute: e.attribute.clone(),
|
attribute: e.attribute.parse()?,
|
||||||
value: value_str.parse()?,
|
value: value_str.parse().unwrap(),
|
||||||
provenance: e.provenance.clone(),
|
provenance: e.provenance.clone(),
|
||||||
|
user: e.user.clone(),
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
})
|
})
|
||||||
} else if let Some(value_num) = e.value_num {
|
} else if let Some(value_num) = e.value_num {
|
||||||
Ok(Entry {
|
Ok(Entry {
|
||||||
entity: Address::decode(&e.entity)?,
|
entity: Address::decode(&e.entity)?,
|
||||||
attribute: e.attribute.clone(),
|
attribute: e.attribute.parse()?,
|
||||||
value: EntryValue::Number(value_num),
|
value: EntryValue::Number(value_num),
|
||||||
provenance: e.provenance.clone(),
|
provenance: e.provenance.clone(),
|
||||||
|
user: e.user.clone(),
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Ok(Entry {
|
Ok(Entry {
|
||||||
entity: Address::decode(&e.entity)?,
|
entity: Address::decode(&e.entity)?,
|
||||||
attribute: e.attribute.clone(),
|
attribute: e.attribute.parse()?,
|
||||||
value: EntryValue::Number(f64::NAN),
|
value: EntryValue::Number(f64::NAN),
|
||||||
provenance: e.provenance.clone(),
|
provenance: e.provenance.clone(),
|
||||||
|
user: e.user.clone(),
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -40,22 +43,20 @@ impl TryFrom<&Entry> for models::Entry {
|
||||||
type Error = anyhow::Error;
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
|
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
|
||||||
if e.attribute.is_empty() {
|
|
||||||
return Err(anyhow!("Attribute cannot be empty."));
|
|
||||||
}
|
|
||||||
let base_entry = models::Entry {
|
let base_entry = models::Entry {
|
||||||
identity: e.address()?.encode()?,
|
identity: e.address()?.encode()?,
|
||||||
entity_searchable: match &e.entity {
|
entity_searchable: match &e.entity {
|
||||||
Address::Attribute(attr) => Some(attr.clone()),
|
Address::Attribute(attr) => Some(attr.to_string()),
|
||||||
Address::Url(url) => Some(url.to_string()),
|
Address::Url(url) => Some(url.to_string()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
entity: e.entity.encode()?,
|
entity: e.entity.encode()?,
|
||||||
attribute: e.attribute.clone(),
|
attribute: e.attribute.to_string(),
|
||||||
value_str: None,
|
value_str: None,
|
||||||
value_num: None,
|
value_num: None,
|
||||||
immutable: false,
|
immutable: false,
|
||||||
provenance: e.provenance.clone(),
|
provenance: e.provenance.clone(),
|
||||||
|
user: e.user.clone(),
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ use lru::LruCache;
|
||||||
use tracing::trace;
|
use tracing::trace;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::OperationContext;
|
||||||
use upend_base::addressing::Address;
|
use upend_base::addressing::Address;
|
||||||
use upend_base::constants::ATTR_LABEL;
|
use upend_base::constants::ATTR_LABEL;
|
||||||
use upend_base::constants::{ATTR_IN, HIER_ROOT_ADDR, HIER_ROOT_INVARIANT};
|
use upend_base::constants::{ATTR_IN, HIER_ROOT_ADDR, HIER_ROOT_INVARIANT};
|
||||||
|
@ -17,21 +18,21 @@ use super::UpEndConnection;
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||||
pub struct UNode(String);
|
pub struct UNode(String);
|
||||||
|
|
||||||
impl UNode {
|
impl std::str::FromStr for UNode {
|
||||||
pub fn new<T: Into<String>>(s: T) -> Result<Self> {
|
type Err = anyhow::Error;
|
||||||
let s = s.into();
|
|
||||||
|
|
||||||
if s.is_empty() {
|
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||||
return Err(anyhow!("UNode can not be empty."));
|
if string.is_empty() {
|
||||||
|
Err(anyhow!("UNode can not be empty."))
|
||||||
|
} else {
|
||||||
|
Ok(Self(string.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self(s))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<UNode> for String {
|
impl std::fmt::Display for UNode {
|
||||||
fn from(value: UNode) -> Self {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
value.0
|
write!(f, "{}", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +49,7 @@ impl std::str::FromStr for UHierPath {
|
||||||
let result: Result<Vec<UNode>> = string
|
let result: Result<Vec<UNode>> = string
|
||||||
.trim_end_matches('/')
|
.trim_end_matches('/')
|
||||||
.split('/')
|
.split('/')
|
||||||
.map(|part| UNode::new(String::from(part)))
|
.map(UNode::from_str)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(UHierPath(result?))
|
Ok(UHierPath(result?))
|
||||||
|
@ -56,12 +57,6 @@ impl std::str::FromStr for UHierPath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for UNode {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for UHierPath {
|
impl std::fmt::Display for UHierPath {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(
|
write!(
|
||||||
|
@ -80,7 +75,7 @@ pub fn list_roots(connection: &UpEndConnection) -> Result<Vec<Address>> {
|
||||||
Ok(connection
|
Ok(connection
|
||||||
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
||||||
entity: QueryComponent::Variable(None),
|
entity: QueryComponent::Variable(None),
|
||||||
attribute: QueryComponent::Exact(ATTR_IN.into()),
|
attribute: QueryComponent::Exact(ATTR_IN.parse().unwrap()),
|
||||||
value: QueryComponent::Exact((*HIER_ROOT_ADDR).clone().into()),
|
value: QueryComponent::Exact((*HIER_ROOT_ADDR).clone().into()),
|
||||||
})))?
|
})))?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -97,6 +92,7 @@ pub fn fetch_or_create_dir(
|
||||||
parent: Option<Address>,
|
parent: Option<Address>,
|
||||||
directory: UNode,
|
directory: UNode,
|
||||||
create: bool,
|
create: bool,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Address> {
|
) -> Result<Address> {
|
||||||
match parent.clone() {
|
match parent.clone() {
|
||||||
Some(address) => trace!("FETCHING/CREATING {}/{:#}", address, directory),
|
Some(address) => trace!("FETCHING/CREATING {}/{:#}", address, directory),
|
||||||
|
@ -111,8 +107,8 @@ pub fn fetch_or_create_dir(
|
||||||
let matching_directories = connection
|
let matching_directories = connection
|
||||||
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
||||||
entity: QueryComponent::Variable(None),
|
entity: QueryComponent::Variable(None),
|
||||||
attribute: QueryComponent::Exact(ATTR_LABEL.into()),
|
attribute: QueryComponent::Exact(ATTR_LABEL.parse().unwrap()),
|
||||||
value: QueryComponent::Exact(String::from(directory.clone()).into()),
|
value: QueryComponent::Exact(directory.to_string().into()),
|
||||||
})))?
|
})))?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|e: Entry| e.entity);
|
.map(|e: Entry| e.entity);
|
||||||
|
@ -121,7 +117,7 @@ pub fn fetch_or_create_dir(
|
||||||
Some(parent) => connection
|
Some(parent) => connection
|
||||||
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
|
||||||
entity: QueryComponent::Variable(None),
|
entity: QueryComponent::Variable(None),
|
||||||
attribute: QueryComponent::Exact(ATTR_IN.into()),
|
attribute: QueryComponent::Exact(ATTR_IN.parse().unwrap()),
|
||||||
value: QueryComponent::Exact(parent.into()),
|
value: QueryComponent::Exact(parent.into()),
|
||||||
})))?
|
})))?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -141,9 +137,10 @@ pub fn fetch_or_create_dir(
|
||||||
|
|
||||||
let directory_entry = Entry {
|
let directory_entry = Entry {
|
||||||
entity: new_directory_address.clone(),
|
entity: new_directory_address.clone(),
|
||||||
attribute: String::from(ATTR_LABEL),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: String::from(directory).into(),
|
value: directory.to_string().into(),
|
||||||
provenance: "SYSTEM FS".to_string(),
|
provenance: context.provenance.clone() + "HIER",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
};
|
};
|
||||||
connection.insert_entry(directory_entry)?;
|
connection.insert_entry(directory_entry)?;
|
||||||
|
@ -151,17 +148,19 @@ pub fn fetch_or_create_dir(
|
||||||
connection.insert_entry(if let Some(parent) = parent {
|
connection.insert_entry(if let Some(parent) = parent {
|
||||||
Entry {
|
Entry {
|
||||||
entity: new_directory_address.clone(),
|
entity: new_directory_address.clone(),
|
||||||
attribute: String::from(ATTR_IN),
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
value: parent.into(),
|
value: parent.into(),
|
||||||
provenance: "SYSTEM FS".to_string(),
|
provenance: context.provenance.clone() + "HIER",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Entry {
|
Entry {
|
||||||
entity: new_directory_address.clone(),
|
entity: new_directory_address.clone(),
|
||||||
attribute: String::from(ATTR_IN),
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
value: HIER_ROOT_ADDR.clone().into(),
|
value: HIER_ROOT_ADDR.clone().into(),
|
||||||
provenance: "SYSTEM FS".to_string(),
|
provenance: context.provenance.clone() + "HIER",
|
||||||
|
user: context.user.clone(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
@ -183,6 +182,7 @@ pub fn resolve_path(
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
path: &UHierPath,
|
path: &UHierPath,
|
||||||
create: bool,
|
create: bool,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<Address>> {
|
) -> Result<Vec<Address>> {
|
||||||
let mut result: Vec<Address> = vec![];
|
let mut result: Vec<Address> = vec![];
|
||||||
let mut path_stack = path.0.to_vec();
|
let mut path_stack = path.0.to_vec();
|
||||||
|
@ -194,6 +194,7 @@ pub fn resolve_path(
|
||||||
result.last().cloned(),
|
result.last().cloned(),
|
||||||
path_stack.pop().unwrap(),
|
path_stack.pop().unwrap(),
|
||||||
create,
|
create,
|
||||||
|
context.clone(),
|
||||||
)?;
|
)?;
|
||||||
result.push(dir_address);
|
result.push(dir_address);
|
||||||
}
|
}
|
||||||
|
@ -207,14 +208,14 @@ pub fn resolve_path_cached(
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
path: &UHierPath,
|
path: &UHierPath,
|
||||||
create: bool,
|
create: bool,
|
||||||
|
context: OperationContext,
|
||||||
cache: &Arc<Mutex<ResolveCache>>,
|
cache: &Arc<Mutex<ResolveCache>>,
|
||||||
) -> Result<Vec<Address>> {
|
) -> Result<Vec<Address>> {
|
||||||
let mut result: Vec<Address> = vec![];
|
let mut result: Vec<Address> = vec![];
|
||||||
let mut path_stack = path.0.to_vec();
|
let mut path_stack = path.0.to_vec();
|
||||||
|
|
||||||
path_stack.reverse();
|
path_stack.reverse();
|
||||||
while !path_stack.is_empty() {
|
while let Some(node) = path_stack.pop() {
|
||||||
let node = path_stack.pop().unwrap();
|
|
||||||
let parent = result.last().cloned();
|
let parent = result.last().cloned();
|
||||||
let key = (parent.clone(), node.clone());
|
let key = (parent.clone(), node.clone());
|
||||||
let mut cache_lock = cache.lock().unwrap();
|
let mut cache_lock = cache.lock().unwrap();
|
||||||
|
@ -223,7 +224,7 @@ pub fn resolve_path_cached(
|
||||||
result.push(address.clone());
|
result.push(address.clone());
|
||||||
} else {
|
} else {
|
||||||
drop(cache_lock);
|
drop(cache_lock);
|
||||||
let address = fetch_or_create_dir(connection, parent, node, create)?;
|
let address = fetch_or_create_dir(connection, parent, node, create, context.clone())?;
|
||||||
result.push(address.clone());
|
result.push(address.clone());
|
||||||
cache.lock().unwrap().put(key, address);
|
cache.lock().unwrap().put(key, address);
|
||||||
}
|
}
|
||||||
|
@ -249,10 +250,10 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_unode_nonempty() {
|
fn test_unode_nonempty() {
|
||||||
let node = UNode::new("foobar");
|
let node = "foobar".parse::<UNode>();
|
||||||
assert!(node.is_ok());
|
assert!(node.is_ok());
|
||||||
|
|
||||||
let node = UNode::new("");
|
let node = "".parse::<UNode>();
|
||||||
assert!(node.is_err());
|
assert!(node.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,11 +294,23 @@ mod tests {
|
||||||
let open_result = UpEndDatabase::open(&temp_dir, true).unwrap();
|
let open_result = UpEndDatabase::open(&temp_dir, true).unwrap();
|
||||||
let connection = open_result.db.connection().unwrap();
|
let connection = open_result.db.connection().unwrap();
|
||||||
|
|
||||||
let foo_result = fetch_or_create_dir(&connection, None, UNode("foo".to_string()), true);
|
let foo_result = fetch_or_create_dir(
|
||||||
|
&connection,
|
||||||
|
None,
|
||||||
|
UNode("foo".to_string()),
|
||||||
|
true,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
assert!(foo_result.is_ok());
|
assert!(foo_result.is_ok());
|
||||||
let foo_result = foo_result.unwrap();
|
let foo_result = foo_result.unwrap();
|
||||||
|
|
||||||
let bar_result = fetch_or_create_dir(&connection, None, UNode("bar".to_string()), true);
|
let bar_result = fetch_or_create_dir(
|
||||||
|
&connection,
|
||||||
|
None,
|
||||||
|
UNode("bar".to_string()),
|
||||||
|
true,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
assert!(bar_result.is_ok());
|
assert!(bar_result.is_ok());
|
||||||
let bar_result = bar_result.unwrap();
|
let bar_result = bar_result.unwrap();
|
||||||
|
|
||||||
|
@ -306,6 +319,7 @@ mod tests {
|
||||||
Some(bar_result.clone()),
|
Some(bar_result.clone()),
|
||||||
UNode("baz".to_string()),
|
UNode("baz".to_string()),
|
||||||
true,
|
true,
|
||||||
|
OperationContext::default(),
|
||||||
);
|
);
|
||||||
assert!(baz_result.is_ok());
|
assert!(baz_result.is_ok());
|
||||||
let baz_result = baz_result.unwrap();
|
let baz_result = baz_result.unwrap();
|
||||||
|
@ -313,7 +327,12 @@ mod tests {
|
||||||
let roots = list_roots(&connection);
|
let roots = list_roots(&connection);
|
||||||
assert_eq!(roots.unwrap(), [foo_result, bar_result.clone()]);
|
assert_eq!(roots.unwrap(), [foo_result, bar_result.clone()]);
|
||||||
|
|
||||||
let resolve_result = resolve_path(&connection, &"bar/baz".parse().unwrap(), false);
|
let resolve_result = resolve_path(
|
||||||
|
&connection,
|
||||||
|
&"bar/baz".parse().unwrap(),
|
||||||
|
false,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
|
||||||
assert!(resolve_result.is_ok());
|
assert!(resolve_result.is_ok());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -321,10 +340,20 @@ mod tests {
|
||||||
vec![bar_result.clone(), baz_result.clone()]
|
vec![bar_result.clone(), baz_result.clone()]
|
||||||
);
|
);
|
||||||
|
|
||||||
let resolve_result = resolve_path(&connection, &"bar/baz/bax".parse().unwrap(), false);
|
let resolve_result = resolve_path(
|
||||||
|
&connection,
|
||||||
|
&"bar/baz/bax".parse().unwrap(),
|
||||||
|
false,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
assert!(resolve_result.is_err());
|
assert!(resolve_result.is_err());
|
||||||
|
|
||||||
let resolve_result = resolve_path(&connection, &"bar/baz/bax".parse().unwrap(), true);
|
let resolve_result = resolve_path(
|
||||||
|
&connection,
|
||||||
|
&"bar/baz/bax".parse().unwrap(),
|
||||||
|
true,
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
assert!(resolve_result.is_ok());
|
assert!(resolve_result.is_ok());
|
||||||
|
|
||||||
let bax_result = fetch_or_create_dir(
|
let bax_result = fetch_or_create_dir(
|
||||||
|
@ -332,6 +361,7 @@ mod tests {
|
||||||
Some(baz_result.clone()),
|
Some(baz_result.clone()),
|
||||||
UNode("bax".to_string()),
|
UNode("bax".to_string()),
|
||||||
false,
|
false,
|
||||||
|
OperationContext::default(),
|
||||||
);
|
);
|
||||||
assert!(bax_result.is_ok());
|
assert!(bax_result.is_ok());
|
||||||
let bax_result = bax_result.unwrap();
|
let bax_result = bax_result.unwrap();
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use super::schema::{data, meta};
|
use super::schema::{data, meta, users};
|
||||||
use chrono::NaiveDateTime;
|
use chrono::NaiveDateTime;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
#[derive(Queryable, Insertable, Serialize, Debug)]
|
#[derive(Queryable, Insertable, Serialize, Debug, Clone)]
|
||||||
#[table_name = "data"]
|
#[table_name = "data"]
|
||||||
pub struct Entry {
|
pub struct Entry {
|
||||||
pub identity: Vec<u8>,
|
pub identity: Vec<u8>,
|
||||||
|
@ -13,6 +13,7 @@ pub struct Entry {
|
||||||
pub value_num: Option<f64>,
|
pub value_num: Option<f64>,
|
||||||
pub immutable: bool,
|
pub immutable: bool,
|
||||||
pub provenance: String,
|
pub provenance: String,
|
||||||
|
pub user: Option<String>,
|
||||||
pub timestamp: NaiveDateTime,
|
pub timestamp: NaiveDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,3 +24,11 @@ pub struct MetaValue {
|
||||||
pub key: String,
|
pub key: String,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Queryable, Insertable, Serialize, Clone, Debug)]
|
||||||
|
#[table_name = "users"]
|
||||||
|
pub struct UserValue {
|
||||||
|
pub id: i32,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ table! {
|
||||||
value_num -> Nullable<Double>,
|
value_num -> Nullable<Double>,
|
||||||
immutable -> Bool,
|
immutable -> Bool,
|
||||||
provenance -> Text,
|
provenance -> Text,
|
||||||
|
user -> Nullable<Text>,
|
||||||
timestamp -> Timestamp,
|
timestamp -> Timestamp,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,4 +21,10 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
allow_tables_to_appear_in_same_query!(data, meta,);
|
table! {
|
||||||
|
users (id) {
|
||||||
|
id -> Integer,
|
||||||
|
username -> Text,
|
||||||
|
password -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
242
db/src/lib.rs
242
db/src/lib.rs
|
@ -26,11 +26,13 @@ use crate::inner::models;
|
||||||
use crate::inner::schema::data;
|
use crate::inner::schema::data;
|
||||||
use crate::util::LoggerSink;
|
use crate::util::LoggerSink;
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
|
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel::r2d2::{self, ConnectionManager};
|
use diesel::r2d2::{self, ConnectionManager};
|
||||||
use diesel::result::{DatabaseErrorKind, Error};
|
use diesel::result::{DatabaseErrorKind, Error};
|
||||||
use diesel::sqlite::SqliteConnection;
|
use diesel::sqlite::SqliteConnection;
|
||||||
use hierarchies::initialize_hier;
|
use hierarchies::initialize_hier;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use shadow_rs::is_release;
|
use shadow_rs::is_release;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
@ -39,7 +41,7 @@ use std::sync::{Arc, Mutex, RwLock};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tracing::{debug, error, trace, warn};
|
use tracing::{debug, error, trace, warn};
|
||||||
use upend_base::addressing::{Address, Addressable};
|
use upend_base::addressing::{Address, Addressable};
|
||||||
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
|
use upend_base::entry::{Attribute, Entry, EntryValue, ImmutableEntry};
|
||||||
use upend_base::error::UpEndError;
|
use upend_base::error::UpEndError;
|
||||||
use upend_base::hash::UpMultihash;
|
use upend_base::hash::UpMultihash;
|
||||||
use upend_base::lang::Query;
|
use upend_base::lang::Query;
|
||||||
|
@ -115,7 +117,7 @@ pub const DATABASE_FILENAME: &str = "upend.sqlite3";
|
||||||
|
|
||||||
impl UpEndDatabase {
|
impl UpEndDatabase {
|
||||||
pub fn open<P: AsRef<Path>>(dirpath: P, reinitialize: bool) -> Result<OpenResult> {
|
pub fn open<P: AsRef<Path>>(dirpath: P, reinitialize: bool) -> Result<OpenResult> {
|
||||||
embed_migrations!("./migrations/upend/");
|
embed_migrations!("./migrations/upend");
|
||||||
|
|
||||||
let upend_path = dirpath.as_ref().join(UPEND_SUBDIR);
|
let upend_path = dirpath.as_ref().join(UPEND_SUBDIR);
|
||||||
|
|
||||||
|
@ -152,7 +154,10 @@ impl UpEndDatabase {
|
||||||
let connection = db.connection().unwrap();
|
let connection = db.connection().unwrap();
|
||||||
|
|
||||||
if !new {
|
if !new {
|
||||||
let db_major: u64 = connection.get_meta("VERSION")?.parse()?;
|
let db_major: u64 = connection
|
||||||
|
.get_meta("VERSION")?
|
||||||
|
.ok_or(anyhow!("Database version not found!"))?
|
||||||
|
.parse()?;
|
||||||
if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() {
|
if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() {
|
||||||
return Err(anyhow!("Incompatible database! Found version "));
|
return Err(anyhow!("Incompatible database! Found version "));
|
||||||
}
|
}
|
||||||
|
@ -201,7 +206,7 @@ impl UpEndConnection {
|
||||||
f()
|
f()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<String> {
|
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<Option<String>> {
|
||||||
use crate::inner::schema::meta::dsl;
|
use crate::inner::schema::meta::dsl;
|
||||||
let key = key.as_ref();
|
let key = key.as_ref();
|
||||||
|
|
||||||
|
@ -210,12 +215,120 @@ impl UpEndConnection {
|
||||||
let _lock = self.lock.read().unwrap();
|
let _lock = self.lock.read().unwrap();
|
||||||
let conn = self.pool.get()?;
|
let conn = self.pool.get()?;
|
||||||
|
|
||||||
dsl::meta
|
let result = dsl::meta
|
||||||
.filter(dsl::key.eq(key))
|
.filter(dsl::key.eq(key))
|
||||||
.load::<models::MetaValue>(&conn)?
|
.load::<models::MetaValue>(&conn)?;
|
||||||
.first()
|
let result = result.first();
|
||||||
.ok_or(anyhow!(r#"No META "{key}" value found."#))
|
Ok(result.map(|v| v.value.clone()))
|
||||||
.map(|mv| mv.value.clone())
|
}
|
||||||
|
|
||||||
|
pub fn set_meta<S: AsRef<str>, T: AsRef<str>>(&self, key: S, value: T) -> Result<()> {
|
||||||
|
use crate::inner::schema::meta::dsl;
|
||||||
|
let key = key.as_ref();
|
||||||
|
let value = value.as_ref();
|
||||||
|
|
||||||
|
trace!("Setting META:{key} to {value}");
|
||||||
|
|
||||||
|
let _lock = self.lock.write().unwrap();
|
||||||
|
let conn = self.pool.get()?;
|
||||||
|
|
||||||
|
diesel::replace_into(dsl::meta)
|
||||||
|
.values((dsl::key.eq(key), dsl::value.eq(value)))
|
||||||
|
.execute(&conn)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_vault_options(&self, options: VaultOptions) -> Result<()> {
|
||||||
|
if let Some(blob_mode) = options.blob_mode {
|
||||||
|
let tree_mode = match blob_mode {
|
||||||
|
BlobMode::Flat => "FLAT".to_string(),
|
||||||
|
BlobMode::Mirror => "MIRROR".to_string(),
|
||||||
|
BlobMode::Incoming(None) => "INCOMING".to_string(),
|
||||||
|
BlobMode::Incoming(Some(group)) => format!("INCOMING:{}", group),
|
||||||
|
BlobMode::StoreOnly => "STORE_ONLY".to_string(),
|
||||||
|
};
|
||||||
|
self.set_meta("VAULT_BLOB_MODE", tree_mode)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_vault_options(&self) -> Result<VaultOptions> {
|
||||||
|
let blob_mode = match self.get_meta("VAULT_BLOB_MODE")? {
|
||||||
|
Some(mode) => match mode.as_str() {
|
||||||
|
"FLAT" => Some(BlobMode::Flat),
|
||||||
|
"MIRROR" => Some(BlobMode::Mirror),
|
||||||
|
"INCOMING" => Some(BlobMode::Incoming(None)),
|
||||||
|
"STORE_ONLY" => Some(BlobMode::StoreOnly),
|
||||||
|
mode if mode.starts_with("INCOMING:") => {
|
||||||
|
Some(BlobMode::Incoming(Some(mode[9..].to_string())))
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn!("Unknown vault tree mode: {}", mode);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(VaultOptions { blob_mode })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_users(&self) -> Result<Vec<String>> {
|
||||||
|
use crate::inner::schema::users::dsl;
|
||||||
|
|
||||||
|
let _lock = self.lock.read().unwrap();
|
||||||
|
let conn = self.pool.get()?;
|
||||||
|
|
||||||
|
let result = dsl::users.select(dsl::username).load::<String>(&conn)?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_user(&self, username: &str, password: &str) -> Result<bool> {
|
||||||
|
use crate::inner::schema::users::dsl;
|
||||||
|
|
||||||
|
let salt = password_hash::SaltString::generate(&mut password_hash::rand_core::OsRng);
|
||||||
|
let argon2 = Argon2::default();
|
||||||
|
let hashed_password = argon2
|
||||||
|
.hash_password(password.as_ref(), &salt)
|
||||||
|
.map_err(|e| anyhow!(e))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let _lock = self.lock.write().unwrap();
|
||||||
|
let conn = self.pool.get()?;
|
||||||
|
let result = diesel::replace_into(dsl::users)
|
||||||
|
.values((
|
||||||
|
dsl::username.eq(username),
|
||||||
|
dsl::password.eq(hashed_password),
|
||||||
|
))
|
||||||
|
.execute(&conn)?;
|
||||||
|
Ok(result > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn authenticate_user(&self, username: &str, password: &str) -> Result<()> {
|
||||||
|
use crate::inner::schema::users::dsl;
|
||||||
|
|
||||||
|
let conn = self.pool.get()?;
|
||||||
|
let user_result = dsl::users
|
||||||
|
.filter(dsl::username.eq(username))
|
||||||
|
.load::<models::UserValue>(&conn)?;
|
||||||
|
|
||||||
|
match user_result.first() {
|
||||||
|
Some(user) => {
|
||||||
|
let parsed_hash = PasswordHash::new(&user.password).map_err(|e| anyhow!(e))?;
|
||||||
|
let argon2 = Argon2::default();
|
||||||
|
argon2
|
||||||
|
.verify_password(password.as_ref(), &parsed_hash)
|
||||||
|
.map_err(|e| anyhow!(e))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let argon2 = Argon2::default();
|
||||||
|
let _ = argon2
|
||||||
|
.verify_password(password.as_ref(), &PasswordHash::new(&DUMMY_HASH).unwrap());
|
||||||
|
Err(anyhow!("user not found"))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
|
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
|
||||||
|
@ -230,7 +343,7 @@ impl UpEndConnection {
|
||||||
|
|
||||||
match entry.len() {
|
match entry.len() {
|
||||||
0 => Ok(None),
|
0 => Ok(None),
|
||||||
1 => Ok(Some(Entry::try_from(entry.get(0).unwrap())?)),
|
1 => Ok(Some(Entry::try_from(entry.first().unwrap())?)),
|
||||||
_ => {
|
_ => {
|
||||||
unreachable!(
|
unreachable!(
|
||||||
"Multiple entries returned with the same hash - this should be impossible!"
|
"Multiple entries returned with the same hash - this should be impossible!"
|
||||||
|
@ -253,7 +366,7 @@ impl UpEndConnection {
|
||||||
let entries = primary
|
let entries = primary
|
||||||
.iter()
|
.iter()
|
||||||
.map(Entry::try_from)
|
.map(Entry::try_from)
|
||||||
.collect::<Result<Vec<Entry>>>()?;
|
.collect::<Result<Vec<Entry>, UpEndError>>()?;
|
||||||
|
|
||||||
let secondary = data
|
let secondary = data
|
||||||
.filter(
|
.filter(
|
||||||
|
@ -271,7 +384,7 @@ impl UpEndConnection {
|
||||||
let secondary_entries = secondary
|
let secondary_entries = secondary
|
||||||
.iter()
|
.iter()
|
||||||
.map(Entry::try_from)
|
.map(Entry::try_from)
|
||||||
.collect::<Result<Vec<Entry>>>()?;
|
.collect::<Result<Vec<Entry>, UpEndError>>()?;
|
||||||
|
|
||||||
Ok([entries, secondary_entries].concat())
|
Ok([entries, secondary_entries].concat())
|
||||||
}
|
}
|
||||||
|
@ -358,8 +471,8 @@ impl UpEndConnection {
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deprecated]
|
// #[deprecated]
|
||||||
pub fn get_all_attributes(&self) -> Result<Vec<String>> {
|
pub fn get_all_attributes(&self) -> Result<Vec<Attribute>> {
|
||||||
use crate::inner::schema::data::dsl::*;
|
use crate::inner::schema::data::dsl::*;
|
||||||
|
|
||||||
let _lock = self.lock.read().unwrap();
|
let _lock = self.lock.read().unwrap();
|
||||||
|
@ -371,7 +484,10 @@ impl UpEndConnection {
|
||||||
.order_by(attribute)
|
.order_by(attribute)
|
||||||
.load::<String>(&conn)?;
|
.load::<String>(&conn)?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result
|
||||||
|
.into_iter()
|
||||||
|
.map(|a| a.parse())
|
||||||
|
.collect::<Result<Vec<Attribute>, UpEndError>>()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_stats(&self) -> Result<serde_json::Value> {
|
pub fn get_stats(&self) -> Result<serde_json::Value> {
|
||||||
|
@ -409,7 +525,7 @@ impl UpEndConnection {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deprecated]
|
// #[deprecated]
|
||||||
pub fn get_explicit_entries(&self) -> Result<Vec<Entry>> {
|
pub fn get_explicit_entries(&self) -> Result<Vec<Entry>> {
|
||||||
use crate::inner::schema::data::dsl::*;
|
use crate::inner::schema::data::dsl::*;
|
||||||
let _lock = self.lock.read().unwrap();
|
let _lock = self.lock.read().unwrap();
|
||||||
|
@ -426,13 +542,23 @@ impl UpEndConnection {
|
||||||
Ok(result
|
Ok(result
|
||||||
.iter()
|
.iter()
|
||||||
.map(Entry::try_from)
|
.map(Entry::try_from)
|
||||||
.collect::<Result<Vec<Entry>>>()?)
|
.collect::<Result<Vec<Entry>, UpEndError>>()?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref DUMMY_HASH: String = Argon2::default()
|
||||||
|
.hash_password(
|
||||||
|
"password".as_ref(),
|
||||||
|
&password_hash::SaltString::generate(&mut password_hash::rand_core::OsRng)
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use upend_base::constants::ATTR_LABEL;
|
use upend_base::constants::{ATTR_IN, ATTR_LABEL};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
@ -475,8 +601,8 @@ mod test {
|
||||||
assert_eq!(result.len(), 2);
|
assert_eq!(result.len(), 2);
|
||||||
|
|
||||||
let other_entity = Address::Uuid(uuid::Uuid::new_v4());
|
let other_entity = Address::Uuid(uuid::Uuid::new_v4());
|
||||||
upend_insert_val!(connection, random_entity, ATTR_LABEL, "BAZQUX").unwrap();
|
upend_insert_val!(connection, other_entity, ATTR_LABEL, "BAZQUX").unwrap();
|
||||||
upend_insert_val!(connection, random_entity, "CHARGE", "POSITIVE").unwrap();
|
upend_insert_val!(connection, other_entity, "CHARGE", "POSITIVE").unwrap();
|
||||||
|
|
||||||
let query = format!(r#"(matches (in @{random_entity} @{other_entity}) ? ?)"#)
|
let query = format!(r#"(matches (in @{random_entity} @{other_entity}) ? ?)"#)
|
||||||
.parse()
|
.parse()
|
||||||
|
@ -527,16 +653,86 @@ mod test {
|
||||||
let result = connection.query(query).unwrap();
|
let result = connection.query(query).unwrap();
|
||||||
assert_eq!(result.len(), 1);
|
assert_eq!(result.len(), 1);
|
||||||
|
|
||||||
|
let edge_entity = Address::Uuid(uuid::Uuid::new_v4());
|
||||||
|
upend_insert_addr!(connection, random_entity, ATTR_IN, other_entity).unwrap();
|
||||||
|
upend_insert_addr!(connection, edge_entity, ATTR_IN, random_entity).unwrap();
|
||||||
|
|
||||||
let query = format!(
|
let query = format!(
|
||||||
r#"(join
|
r#"(join
|
||||||
(matches ?a "FLAVOUR" ?)
|
(matches ?a "{ATTR_IN}" @{other_entity})
|
||||||
(matches ?a "{ATTR_LABEL}" "FOOBAR")
|
(matches ? "{ATTR_IN}" ?a)
|
||||||
)"#
|
)"#
|
||||||
)
|
)
|
||||||
.parse()
|
.parse()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let result = connection.query(query).unwrap();
|
let result = connection.query(query).unwrap();
|
||||||
assert_eq!(result.len(), 1);
|
assert_eq!(result.len(), 1);
|
||||||
assert_eq!(result[0].value, "STRANGE".into());
|
assert_eq!(result[0].entity, edge_entity);
|
||||||
|
assert_eq!(result[0].value, EntryValue::Address(random_entity));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_users() {
|
||||||
|
let tempdir = TempDir::new().unwrap();
|
||||||
|
let result = UpEndDatabase::open(&tempdir, false).unwrap();
|
||||||
|
let db = result.db;
|
||||||
|
|
||||||
|
let connection = db.connection().unwrap();
|
||||||
|
|
||||||
|
assert!(connection.authenticate_user("thm", "hunter2").is_err());
|
||||||
|
connection.set_user("thm", "hunter2").unwrap();
|
||||||
|
connection.authenticate_user("thm", "hunter2").unwrap();
|
||||||
|
assert!(connection.authenticate_user("thm", "password").is_err());
|
||||||
|
connection.set_user("thm", "password").unwrap();
|
||||||
|
connection.authenticate_user("thm", "password").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct VaultOptions {
|
||||||
|
pub blob_mode: Option<BlobMode>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Specifies how to store new blobs
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub enum BlobMode {
|
||||||
|
#[default]
|
||||||
|
/// Mirror the original tree
|
||||||
|
Mirror,
|
||||||
|
/// Use only the last level of the tree as a group
|
||||||
|
Flat,
|
||||||
|
/// Place all files in a single group
|
||||||
|
Incoming(Option<String>),
|
||||||
|
/// Only store files, don't place them anywhere
|
||||||
|
StoreOnly,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for BlobMode {
|
||||||
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"flat" => Ok(BlobMode::Flat),
|
||||||
|
"mirror" => Ok(BlobMode::Mirror),
|
||||||
|
"incoming" => Ok(BlobMode::Incoming(None)),
|
||||||
|
s if s.starts_with("incoming:") => Ok(BlobMode::Incoming(Some(s[9..].to_string()))),
|
||||||
|
"store_only" => Ok(BlobMode::StoreOnly),
|
||||||
|
_ => Err(anyhow!("Unknown blob mode: {}", s)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct OperationContext {
|
||||||
|
pub user: Option<String>,
|
||||||
|
pub provenance: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for OperationContext {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
user: None,
|
||||||
|
provenance: "SYSTEM".to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,9 +3,10 @@ macro_rules! upend_insert_val {
|
||||||
($db_connection:expr, $entity:expr, $attribute:expr, $value:expr) => {{
|
($db_connection:expr, $entity:expr, $attribute:expr, $value:expr) => {{
|
||||||
$db_connection.insert_entry(Entry {
|
$db_connection.insert_entry(Entry {
|
||||||
entity: $entity.clone(),
|
entity: $entity.clone(),
|
||||||
attribute: String::from($attribute),
|
attribute: $attribute.parse().unwrap(),
|
||||||
value: upend_base::entry::EntryValue::String(String::from($value)),
|
value: upend_base::entry::EntryValue::String(String::from($value)),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
|
user: None,
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
})
|
||||||
}};
|
}};
|
||||||
|
@ -16,9 +17,10 @@ macro_rules! upend_insert_addr {
|
||||||
($db_connection:expr, $entity:expr, $attribute:expr, $addr:expr) => {{
|
($db_connection:expr, $entity:expr, $attribute:expr, $addr:expr) => {{
|
||||||
$db_connection.insert_entry(Entry {
|
$db_connection.insert_entry(Entry {
|
||||||
entity: $entity.clone(),
|
entity: $entity.clone(),
|
||||||
attribute: String::from($attribute),
|
attribute: $attribute.parse().unwrap(),
|
||||||
value: upend_base::entry::EntryValue::Address($addr.clone()),
|
value: upend_base::entry::EntryValue::Address($addr.clone()),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
|
user: None,
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
})
|
})
|
||||||
}};
|
}};
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
use self::db::files;
|
use self::db::files;
|
||||||
|
|
||||||
use super::{Blob, StoreError, UpStore, UpdatePathOutcome};
|
use super::{Blob, StoreError, UpStore, UpdateOptions, UpdatePathOutcome};
|
||||||
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode};
|
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode};
|
||||||
use crate::jobs::{JobContainer, JobHandle};
|
use crate::jobs::{JobContainer, JobHandle};
|
||||||
use crate::util::hash_at_path;
|
use crate::util::hash_at_path;
|
||||||
use crate::{ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR};
|
use crate::{
|
||||||
use anyhow::{anyhow, Error, Result};
|
BlobMode, ConnectionOptions, LoggingHandler, OperationContext, UpEndConnection, UpEndDatabase,
|
||||||
|
UPEND_SUBDIR,
|
||||||
|
};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
|
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
|
||||||
use diesel::ExpressionMethods;
|
use diesel::ExpressionMethods;
|
||||||
use diesel::{Connection, QueryDsl, RunQueryDsl, SqliteConnection};
|
use diesel::{Connection, QueryDsl, RunQueryDsl, SqliteConnection};
|
||||||
|
use jwalk::WalkDir;
|
||||||
use lru::LruCache;
|
use lru::LruCache;
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
@ -17,6 +21,7 @@ use std::borrow::Borrow;
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::path::{Component, Path};
|
use std::path::{Component, Path};
|
||||||
|
use std::str::FromStr;
|
||||||
use std::sync::{Arc, Mutex, RwLock};
|
use std::sync::{Arc, Mutex, RwLock};
|
||||||
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
|
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
|
||||||
use std::{fs, iter};
|
use std::{fs, iter};
|
||||||
|
@ -25,7 +30,6 @@ use upend_base::addressing::Address;
|
||||||
use upend_base::constants::{ATTR_ADDED, ATTR_BY, ATTR_IN, ATTR_LABEL, ATTR_OF, TYPE_HASH_ADDRESS};
|
use upend_base::constants::{ATTR_ADDED, ATTR_BY, ATTR_IN, ATTR_LABEL, ATTR_OF, TYPE_HASH_ADDRESS};
|
||||||
use upend_base::entry::Entry;
|
use upend_base::entry::Entry;
|
||||||
use upend_base::hash::{b58_encode, UpMultihash};
|
use upend_base::hash::{b58_encode, UpMultihash};
|
||||||
use walkdir::WalkDir;
|
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
|
|
||||||
|
@ -53,7 +57,7 @@ impl FsStore {
|
||||||
// while diesel doesn't support multiple embedded migrations...
|
// while diesel doesn't support multiple embedded migrations...
|
||||||
let connection = manager.connect()?;
|
let connection = manager.connect()?;
|
||||||
connection.execute(
|
connection.execute(
|
||||||
r#"
|
r#"
|
||||||
CREATE TABLE IF NOT EXISTS files
|
CREATE TABLE IF NOT EXISTS files
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
@ -69,7 +73,7 @@ impl FsStore {
|
||||||
|
|
||||||
PRAGMA journal_mode = WAL; PRAGMA wal_autocheckpoint = 1000; PRAGMA wal_checkpoint(TRUNCATE);
|
PRAGMA journal_mode = WAL; PRAGMA wal_autocheckpoint = 1000; PRAGMA wal_checkpoint(TRUNCATE);
|
||||||
"#,
|
"#,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let pool = r2d2::Pool::builder()
|
let pool = r2d2::Pool::builder()
|
||||||
.connection_customizer(Box::new(ConnectionOptions {
|
.connection_customizer(Box::new(ConnectionOptions {
|
||||||
|
@ -91,12 +95,14 @@ impl FsStore {
|
||||||
&self,
|
&self,
|
||||||
db: D,
|
db: D,
|
||||||
job_handle: JobHandle,
|
job_handle: JobHandle,
|
||||||
quick_check: bool,
|
options: UpdateOptions,
|
||||||
_disable_synchronous: bool,
|
context: OperationContext,
|
||||||
) -> Result<Vec<UpdatePathOutcome>> {
|
) -> Result<Vec<UpdatePathOutcome>> {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
info!("Vault rescan started.");
|
info!("Vault rescan started.");
|
||||||
|
|
||||||
|
let quick_check = options.initial;
|
||||||
|
|
||||||
let db = db.borrow();
|
let db = db.borrow();
|
||||||
let upconnection = db.connection()?;
|
let upconnection = db.connection()?;
|
||||||
|
|
||||||
|
@ -104,13 +110,13 @@ impl FsStore {
|
||||||
trace!("Initializing DB types.");
|
trace!("Initializing DB types.");
|
||||||
upend_insert_addr!(
|
upend_insert_addr!(
|
||||||
upconnection,
|
upconnection,
|
||||||
Address::Attribute(FILE_SIZE_KEY.to_string()),
|
Address::Attribute(FILE_SIZE_KEY.parse().unwrap()),
|
||||||
ATTR_OF,
|
ATTR_OF,
|
||||||
TYPE_HASH_ADDRESS
|
TYPE_HASH_ADDRESS
|
||||||
)?;
|
)?;
|
||||||
upend_insert_addr!(
|
upend_insert_addr!(
|
||||||
upconnection,
|
upconnection,
|
||||||
Address::Attribute(FILE_MIME_KEY.to_string()),
|
Address::Attribute(FILE_MIME_KEY.parse().unwrap()),
|
||||||
ATTR_OF,
|
ATTR_OF,
|
||||||
TYPE_HASH_ADDRESS
|
TYPE_HASH_ADDRESS
|
||||||
)?;
|
)?;
|
||||||
|
@ -118,11 +124,11 @@ impl FsStore {
|
||||||
// Walk through the vault, find all paths
|
// Walk through the vault, find all paths
|
||||||
trace!("Traversing vault directory");
|
trace!("Traversing vault directory");
|
||||||
let absolute_dir_path = fs::canonicalize(&*self.path)?;
|
let absolute_dir_path = fs::canonicalize(&*self.path)?;
|
||||||
let path_entries: Vec<PathBuf> = WalkDir::new(&*self.path)
|
let paths: Vec<PathBuf> = WalkDir::new(&*self.path)
|
||||||
.follow_links(true)
|
.follow_links(true)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|e| e.ok())
|
.filter_map(|e| e.ok())
|
||||||
.filter_map(|e| fs::canonicalize(e.into_path()).ok())
|
.filter_map(|e| fs::canonicalize(e.path()).ok())
|
||||||
.filter(|e| e.is_file())
|
.filter(|e| e.is_file())
|
||||||
.filter(|e| !e.starts_with(absolute_dir_path.join(UPEND_SUBDIR)))
|
.filter(|e| !e.starts_with(absolute_dir_path.join(UPEND_SUBDIR)))
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -132,18 +138,24 @@ impl FsStore {
|
||||||
|
|
||||||
// Actual processing
|
// Actual processing
|
||||||
let count = RwLock::new(0_usize);
|
let count = RwLock::new(0_usize);
|
||||||
let resolve_cache = Arc::new(Mutex::new(LruCache::new(256)));
|
#[allow(clippy::type_complexity)]
|
||||||
let total = path_entries.len() as f32;
|
let resolve_cache: Arc<Mutex<LruCache<(Option<Address>, UNode), Address>>> =
|
||||||
|
Arc::new(Mutex::new(LruCache::new(256)));
|
||||||
|
let total = paths.len() as f32;
|
||||||
let shared_job_handle = Arc::new(Mutex::new(job_handle));
|
let shared_job_handle = Arc::new(Mutex::new(job_handle));
|
||||||
let path_outcomes: Vec<UpdatePathOutcome> = path_entries
|
|
||||||
|
let path_outcomes: Vec<UpdatePathOutcome> = paths
|
||||||
.into_par_iter()
|
.into_par_iter()
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
let result = self.process_directory_entry(
|
let result = self.process_directory_entry(
|
||||||
db,
|
db,
|
||||||
&resolve_cache,
|
|
||||||
path.clone(),
|
path.clone(),
|
||||||
|
options.tree_mode.clone(),
|
||||||
|
options.initial,
|
||||||
&existing_files,
|
&existing_files,
|
||||||
|
&resolve_cache,
|
||||||
quick_check,
|
quick_check,
|
||||||
|
context.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut cnt = count.write().unwrap();
|
let mut cnt = count.write().unwrap();
|
||||||
|
@ -169,11 +181,7 @@ impl FsStore {
|
||||||
let existing_files = existing_files.read().unwrap();
|
let existing_files = existing_files.read().unwrap();
|
||||||
|
|
||||||
let cleanup_results = existing_files.iter().filter(|f| f.valid).map(|file| {
|
let cleanup_results = existing_files.iter().filter(|f| f.valid).map(|file| {
|
||||||
let trans_result = upconnection.transaction::<_, Error, _>(|| {
|
let trans_result = self.file_set_valid(file.id, false);
|
||||||
self.file_set_valid(file.id, false)?;
|
|
||||||
upconnection.remove_object(Address::from(file.clone()))?;
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
match trans_result {
|
match trans_result {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
|
@ -234,13 +242,17 @@ impl FsStore {
|
||||||
Ok(all_outcomes)
|
Ok(all_outcomes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn process_directory_entry<D: Borrow<UpEndDatabase>>(
|
fn process_directory_entry<D: Borrow<UpEndDatabase>>(
|
||||||
&self,
|
&self,
|
||||||
db: D,
|
db: D,
|
||||||
resolve_cache: &Arc<Mutex<ResolveCache>>,
|
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
mode: BlobMode,
|
||||||
|
initial: bool,
|
||||||
existing_files: &Arc<RwLock<Vec<db::File>>>,
|
existing_files: &Arc<RwLock<Vec<db::File>>>,
|
||||||
|
resolve_cache: &Arc<Mutex<ResolveCache>>,
|
||||||
quick_check: bool,
|
quick_check: bool,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<UpdatePathOutcome> {
|
) -> Result<UpdatePathOutcome> {
|
||||||
trace!("Processing: {:?}", path);
|
trace!("Processing: {:?}", path);
|
||||||
|
|
||||||
|
@ -311,8 +323,13 @@ impl FsStore {
|
||||||
|
|
||||||
if let Some(idx) = maybe_existing_file {
|
if let Some(idx) = maybe_existing_file {
|
||||||
existing_files_write.swap_remove(idx);
|
existing_files_write.swap_remove(idx);
|
||||||
trace!("Unchanged: {:?}", path);
|
return if existing_file.valid {
|
||||||
return Ok(UpdatePathOutcome::Unchanged(path));
|
trace!("Unchanged: {:?}", path);
|
||||||
|
Ok(UpdatePathOutcome::Unchanged(path))
|
||||||
|
} else {
|
||||||
|
trace!("Re-added: {:?}", path);
|
||||||
|
Ok(UpdatePathOutcome::Added(path.clone()))
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -320,21 +337,40 @@ impl FsStore {
|
||||||
drop(existing_files_read);
|
drop(existing_files_read);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not, add it!
|
// If not, hash it.
|
||||||
if file_hash.is_none() {
|
if file_hash.is_none() {
|
||||||
file_hash = Some(hash_at_path(&path)?);
|
file_hash = Some(hash_at_path(&path)?);
|
||||||
}
|
}
|
||||||
let mime_type = tree_magic_mini::from_filepath(&path).map(|s| s.to_string());
|
let file_hash = file_hash.unwrap();
|
||||||
|
|
||||||
|
let connection: UpEndConnection = db.borrow().connection()?;
|
||||||
|
let file_is_known = !connection
|
||||||
|
.query(
|
||||||
|
format!(
|
||||||
|
"(matches @{} \"{}\" ?)",
|
||||||
|
Address::Hash(file_hash.clone()),
|
||||||
|
ATTR_IN
|
||||||
|
)
|
||||||
|
.parse()?,
|
||||||
|
)?
|
||||||
|
.is_empty();
|
||||||
|
|
||||||
|
let upath = if !file_is_known || initial {
|
||||||
|
self.path_to_upath(&path, mode)?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
self.insert_file_with_metadata(
|
self.insert_file_with_metadata(
|
||||||
&db.borrow().connection()?,
|
&db.borrow().connection()?,
|
||||||
&normalized_path,
|
&path,
|
||||||
file_hash.unwrap(),
|
upath,
|
||||||
|
file_hash,
|
||||||
None,
|
None,
|
||||||
size,
|
size,
|
||||||
mtime,
|
mtime,
|
||||||
mime_type,
|
|
||||||
Some(resolve_cache),
|
Some(resolve_cache),
|
||||||
|
context,
|
||||||
)
|
)
|
||||||
.map(|_| {
|
.map(|_| {
|
||||||
info!("Added: {:?}", path);
|
info!("Added: {:?}", path);
|
||||||
|
@ -342,52 +378,58 @@ impl FsStore {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_file(
|
fn path_to_upath(&self, path: &Path, mode: BlobMode) -> Result<Option<UHierPath>> {
|
||||||
&self,
|
match mode {
|
||||||
connection: &UpEndConnection,
|
BlobMode::Flat => {
|
||||||
path: &Path,
|
let normalized_path = self.normalize_path(path).unwrap();
|
||||||
hash: UpMultihash,
|
let dirname = normalized_path.parent().and_then(|p| p.components().last());
|
||||||
name_hint: Option<String>,
|
|
||||||
) -> Result<Address> {
|
|
||||||
let normalized_path = self.normalize_path(path)?;
|
|
||||||
let metadata = fs::metadata(path)?;
|
|
||||||
let size = metadata.len() as i64;
|
|
||||||
let mtime = metadata
|
|
||||||
.modified()
|
|
||||||
.map(|t| {
|
|
||||||
NaiveDateTime::from_timestamp_opt(
|
|
||||||
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
.flatten();
|
|
||||||
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
|
|
||||||
|
|
||||||
self.insert_file_with_metadata(
|
let upath = UHierPath(if let Some(dirname) = dirname {
|
||||||
connection,
|
vec![
|
||||||
&normalized_path,
|
"NATIVE".parse().unwrap(),
|
||||||
hash,
|
UNode::from_str(&dirname.as_os_str().to_string_lossy()).unwrap(),
|
||||||
name_hint,
|
]
|
||||||
size,
|
} else {
|
||||||
mtime,
|
vec!["NATIVE".parse().unwrap()]
|
||||||
mime_type,
|
});
|
||||||
None,
|
|
||||||
)
|
Ok(Some(upath))
|
||||||
|
}
|
||||||
|
BlobMode::Mirror => {
|
||||||
|
let normalized_path = self.normalize_path(path).unwrap();
|
||||||
|
let path = normalized_path.parent().unwrap();
|
||||||
|
|
||||||
|
let upath =
|
||||||
|
iter::once("NATIVE".parse().unwrap())
|
||||||
|
.chain(path.iter().map(|component| {
|
||||||
|
UNode::from_str(&component.to_string_lossy()).unwrap()
|
||||||
|
}))
|
||||||
|
.collect::<Vec<UNode>>();
|
||||||
|
|
||||||
|
Ok(Some(UHierPath(upath)))
|
||||||
|
}
|
||||||
|
BlobMode::Incoming(group) => {
|
||||||
|
let upath = UHierPath(vec![group.unwrap_or("INCOMING".to_string()).parse()?]);
|
||||||
|
Ok(Some(upath))
|
||||||
|
}
|
||||||
|
BlobMode::StoreOnly => Ok(None),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn insert_file_with_metadata(
|
fn insert_file_with_metadata(
|
||||||
&self,
|
&self,
|
||||||
connection: &UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
normalized_path: &Path,
|
path: &Path,
|
||||||
|
upath: Option<UHierPath>,
|
||||||
hash: UpMultihash,
|
hash: UpMultihash,
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
size: i64,
|
size: i64,
|
||||||
mtime: Option<NaiveDateTime>,
|
mtime: Option<NaiveDateTime>,
|
||||||
mime_type: Option<String>,
|
|
||||||
resolve_cache: Option<&Arc<Mutex<ResolveCache>>>,
|
resolve_cache: Option<&Arc<Mutex<ResolveCache>>>,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Address> {
|
) -> Result<Address> {
|
||||||
|
let normalized_path = self.normalize_path(path)?;
|
||||||
let new_file = db::NewFile {
|
let new_file = db::NewFile {
|
||||||
path: normalized_path
|
path: normalized_path
|
||||||
.to_str()
|
.to_str()
|
||||||
|
@ -404,23 +446,26 @@ impl FsStore {
|
||||||
// Metadata
|
// Metadata
|
||||||
let size_entry = Entry {
|
let size_entry = Entry {
|
||||||
entity: blob_address.clone(),
|
entity: blob_address.clone(),
|
||||||
attribute: FILE_SIZE_KEY.to_string(),
|
attribute: FILE_SIZE_KEY.parse().unwrap(),
|
||||||
value: (size as f64).into(),
|
value: (size as f64).into(),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
|
||||||
let mime_entry = mime_type.map(|mime_type| Entry {
|
let mime_entry = mime_type.map(|mime_type| Entry {
|
||||||
entity: blob_address.clone(),
|
entity: blob_address.clone(),
|
||||||
attribute: FILE_MIME_KEY.to_string(),
|
attribute: FILE_MIME_KEY.parse().unwrap(),
|
||||||
value: mime_type.into(),
|
value: mime_type.into(),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let added_entry = Entry {
|
let added_entry = Entry {
|
||||||
entity: blob_address.clone(),
|
entity: blob_address.clone(),
|
||||||
attribute: ATTR_ADDED.to_string(),
|
attribute: ATTR_ADDED.parse().unwrap(),
|
||||||
value: (SystemTime::now()
|
value: (SystemTime::now()
|
||||||
.duration_since(UNIX_EPOCH)
|
.duration_since(UNIX_EPOCH)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -428,27 +473,13 @@ impl FsStore {
|
||||||
.into(),
|
.into(),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add the appropriate entries w/r/t virtual filesystem location
|
|
||||||
let components = normalized_path.components().collect::<Vec<Component>>();
|
let components = normalized_path.components().collect::<Vec<Component>>();
|
||||||
let (filename, dir_path) = components.split_last().unwrap();
|
let filename = components.last().unwrap();
|
||||||
|
|
||||||
let upath = UHierPath(
|
let file_count = self.insert_file_record(new_file)?;
|
||||||
iter::once(UNode::new("NATIVE").unwrap())
|
|
||||||
.chain(dir_path.iter().map(|component| {
|
|
||||||
UNode::new(component.as_os_str().to_string_lossy().to_string()).unwrap()
|
|
||||||
}))
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
let resolved_path = match resolve_cache {
|
|
||||||
Some(cache) => resolve_path_cached(connection, &upath, true, cache)?,
|
|
||||||
None => resolve_path(connection, &upath, true)?,
|
|
||||||
};
|
|
||||||
let parent_dir = resolved_path.last().unwrap();
|
|
||||||
|
|
||||||
// Insert all
|
|
||||||
let file_count = self.insert_file(new_file)?;
|
|
||||||
|
|
||||||
connection.insert_entry_immutable(size_entry)?;
|
connection.insert_entry_immutable(size_entry)?;
|
||||||
if file_count == 1 {
|
if file_count == 1 {
|
||||||
|
@ -458,39 +489,52 @@ impl FsStore {
|
||||||
connection.insert_entry(mime_entry)?;
|
connection.insert_entry(mime_entry)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let dir_has_entry = Entry {
|
|
||||||
entity: blob_address.clone(),
|
|
||||||
attribute: ATTR_IN.to_string(),
|
|
||||||
value: parent_dir.clone().into(),
|
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
|
||||||
};
|
|
||||||
let dir_has_entry_addr = connection.insert_entry(dir_has_entry)?;
|
|
||||||
|
|
||||||
let label_entry = Entry {
|
let label_entry = Entry {
|
||||||
entity: blob_address.clone(),
|
entity: blob_address.clone(),
|
||||||
attribute: ATTR_LABEL.to_string(),
|
attribute: ATTR_LABEL.parse().unwrap(),
|
||||||
value: name
|
value: name
|
||||||
.unwrap_or_else(|| filename.as_os_str().to_string_lossy().to_string())
|
.unwrap_or_else(|| filename.as_os_str().to_string_lossy().to_string())
|
||||||
.into(),
|
.into(),
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
};
|
};
|
||||||
let label_entry_addr = connection.insert_entry(label_entry)?;
|
let label_entry_addr = connection.insert_entry(label_entry)?;
|
||||||
|
|
||||||
let alias_entry = Entry {
|
if let Some(upath) = upath {
|
||||||
entity: dir_has_entry_addr,
|
let resolved_path = match resolve_cache {
|
||||||
attribute: ATTR_BY.to_string(),
|
Some(cache) => {
|
||||||
value: label_entry_addr.into(),
|
resolve_path_cached(connection, &upath, true, context.clone(), cache)?
|
||||||
provenance: "SYSTEM INIT".to_string(),
|
}
|
||||||
timestamp: chrono::Utc::now().naive_utc(),
|
None => resolve_path(connection, &upath, true, context.clone())?,
|
||||||
};
|
};
|
||||||
connection.insert_entry(alias_entry)?;
|
let parent_dir = resolved_path.last().unwrap();
|
||||||
|
|
||||||
|
let dir_has_entry = Entry {
|
||||||
|
entity: blob_address.clone(),
|
||||||
|
attribute: ATTR_IN.parse().unwrap(),
|
||||||
|
value: parent_dir.clone().into(),
|
||||||
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
|
};
|
||||||
|
let dir_has_entry_addr = connection.insert_entry(dir_has_entry)?;
|
||||||
|
|
||||||
|
let alias_entry = Entry {
|
||||||
|
entity: dir_has_entry_addr,
|
||||||
|
attribute: ATTR_BY.parse().unwrap(),
|
||||||
|
value: label_entry_addr.into(),
|
||||||
|
provenance: "SYSTEM INIT".to_string(),
|
||||||
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
|
user: context.user.clone(),
|
||||||
|
};
|
||||||
|
connection.insert_entry(alias_entry)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(blob_address)
|
Ok(blob_address)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_file(&self, file: db::NewFile) -> Result<u32> {
|
fn insert_file_record(&self, file: db::NewFile) -> Result<u32> {
|
||||||
trace!(
|
trace!(
|
||||||
"Inserting {} ({})...",
|
"Inserting {} ({})...",
|
||||||
&file.path,
|
&file.path,
|
||||||
|
@ -617,9 +661,11 @@ impl UpStore for FsStore {
|
||||||
|
|
||||||
fn store(
|
fn store(
|
||||||
&self,
|
&self,
|
||||||
connection: UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
blob: Blob,
|
blob: Blob,
|
||||||
name_hint: Option<String>,
|
name_hint: Option<String>,
|
||||||
|
blob_mode: Option<BlobMode>,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<UpMultihash, super::StoreError> {
|
) -> Result<UpMultihash, super::StoreError> {
|
||||||
let file_path = blob.get_file_path();
|
let file_path = blob.get_file_path();
|
||||||
let hash = hash_at_path(file_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
let hash = hash_at_path(file_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||||
|
@ -641,11 +687,41 @@ impl UpStore for FsStore {
|
||||||
};
|
};
|
||||||
|
|
||||||
let final_path = self.path.join(final_name);
|
let final_path = self.path.join(final_name);
|
||||||
|
|
||||||
fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||||
|
|
||||||
self.add_file(&connection, &final_path, hash.clone(), name_hint)
|
let upath = if let Some(bm) = blob_mode {
|
||||||
.map_err(|e| StoreError::Unknown(e.to_string()))?;
|
self.path_to_upath(&final_path, bm)
|
||||||
|
.map_err(|e| StoreError::Unknown(e.to_string()))?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let metadata =
|
||||||
|
fs::metadata(&final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||||
|
let size = metadata.len() as i64;
|
||||||
|
let mtime = metadata
|
||||||
|
.modified()
|
||||||
|
.map(|t| {
|
||||||
|
NaiveDateTime::from_timestamp_opt(
|
||||||
|
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
self.insert_file_with_metadata(
|
||||||
|
connection,
|
||||||
|
&final_path,
|
||||||
|
upath,
|
||||||
|
hash.clone(),
|
||||||
|
name_hint,
|
||||||
|
size,
|
||||||
|
mtime,
|
||||||
|
None,
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
.map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
|
@ -655,18 +731,19 @@ impl UpStore for FsStore {
|
||||||
&self,
|
&self,
|
||||||
db: &UpEndDatabase,
|
db: &UpEndDatabase,
|
||||||
mut job_container: JobContainer,
|
mut job_container: JobContainer,
|
||||||
initial: bool,
|
options: UpdateOptions,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
|
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
|
||||||
trace!(
|
trace!(
|
||||||
"Running a vault update of {:?}, initial = {}.",
|
"Running a vault update of {:?}, options = {:?}.",
|
||||||
self.path,
|
self.path,
|
||||||
initial
|
options
|
||||||
);
|
);
|
||||||
let job_result = job_container.add_job("REIMPORT", "Scaning vault directory...");
|
let job_result = job_container.add_job("REIMPORT", "Scaning vault directory...");
|
||||||
|
|
||||||
match job_result {
|
match job_result {
|
||||||
Ok(job_handle) => {
|
Ok(job_handle) => {
|
||||||
let result = self.rescan_vault(db, job_handle, !initial, initial);
|
let result = self.rescan_vault(db, job_handle, options, context);
|
||||||
|
|
||||||
if let Err(err) = &result {
|
if let Err(err) = &result {
|
||||||
error!("Update did not succeed! {:?}", err);
|
error!("Update did not succeed! {:?}", err);
|
||||||
|
@ -769,7 +846,15 @@ mod test {
|
||||||
let job_container = JobContainer::new();
|
let job_container = JobContainer::new();
|
||||||
|
|
||||||
// Store scan
|
// Store scan
|
||||||
let rescan_result = store.update(&open_result.db, job_container, false);
|
let rescan_result = store.update(
|
||||||
|
&open_result.db,
|
||||||
|
job_container,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: true,
|
||||||
|
tree_mode: BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
assert!(rescan_result.is_ok());
|
assert!(rescan_result.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -808,7 +893,15 @@ mod test {
|
||||||
|
|
||||||
// Initial scan
|
// Initial scan
|
||||||
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
||||||
let rescan_result = store.rescan_vault(&open_result.db, job, quick, true);
|
let rescan_result = store.rescan_vault(
|
||||||
|
&open_result.db,
|
||||||
|
job,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: quick,
|
||||||
|
tree_mode: BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
|
||||||
assert!(rescan_result.is_ok());
|
assert!(rescan_result.is_ok());
|
||||||
let rescan_result = rescan_result.unwrap();
|
let rescan_result = rescan_result.unwrap();
|
||||||
|
@ -821,7 +914,15 @@ mod test {
|
||||||
|
|
||||||
// Modification-less rescan
|
// Modification-less rescan
|
||||||
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
||||||
let rescan_result = store.rescan_vault(&open_result.db, job, quick, false);
|
let rescan_result = store.rescan_vault(
|
||||||
|
&open_result.db,
|
||||||
|
job,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: quick,
|
||||||
|
tree_mode: BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
|
||||||
assert!(rescan_result.is_ok());
|
assert!(rescan_result.is_ok());
|
||||||
let rescan_result = rescan_result.unwrap();
|
let rescan_result = rescan_result.unwrap();
|
||||||
|
@ -837,7 +938,15 @@ mod test {
|
||||||
std::fs::remove_file(temp_dir_path.join("hello-world.txt")).unwrap();
|
std::fs::remove_file(temp_dir_path.join("hello-world.txt")).unwrap();
|
||||||
|
|
||||||
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
||||||
let rescan_result = store.rescan_vault(&open_result.db, job, quick, false);
|
let rescan_result = store.rescan_vault(
|
||||||
|
&open_result.db,
|
||||||
|
job,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: quick,
|
||||||
|
tree_mode: BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
|
||||||
assert!(rescan_result.is_ok());
|
assert!(rescan_result.is_ok());
|
||||||
let rescan_result = rescan_result.unwrap();
|
let rescan_result = rescan_result.unwrap();
|
||||||
|
@ -863,5 +972,206 @@ mod test {
|
||||||
.filter(|upo| matches!(upo, UpdatePathOutcome::Removed(_)))
|
.filter(|upo| matches!(upo, UpdatePathOutcome::Removed(_)))
|
||||||
.count()
|
.count()
|
||||||
);
|
);
|
||||||
|
assert!(store
|
||||||
|
.retrieve_all_files()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.filter(|f| f.path == "hello-world.txt")
|
||||||
|
.all(|f| !f.valid));
|
||||||
|
assert!(store
|
||||||
|
.retrieve_all_files()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.filter(|f| f.path == "hello-world.txt")
|
||||||
|
.all(|f| !f.valid));
|
||||||
|
|
||||||
|
// Re-add the file
|
||||||
|
let file_path = temp_dir_path.join("hello-world.txt");
|
||||||
|
let mut tmp_file = File::create(file_path).unwrap();
|
||||||
|
writeln!(tmp_file, "Hello, World!").unwrap();
|
||||||
|
|
||||||
|
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
||||||
|
let rescan_result = store.rescan_vault(
|
||||||
|
&open_result.db,
|
||||||
|
job,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: quick,
|
||||||
|
tree_mode: BlobMode::default(),
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(rescan_result.is_ok());
|
||||||
|
let rescan_result = rescan_result.unwrap();
|
||||||
|
assert_eq!(rescan_result.len(), 3);
|
||||||
|
assert_eq!(
|
||||||
|
1,
|
||||||
|
rescan_result
|
||||||
|
.iter()
|
||||||
|
.filter(|upo| matches!(upo, UpdatePathOutcome::Unchanged(_)))
|
||||||
|
.count()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
1,
|
||||||
|
rescan_result
|
||||||
|
.iter()
|
||||||
|
.filter(|upo| matches!(upo, UpdatePathOutcome::Skipped(_)))
|
||||||
|
.count()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
1,
|
||||||
|
rescan_result
|
||||||
|
.iter()
|
||||||
|
.filter(|upo| matches!(upo, UpdatePathOutcome::Added(_)))
|
||||||
|
.count()
|
||||||
|
);
|
||||||
|
assert!(store
|
||||||
|
.retrieve_all_files()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.filter(|f| f.path == "hello-world.txt")
|
||||||
|
.all(|f| f.valid));
|
||||||
|
assert!(store
|
||||||
|
.retrieve_all_files()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.filter(|f| f.path == "hello-world.txt")
|
||||||
|
.all(|f| f.valid));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prepare a temporary filesystem structure for testing
|
||||||
|
/// Returns the database connection
|
||||||
|
/// The structure is as follows:
|
||||||
|
/// ```text
|
||||||
|
/// NATIVE
|
||||||
|
/// ├── nested_directory
|
||||||
|
/// │ ├── nested_two
|
||||||
|
/// │ │ └── nested_three
|
||||||
|
/// │ │ │ └── foo.txt
|
||||||
|
/// │ │ └── nested_four
|
||||||
|
/// │ │ └── baz.txt
|
||||||
|
/// │ └── nested_three
|
||||||
|
/// │ └── bar.txt
|
||||||
|
/// └── in_root.txt
|
||||||
|
/// ```
|
||||||
|
fn _prepare_hier_vault(tree_mode: BlobMode) -> (UpEndConnection, TempDir) {
|
||||||
|
// Prepare temporary filesystem structure
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let temp_dir_path = temp_dir.path().canonicalize().unwrap();
|
||||||
|
|
||||||
|
let nested_directory_path = temp_dir_path.join("nested_directory");
|
||||||
|
fs::create_dir(&nested_directory_path).unwrap();
|
||||||
|
let nested_two_path = nested_directory_path.join("nested_two");
|
||||||
|
fs::create_dir(&nested_two_path).unwrap();
|
||||||
|
let nested_three_first_path = nested_directory_path.join("nested_three");
|
||||||
|
fs::create_dir(&nested_three_first_path).unwrap();
|
||||||
|
let nested_three_second_path = nested_two_path.join("nested_three");
|
||||||
|
fs::create_dir(&nested_three_second_path).unwrap();
|
||||||
|
let nested_four_path = nested_two_path.join("nested_four");
|
||||||
|
fs::create_dir(&nested_four_path).unwrap();
|
||||||
|
|
||||||
|
let file_path = nested_three_second_path.join("foo.txt");
|
||||||
|
let mut tmp_file = File::create(file_path).unwrap();
|
||||||
|
writeln!(tmp_file, "Hello, World! I'm foo, and deep.").unwrap();
|
||||||
|
|
||||||
|
let file_path = nested_three_first_path.join("bar.txt");
|
||||||
|
let mut tmp_file = File::create(file_path).unwrap();
|
||||||
|
writeln!(tmp_file, "Hello, World! I'm bar, and shallower.").unwrap();
|
||||||
|
|
||||||
|
let file_path = nested_four_path.join("baz.txt");
|
||||||
|
let mut tmp_file = File::create(file_path).unwrap();
|
||||||
|
writeln!(tmp_file, "Hello, World! I'm baz.").unwrap();
|
||||||
|
|
||||||
|
let file_path = temp_dir_path.join("in_root.txt");
|
||||||
|
let mut tmp_file = File::create(file_path).unwrap();
|
||||||
|
writeln!(tmp_file, "Hello, World! I'm in root.").unwrap();
|
||||||
|
|
||||||
|
// Initialize database
|
||||||
|
let open_result = UpEndDatabase::open(&temp_dir, true).unwrap();
|
||||||
|
let store = FsStore::from_path(&temp_dir).unwrap();
|
||||||
|
let mut job_container = JobContainer::new();
|
||||||
|
|
||||||
|
// Initial scan
|
||||||
|
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
|
||||||
|
store
|
||||||
|
.rescan_vault(
|
||||||
|
&open_result.db,
|
||||||
|
job,
|
||||||
|
UpdateOptions {
|
||||||
|
initial: true,
|
||||||
|
tree_mode,
|
||||||
|
},
|
||||||
|
OperationContext::default(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
(open_result.db.connection().unwrap(), temp_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_paths(paths: Vec<&str>, connection: &UpEndConnection) {
|
||||||
|
paths.iter().for_each(|path| {
|
||||||
|
let upath: UHierPath = path.parse().unwrap();
|
||||||
|
assert!(
|
||||||
|
resolve_path(&connection, &upath, false, OperationContext::default()).is_ok(),
|
||||||
|
"Failed: {}",
|
||||||
|
upath
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_initial_scan(mode: BlobMode, expected_paths: Vec<&str>) {
|
||||||
|
let (connection, _vault_dir) = _prepare_hier_vault(mode);
|
||||||
|
assert_paths(expected_paths, &connection);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mirror_mode() {
|
||||||
|
test_initial_scan(
|
||||||
|
BlobMode::Mirror,
|
||||||
|
vec![
|
||||||
|
"NATIVE",
|
||||||
|
"NATIVE/nested_directory/nested_two/nested_three/foo.txt",
|
||||||
|
"NATIVE/nested_directory/nested_two/nested_four/baz.txt",
|
||||||
|
"NATIVE/nested_directory/nested_three/bar.txt",
|
||||||
|
"NATIVE/in_root.txt",
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_flat_mode() {
|
||||||
|
test_initial_scan(
|
||||||
|
BlobMode::Flat,
|
||||||
|
vec![
|
||||||
|
"NATIVE",
|
||||||
|
"NATIVE/nested_three/foo.txt",
|
||||||
|
"NATIVE/nested_four/baz.txt",
|
||||||
|
"NATIVE/nested_three/bar.txt",
|
||||||
|
"NATIVE/in_root.txt",
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_incoming_mode() {
|
||||||
|
test_initial_scan(
|
||||||
|
BlobMode::Incoming(None),
|
||||||
|
vec![
|
||||||
|
"INCOMING/foo.txt",
|
||||||
|
"INCOMING/baz.txt",
|
||||||
|
"INCOMING/bar.txt",
|
||||||
|
"INCOMING/in_root.txt",
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
test_initial_scan(
|
||||||
|
BlobMode::Incoming(Some("new files".to_string())),
|
||||||
|
vec![
|
||||||
|
"new files/foo.txt",
|
||||||
|
"new files/baz.txt",
|
||||||
|
"new files/bar.txt",
|
||||||
|
"new files/in_root.txt",
|
||||||
|
],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use super::{UpEndConnection, UpEndDatabase};
|
use super::{UpEndConnection, UpEndDatabase};
|
||||||
use crate::jobs::JobContainer;
|
use crate::OperationContext;
|
||||||
|
use crate::{jobs::JobContainer, BlobMode};
|
||||||
use upend_base::hash::UpMultihash;
|
use upend_base::hash::UpMultihash;
|
||||||
|
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
@ -57,15 +58,24 @@ pub trait UpStore {
|
||||||
fn retrieve_all(&self) -> Result<Vec<Blob>>;
|
fn retrieve_all(&self) -> Result<Vec<Blob>>;
|
||||||
fn store(
|
fn store(
|
||||||
&self,
|
&self,
|
||||||
connection: UpEndConnection,
|
connection: &UpEndConnection,
|
||||||
blob: Blob,
|
blob: Blob,
|
||||||
name_hint: Option<String>,
|
name_hint: Option<String>,
|
||||||
|
blob_mode: Option<BlobMode>,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<UpMultihash>;
|
) -> Result<UpMultihash>;
|
||||||
fn update(
|
fn update(
|
||||||
&self,
|
&self,
|
||||||
database: &UpEndDatabase,
|
database: &UpEndDatabase,
|
||||||
job_container: JobContainer,
|
job_container: JobContainer,
|
||||||
initial: bool,
|
options: UpdateOptions,
|
||||||
|
context: OperationContext,
|
||||||
) -> Result<Vec<UpdatePathOutcome>>;
|
) -> Result<Vec<UpdatePathOutcome>>;
|
||||||
fn stats(&self) -> Result<serde_json::Value>;
|
fn stats(&self) -> Result<serde_json::Value>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct UpdateOptions {
|
||||||
|
pub initial: bool,
|
||||||
|
pub tree_mode: BlobMode,
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
target
|
|
@ -0,0 +1,299 @@
|
||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.75"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "autocfg"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.21.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "const_format"
|
||||||
|
version = "0.2.32"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673"
|
||||||
|
dependencies = [
|
||||||
|
"const_format_proc_macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "const_format_proc_macros"
|
||||||
|
version = "0.2.32"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-xid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deranged"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
|
||||||
|
dependencies = [
|
||||||
|
"powerfmt",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "extism-manifest"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "22b0e600ec289630715ffdc11aca36a26297c3ab7908f14d5bbf3770d102bce7"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "extism-pdk"
|
||||||
|
version = "0.3.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09c20fe9cafa572607e22192bf2040849e7456664895bdc589c89387876e2067"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"base64",
|
||||||
|
"extism-manifest",
|
||||||
|
"extism-pdk-derive",
|
||||||
|
"rmp-serde",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "extism-pdk-derive"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d2be216330f7304de051e0faf1578880e9e0dc1ecbd2c0fea5765c63a079d0ba"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "is_debug"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "06d198e9919d9822d5f7083ba8530e04de87841eaf21ead9af8f2304efd57c89"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "1.0.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.150"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-traits"
|
||||||
|
version = "0.2.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num_threads"
|
||||||
|
version = "0.1.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "paste"
|
||||||
|
version = "1.0.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "powerfmt"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "1.0.69"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "1.0.33"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rmp"
|
||||||
|
version = "0.8.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"num-traits",
|
||||||
|
"paste",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rmp-serde"
|
||||||
|
version = "1.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"rmp",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ryu"
|
||||||
|
version = "1.0.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.192"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.192"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_json"
|
||||||
|
version = "1.0.108"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
|
||||||
|
dependencies = [
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shadow-rs"
|
||||||
|
version = "0.23.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "970538704756fd0bb4ec8cb89f80674afb661e7c0fe716f9ba5be57717742300"
|
||||||
|
dependencies = [
|
||||||
|
"const_format",
|
||||||
|
"is_debug",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "2.0.39"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.3.30"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
|
||||||
|
dependencies = [
|
||||||
|
"deranged",
|
||||||
|
"itoa",
|
||||||
|
"libc",
|
||||||
|
"num_threads",
|
||||||
|
"powerfmt",
|
||||||
|
"serde",
|
||||||
|
"time-core",
|
||||||
|
"time-macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time-core"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time-macros"
|
||||||
|
version = "0.2.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
|
||||||
|
dependencies = [
|
||||||
|
"time-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-ident"
|
||||||
|
version = "1.0.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-xid"
|
||||||
|
version = "0.2.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "upend-extension-base"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "upend-plugin-dummy"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"extism-pdk",
|
||||||
|
"serde",
|
||||||
|
"shadow-rs",
|
||||||
|
"upend-extension-base",
|
||||||
|
]
|
|
@ -0,0 +1,3 @@
|
||||||
|
[workspace]
|
||||||
|
members = ["base", "dummy"]
|
||||||
|
resolver = "2"
|
|
@ -0,0 +1,10 @@
|
||||||
|
[package]
|
||||||
|
name = "upend-extension-base"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
|
@ -0,0 +1,14 @@
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PluginInfo {
|
||||||
|
pub name: String,
|
||||||
|
pub version: String,
|
||||||
|
pub r#type: PluginType,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum PluginType {
|
||||||
|
Extractor,
|
||||||
|
Transformer,
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
[package]
|
||||||
|
edition = "2021"
|
||||||
|
name = "upend-plugin-dummy"
|
||||||
|
version = "0.0.0"
|
||||||
|
build = "build.rs"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/lib.rs"
|
||||||
|
crate_type = ["cdylib"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
extism-pdk = "1.1.0"
|
||||||
|
serde = "1.0.181"
|
||||||
|
upend-extension-base = { path = "../base", version = "0.1.0" }
|
||||||
|
shadow-rs = { version = "0.23", default-features = false }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
shadow-rs = { version = "0.23", default-features = false }
|
|
@ -0,0 +1,3 @@
|
||||||
|
fn main() -> shadow_rs::SdResult<()> {
|
||||||
|
shadow_rs::new()
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
use extism_pdk::*;
|
||||||
|
use shadow_rs::shadow;
|
||||||
|
use upend_extension_base::{PluginInfo, PluginType};
|
||||||
|
|
||||||
|
shadow!(build);
|
||||||
|
|
||||||
|
#[plugin_fn]
|
||||||
|
pub fn info(_arg: ()) -> FnResult<Json<PluginInfo>> {
|
||||||
|
Ok(Json(PluginInfo {
|
||||||
|
name: "Text".to_string(),
|
||||||
|
version: build::PKG_VERSION.to_string(),
|
||||||
|
r#type: PluginType::Transformer,
|
||||||
|
}))
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
node_modules
|
||||||
|
/dist
|
||||||
|
tests
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"ignorePatterns": ["**/*.js"],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"es2021": true
|
||||||
|
},
|
||||||
|
"extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": "latest",
|
||||||
|
"sourceType": "module",
|
||||||
|
"project": "./tsconfig.json"
|
||||||
|
},
|
||||||
|
"plugins": ["@typescript-eslint", "ava"],
|
||||||
|
"rules": {
|
||||||
|
"@typescript-eslint/consistent-type-exports": "error",
|
||||||
|
"@typescript-eslint/consistent-type-imports": "error",
|
||||||
|
"ava/assertion-arguments": "error"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
node_modules
|
||||||
|
dist
|
|
@ -0,0 +1,5 @@
|
||||||
|
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||||
|
module.exports = {
|
||||||
|
preset: 'ts-jest',
|
||||||
|
testEnvironment: 'node',
|
||||||
|
};
|
|
@ -0,0 +1,40 @@
|
||||||
|
{
|
||||||
|
"name": "@upnd/upend",
|
||||||
|
"version": "0.5.4",
|
||||||
|
"description": "Client library to interact with the UpEnd system.",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist/**/*"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": "./dist/index.js",
|
||||||
|
"./*": "./dist/*.js",
|
||||||
|
"./wasm": "./dist/wasm/index.js",
|
||||||
|
"./wasm/*": "./dist/wasm/*.js"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc --build --verbose",
|
||||||
|
"test": "jest",
|
||||||
|
"lint": "eslint ."
|
||||||
|
},
|
||||||
|
"author": "Tomáš Mládek <t@mldk.cz>",
|
||||||
|
"license": "AGPL-3.0",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/debug": "^4.1.8",
|
||||||
|
"@types/jest": "^29.5.12",
|
||||||
|
"@typescript-eslint/eslint-plugin": "latest",
|
||||||
|
"@typescript-eslint/parser": "latest",
|
||||||
|
"eslint": "^8.7.0",
|
||||||
|
"eslint-plugin-ava": "^14.0.0",
|
||||||
|
"jest": "^29.7.0",
|
||||||
|
"ts-jest": "^29.1.2",
|
||||||
|
"typescript": "^4.4.4"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@upnd/wasm-node": "^0.1.0",
|
||||||
|
"@upnd/wasm-web": "^0.1.0",
|
||||||
|
"debug": "^4.3.4",
|
||||||
|
"lru-cache": "^7.0.0"
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,542 @@
|
||||||
|
import LRU from "lru-cache";
|
||||||
|
import type { Query, UpObject } from "./index";
|
||||||
|
import { UpListing } from "./index";
|
||||||
|
import type {
|
||||||
|
Address,
|
||||||
|
ADDRESS_TYPE,
|
||||||
|
AttributeListingResult,
|
||||||
|
EntityListing,
|
||||||
|
IJob,
|
||||||
|
IValue,
|
||||||
|
ListingResult,
|
||||||
|
PutInput,
|
||||||
|
PutResult,
|
||||||
|
StoreInfo,
|
||||||
|
VaultInfo,
|
||||||
|
} from "./types";
|
||||||
|
import type { AddressComponents, UpEndWasmExtensions } from "./wasm";
|
||||||
|
import debug from "debug";
|
||||||
|
import { browser } from "./util";
|
||||||
|
|
||||||
|
const dbg = debug("upend:api");
|
||||||
|
|
||||||
|
export type { AddressComponents };
|
||||||
|
|
||||||
|
export type UpendApiError = {
|
||||||
|
kind: "Unauthorized" | "HttpError" | "FetchError" | "Unknown";
|
||||||
|
message?: string;
|
||||||
|
error?: Error;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class UpEndApi {
|
||||||
|
private instanceUrl = "";
|
||||||
|
private readonly wasmExtensions: UpEndWasmExtensions | undefined = undefined;
|
||||||
|
public readonly timeout: number;
|
||||||
|
|
||||||
|
private queryOnceLRU = new LRU<string, UpListing>({ max: 128 });
|
||||||
|
private inFlightRequests: { [key: string]: Promise<UpListing> | null } = {};
|
||||||
|
private key: string | undefined;
|
||||||
|
private readonly onError: ((error: UpendApiError) => void) | undefined;
|
||||||
|
|
||||||
|
constructor(config?: {
|
||||||
|
instanceUrl?: string;
|
||||||
|
wasmExtensions?: UpEndWasmExtensions;
|
||||||
|
timeout?: number;
|
||||||
|
authKey?: string;
|
||||||
|
onError?: (error: UpendApiError) => void;
|
||||||
|
}) {
|
||||||
|
this.setInstanceUrl(config?.instanceUrl || "http://localhost:8093");
|
||||||
|
this.wasmExtensions = config?.wasmExtensions;
|
||||||
|
this.timeout = config?.timeout || 30_000;
|
||||||
|
this.key = config?.authKey;
|
||||||
|
this.onError = config?.onError;
|
||||||
|
}
|
||||||
|
|
||||||
|
public setInstanceUrl(apiUrl: string) {
|
||||||
|
this.instanceUrl = apiUrl.replace(/\/+$/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
public get apiUrl() {
|
||||||
|
return this.instanceUrl + "/api";
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchEntity(
|
||||||
|
address: string,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<UpObject> {
|
||||||
|
dbg("Fetching Entity %s", address);
|
||||||
|
const entityFetch = await this.fetch(
|
||||||
|
`${this.apiUrl}/obj/${address}`,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
const entityResult = (await entityFetch.json()) as EntityListing;
|
||||||
|
const entityListing = new UpListing(entityResult.entries);
|
||||||
|
return entityListing.getObject(address);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchEntry(address: string, options?: ApiFetchOptions) {
|
||||||
|
dbg("Fetching entry %s", address);
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/raw/${address}`, options);
|
||||||
|
const data = await response.json();
|
||||||
|
const listing = new UpListing({ address: data });
|
||||||
|
return listing.entries[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
public async query(
|
||||||
|
query: string | Query,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<UpListing> {
|
||||||
|
const queryStr = query.toString();
|
||||||
|
|
||||||
|
const cacheResult = this.queryOnceLRU.get(queryStr);
|
||||||
|
if (!cacheResult) {
|
||||||
|
if (!this.inFlightRequests[queryStr]) {
|
||||||
|
dbg(`Querying: ${query}`);
|
||||||
|
this.inFlightRequests[queryStr] = new Promise((resolve, reject) => {
|
||||||
|
this.fetch(`${this.apiUrl}/query`, options, {
|
||||||
|
method: "POST",
|
||||||
|
body: queryStr,
|
||||||
|
keepalive: true,
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (!response.ok) {
|
||||||
|
reject(
|
||||||
|
`Query ${queryStr} failed: ${response.status} ${
|
||||||
|
response.statusText
|
||||||
|
}: ${await response.text()}}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
resolve(new UpListing(await response.json()));
|
||||||
|
this.inFlightRequests[queryStr] = null;
|
||||||
|
})
|
||||||
|
.catch((err) => reject(err));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
dbg(`Chaining request for ${queryStr}...`);
|
||||||
|
}
|
||||||
|
return await (this.inFlightRequests[queryStr] as Promise<UpListing>); // TODO?
|
||||||
|
} else {
|
||||||
|
dbg(`Returning cached: ${queryStr}`);
|
||||||
|
return cacheResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async putEntry(
|
||||||
|
input: PutInput,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<PutResult> {
|
||||||
|
dbg("Putting %O", input);
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/obj`, options, {
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
method: "PUT",
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
});
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async putEntityAttribute(
|
||||||
|
entity: Address,
|
||||||
|
attribute: string,
|
||||||
|
value: IValue,
|
||||||
|
provenance?: string,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<Address> {
|
||||||
|
dbg("Putting %s = %o for %s (%s)", attribute, value, entity, provenance);
|
||||||
|
let url = `${this.apiUrl}/obj/${entity}/${attribute}`;
|
||||||
|
if (provenance) {
|
||||||
|
url += `?provenance=${provenance}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await this.fetch(url, options, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify(value),
|
||||||
|
});
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async putBlob(
|
||||||
|
fileOrUrl: File | URL,
|
||||||
|
options?: ApiFetchOptions & { onProgress?: (ev: ProgressEvent) => void },
|
||||||
|
): Promise<Address> {
|
||||||
|
dbg("Putting Blob: %O", fileOrUrl);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
if (fileOrUrl instanceof File) {
|
||||||
|
formData.append(fileOrUrl.name, fileOrUrl);
|
||||||
|
} else {
|
||||||
|
formData.append("@url", fileOrUrl.toString());
|
||||||
|
}
|
||||||
|
const signal = this.getAbortSignal(options);
|
||||||
|
|
||||||
|
if (browser && fileOrUrl instanceof File) {
|
||||||
|
dbg("Using XHR for file upload");
|
||||||
|
const xhrdbg = debug("upend:api:xhr");
|
||||||
|
const xhr = new XMLHttpRequest();
|
||||||
|
signal.addEventListener("abort", () => xhr.abort());
|
||||||
|
for (const event of [
|
||||||
|
"loadstart",
|
||||||
|
"load",
|
||||||
|
"loadend",
|
||||||
|
"progress",
|
||||||
|
"abort",
|
||||||
|
"error",
|
||||||
|
] as const) {
|
||||||
|
xhr.addEventListener(event, (ev) => xhrdbg(`XHR ${event}: %O`, ev));
|
||||||
|
xhr.upload.addEventListener(event, (ev) =>
|
||||||
|
xhrdbg(`XHR upload ${event}: %O`, ev),
|
||||||
|
);
|
||||||
|
if (options?.onProgress) {
|
||||||
|
xhr.upload.addEventListener(event, options.onProgress);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
xhr.open("PUT", `${this.apiUrl}/blob`, true);
|
||||||
|
xhr.onload = () => {
|
||||||
|
if (xhr.status >= 200 && xhr.status < 300) {
|
||||||
|
try {
|
||||||
|
resolve(JSON.parse(xhr.responseText));
|
||||||
|
} catch (e) {
|
||||||
|
reject(e);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
reject(xhr.statusText);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
xhr.send(formData);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/blob`, options, {
|
||||||
|
method: "PUT",
|
||||||
|
body: formData,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw Error(await response.text());
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async deleteEntry(
|
||||||
|
address: Address,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<void> {
|
||||||
|
dbg("Deleting entry %s", address);
|
||||||
|
await this.fetch(`${this.apiUrl}/obj/${address}`, options, {
|
||||||
|
method: "DELETE",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public getRaw(
|
||||||
|
address: Address,
|
||||||
|
config?: { preview?: boolean; authenticated?: boolean },
|
||||||
|
) {
|
||||||
|
let result = `${this.apiUrl}/${config?.preview ? "thumb" : "raw"}/${address}`;
|
||||||
|
if (config?.authenticated) {
|
||||||
|
result += `?auth_key=${this.key}`;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchRaw(
|
||||||
|
address: Address,
|
||||||
|
preview = false,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
) {
|
||||||
|
dbg("Getting %s raw (preview = %s)", address, preview);
|
||||||
|
return await this.fetch(this.getRaw(address, { preview }), options);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async refreshVault(options?: ApiFetchOptions) {
|
||||||
|
dbg("Triggering vault refresh");
|
||||||
|
return await this.fetch(`${this.apiUrl}/refresh`, options, {
|
||||||
|
method: "POST",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async nativeOpen(address: Address, options?: ApiFetchOptions) {
|
||||||
|
dbg("Opening %s natively", address);
|
||||||
|
return this.fetch(`${this.apiUrl}/raw/${address}?native=1`, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchRoots(options?: ApiFetchOptions): Promise<ListingResult> {
|
||||||
|
dbg("Fetching hierarchical roots...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/hier_roots`, options);
|
||||||
|
const roots = await response.json();
|
||||||
|
dbg("Hierarchical roots: %O", roots);
|
||||||
|
return roots;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchJobs(options?: ApiFetchOptions): Promise<IJob[]> {
|
||||||
|
// dbg("Fetching jobs...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/jobs`, options);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchAllAttributes(
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<AttributeListingResult> {
|
||||||
|
dbg("Fetching all attributes...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/all/attributes`, options);
|
||||||
|
const result = await response.json();
|
||||||
|
dbg("All attributes: %O", result);
|
||||||
|
return await result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchInfo(options?: ApiFetchOptions): Promise<VaultInfo> {
|
||||||
|
dbg("Fetching vault info...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/info`, options);
|
||||||
|
const result = await response.json();
|
||||||
|
dbg("Vault info: %O", result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchOptions(options?: ApiFetchOptions): Promise<VaultOptions> {
|
||||||
|
dbg("Fetching vault options...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/options`, options);
|
||||||
|
const result = await response.json();
|
||||||
|
dbg("Vault options: %O", result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchStoreInfo(
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<{ [key: string]: StoreInfo }> {
|
||||||
|
dbg("Fetching store info...");
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/stats/store`, options);
|
||||||
|
const result = await response.json();
|
||||||
|
dbg("Store info: %O");
|
||||||
|
return await result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getAddress(
|
||||||
|
input: { urlContent: string } | ADDRESS_TYPE,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<string> {
|
||||||
|
let response: Response;
|
||||||
|
if (typeof input === "string") {
|
||||||
|
if (this.wasmExtensions) {
|
||||||
|
await this.wasmExtensions.init();
|
||||||
|
return this.wasmExtensions.AddressTypeConstants[input];
|
||||||
|
}
|
||||||
|
response = await this.fetch(
|
||||||
|
`${this.apiUrl}/address?type=${input}`,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
if ("urlContent" in input) {
|
||||||
|
response = await this.fetch(
|
||||||
|
`${this.apiUrl}/address?url_content=${input.urlContent}`,
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw new Error("Input cannot be empty.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const result = await response.json();
|
||||||
|
dbg("Address for %o = %s", input, result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async addressToComponents(
|
||||||
|
address: string,
|
||||||
|
): Promise<AddressComponents> {
|
||||||
|
if (!this.wasmExtensions) {
|
||||||
|
throw new Error("WASM extensions not supplied.");
|
||||||
|
}
|
||||||
|
await this.wasmExtensions.init();
|
||||||
|
return this.wasmExtensions.addr_to_components(address);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async componentsToAddress(
|
||||||
|
components: AddressComponents,
|
||||||
|
): Promise<string> {
|
||||||
|
if (!this.wasmExtensions) {
|
||||||
|
throw new Error("WASM extensions not initialized.");
|
||||||
|
}
|
||||||
|
await this.wasmExtensions.init();
|
||||||
|
return this.wasmExtensions.components_to_addr(components);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getVaultOptions(
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<VaultOptions> {
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/options`, options);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async setVaultOptions(
|
||||||
|
options: VaultOptions,
|
||||||
|
apiOptions?: ApiFetchOptions,
|
||||||
|
): Promise<void> {
|
||||||
|
const payload: Record<string, unknown> = {};
|
||||||
|
|
||||||
|
if (options.blob_mode) {
|
||||||
|
const blob_mode: Record<string, unknown> = {};
|
||||||
|
blob_mode[options.blob_mode] = null;
|
||||||
|
payload["blob_mode"] = blob_mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/options`, apiOptions, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw Error(await response.text());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async authenticate(
|
||||||
|
credentials: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
},
|
||||||
|
mode: "key",
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<{ key: string }>;
|
||||||
|
public async authenticate(
|
||||||
|
credentials: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
},
|
||||||
|
mode?: "cookie",
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<void>;
|
||||||
|
public async authenticate(
|
||||||
|
credentials: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
},
|
||||||
|
mode: "key" | "cookie" | undefined,
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<{ key: string } | void> {
|
||||||
|
const via = mode || "cookie";
|
||||||
|
const response = await this.fetch(
|
||||||
|
`${this.apiUrl}/auth/login?via=${via}`,
|
||||||
|
options,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify(credentials),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw Error(await response.text());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mode === "key") {
|
||||||
|
const data = await response.json();
|
||||||
|
if (!data.key) {
|
||||||
|
throw Error("No key returned from server.");
|
||||||
|
}
|
||||||
|
this.key = data.key;
|
||||||
|
return data.key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async register(credentials: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
}): Promise<void> {
|
||||||
|
await this.fetch(`${this.apiUrl}/auth/register`, undefined, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify(credentials),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async authStatus(
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<{ user: string } | undefined> {
|
||||||
|
const response = await this.fetch(`${this.apiUrl}/auth/whoami`, options);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async resetAuth(mode: "key"): Promise<void>;
|
||||||
|
public async resetAuth(
|
||||||
|
mode?: "cookie",
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<void>;
|
||||||
|
public async resetAuth(
|
||||||
|
mode?: "key" | "cookie",
|
||||||
|
options?: ApiFetchOptions,
|
||||||
|
): Promise<void> {
|
||||||
|
if (mode === "key") {
|
||||||
|
this.key = undefined;
|
||||||
|
} else {
|
||||||
|
await this.fetch(`${this.apiUrl}/auth/logout`, options, {
|
||||||
|
method: "POST",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getAbortSignal(options: ApiFetchOptions | undefined) {
|
||||||
|
const controller = options?.abortController || new AbortController();
|
||||||
|
const timeout = options?.timeout || this.timeout;
|
||||||
|
if (timeout > 0) {
|
||||||
|
setTimeout(() => controller.abort(), timeout);
|
||||||
|
}
|
||||||
|
return controller.signal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetch(
|
||||||
|
url: string,
|
||||||
|
options: ApiFetchOptions | undefined,
|
||||||
|
requestInit?: RequestInit & { headers?: Record<string, string> },
|
||||||
|
): Promise<Response> {
|
||||||
|
const signal = this.getAbortSignal(options);
|
||||||
|
const headers = requestInit?.headers || {};
|
||||||
|
if (this.key) {
|
||||||
|
headers["Authorization"] = `Bearer ${this.key}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let result: Response;
|
||||||
|
let error: UpendApiError | undefined;
|
||||||
|
try {
|
||||||
|
result = await fetch(url, {
|
||||||
|
...requestInit,
|
||||||
|
signal,
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
if (!result.ok) {
|
||||||
|
if (result.status === 401) {
|
||||||
|
error = { kind: "Unauthorized", message: await result.text() };
|
||||||
|
} else {
|
||||||
|
error = {
|
||||||
|
kind: "HttpError",
|
||||||
|
message: `HTTP Error ${result.status}: ${result.statusText}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
error = { kind: "FetchError", error: e as Error };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
if (this.onError) {
|
||||||
|
this.onError(error);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result!;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ApiFetchOptions {
|
||||||
|
timeout?: number;
|
||||||
|
abortController?: AbortController;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type VaultBlobMode = "Flat" | "Mirror" | "Incoming";
|
||||||
|
|
||||||
|
export interface VaultOptions {
|
||||||
|
blob_mode: VaultBlobMode;
|
||||||
|
}
|
|
@ -1,16 +1,7 @@
|
||||||
import type { IEntry, IValue, ListingResult } from "./types";
|
import type { IEntry, IValue, ListingResult } from "./types";
|
||||||
|
|
||||||
// export function listingAsOrdered(listing: ListingResult): OrderedListing {
|
export { UpEndApi } from "./api";
|
||||||
// const entries = Object.entries(listing) as [Address, IEntry][];
|
export { Query } from "./query";
|
||||||
// return entries
|
|
||||||
// .sort(([_, a], [__, b]) =>
|
|
||||||
// String(a.value.c).localeCompare(String(b.value.c))
|
|
||||||
// )
|
|
||||||
// .sort(([_, a], [__, b]) =>
|
|
||||||
// String(a.value.t).localeCompare(String(b.value.t))
|
|
||||||
// )
|
|
||||||
// .sort(([_, a], [__, b]) => a.attribute.localeCompare(b.attribute));
|
|
||||||
// }
|
|
||||||
|
|
||||||
export class UpListing {
|
export class UpListing {
|
||||||
public readonly entries: UpEntry[];
|
public readonly entries: UpEntry[];
|
||||||
|
@ -18,7 +9,7 @@ export class UpListing {
|
||||||
|
|
||||||
constructor(listing: ListingResult) {
|
constructor(listing: ListingResult) {
|
||||||
this.entries = Object.entries(listing).map(
|
this.entries = Object.entries(listing).map(
|
||||||
(lr) => new UpEntry(...lr, this)
|
(lr) => new UpEntry(...lr, this),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +17,7 @@ export class UpListing {
|
||||||
const allEntities = new Set(this.entries.map((e) => e.entity));
|
const allEntities = new Set(this.entries.map((e) => e.entity));
|
||||||
const result: { [key: string]: UpObject } = {};
|
const result: { [key: string]: UpObject } = {};
|
||||||
Array.from(allEntities).forEach(
|
Array.from(allEntities).forEach(
|
||||||
(entity) => (result[entity] = new UpObject(entity, this))
|
(entity) => (result[entity] = new UpObject(entity, this)),
|
||||||
);
|
);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -37,6 +28,18 @@ export class UpListing {
|
||||||
}
|
}
|
||||||
return this._objects[address];
|
return this._objects[address];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get entities(): string[] {
|
||||||
|
return Array.from(new Set(this.entries.map((e) => `@${e.entity}`)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public get attributes(): string[] {
|
||||||
|
return Array.from(new Set(this.entries.map((e) => e.attribute)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public get values(): IValue[] {
|
||||||
|
return Array.from(new Set(this.entries.map((e) => e.value)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class UpObject {
|
export class UpObject {
|
||||||
|
@ -54,45 +57,55 @@ export class UpObject {
|
||||||
|
|
||||||
public get attributes() {
|
public get attributes() {
|
||||||
return (this.listing?.entries || []).filter(
|
return (this.listing?.entries || []).filter(
|
||||||
(e) => e.entity === this.address
|
(e) => e.entity === this.address,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public get backlinks() {
|
public get backlinks() {
|
||||||
return (this.listing?.entries || []).filter(
|
return (this.listing?.entries || []).filter(
|
||||||
(e) => e.value.c === this.address
|
(e) => e.value.c === this.address,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public get attr() {
|
private _attr: Record<string, UpEntry[] | undefined> | undefined;
|
||||||
const result = {} as { [key: string]: UpEntry[] };
|
|
||||||
this.attributes.forEach((entry) => {
|
|
||||||
if (!result[entry.attribute]) {
|
|
||||||
result[entry.attribute] = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
result[entry.attribute].push(entry);
|
public get attr(): Record<string, UpEntry[] | undefined> {
|
||||||
});
|
if (!this._attr) {
|
||||||
|
const result = {} as { [key: string]: UpEntry[] };
|
||||||
|
this.attributes.forEach((entry) => {
|
||||||
|
if (!result[entry.attribute]) {
|
||||||
|
result[entry.attribute] = [];
|
||||||
|
}
|
||||||
|
|
||||||
this.backlinks.forEach((entry) => {
|
result[entry.attribute].push(entry);
|
||||||
const attribute = `~${entry.attribute}`;
|
});
|
||||||
if (!result[attribute]) {
|
|
||||||
result[attribute] = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
result[attribute].push(entry);
|
this.backlinks.forEach((entry) => {
|
||||||
});
|
const attribute = `~${entry.attribute}`;
|
||||||
|
if (!result[attribute]) {
|
||||||
|
result[attribute] = [];
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
result[attribute].push(entry);
|
||||||
|
});
|
||||||
|
|
||||||
|
this._attr = result;
|
||||||
|
}
|
||||||
|
return this._attr;
|
||||||
}
|
}
|
||||||
|
|
||||||
public get(attr: string) {
|
public get(attr: string): string | number | null | undefined {
|
||||||
return this.attr[attr] ? this.attr[attr][0].value.c : undefined;
|
return this.attr[attr]?.[0].value.c;
|
||||||
}
|
}
|
||||||
|
|
||||||
public identify(): string[] {
|
public identify(): string[] {
|
||||||
const lblValues = (this.attr["LBL"] || []).map((e) => String(e.value.c));
|
return (this.attr["LBL"] || []).map((e) => String(e.value.c));
|
||||||
return lblValues;
|
}
|
||||||
|
|
||||||
|
public toString(): string {
|
||||||
|
return [`@${this.address}`, this.identify().join(", ")]
|
||||||
|
.filter(Boolean)
|
||||||
|
.join(" | ");
|
||||||
}
|
}
|
||||||
|
|
||||||
public asDict() {
|
public asDict() {
|
||||||
|
@ -108,6 +121,7 @@ export class UpEntry extends UpObject implements IEntry {
|
||||||
attribute: string;
|
attribute: string;
|
||||||
value: IValue;
|
value: IValue;
|
||||||
provenance: string;
|
provenance: string;
|
||||||
|
user: string;
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
|
|
||||||
constructor(address: string, entry: IEntry, listing: UpListing) {
|
constructor(address: string, entry: IEntry, listing: UpListing) {
|
||||||
|
@ -117,6 +131,7 @@ export class UpEntry extends UpObject implements IEntry {
|
||||||
this.attribute = entry.attribute;
|
this.attribute = entry.attribute;
|
||||||
this.value = entry.value;
|
this.value = entry.value;
|
||||||
this.provenance = entry.provenance;
|
this.provenance = entry.provenance;
|
||||||
|
this.user = entry.user;
|
||||||
this.timestamp = entry.timestamp;
|
this.timestamp = entry.timestamp;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
import type { Address } from "./types";
|
||||||
|
import { isAddress } from "./types";
|
||||||
|
|
||||||
|
export const Any = "?";
|
||||||
|
class Var {
|
||||||
|
constructor(public readonly name: string) {}
|
||||||
|
}
|
||||||
|
export function Variable(name: string): Var {
|
||||||
|
return new Var(name);
|
||||||
|
}
|
||||||
|
type QueryPart<T> = T | T[] | typeof Any | Var;
|
||||||
|
|
||||||
|
export class Query {
|
||||||
|
private _query: string | undefined;
|
||||||
|
|
||||||
|
public static matches(
|
||||||
|
entity: QueryPart<string>,
|
||||||
|
attribute: QueryPart<string>,
|
||||||
|
value: QueryPart<string | number | Address>
|
||||||
|
): Query {
|
||||||
|
const query = new Query();
|
||||||
|
|
||||||
|
let entityStr;
|
||||||
|
if (entity === Any) {
|
||||||
|
entityStr = "?";
|
||||||
|
} else if (entity instanceof Var) {
|
||||||
|
entityStr = `?${entity.name}`;
|
||||||
|
} else {
|
||||||
|
entityStr = Array.isArray(entity) ? `(in ${entity.join(" ")})` : entity;
|
||||||
|
}
|
||||||
|
|
||||||
|
let attributeStr;
|
||||||
|
if (attribute === Any) {
|
||||||
|
attributeStr = "?";
|
||||||
|
} else if (attribute instanceof Var) {
|
||||||
|
attributeStr = `?${attribute.name}`;
|
||||||
|
} else {
|
||||||
|
attributeStr = Array.isArray(attribute)
|
||||||
|
? `(in ${attribute.map((a) => `"${a}"`).join(" ")})`
|
||||||
|
: `"${attribute}"`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let valueStr;
|
||||||
|
if (value === Any) {
|
||||||
|
valueStr = "?";
|
||||||
|
} else if (value instanceof Var) {
|
||||||
|
valueStr = `?${value.name}`;
|
||||||
|
} else {
|
||||||
|
valueStr = (Array.isArray(value) ? value : [value])
|
||||||
|
.map((v) => {
|
||||||
|
if (typeof v === "number") return v;
|
||||||
|
if (isAddress(v)) return v;
|
||||||
|
if (typeof v === "string") return `"${v}"`;
|
||||||
|
})
|
||||||
|
.join(" ");
|
||||||
|
valueStr = Array.isArray(value) ? `(in ${valueStr})` : valueStr;
|
||||||
|
}
|
||||||
|
|
||||||
|
query._query = `(matches ${entityStr} ${attributeStr} ${valueStr})`;
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static or(...queries: Query[]): Query {
|
||||||
|
const query = new Query();
|
||||||
|
query._query = `(or ${queries.join(" ")})`;
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static and(...queries: Query[]): Query {
|
||||||
|
const query = new Query();
|
||||||
|
query._query = `(and ${queries.join(" ")})`;
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static not(query: Query): Query {
|
||||||
|
const q = new Query();
|
||||||
|
q._query = `(not ${query})`;
|
||||||
|
return q;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static join(...queries: Query[]): Query {
|
||||||
|
const query = new Query();
|
||||||
|
query._query = `(join ${queries.join(" ")})`;
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toString(): string {
|
||||||
|
if (!this._query) throw new Error("Query is not defined");
|
||||||
|
return this._query;
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,6 +2,10 @@ export type Address = string;
|
||||||
export type ADDRESS_TYPE = "Hash" | "Uuid" | "Attribute" | "Url";
|
export type ADDRESS_TYPE = "Hash" | "Uuid" | "Attribute" | "Url";
|
||||||
export type VALUE_TYPE = "Address" | "String" | "Number" | "Invalid";
|
export type VALUE_TYPE = "Address" | "String" | "Number" | "Invalid";
|
||||||
|
|
||||||
|
export function isAddress(address: string): address is Address {
|
||||||
|
return address.startsWith("@");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A single atomic entry in UpEnd.
|
* A single atomic entry in UpEnd.
|
||||||
*/
|
*/
|
||||||
|
@ -14,6 +18,8 @@ export interface IEntry {
|
||||||
value: IValue;
|
value: IValue;
|
||||||
/** The origin or provenance of the data entry (e.g. SYSTEM or USER API...) */
|
/** The origin or provenance of the data entry (e.g. SYSTEM or USER API...) */
|
||||||
provenance: string;
|
provenance: string;
|
||||||
|
/** The user who created the data entry. */
|
||||||
|
user: string;
|
||||||
/** The timestamp when the data entry was created in RFC 3339 format. */
|
/** The timestamp when the data entry was created in RFC 3339 format. */
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
}
|
}
|
||||||
|
@ -34,10 +40,6 @@ export type IValue =
|
||||||
| {
|
| {
|
||||||
t: "Null";
|
t: "Null";
|
||||||
c: null;
|
c: null;
|
||||||
}
|
|
||||||
| {
|
|
||||||
t: "Invalid";
|
|
||||||
c: null;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface InvariantEntry {
|
export interface InvariantEntry {
|
||||||
|
@ -97,6 +99,7 @@ export interface VaultInfo {
|
||||||
location: string;
|
location: string;
|
||||||
version: string;
|
version: string;
|
||||||
desktop: boolean;
|
desktop: boolean;
|
||||||
|
public: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface StoreInfo {
|
export interface StoreInfo {
|
|
@ -0,0 +1 @@
|
||||||
|
export const browser = typeof window !== "undefined";
|
|
@ -0,0 +1,34 @@
|
||||||
|
import debug from "debug";
|
||||||
|
const dbg = debug("upend:wasm");
|
||||||
|
|
||||||
|
export abstract class UpEndWasmExtensions {
|
||||||
|
private initialized = false;
|
||||||
|
private initPromise: Promise<void> | undefined = undefined;
|
||||||
|
protected abstract _init(): Promise<void>;
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
if (!this.initialized) {
|
||||||
|
if (!this.initPromise) {
|
||||||
|
this.initPromise = this._init();
|
||||||
|
}
|
||||||
|
await this.initPromise;
|
||||||
|
this.initialized = true;
|
||||||
|
dbg("WASM extensions initialized.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract addr_to_components(address: string): AddressComponents;
|
||||||
|
public abstract components_to_addr(components: AddressComponents): string;
|
||||||
|
public abstract AddressTypeConstants: AddressTypeConstants;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AddressTypeConstants {
|
||||||
|
Attribute: string;
|
||||||
|
Hash: string;
|
||||||
|
Url: string;
|
||||||
|
Uuid: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AddressComponents {
|
||||||
|
t: string;
|
||||||
|
c?: string;
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
import type { AddressComponents, AddressTypeConstants } from "./index";
|
||||||
|
import { UpEndWasmExtensions } from "./index";
|
||||||
|
import {
|
||||||
|
addr_to_components,
|
||||||
|
components_to_addr,
|
||||||
|
AddressComponents as WasmAddresComponents,
|
||||||
|
AddressTypeConstants as WasmAddresTypeConstants,
|
||||||
|
} from "@upnd/wasm-node";
|
||||||
|
|
||||||
|
export class UpEndWasmExtensionsNode extends UpEndWasmExtensions {
|
||||||
|
protected async _init(): Promise<void> {}
|
||||||
|
|
||||||
|
addr_to_components(address: string): AddressComponents {
|
||||||
|
return addr_to_components(address);
|
||||||
|
}
|
||||||
|
|
||||||
|
components_to_addr(components: AddressComponents): string {
|
||||||
|
const wc = new WasmAddresComponents(components.t, components.c);
|
||||||
|
return components_to_addr(wc);
|
||||||
|
}
|
||||||
|
|
||||||
|
get AddressTypeConstants(): AddressTypeConstants {
|
||||||
|
return new WasmAddresTypeConstants();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
import type { AddressComponents, AddressTypeConstants } from "./index";
|
||||||
|
import { UpEndWasmExtensions } from "./index";
|
||||||
|
import type { InitInput } from "@upnd/wasm-web";
|
||||||
|
import {
|
||||||
|
addr_to_components,
|
||||||
|
components_to_addr,
|
||||||
|
AddressComponents as WasmAddresComponents,
|
||||||
|
AddressTypeConstants as WasmAddresTypeConstants,
|
||||||
|
} from "@upnd/wasm-web";
|
||||||
|
import init_wasm from "@upnd/wasm-web";
|
||||||
|
|
||||||
|
export class UpEndWasmExtensionsWeb extends UpEndWasmExtensions {
|
||||||
|
private initInput: InitInput;
|
||||||
|
|
||||||
|
constructor(init: InitInput) {
|
||||||
|
super();
|
||||||
|
this.initInput = init;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async _init(): Promise<void> {
|
||||||
|
await init_wasm(this.initInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
addr_to_components(address: string): AddressComponents {
|
||||||
|
return addr_to_components(address);
|
||||||
|
}
|
||||||
|
|
||||||
|
components_to_addr(components: AddressComponents): string {
|
||||||
|
const wc = new WasmAddresComponents(components.t, components.c);
|
||||||
|
return components_to_addr(wc);
|
||||||
|
}
|
||||||
|
|
||||||
|
get AddressTypeConstants(): AddressTypeConstants {
|
||||||
|
return new WasmAddresTypeConstants();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,79 @@
|
||||||
|
import { Any, Query, Variable } from "../src/query";
|
||||||
|
|
||||||
|
describe("query matches", () => {
|
||||||
|
test("query matches simple", () => {
|
||||||
|
const query = Query.matches("entity", "attribute", "value");
|
||||||
|
expect(query.toString()).toBe('(matches entity "attribute" "value")');
|
||||||
|
});
|
||||||
|
|
||||||
|
test("query matches anything", () => {
|
||||||
|
const query = Query.matches(Any, Any, Any);
|
||||||
|
expect(query.toString()).toBe("(matches ? ? ?)");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("query matches array", () => {
|
||||||
|
const query = Query.matches("entity", "attribute", ["value1", "value2"]);
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(matches entity "attribute" (in "value1" "value2"))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("query matches addresses", () => {
|
||||||
|
const query = Query.matches("entity", "attribute", [
|
||||||
|
"@address1",
|
||||||
|
"@address2",
|
||||||
|
]);
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(matches entity "attribute" (in @address1 @address2))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("query matches numbers", () => {
|
||||||
|
const query = Query.matches("entity", "attribute", [1, 2]);
|
||||||
|
expect(query.toString()).toBe('(matches entity "attribute" (in 1 2))');
|
||||||
|
});
|
||||||
|
|
||||||
|
test("query matches variables", () => {
|
||||||
|
const query = Query.matches("entity", "attribute", Variable("a"));
|
||||||
|
expect(query.toString()).toBe('(matches entity "attribute" ?a)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("compound queries", () => {
|
||||||
|
test("OR queries", () => {
|
||||||
|
const query = Query.or(
|
||||||
|
Query.matches("entity", "attribute1", "value2"),
|
||||||
|
Query.matches("entity", "attribute2", "value2"),
|
||||||
|
);
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(or (matches entity "attribute1" "value2") (matches entity "attribute2" "value2"))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("AND queries", () => {
|
||||||
|
const query = Query.and(
|
||||||
|
Query.matches("entity", "attribute1", "value2"),
|
||||||
|
Query.matches("entity", "attribute2", "value2"),
|
||||||
|
);
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(and (matches entity "attribute1" "value2") (matches entity "attribute2" "value2"))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("NOT query", () => {
|
||||||
|
const query = Query.not(Query.matches("entity", "attribute1", "value2"));
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(not (matches entity "attribute1" "value2"))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("JOIN queries", () => {
|
||||||
|
const query = Query.join(
|
||||||
|
Query.matches("entity", "attribute1", "value2"),
|
||||||
|
Query.matches("entity", "attribute2", "value2"),
|
||||||
|
);
|
||||||
|
expect(query.toString()).toBe(
|
||||||
|
'(join (matches entity "attribute1" "value2") (matches entity "attribute2" "value2"))',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,19 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"importsNotUsedAsValues": "error",
|
||||||
|
"lib": [
|
||||||
|
"es2019",
|
||||||
|
"DOM"
|
||||||
|
],
|
||||||
|
"module": "commonjs",
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src",
|
||||||
|
"strict": true,
|
||||||
|
"target": "es5"
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist"]
|
||||||
|
}
|
|
@ -1 +0,0 @@
|
||||||
*/dist
|
|
|
@ -1,134 +0,0 @@
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from operator import add
|
|
||||||
|
|
||||||
import click
|
|
||||||
import colorama
|
|
||||||
import psycopg2
|
|
||||||
from tqdm import tqdm
|
|
||||||
from upend import UpEnd
|
|
||||||
|
|
||||||
|
|
||||||
class LogFormatter(logging.Formatter):
|
|
||||||
format_str = "[%(asctime)s] %(levelname)s - %(message)s"
|
|
||||||
|
|
||||||
FORMATS = {
|
|
||||||
logging.DEBUG: colorama.Fore.LIGHTBLACK_EX + format_str + colorama.Fore.RESET,
|
|
||||||
logging.INFO: format_str,
|
|
||||||
logging.WARNING: colorama.Fore.YELLOW + format_str + colorama.Fore.RESET,
|
|
||||||
logging.ERROR: colorama.Fore.RED + format_str + colorama.Fore.RESET,
|
|
||||||
logging.CRITICAL: colorama.Fore.RED
|
|
||||||
+ colorama.Style.BRIGHT
|
|
||||||
+ format_str
|
|
||||||
+ colorama.Style.RESET_ALL
|
|
||||||
+ colorama.Fore.RESET,
|
|
||||||
}
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
log_fmt = self.FORMATS.get(record.levelno)
|
|
||||||
formatter = logging.Formatter(log_fmt)
|
|
||||||
return formatter.format(record)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class KSXTrackFile:
|
|
||||||
file: str
|
|
||||||
sha256sum: str
|
|
||||||
energy: int
|
|
||||||
seriousness: int
|
|
||||||
tint: int
|
|
||||||
materials: int
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option("--db-name", required=True)
|
|
||||||
@click.option("--db-user", required=True)
|
|
||||||
@click.option("--db-password", required=True)
|
|
||||||
@click.option("--db-host", default="localhost")
|
|
||||||
@click.option("--db-port", default=5432, type=int)
|
|
||||||
def main(db_name, db_user, db_password, db_host, db_port):
|
|
||||||
"""Load KSX database dump into UpEnd."""
|
|
||||||
|
|
||||||
logger = logging.getLogger("ksx2upend")
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
ch = logging.StreamHandler()
|
|
||||||
ch.setLevel(logging.DEBUG)
|
|
||||||
ch.setFormatter(LogFormatter())
|
|
||||||
logger.addHandler(ch)
|
|
||||||
|
|
||||||
logger.debug("Connecting to PostgreSQL...")
|
|
||||||
connection = psycopg2.connect(
|
|
||||||
database=db_name,
|
|
||||||
user=db_user,
|
|
||||||
password=db_password,
|
|
||||||
host=db_host,
|
|
||||||
port=db_port,
|
|
||||||
)
|
|
||||||
cur = connection.cursor()
|
|
||||||
|
|
||||||
logger.debug("Connecting to UpEnd...")
|
|
||||||
upend = UpEnd()
|
|
||||||
|
|
||||||
cur.execute(
|
|
||||||
"SELECT file, sha256sum, energy, seriousness, tint, materials "
|
|
||||||
"FROM ksx_radio_trackfile "
|
|
||||||
"INNER JOIN ksx_radio_moodsregular ON ksx_radio_trackfile.track_id = ksx_radio_moodsregular.track_id"
|
|
||||||
)
|
|
||||||
trackfiles = [KSXTrackFile(*row) for row in cur.fetchall()]
|
|
||||||
logger.info(f"Got {len(trackfiles)} (annotated) trackfiles from database...")
|
|
||||||
|
|
||||||
# TODO: get_invariant() or somesuch?
|
|
||||||
blob_addr = list(upend.query((None, "TYPE", 'J"BLOB"')).values())[0]["entity"]
|
|
||||||
|
|
||||||
all_files = upend.query((None, "IS", f"O{blob_addr}")).values()
|
|
||||||
hashed_files = upend.query((None, "SHA256", None)).values()
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Got {len(all_files)} files from UpEnd ({len(hashed_files)} of which are hashed)..."
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(hashed_files) < len(all_files):
|
|
||||||
logger.info("Computing SHA256 hashes for UpEnd files...")
|
|
||||||
hashed_entries = [entry["entity"] for entry in hashed_files]
|
|
||||||
unhashed_files = [
|
|
||||||
file for file in all_files if file["entity"] not in hashed_entries
|
|
||||||
]
|
|
||||||
for entry in tqdm(unhashed_files):
|
|
||||||
sha256_hash = hashlib.sha256()
|
|
||||||
for chunk in upend.get_raw(entry["entity"]):
|
|
||||||
sha256_hash.update(chunk)
|
|
||||||
upend.insert((entry["entity"], "SHA256", sha256_hash.hexdigest()))
|
|
||||||
hashed_files = upend.query((None, "SHA256", None)).values()
|
|
||||||
|
|
||||||
sha256_trackfiles = {tf.sha256sum: tf for tf in trackfiles}
|
|
||||||
sha256_entities = {entry["value"]["c"]: entry["entity"] for entry in hashed_files}
|
|
||||||
|
|
||||||
tf_and_ue = [sum for sum in sha256_trackfiles.keys() if sum in sha256_entities]
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Out of {len(trackfiles)} trackfiles, and out of {len(hashed_files)} files in UpEnd, {len(tf_and_ue)} are present in both."
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Inserting types...")
|
|
||||||
ksx_type_result = upend.insert((None, "TYPE", "KSX_TRACK_MOODS"))
|
|
||||||
ksx_type_addr = list(ksx_type_result.values())[0]["entity"]
|
|
||||||
upend.insert((ksx_type_addr, "TYPE_REQUIRES", "KSX_ENERGY"))
|
|
||||||
upend.insert((ksx_type_addr, "TYPE_REQUIRES", "KSX_SERIOUSNESS"))
|
|
||||||
upend.insert((ksx_type_addr, "TYPE_REQUIRES", "KSX_TINT"))
|
|
||||||
upend.insert((ksx_type_addr, "TYPE_REQUIRES", "KSX_MATERIALS"))
|
|
||||||
|
|
||||||
logger.info("Inserting mood data...")
|
|
||||||
for sum in tqdm(tf_and_ue):
|
|
||||||
tf = sha256_trackfiles[sum]
|
|
||||||
address = sha256_entities[sum]
|
|
||||||
|
|
||||||
upend.insert((address, "IS", ksx_type_addr), value_type="Address")
|
|
||||||
upend.insert((address, "KSX_ENERGY", tf.energy))
|
|
||||||
upend.insert((address, "KSX_SERIOUSNESS", tf.seriousness))
|
|
||||||
upend.insert((address, "KSX_TINT", tf.tint))
|
|
||||||
upend.insert((address, "KSX_MATERIALS", tf.materials))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,398 +0,0 @@
|
||||||
[[package]]
|
|
||||||
name = "appdirs"
|
|
||||||
version = "1.4.4"
|
|
||||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "black"
|
|
||||||
version = "21.6b0"
|
|
||||||
description = "The uncompromising code formatter."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.2"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
appdirs = "*"
|
|
||||||
click = ">=7.1.2"
|
|
||||||
mypy-extensions = ">=0.4.3"
|
|
||||||
pathspec = ">=0.8.1,<1"
|
|
||||||
regex = ">=2020.1.8"
|
|
||||||
toml = ">=0.10.1"
|
|
||||||
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""}
|
|
||||||
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
|
||||||
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
|
|
||||||
python2 = ["typed-ast (>=1.4.2)"]
|
|
||||||
uvloop = ["uvloop (>=0.15.2)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "certifi"
|
|
||||||
version = "2021.5.30"
|
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "chardet"
|
|
||||||
version = "4.0.0"
|
|
||||||
description = "Universal encoding detector for Python 2 and 3"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.0.1"
|
|
||||||
description = "Composable command line interface toolkit"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.4"
|
|
||||||
description = "Cross-platform colored terminal text."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "idna"
|
|
||||||
version = "2.10"
|
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "importlib-metadata"
|
|
||||||
version = "4.5.0"
|
|
||||||
description = "Read metadata from Python packages"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
|
||||||
zipp = ">=0.5"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
|
|
||||||
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mypy-extensions"
|
|
||||||
version = "0.4.3"
|
|
||||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pathspec"
|
|
||||||
version = "0.8.1"
|
|
||||||
description = "Utility library for gitignore style pattern matching of file paths."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "psycopg2-binary"
|
|
||||||
version = "2.9.1"
|
|
||||||
description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex"
|
|
||||||
version = "2021.4.4"
|
|
||||||
description = "Alternative regular expression module, to replace re."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "requests"
|
|
||||||
version = "2.25.1"
|
|
||||||
description = "Python HTTP for Humans."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
certifi = ">=2017.4.17"
|
|
||||||
chardet = ">=3.0.2,<5"
|
|
||||||
idna = ">=2.5,<3"
|
|
||||||
urllib3 = ">=1.21.1,<1.27"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
|
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "toml"
|
|
||||||
version = "0.10.2"
|
|
||||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tqdm"
|
|
||||||
version = "4.61.1"
|
|
||||||
description = "Fast, Extensible Progress Meter"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["py-make (>=0.1.0)", "twine", "wheel"]
|
|
||||||
notebook = ["ipywidgets (>=6)"]
|
|
||||||
telegram = ["requests"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typed-ast"
|
|
||||||
version = "1.4.3"
|
|
||||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-extensions"
|
|
||||||
version = "3.10.0.0"
|
|
||||||
description = "Backported and Experimental Type Hints for Python 3.5+"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "upend"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = ""
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "^3.7"
|
|
||||||
develop = true
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
requests = "^2.25.1"
|
|
||||||
|
|
||||||
[package.source]
|
|
||||||
type = "directory"
|
|
||||||
url = "../upend"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "urllib3"
|
|
||||||
version = "1.26.5"
|
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
brotli = ["brotlipy (>=0.6.0)"]
|
|
||||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zipp"
|
|
||||||
version = "3.4.1"
|
|
||||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
|
|
||||||
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
lock-version = "1.1"
|
|
||||||
python-versions = "^3.7"
|
|
||||||
content-hash = "ca9615ad2a499b8ab72751257d9b56f012ef4482b56ce205928d4f326e161787"
|
|
||||||
|
|
||||||
[metadata.files]
|
|
||||||
appdirs = [
|
|
||||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
|
||||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
|
||||||
]
|
|
||||||
black = [
|
|
||||||
{file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"},
|
|
||||||
{file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"},
|
|
||||||
]
|
|
||||||
certifi = [
|
|
||||||
{file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"},
|
|
||||||
{file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"},
|
|
||||||
]
|
|
||||||
chardet = [
|
|
||||||
{file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"},
|
|
||||||
{file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
|
|
||||||
]
|
|
||||||
click = [
|
|
||||||
{file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
|
|
||||||
{file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
|
|
||||||
]
|
|
||||||
colorama = [
|
|
||||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
|
||||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
|
||||||
]
|
|
||||||
idna = [
|
|
||||||
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
|
|
||||||
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
|
|
||||||
]
|
|
||||||
importlib-metadata = [
|
|
||||||
{file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"},
|
|
||||||
{file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"},
|
|
||||||
]
|
|
||||||
mypy-extensions = [
|
|
||||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
|
||||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
|
||||||
]
|
|
||||||
pathspec = [
|
|
||||||
{file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"},
|
|
||||||
{file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"},
|
|
||||||
]
|
|
||||||
psycopg2-binary = [
|
|
||||||
{file = "psycopg2-binary-2.9.1.tar.gz", hash = "sha256:b0221ca5a9837e040ebf61f48899926b5783668b7807419e4adae8175a31f773"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:c250a7ec489b652c892e4f0a5d122cc14c3780f9f643e1a326754aedf82d9a76"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aef9aee84ec78af51107181d02fe8773b100b01c5dfde351184ad9223eab3698"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c3fb684e9abfc47218d3784c7b4c47c8587951ea4dd5bc38b6636ac57f616"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:995fc41ebda5a7a663a254a1dcac52638c3e847f48307b5416ee373da15075d7"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:fbb42a541b1093385a2d8c7eec94d26d30437d0e77c1d25dae1dcc46741a385e"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-win32.whl", hash = "sha256:20f1ab44d8c352074e2d7ca67dc00843067788791be373e67a0911998787ce7d"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f6fac64a38f6768e7bc7b035b9e10d8a538a9fadce06b983fb3e6fa55ac5f5ce"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:1e3a362790edc0a365385b1ac4cc0acc429a0c0d662d829a50b6ce743ae61b5a"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8559617b1fcf59a9aedba2c9838b5b6aa211ffedecabca412b92a1ff75aac1a"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36c7eb6152ba5467fb264d73844877be8b0847874d4822b7cf2d3c0cb8cdcb0"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:2f62c207d1740b0bde5c4e949f857b044818f734a3d57f1d0d0edc65050532ed"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:cfc523edecddaef56f6740d7de1ce24a2fdf94fd5e704091856a201872e37f9f"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-win32.whl", hash = "sha256:1e85b74cbbb3056e3656f1cc4781294df03383127a8114cbc6531e8b8367bf1e"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1473c0215b0613dd938db54a653f68251a45a78b05f6fc21af4326f40e8360a2"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:35c4310f8febe41f442d3c65066ca93cccefd75013df3d8c736c5b93ec288140"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c13d72ed6af7fd2c8acbd95661cf9477f94e381fce0792c04981a8283b52917"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14db1752acdd2187d99cb2ca0a1a6dfe57fc65c3281e0f20e597aac8d2a5bd90"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:aed4a9a7e3221b3e252c39d0bf794c438dc5453bc2963e8befe9d4cd324dff72"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:da113b70f6ec40e7d81b43d1b139b9db6a05727ab8be1ee559f3a69854a69d34"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-win32.whl", hash = "sha256:4235f9d5ddcab0b8dbd723dca56ea2922b485ea00e1dafacf33b0c7e840b3d32"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:988b47ac70d204aed01589ed342303da7c4d84b56c2f4c4b8b00deda123372bf"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:7360647ea04db2e7dff1648d1da825c8cf68dc5fbd80b8fb5b3ee9f068dcd21a"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca86db5b561b894f9e5f115d6a159fff2a2570a652e07889d8a383b5fae66eb4"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ced67f1e34e1a450cdb48eb53ca73b60aa0af21c46b9b35ac3e581cf9f00e31"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:0f2e04bd2a2ab54fa44ee67fe2d002bb90cee1c0f1cc0ebc3148af7b02034cbd"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:3242b9619de955ab44581a03a64bdd7d5e470cc4183e8fcadd85ab9d3756ce7a"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-win32.whl", hash = "sha256:0b7dae87f0b729922e06f85f667de7bf16455d411971b2043bbd9577af9d1975"},
|
|
||||||
{file = "psycopg2_binary-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4d7679a08fea64573c969f6994a2631908bb2c0e69a7235648642f3d2e39a68"},
|
|
||||||
]
|
|
||||||
regex = [
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"},
|
|
||||||
{file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"},
|
|
||||||
]
|
|
||||||
requests = [
|
|
||||||
{file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"},
|
|
||||||
{file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"},
|
|
||||||
]
|
|
||||||
toml = [
|
|
||||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
|
||||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
|
||||||
]
|
|
||||||
tqdm = [
|
|
||||||
{file = "tqdm-4.61.1-py2.py3-none-any.whl", hash = "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2"},
|
|
||||||
{file = "tqdm-4.61.1.tar.gz", hash = "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd"},
|
|
||||||
]
|
|
||||||
typed-ast = [
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
|
|
||||||
{file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
|
|
||||||
]
|
|
||||||
typing-extensions = [
|
|
||||||
{file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"},
|
|
||||||
{file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"},
|
|
||||||
{file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"},
|
|
||||||
]
|
|
||||||
upend = []
|
|
||||||
urllib3 = [
|
|
||||||
{file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"},
|
|
||||||
{file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"},
|
|
||||||
]
|
|
||||||
zipp = [
|
|
||||||
{file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"},
|
|
||||||
{file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"},
|
|
||||||
]
|
|
|
@ -1,19 +0,0 @@
|
||||||
[tool.poetry]
|
|
||||||
authors = ["Tomáš Mládek <t@mldk.cz>"]
|
|
||||||
description = ""
|
|
||||||
name = "fromksx"
|
|
||||||
version = "0.1.0"
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
click = "^8.0.1"
|
|
||||||
colorama = "^0.4.4"
|
|
||||||
python = "^3.7"
|
|
||||||
tqdm = "^4.61.1"
|
|
||||||
upend = {path = "../upend", develop = true}
|
|
||||||
psycopg2-binary = "^2.9.1"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
black = {version = "^21.6b0", allow-prereleases = true}
|
|
||||||
[build-system]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
||||||
requires = ["poetry-core>=1.0.0"]
|
|
|
@ -1,21 +0,0 @@
|
||||||
{
|
|
||||||
"ignorePatterns": ["**/*.js"],
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"es2021": true
|
|
||||||
},
|
|
||||||
"extends": [
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:@typescript-eslint/recommended"
|
|
||||||
],
|
|
||||||
"parser": "@typescript-eslint/parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": "latest",
|
|
||||||
"sourceType": "module"
|
|
||||||
},
|
|
||||||
"plugins": [
|
|
||||||
"@typescript-eslint"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue