diff --git a/CHANGELOG.md b/CHANGELOG.md index 98a9ad143..d2224721e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ - Global - Fixed an incredibly annoying bug that made the program panic because of a wrong utxo/address durable state after a or many new datasets were added/changed after a first successful parse of the chain + - Fixed bug that would crash program if launched for the first time ever + - Merged the core of `HeightMap` and `DateMap` structs into `GenericMap` + - Added `Height` struct - CLI - Added an argument parser for improved UX with several options - Datasets diff --git a/app/package.json b/app/package.json index 4608008b4..90ccf64aa 100644 --- a/app/package.json +++ b/app/package.json @@ -26,7 +26,7 @@ }, "devDependencies": { "@ianvs/prettier-plugin-sort-imports": "^4.3.1", - "@iconify-json/tabler": "^1.1.117", + "@iconify-json/tabler": "^1.1.118", "@tailwindcss/container-queries": "^0.1.1", "autoprefixer": "^10.4.19", "postcss": "^8.4.39", @@ -42,6 +42,6 @@ "vite-plugin-pwa": "^0.20.0", "vite-plugin-solid": "^2.10.2", "workbox-window": "^7.1.0", - "wrangler": "^3.65.0" + "wrangler": "^3.65.1" } } diff --git a/app/pnpm-lock.yaml b/app/pnpm-lock.yaml index 34e5008ce..e590d0c13 100644 --- a/app/pnpm-lock.yaml +++ b/app/pnpm-lock.yaml @@ -32,8 +32,8 @@ devDependencies: specifier: ^4.3.1 version: 4.3.1(prettier@3.3.3) '@iconify-json/tabler': - specifier: ^1.1.117 - version: 1.1.117 + specifier: ^1.1.118 + version: 1.1.118 '@tailwindcss/container-queries': specifier: ^0.1.1 version: 0.1.1(tailwindcss@3.4.6) @@ -80,8 +80,8 @@ devDependencies: specifier: ^7.1.0 version: 7.1.0 wrangler: - specifier: ^3.65.0 - version: 3.65.0 + specifier: ^3.65.1 + version: 3.65.1 packages: @@ -1387,8 +1387,8 @@ packages: mime: 3.0.0 dev: true - /@cloudflare/workerd-darwin-64@1.20240712.0: - resolution: {integrity: sha512-KB1vbOhr62BCAwVr3VaRcngzPeSCQ7zPA9VGrfwYXIxo0Y4zlW1z0EVtcewFSz5XXKr3BtNnJXxdjDPUNkguQw==} + /@cloudflare/workerd-darwin-64@1.20240718.0: + resolution: {integrity: sha512-BsPZcSCgoGnufog2GIgdPuiKicYTNyO/Dp++HbpLRH+yQdX3x4aWx83M+a0suTl1xv76dO4g9aw7SIB6OSgIyQ==} engines: {node: '>=16'} cpu: [x64] os: [darwin] @@ -1396,8 +1396,8 @@ packages: dev: true optional: true - /@cloudflare/workerd-darwin-arm64@1.20240712.0: - resolution: {integrity: sha512-UDwFnCfQGFVCNxOeHxKNEc1ANQk/3OIiFWpVsxgZqJqU/22XM88JHxJW+YcBKsaUGUlpLyImaYUn2/rG+i+9UQ==} + /@cloudflare/workerd-darwin-arm64@1.20240718.0: + resolution: {integrity: sha512-nlr4gaOO5gcJerILJQph3+2rnas/nx/lYsuaot1ntHu4LAPBoQo1q/Pucj2cSIav4UiMzTbDmoDwPlls4Kteog==} engines: {node: '>=16'} cpu: [arm64] os: [darwin] @@ -1405,8 +1405,8 @@ packages: dev: true optional: true - /@cloudflare/workerd-linux-64@1.20240712.0: - resolution: {integrity: sha512-MxpMHSJcZRUL66TO7BEnEim9WgZ8wJEVOB1Rq7a/IF2hI4/8f+N+02PChh62NkBlWxDfTXAtZy0tyQMm0EGjHg==} + /@cloudflare/workerd-linux-64@1.20240718.0: + resolution: {integrity: sha512-LJ/k3y47pBcjax0ee4K+6ZRrSsqWlfU4lbU8Dn6u5tSC9yzwI4YFNXDrKWInB0vd7RT3w4Yqq1S6ZEbfRrqVUg==} engines: {node: '>=16'} cpu: [x64] os: [linux] @@ -1414,8 +1414,8 @@ packages: dev: true optional: true - /@cloudflare/workerd-linux-arm64@1.20240712.0: - resolution: {integrity: sha512-DtLYZsFFFAMgn+6YCHoQS6nYY4nbdAtcAFa4PhWTjLJDbvQEn3IoK9Bi4ajCL7xG36FeuBdZliSbBiiv7CJjfQ==} + /@cloudflare/workerd-linux-arm64@1.20240718.0: + resolution: {integrity: sha512-zBEZvy88EcAMGRGfuVtS00Yl7lJdUM9sH7i651OoL+q0Plv9kphlCC0REQPwzxrEYT1qibSYtWcD9IxQGgx2/g==} engines: {node: '>=16'} cpu: [arm64] os: [linux] @@ -1423,8 +1423,8 @@ packages: dev: true optional: true - /@cloudflare/workerd-windows-64@1.20240712.0: - resolution: {integrity: sha512-u8zoT9PQiiwxuz9npquLBFWrC/RlBWGGZ1aylarZNFlM4sFrRm+bRr6i+KtS+fltHIVXj3teuoKYytA1ppf9Yw==} + /@cloudflare/workerd-windows-64@1.20240718.0: + resolution: {integrity: sha512-YpCRvvT47XanFum7C3SedOZKK6BfVhqmwdAAVAQFyc4gsCdegZo0JkUkdloC/jwuWlbCACOG2HTADHOqyeolzQ==} engines: {node: '>=16'} cpu: [x64] os: [win32] @@ -1882,13 +1882,13 @@ packages: '@babel/traverse': 7.24.8 '@babel/types': 7.24.9 prettier: 3.3.3 - semver: 7.6.2 + semver: 7.6.3 transitivePeerDependencies: - supports-color dev: true - /@iconify-json/tabler@1.1.117: - resolution: {integrity: sha512-RiFbco9Qo0X7jlRFlD2sq9kwLZZ5JFOyacGKi9nzdnao9tV/qFFh1930JDXFay/CTs/jr7+RYPsI+wGFg0jfsA==} + /@iconify-json/tabler@1.1.118: + resolution: {integrity: sha512-nDOjYG75BlagOe4e+V3K4qVFujdPOeYzi6ZB3sYz0xbdIkpSF/4YvKGdLDr4tmkwGUTV8fhlHVfMpRoDoBnnnA==} dependencies: '@iconify/types': 2.0.0 dev: true @@ -2102,128 +2102,128 @@ packages: rollup: 2.79.1 dev: true - /@rollup/rollup-android-arm-eabi@4.18.1: - resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==} + /@rollup/rollup-android-arm-eabi@4.19.0: + resolution: {integrity: sha512-JlPfZ/C7yn5S5p0yKk7uhHTTnFlvTgLetl2VxqE518QgyM7C9bSfFTYvB/Q/ftkq0RIPY4ySxTz+/wKJ/dXC0w==} cpu: [arm] os: [android] requiresBuild: true dev: true optional: true - /@rollup/rollup-android-arm64@4.18.1: - resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==} + /@rollup/rollup-android-arm64@4.19.0: + resolution: {integrity: sha512-RDxUSY8D1tWYfn00DDi5myxKgOk6RvWPxhmWexcICt/MEC6yEMr4HNCu1sXXYLw8iAsg0D44NuU+qNq7zVWCrw==} cpu: [arm64] os: [android] requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-arm64@4.18.1: - resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==} + /@rollup/rollup-darwin-arm64@4.19.0: + resolution: {integrity: sha512-emvKHL4B15x6nlNTBMtIaC9tLPRpeA5jMvRLXVbl/W9Ie7HhkrE7KQjvgS9uxgatL1HmHWDXk5TTS4IaNJxbAA==} cpu: [arm64] os: [darwin] requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-x64@4.18.1: - resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==} + /@rollup/rollup-darwin-x64@4.19.0: + resolution: {integrity: sha512-fO28cWA1dC57qCd+D0rfLC4VPbh6EOJXrreBmFLWPGI9dpMlER2YwSPZzSGfq11XgcEpPukPTfEVFtw2q2nYJg==} cpu: [x64] os: [darwin] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.18.1: - resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==} + /@rollup/rollup-linux-arm-gnueabihf@4.19.0: + resolution: {integrity: sha512-2Rn36Ubxdv32NUcfm0wB1tgKqkQuft00PtM23VqLuCUR4N5jcNWDoV5iBC9jeGdgS38WK66ElncprqgMUOyomw==} cpu: [arm] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm-musleabihf@4.18.1: - resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==} + /@rollup/rollup-linux-arm-musleabihf@4.19.0: + resolution: {integrity: sha512-gJuzIVdq/X1ZA2bHeCGCISe0VWqCoNT8BvkQ+BfsixXwTOndhtLUpOg0A1Fcx/+eA6ei6rMBzlOz4JzmiDw7JQ==} cpu: [arm] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-gnu@4.18.1: - resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==} + /@rollup/rollup-linux-arm64-gnu@4.19.0: + resolution: {integrity: sha512-0EkX2HYPkSADo9cfeGFoQ7R0/wTKb7q6DdwI4Yn/ULFE1wuRRCHybxpl2goQrx4c/yzK3I8OlgtBu4xvted0ug==} cpu: [arm64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-musl@4.18.1: - resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==} + /@rollup/rollup-linux-arm64-musl@4.19.0: + resolution: {integrity: sha512-GlIQRj9px52ISomIOEUq/IojLZqzkvRpdP3cLgIE1wUWaiU5Takwlzpz002q0Nxxr1y2ZgxC2obWxjr13lvxNQ==} cpu: [arm64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-powerpc64le-gnu@4.18.1: - resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==} + /@rollup/rollup-linux-powerpc64le-gnu@4.19.0: + resolution: {integrity: sha512-N6cFJzssruDLUOKfEKeovCKiHcdwVYOT1Hs6dovDQ61+Y9n3Ek4zXvtghPPelt6U0AH4aDGnDLb83uiJMkWYzQ==} cpu: [ppc64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-riscv64-gnu@4.18.1: - resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==} + /@rollup/rollup-linux-riscv64-gnu@4.19.0: + resolution: {integrity: sha512-2DnD3mkS2uuam/alF+I7M84koGwvn3ZVD7uG+LEWpyzo/bq8+kKnus2EVCkcvh6PlNB8QPNFOz6fWd5N8o1CYg==} cpu: [riscv64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-s390x-gnu@4.18.1: - resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==} + /@rollup/rollup-linux-s390x-gnu@4.19.0: + resolution: {integrity: sha512-D6pkaF7OpE7lzlTOFCB2m3Ngzu2ykw40Nka9WmKGUOTS3xcIieHe82slQlNq69sVB04ch73thKYIWz/Ian8DUA==} cpu: [s390x] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-gnu@4.18.1: - resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==} + /@rollup/rollup-linux-x64-gnu@4.19.0: + resolution: {integrity: sha512-HBndjQLP8OsdJNSxpNIN0einbDmRFg9+UQeZV1eiYupIRuZsDEoeGU43NQsS34Pp166DtwQOnpcbV/zQxM+rWA==} cpu: [x64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-musl@4.18.1: - resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==} + /@rollup/rollup-linux-x64-musl@4.19.0: + resolution: {integrity: sha512-HxfbvfCKJe/RMYJJn0a12eiOI9OOtAUF4G6ozrFUK95BNyoJaSiBjIOHjZskTUffUrB84IPKkFG9H9nEvJGW6A==} cpu: [x64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-arm64-msvc@4.18.1: - resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==} + /@rollup/rollup-win32-arm64-msvc@4.19.0: + resolution: {integrity: sha512-HxDMKIhmcguGTiP5TsLNolwBUK3nGGUEoV/BO9ldUBoMLBssvh4J0X8pf11i1fTV7WShWItB1bKAKjX4RQeYmg==} cpu: [arm64] os: [win32] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-ia32-msvc@4.18.1: - resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==} + /@rollup/rollup-win32-ia32-msvc@4.19.0: + resolution: {integrity: sha512-xItlIAZZaiG/u0wooGzRsx11rokP4qyc/79LkAOdznGRAbOFc+SfEdfUOszG1odsHNgwippUJavag/+W/Etc6Q==} cpu: [ia32] os: [win32] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-x64-msvc@4.18.1: - resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==} + /@rollup/rollup-win32-x64-msvc@4.19.0: + resolution: {integrity: sha512-xNo5fV5ycvCCKqiZcpB65VMR11NJB+StnxHz20jdqRAktfdfzhgjTiJ2doTDQE/7dqGaV5I7ZGqKpgph6lCIag==} cpu: [x64] os: [win32] requiresBuild: true @@ -2635,8 +2635,8 @@ packages: hasBin: true dependencies: caniuse-lite: 1.0.30001642 - electron-to-chromium: 1.4.828 - node-releases: 2.0.14 + electron-to-chromium: 1.4.832 + node-releases: 2.0.17 update-browserslist-db: 1.1.0(browserslist@4.23.2) dev: true @@ -3090,7 +3090,7 @@ packages: '@one-ini/wasm': 0.1.1 commander: 10.0.1 minimatch: 9.0.1 - semver: 7.6.2 + semver: 7.6.3 dev: true /ejs@3.1.10: @@ -3098,11 +3098,11 @@ packages: engines: {node: '>=0.10.0'} hasBin: true dependencies: - jake: 10.9.1 + jake: 10.9.2 dev: true - /electron-to-chromium@1.4.828: - resolution: {integrity: sha512-QOIJiWpQJDHAVO4P58pwb133Cwee0nbvy/MV1CwzZVGpkH1RX33N3vsaWRCpR6bF63AAq366neZrRTu7Qlsbbw==} + /electron-to-chromium@1.4.832: + resolution: {integrity: sha512-cTen3SB0H2SGU7x467NRe1eVcQgcuS6jckKfWJHia2eo0cHIGOqHoAxevIYZD4eRHcWjkvFzo93bi3vJ9W+1lA==} dev: true /emoji-regex@8.0.0: @@ -3798,8 +3798,8 @@ packages: engines: {node: '>= 0.4'} dev: true - /is-core-module@2.14.0: - resolution: {integrity: sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==} + /is-core-module@2.15.0: + resolution: {integrity: sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==} engines: {node: '>= 0.4'} dependencies: hasown: 2.0.2 @@ -3963,8 +3963,8 @@ packages: '@pkgjs/parseargs': 0.11.0 dev: true - /jake@10.9.1: - resolution: {integrity: sha512-61btcOHNnLnsOdtLgA5efqQWjnSi/vow5HbI7HMdKKWqvrKR1bLK3BPlJn9gcSaP2ewuamUSMB5XEy76KUIS2w==} + /jake@10.9.2: + resolution: {integrity: sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==} engines: {node: '>=10'} hasBin: true dependencies: @@ -4256,8 +4256,8 @@ packages: engines: {node: '>=4'} dev: true - /miniflare@3.20240712.0: - resolution: {integrity: sha512-zVbsMX2phvJS1uTPmjK6CvVBq4ON2UkmvTw9IMfNPACsWJmHEdsBDxsYEG1vKAduJdI5gULLuJf7qpFxByDhGw==} + /miniflare@3.20240718.0: + resolution: {integrity: sha512-TKgSeyqPBeT8TBLxbDJOKPWlq/wydoJRHjAyDdgxbw59N6wbP8JucK6AU1vXCfu21eKhrEin77ssXOpbfekzPA==} engines: {node: '>=16.13'} hasBin: true dependencies: @@ -4269,7 +4269,7 @@ packages: glob-to-regexp: 0.4.1 stoppable: 1.1.0 undici: 5.28.4 - workerd: 1.20240712.0 + workerd: 1.20240718.0 ws: 8.18.0 youch: 3.3.3 zod: 3.23.8 @@ -4381,8 +4381,8 @@ packages: engines: {node: '>= 6.13.0'} dev: true - /node-releases@2.0.14: - resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + /node-releases@2.0.17: + resolution: {integrity: sha512-Ww6ZlOiEQfPfXM45v17oabk77Z7mg5bOt7AjDyzy7RjK9OrLrLC8dyZQoAPEOtFX9SaNf1Tdvr5gRJWdTJj7GA==} dev: true /nopt@7.2.1: @@ -4407,8 +4407,8 @@ packages: engines: {node: '>=10'} dependencies: hosted-git-info: 4.1.0 - is-core-module: 2.14.0 - semver: 7.6.2 + is-core-module: 2.15.0 + semver: 7.6.3 validate-npm-package-license: 3.0.4 dev: true @@ -4664,8 +4664,8 @@ packages: yaml: 2.4.5 dev: true - /postcss-nested@6.0.1(postcss@8.4.39): - resolution: {integrity: sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==} + /postcss-nested@6.2.0(postcss@8.4.39): + resolution: {integrity: sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==} engines: {node: '>=12.0'} peerDependencies: postcss: ^8.2.14 @@ -4985,7 +4985,7 @@ packages: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} hasBin: true dependencies: - is-core-module: 2.14.0 + is-core-module: 2.15.0 path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 dev: true @@ -5049,29 +5049,29 @@ packages: fsevents: 2.3.3 dev: true - /rollup@4.18.1: - resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==} + /rollup@4.19.0: + resolution: {integrity: sha512-5r7EYSQIowHsK4eTZ0Y81qpZuJz+MUuYeqmmYmRMl1nwhdmbiYqt5jwzf6u7wyOzJgYqtCRMtVRKOtHANBz7rA==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true dependencies: '@types/estree': 1.0.5 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.18.1 - '@rollup/rollup-android-arm64': 4.18.1 - '@rollup/rollup-darwin-arm64': 4.18.1 - '@rollup/rollup-darwin-x64': 4.18.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.18.1 - '@rollup/rollup-linux-arm-musleabihf': 4.18.1 - '@rollup/rollup-linux-arm64-gnu': 4.18.1 - '@rollup/rollup-linux-arm64-musl': 4.18.1 - '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1 - '@rollup/rollup-linux-riscv64-gnu': 4.18.1 - '@rollup/rollup-linux-s390x-gnu': 4.18.1 - '@rollup/rollup-linux-x64-gnu': 4.18.1 - '@rollup/rollup-linux-x64-musl': 4.18.1 - '@rollup/rollup-win32-arm64-msvc': 4.18.1 - '@rollup/rollup-win32-ia32-msvc': 4.18.1 - '@rollup/rollup-win32-x64-msvc': 4.18.1 + '@rollup/rollup-android-arm-eabi': 4.19.0 + '@rollup/rollup-android-arm64': 4.19.0 + '@rollup/rollup-darwin-arm64': 4.19.0 + '@rollup/rollup-darwin-x64': 4.19.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.19.0 + '@rollup/rollup-linux-arm-musleabihf': 4.19.0 + '@rollup/rollup-linux-arm64-gnu': 4.19.0 + '@rollup/rollup-linux-arm64-musl': 4.19.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.19.0 + '@rollup/rollup-linux-riscv64-gnu': 4.19.0 + '@rollup/rollup-linux-s390x-gnu': 4.19.0 + '@rollup/rollup-linux-x64-gnu': 4.19.0 + '@rollup/rollup-linux-x64-musl': 4.19.0 + '@rollup/rollup-win32-arm64-msvc': 4.19.0 + '@rollup/rollup-win32-ia32-msvc': 4.19.0 + '@rollup/rollup-win32-x64-msvc': 4.19.0 fsevents: 2.3.3 dev: true @@ -5126,8 +5126,8 @@ packages: hasBin: true dev: true - /semver@7.6.2: - resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} + /semver@7.6.3: + resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} engines: {node: '>=10'} hasBin: true dev: true @@ -5476,7 +5476,7 @@ packages: postcss-import: 15.1.0(postcss@8.4.39) postcss-js: 4.0.1(postcss@8.4.39) postcss-load-config: 4.0.2(postcss@8.4.39) - postcss-nested: 6.0.1(postcss@8.4.39) + postcss-nested: 6.2.0(postcss@8.4.39) postcss-selector-parser: 6.1.1 resolve: 1.22.8 sucrase: 3.35.0 @@ -5722,8 +5722,8 @@ packages: engines: {node: '>=4'} dev: true - /unimport@3.8.0(rollup@2.79.1): - resolution: {integrity: sha512-leq5bfNxyytAer8cYPi0dR0L6p8ZnZ8NxR9TKsSIbJM47TOxC5qURJXQZ8xuBGqLakUqYO6CvVtf3lWKo9k+8A==} + /unimport@3.9.0(rollup@2.79.1): + resolution: {integrity: sha512-H2ftTISja1BonUVdOKRos6HC6dqYDR40dQTZY3zIDJ/5/z4ihncuL0LqLvtxYqUDMib41eAtunQUhXIWTCZ8rA==} dependencies: '@rollup/pluginutils': 5.1.0(rollup@2.79.1) acorn: 8.12.1 @@ -5772,7 +5772,7 @@ packages: local-pkg: 0.5.0 magic-string: 0.30.10 minimatch: 9.0.5 - unimport: 3.8.0(rollup@2.79.1) + unimport: 3.9.0(rollup@2.79.1) unplugin: 1.11.0 transitivePeerDependencies: - rollup @@ -5924,7 +5924,7 @@ packages: dependencies: esbuild: 0.21.5 postcss: 8.4.39 - rollup: 4.18.1 + rollup: 4.19.0 optionalDependencies: fsevents: 2.3.3 dev: true @@ -6147,25 +6147,25 @@ packages: workbox-core: 7.1.0 dev: true - /workerd@1.20240712.0: - resolution: {integrity: sha512-hdIHZif82hBDy9YnMtcmDGgbLU5f2P2aGpi/X8EKhTSLDppVUGrkY3XB536J4jGjA2D5dS0FUEXCl5bAJEed8Q==} + /workerd@1.20240718.0: + resolution: {integrity: sha512-w7lOLRy0XecQTg/ujTLWBiJJuoQvzB3CdQ6/8Wgex3QxFhV9Pbnh3UbwIuUfMw3OCCPQc4o7y+1P+mISAgp6yg==} engines: {node: '>=16'} hasBin: true requiresBuild: true optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20240712.0 - '@cloudflare/workerd-darwin-arm64': 1.20240712.0 - '@cloudflare/workerd-linux-64': 1.20240712.0 - '@cloudflare/workerd-linux-arm64': 1.20240712.0 - '@cloudflare/workerd-windows-64': 1.20240712.0 + '@cloudflare/workerd-darwin-64': 1.20240718.0 + '@cloudflare/workerd-darwin-arm64': 1.20240718.0 + '@cloudflare/workerd-linux-64': 1.20240718.0 + '@cloudflare/workerd-linux-arm64': 1.20240718.0 + '@cloudflare/workerd-windows-64': 1.20240718.0 dev: true - /wrangler@3.65.0: - resolution: {integrity: sha512-IDy4ttyJZssazAd5CXHw4NWeZFGxngdNF5m2ogltdT3CV7uHfCvPVdMcr4uNMpRZd0toHmAE3LtQeXxDFFp88A==} + /wrangler@3.65.1: + resolution: {integrity: sha512-Z5NyrbpGMQCpim/6VnI1im0/Weh5+CU1sdep1JbfFxHjn/Jt9K+MeUq+kCns5ubkkdRx2EYsusB/JKyX2JdJ4w==} engines: {node: '>=16.17.0'} hasBin: true peerDependencies: - '@cloudflare/workers-types': ^4.20240712.0 + '@cloudflare/workers-types': ^4.20240718.0 peerDependenciesMeta: '@cloudflare/workers-types': optional: true @@ -6177,7 +6177,7 @@ packages: chokidar: 3.6.0 date-fns: 3.6.0 esbuild: 0.17.19 - miniflare: 3.20240712.0 + miniflare: 3.20240718.0 nanoid: 3.3.7 path-to-regexp: 6.2.2 resolve: 1.22.8 diff --git a/app/src/scripts/datasets/base.ts b/app/src/scripts/datasets/base.ts index dd2259ecf..96c125ead 100644 --- a/app/src/scripts/datasets/base.ts +++ b/app/src/scripts/datasets/base.ts @@ -12,22 +12,24 @@ export function createScaleDatasets({ type Key = keyof typeof groupedKeysToURLPath; type ResourceData = ReturnType>; - type ResourceDatasets = Record, ResourceData>; + type ResourceDatasets = Record, ResourceData>; const datasets = groupedKeysToURLPath as any as ResourceDatasets; for (const key in groupedKeysToURLPath) { - if ((key as Key) !== "ohlc") { - datasets[key as unknown as Exclude] = createResourceDataset({ - scale, - path: groupedKeysToURLPath[key as Key] as any, - }); + if ((key as Key) !== "price") { + datasets[key as unknown as Exclude] = createResourceDataset( + { + scale, + path: groupedKeysToURLPath[key as Key] as any, + }, + ); } } const price = createResourceDataset({ scale, - path: `/${scale}-to-ohlc`, + path: `/${scale}-to-price`, }); Object.assign(datasets, { price }); diff --git a/app/src/scripts/datasets/date.ts b/app/src/scripts/datasets/date.ts index fb08071c7..f5f5a8382 100644 --- a/app/src/scripts/datasets/date.ts +++ b/app/src/scripts/datasets/date.ts @@ -10,13 +10,13 @@ export function createDateDatasets({ type Key = keyof typeof groupedKeysToURLPath; type ResourceData = ReturnType>; - type ResourceDatasets = Record, ResourceData>; + type ResourceDatasets = Record, ResourceData>; const datasets = groupedKeysToURLPath as any as ResourceDatasets; for (const key in groupedKeysToURLPath) { - if ((key as Key) !== "ohlc") { - datasets[key as Exclude] = createResourceDataset<"date">({ + if ((key as Key) !== "price") { + datasets[key as Exclude] = createResourceDataset<"date">({ scale: "date", path: groupedKeysToURLPath[key as Key], }); @@ -25,7 +25,7 @@ export function createDateDatasets({ const price = createResourceDataset<"date", OHLC>({ scale: "date", - path: "/date-to-ohlc", + path: "/date-to-price", }); Object.assign(datasets, { price }); diff --git a/app/src/scripts/datasets/height.ts b/app/src/scripts/datasets/height.ts index 6d6512d5d..0cc38813b 100644 --- a/app/src/scripts/datasets/height.ts +++ b/app/src/scripts/datasets/height.ts @@ -8,13 +8,13 @@ export function createHeightDatasets({ type Key = keyof typeof groupedKeysToURLPath; type ResourceData = ReturnType>; - type ResourceDatasets = Record, ResourceData>; + type ResourceDatasets = Record, ResourceData>; const datasets = groupedKeysToURLPath as any as ResourceDatasets; for (const key in groupedKeysToURLPath) { - if ((key as Key) !== "ohlc") { - datasets[key as Exclude] = createResourceDataset<"height">({ + if ((key as Key) !== "price") { + datasets[key as Exclude] = createResourceDataset<"height">({ scale: "height", path: groupedKeysToURLPath[key as Key], }); @@ -23,7 +23,7 @@ export function createHeightDatasets({ const price = createResourceDataset<"height", OHLC>({ scale: "height", - path: "/height-to-ohlc", + path: "/height-to-price", }); Object.assign(datasets, { price }); diff --git a/app/src/scripts/datasets/index.ts b/app/src/scripts/datasets/index.ts index 465f526db..abdfc1f52 100644 --- a/app/src/scripts/datasets/index.ts +++ b/app/src/scripts/datasets/index.ts @@ -22,7 +22,7 @@ export function createDatasets() { let dataset: ResourceDataset; - if (path === `/${scale}-to-ohlc`) { + if (path === `/${scale}-to-price`) { dataset = createResourceDataset({ scale, path, diff --git a/app/src/scripts/datasets/resource.ts b/app/src/scripts/datasets/resource.ts index 38941215a..1d906af84 100644 --- a/app/src/scripts/datasets/resource.ts +++ b/app/src/scripts/datasets/resource.ts @@ -4,29 +4,27 @@ import { createRWS } from "/src/solid/rws"; import { HEIGHT_CHUNK_SIZE } from "."; +const USE_LOCAL_URL = true; +const LOCAL_URL = "http://localhost:3111"; +const WEB_URL = "https://api.satonomics.xyz"; +const BACKUP_WEB_URL = "https://api-bkp.satonomics.xyz"; + export function createResourceDataset< Scale extends ResourceScale, Type extends OHLC | number = number, >({ scale, path }: { scale: Scale; path: string }) { - type Dataset = Scale extends "date" - ? FetchedDateDataset - : FetchedHeightDataset; - type Value = DatasetValue< Type extends number ? SingleValueData : CandlestickData >; const baseURL = `${ - location.hostname === "localhost" - ? "http://localhost:3110" - : "https://api.satonomics.xyz" - // "https://api.satonomics.xyz" + USE_LOCAL_URL && location.hostname === "localhost" ? LOCAL_URL : WEB_URL }${path}`; const backupURL = `${ - location.hostname === "localhost" - ? "http://localhost:3110" - : "https://api-bkp.satonomics.xyz" + USE_LOCAL_URL && location.hostname === "localhost" + ? LOCAL_URL + : BACKUP_WEB_URL }${path}`; return createRoot((dispose) => { @@ -36,14 +34,14 @@ export function createResourceDataset< ) .fill(null) .map((): FetchedResult => { - const json = createRWS | null>(null); + const json = createRWS | null>(null); return { at: null, json, loading: false, vec: createMemo(() => { - const map = json()?.dataset.map || null; + const map = json()?.dataset.map; if (!map) { return null; @@ -186,7 +184,7 @@ export function createResourceDataset< console.log(`fetch: ${path}?chunk=${id}`); - const previousMap = fetched.json()?.dataset.map; + const previousMap = fetched.json()?.dataset; const newMap = json.dataset.map; const previousLength = Object.keys(previousMap || []).length; diff --git a/app/src/scripts/datasets/types.d.ts b/app/src/scripts/datasets/types.d.ts index afb0a6527..1a944898b 100644 --- a/app/src/scripts/datasets/types.d.ts +++ b/app/src/scripts/datasets/types.d.ts @@ -7,14 +7,6 @@ type DatasetValue = T & Valued; interface ResourceDataset< Scale extends ResourceScale, Type extends OHLC | number = number, - FetchedDataset extends - | FetchedDateDataset - | FetchedHeightDataset = Scale extends "date" - ? FetchedDateDataset - : FetchedHeightDataset, - Value extends SingleValueData | CandlestickData = Type extends number - ? SingleValueData - : CandlestickData, > { scale: Scale; url: string; @@ -26,33 +18,20 @@ interface ResourceDataset< interface FetchedResult< Scale extends ResourceScale, Type extends number | OHLC, - Dataset extends - | FetchedDateDataset - | FetchedHeightDataset = Scale extends "date" - ? FetchedDateDataset - : FetchedHeightDataset, Value extends DatasetValue = DatasetValue< Type extends number ? SingleValueData : CandlestickData >, > { at: Date | null; - json: RWS | null>; + json: RWS | null>; vec: Accessor; loading: boolean; } -interface FetchedJSON< - Scale extends ResourceScale, - Type extends number | OHLC, - Dataset extends - | FetchedDateDataset - | FetchedHeightDataset = Scale extends "date" - ? FetchedDateDataset - : FetchedHeightDataset, -> { +interface FetchedJSON { source: FetchedSource; chunk: FetchedChunk; - dataset: FetchedDataset; + dataset: FetchedDataset; } type FetchedSource = string; @@ -63,21 +42,24 @@ interface FetchedChunk { next: string | null; } -interface FetchedDataset< +type FetchedDataset< Scale extends ResourceScale, Type extends number | OHLC, - Dataset extends - | FetchedDateDataset - | FetchedHeightDataset = Scale extends "date" - ? FetchedDateDataset - : FetchedHeightDataset, -> { +> = Scale extends "date" + ? FetchedDateDataset + : FetchedHeightDataset; + +interface Versioned { version: number; - map: Dataset; } -type FetchedDateDataset = Record; -type FetchedHeightDataset = T[]; +interface FetchedDateDataset extends Versioned { + map: Record; +} + +interface FetchedHeightDataset extends Versioned { + map: Type[]; +} interface OHLC { open: number; diff --git a/app/src/scripts/presets/apply.ts b/app/src/scripts/presets/apply.ts index 999700e48..35288472f 100644 --- a/app/src/scripts/presets/apply.ts +++ b/app/src/scripts/presets/apply.ts @@ -215,7 +215,7 @@ export function applySeriesList({ if (chartIndex === 0) { const datasetPath = - priceDataset || (`/${scale}-to-ohlc` satisfies AnyDatasetPath); + priceDataset || (`/${scale}-to-price` satisfies AnyDatasetPath); const dataset = datasets.getOrImport(scale, datasetPath); diff --git a/parser/.gitignore b/parser/.gitignore index 0ee279362..4199ebbb3 100644 --- a/parser/.gitignore +++ b/parser/.gitignore @@ -16,4 +16,4 @@ benches parser.log config.toml -*\ copy.rs +*\ copy diff --git a/parser/src/actions/export.rs b/parser/src/actions/export.rs index 3a91cce8d..8cda80977 100644 --- a/parser/src/actions/export.rs +++ b/parser/src/actions/export.rs @@ -4,15 +4,15 @@ use crate::{ databases::Databases, datasets::AllDatasets, states::States, - structs::WNaiveDate, + structs::{Date, Height}, utils::{log, time}, }; pub struct ExportedData<'a> { pub databases: Option<&'a mut Databases>, pub datasets: &'a mut AllDatasets, - pub date: WNaiveDate, - pub height: usize, + pub date: Date, + pub height: Height, pub states: Option<&'a States>, } diff --git a/parser/src/actions/iter_blocks.rs b/parser/src/actions/iter_blocks.rs index 61a3f6c31..41279f207 100644 --- a/parser/src/actions/iter_blocks.rs +++ b/parser/src/actions/iter_blocks.rs @@ -8,11 +8,11 @@ use parse::ParseData; use crate::{ actions::{export, find_first_inserted_unsafe_height, parse}, - bitcoin::{check_if_height_safe, BitcoinDB, NUMBER_OF_UNSAFE_BLOCKS}, + bitcoin::BitcoinDB, databases::Databases, datasets::{AllDatasets, ComputeData}, states::{AddressCohortsDurableStates, States, UTXOCohortsDurableStates}, - structs::{DateData, WNaiveDate}, + structs::{Date, DateData, MapKey}, utils::{generate_allocation_files, log, time}, }; @@ -44,7 +44,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re log(&format!("Starting parsing at height: {height}")); - let mut block_iter = bitcoin_db.iter_block(height, block_count); + let mut block_iter = bitcoin_db.iter_block(height.to_usize(), block_count); let mut next_block_opt = None; let mut blocks_loop_date = None; @@ -70,7 +70,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re if let Some(current_block) = current_block_opt { let timestamp = current_block.header.time; - let current_block_date = WNaiveDate::from_timestamp(timestamp); + let current_block_date = Date::from_timestamp(timestamp); let current_block_height = height + blocks_loop_i; if states.address_cohorts_durable_states.is_none() @@ -95,10 +95,14 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re let next_block_date = next_block_opt .as_ref() - .map(|next_block| WNaiveDate::from_timestamp(next_block.header.time)); + .map(|next_block| Date::from_timestamp(next_block.header.time)); // Always run for the first block of the loop if blocks_loop_date.is_none() { + log(&format!( + "Processing {current_block_date} (height: {height})..." + )); + blocks_loop_date.replace(current_block_date); if states @@ -112,9 +116,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re .push(DateData::new(current_block_date, vec![])); } - log(&format!( - "Processing {current_block_date} (height: {height})..." - )); + processed_dates.insert(current_block_date); } let blocks_loop_date = blocks_loop_date.unwrap(); @@ -154,17 +156,12 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re blocks_loop_i += 1; if is_date_last_block { - processed_dates.insert(blocks_loop_date); - height += blocks_loop_i; let is_new_month = next_block_date .map_or(true, |next_block_date| next_block_date.day() == 1); - let is_close_to_the_end = - height > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3)); - - if is_new_month || is_close_to_the_end { + if is_new_month || height.is_close_to_end(block_count) { break 'days; } @@ -177,7 +174,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re } // Don't remember why -1 - let last_height = height - 1; + let last_height = height - 1_u32; log(&format!( "Parsing month took {} seconds (last height: {last_height})\n", @@ -186,15 +183,19 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re if first_unsafe_heights.computed <= last_height { time("Computing datasets", || { + let dates = processed_dates.into_iter().collect_vec(); + + let heights = processed_heights.into_iter().collect_vec(); + datasets.compute(ComputeData { - dates: &processed_dates.into_iter().collect_vec(), - heights: &processed_heights.into_iter().collect_vec(), + dates: &dates, + heights: &heights, }) }); } if should_export { - let is_safe = check_if_height_safe(height, block_count); + let is_safe = height.is_safe(block_count); export(ExportedData { databases: is_safe.then_some(&mut databases), diff --git a/parser/src/actions/min_height.rs b/parser/src/actions/min_height.rs index 4a3e403ae..8db483230 100644 --- a/parser/src/actions/min_height.rs +++ b/parser/src/actions/min_height.rs @@ -2,17 +2,18 @@ use crate::{ databases::Databases, datasets::{AllDatasets, AnyDatasets}, states::States, + structs::Height, utils::log, }; #[derive(Default, Debug)] pub struct Heights { - pub inserted: usize, - pub computed: usize, + pub inserted: Height, + pub computed: Height, } impl Heights { - pub fn min(&self) -> usize { + pub fn min(&self) -> Height { self.inserted.min(self.computed) } } @@ -93,13 +94,13 @@ pub fn find_first_inserted_unsafe_height( }) { None } else { - Some(last_date_height + 1) + Some(last_date_height + 1_u32) } }) ).unwrap_or_default(); Some(Heights { - inserted: last_safe_height + 1, + inserted: last_safe_height + 1_u32, computed, }) } diff --git a/parser/src/actions/parse.rs b/parser/src/actions/parse.rs index d81c3d32f..ded87a1df 100644 --- a/parser/src/actions/parse.rs +++ b/parser/src/actions/parse.rs @@ -17,8 +17,8 @@ use crate::{ States, UTXOCohortsOneShotStates, UTXOCohortsSentStates, }, structs::{ - Address, AddressData, AddressRealizedData, BlockData, BlockPath, Counter, EmptyAddressData, - PartialTxoutData, Price, SentData, TxData, TxoutIndex, WAmount, WNaiveDate, + Address, AddressData, AddressRealizedData, Amount, BlockData, BlockPath, Counter, Date, + EmptyAddressData, Height, PartialTxoutData, Price, SentData, TxData, TxoutIndex, }, }; @@ -29,9 +29,9 @@ pub struct ParseData<'a> { pub compute_addresses: bool, pub databases: &'a mut Databases, pub datasets: &'a mut AllDatasets, - pub date: WNaiveDate, - pub first_date_height: usize, - pub height: usize, + pub date: Date, + pub first_date_height: Height, + pub height: Height, pub is_date_last_block: bool, pub states: &'a mut States, pub timestamp: u32, @@ -61,13 +61,11 @@ pub fn parse( let date_index = states.date_data_vec.len() - 1; - let previous_timestamp = if height > 0 { - Some( - datasets - .block_metadata - .timestamp - .get_or_import(&(height - 1)), - ) + let previous_timestamp = if let Some(previous_height) = height.checked_sub(1) { + datasets + .block_metadata + .timestamp + .get_or_import(&Height::new(previous_height)) } else { None }; @@ -105,20 +103,20 @@ pub fn parse( .last_mut() .unwrap() .blocks - .push(BlockData::new(height as u32, block_price, timestamp)); + .push(BlockData::new(height, block_price, timestamp)); let mut block_path_to_sent_data: BTreeMap = BTreeMap::default(); // let mut received_data: ReceivedData = ReceivedData::default(); let mut address_index_to_address_realized_data: BTreeMap = BTreeMap::default(); - let mut coinbase = WAmount::ZERO; - let mut satblocks_destroyed = WAmount::ZERO; - let mut satdays_destroyed = WAmount::ZERO; - let mut amount_sent = WAmount::ZERO; + let mut coinbase = Amount::ZERO; + let mut satblocks_destroyed = Amount::ZERO; + let mut satdays_destroyed = Amount::ZERO; + let mut amount_sent = Amount::ZERO; let mut transaction_count = 0; let mut fees = vec![]; - let mut fees_total = WAmount::ZERO; + let mut fees_total = Amount::ZERO; let ( TxoutsParsingResults { @@ -183,7 +181,7 @@ pub fn parse( // --- let mut utxos = BTreeMap::new(); - let mut spendable_amount = WAmount::ZERO; + let mut spendable_amount = Amount::ZERO; let is_coinbase = tx.is_coinbase(); @@ -191,8 +189,8 @@ pub fn parse( unreachable!(); } - let mut inputs_sum = WAmount::ZERO; - let mut outputs_sum = WAmount::ZERO; + let mut inputs_sum = Amount::ZERO; + let mut outputs_sum = Amount::ZERO; let last_block = states.date_data_vec.last_mut_block().unwrap(); @@ -205,7 +203,7 @@ pub fn parse( panic!("vout can indeed be bigger than u16::MAX !"); } - let amount = WAmount::wrap(tx_out.value); + let amount = Amount::wrap(tx_out.value); if is_coinbase { coinbase += amount; @@ -440,8 +438,7 @@ pub fn parse( .or_default() .send(input_amount); - satblocks_destroyed += - input_amount * (height as u64 - input_block_data.height as u64); + satblocks_destroyed += input_amount * (height - input_block_data.height); satdays_destroyed += input_amount * date.signed_duration_since(*input_date_data.date).num_days() as u64; @@ -569,7 +566,7 @@ pub fn parse( let block_data = states.date_data_vec.get_block_data(block_path).unwrap(); - if block_data.height != height as u32 { + if block_data.height != height { states .utxo_cohorts_durable_states .as_mut() @@ -585,7 +582,7 @@ pub fn parse( let last_block_data = states.date_data_vec.last_block().unwrap(); - if last_block_data.height != height as u32 { + if last_block_data.height != height { unreachable!() } @@ -744,7 +741,7 @@ pub fn parse( compute_addresses, databases, date, - date_blocks_range: &(first_date_height..=height), + date_blocks_range: &(*first_date_height..=*height), date_first_height: first_date_height, difficulty, fees: &fees, @@ -763,7 +760,7 @@ pub fn parse( pub struct TxoutsParsingResults { partial_txout_data_vec: Vec>, - provably_unspendable: WAmount, + provably_unspendable: Amount, op_returns: usize, } @@ -776,7 +773,7 @@ fn pre_process_outputs( empty_addresses: &mut Counter, address_to_address_index: &mut AddressToAddressIndex, ) -> TxoutsParsingResults { - let mut provably_unspendable = WAmount::ZERO; + let mut provably_unspendable = Amount::ZERO; let mut op_returns = 0; let mut partial_txout_data_vec = block @@ -785,12 +782,12 @@ fn pre_process_outputs( .flat_map(|tx| &tx.output) .map(|txout| { let script = &txout.script_pubkey; - let amount = WAmount::wrap(txout.value); + let amount = Amount::wrap(txout.value); // 0 sats outputs are possible and allowed ! // https://mempool.space/tx/2f2442f68e38b980a6c4cec21e71851b0d8a5847d85208331a27321a9967bbd6 // https://bitcoin.stackexchange.com/questions/104937/transaction-outputs-with-value-0 - if amount == WAmount::ZERO { + if amount == Amount::ZERO { return None; } @@ -859,7 +856,7 @@ fn pre_process_inputs<'a>( compute_addresses: bool, ) -> ( BTreeMap<&'a Txid, Option>, - BTreeMap)>, + BTreeMap)>, ) { let mut txid_to_tx_data: BTreeMap<&Txid, Option> = block .txdata @@ -937,7 +934,7 @@ fn compute_address_index_to_address_data( address_index_to_address_data_db: &mut AddressIndexToAddressData, address_index_to_empty_address_data_db: &mut AddressIndexToEmptyAddressData, partial_txout_data_vec: &[Option], - txout_index_to_amount_and_address_index: &BTreeMap)>, + txout_index_to_amount_and_address_index: &BTreeMap)>, compute_addresses: bool, ) -> BTreeMap { if !compute_addresses { diff --git a/parser/src/bitcoin/daemon.rs b/parser/src/bitcoin/daemon.rs index e728c660e..45f58e52b 100644 --- a/parser/src/bitcoin/daemon.rs +++ b/parser/src/bitcoin/daemon.rs @@ -4,6 +4,7 @@ use color_eyre::eyre::eyre; use serde_json::Value; use crate::{ + structs::Height, utils::{log, log_output, retry}, Config, }; @@ -71,10 +72,10 @@ impl BitcoinDaemon { } } - pub fn wait_for_new_block(&self, last_block_height: usize) { + pub fn wait_for_new_block(&self, last_block_height: Height) { log("Waiting for new block..."); - while self.get_blockchain_info().headers as usize == last_block_height { + while last_block_height == self.get_blockchain_info().headers { sleep(Duration::from_secs(5)) } } diff --git a/parser/src/bitcoin/height.rs b/parser/src/bitcoin/height.rs deleted file mode 100644 index 372cbbe2d..000000000 --- a/parser/src/bitcoin/height.rs +++ /dev/null @@ -1,5 +0,0 @@ -use super::NUMBER_OF_UNSAFE_BLOCKS; - -pub fn check_if_height_safe(height: usize, block_count: usize) -> bool { - height < block_count - NUMBER_OF_UNSAFE_BLOCKS -} diff --git a/parser/src/bitcoin/mod.rs b/parser/src/bitcoin/mod.rs index 22fbde05d..1f3d18a9e 100644 --- a/parser/src/bitcoin/mod.rs +++ b/parser/src/bitcoin/mod.rs @@ -2,10 +2,8 @@ mod addresses; mod consts; mod daemon; mod db; -mod height; pub use addresses::*; pub use consts::*; pub use daemon::*; pub use db::*; -pub use height::*; diff --git a/parser/src/databases/_trait.rs b/parser/src/databases/_trait.rs index 55f92f733..a7c632979 100644 --- a/parser/src/databases/_trait.rs +++ b/parser/src/databases/_trait.rs @@ -1,6 +1,9 @@ use std::{fs, io}; -use crate::{structs::WNaiveDate, utils::log}; +use crate::{ + structs::{Date, Height}, + utils::log, +}; use super::databases_folder_path; @@ -10,7 +13,7 @@ where { fn import() -> Self; - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()>; + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()>; fn folder<'a>() -> &'a str; diff --git a/parser/src/databases/address_index_to_address_data.rs b/parser/src/databases/address_index_to_address_data.rs index 58a2495b4..a516f3246 100644 --- a/parser/src/databases/address_index_to_address_data.rs +++ b/parser/src/databases/address_index_to_address_data.rs @@ -8,7 +8,7 @@ use allocative::Allocative; use rayon::prelude::*; use crate::{ - structs::{AddressData, WNaiveDate}, + structs::{AddressData, Date, Height}, utils::time, }; @@ -97,7 +97,11 @@ impl AddressIndexToAddressData { } fn open_all(&mut self) { - fs::read_dir(databases_folder_path(Self::folder())) + let path = Self::full_path(); + + fs::create_dir_all(&path).unwrap(); + + fs::read_dir(path) .unwrap() .map(|entry| { entry @@ -128,7 +132,7 @@ impl AnyDatabaseGroup for AddressIndexToAddressData { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { mem::take(&mut self.map) .into_par_iter() .try_for_each(|(_, db)| db.export())?; diff --git a/parser/src/databases/address_index_to_empty_address_data.rs b/parser/src/databases/address_index_to_empty_address_data.rs index d85e73d0b..9e2e89c64 100644 --- a/parser/src/databases/address_index_to_empty_address_data.rs +++ b/parser/src/databases/address_index_to_empty_address_data.rs @@ -7,7 +7,7 @@ use std::{ use allocative::Allocative; use rayon::prelude::*; -use crate::structs::{EmptyAddressData, WNaiveDate}; +use crate::structs::{Date, EmptyAddressData, Height}; use super::{AnyDatabaseGroup, Metadata, SizedDatabase}; @@ -103,7 +103,7 @@ impl AnyDatabaseGroup for AddressIndexToEmptyAddressData { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { mem::take(&mut self.map) .into_par_iter() .try_for_each(|(_, db)| db.export())?; diff --git a/parser/src/databases/address_to_address_index.rs b/parser/src/databases/address_to_address_index.rs index 68409a0bb..d63170701 100644 --- a/parser/src/databases/address_to_address_index.rs +++ b/parser/src/databases/address_to_address_index.rs @@ -3,7 +3,7 @@ use std::{collections::BTreeMap, mem, thread}; use allocative::Allocative; use rayon::prelude::*; -use crate::structs::{Address, WNaiveDate}; +use crate::structs::{Address, Date, Height}; use super::{ AnyDatabaseGroup, Database, Metadata, SizedDatabase, U8x19, U8x31, @@ -261,7 +261,7 @@ impl AnyDatabaseGroup for AddressToAddressIndex { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { thread::scope(|s| { s.spawn(|| { mem::take(&mut self.p2pk) diff --git a/parser/src/databases/metadata.rs b/parser/src/databases/metadata.rs index 5d424a6b2..f1d41de5e 100644 --- a/parser/src/databases/metadata.rs +++ b/parser/src/databases/metadata.rs @@ -8,7 +8,7 @@ use std::{ use crate::{ io::Binary, - structs::{Counter, WNaiveDate}, + structs::{Counter, Date, Height}, }; #[derive(Default, Debug, Encode, Decode, Allocative)] @@ -39,7 +39,7 @@ impl Metadata { } } - pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { if self.last_height.unwrap_or_default() < height { self.last_height.replace(height); } @@ -77,8 +77,8 @@ impl Metadata { pub struct MetadataData { pub serial: usize, pub len: Counter, - pub last_height: Option, - pub last_date: Option, + pub last_height: Option, + pub last_date: Option, } impl MetadataData { diff --git a/parser/src/databases/mod.rs b/parser/src/databases/mod.rs index e8edad1db..e436b8fd3 100644 --- a/parser/src/databases/mod.rs +++ b/parser/src/databases/mod.rs @@ -22,7 +22,10 @@ pub use txid_to_tx_data::*; pub use txout_index_to_address_index::*; pub use txout_index_to_amount::*; -use crate::{structs::WNaiveDate, utils::time}; +use crate::{ + structs::{Date, Height}, + utils::time, +}; #[derive(Allocative)] pub struct Databases { @@ -58,7 +61,7 @@ impl Databases { } } - pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { thread::scope(|s| { s.spawn(|| { time("> Database txid_to_tx_data", || { @@ -115,13 +118,13 @@ impl Databases { let _ = self.txout_index_to_amount.reset(); } - pub fn check_if_needs_to_compute_addresses(&self, height: usize, date: WNaiveDate) -> bool { - let check_height = |last_height: Option| { + pub fn check_if_needs_to_compute_addresses(&self, height: Height, date: Date) -> bool { + let check_height = |last_height: Option| { last_height.map_or(true, |last_height| last_height < height) }; let check_date = - |last_date: Option| last_date.map_or(true, |last_date| last_date < date); + |last_date: Option| last_date.map_or(true, |last_date| last_date < date); let check_metadata = |metadata: &Metadata| { check_height(metadata.last_height) || check_date(metadata.last_date) @@ -133,8 +136,8 @@ impl Databases { pub fn check_if_usable( &self, - min_initial_last_address_height: Option, - min_initial_last_address_date: Option, + min_initial_last_address_height: Option, + min_initial_last_address_date: Option, ) -> bool { let are_tx_databases_in_sync = self .txout_index_to_amount diff --git a/parser/src/databases/txid_to_tx_data.rs b/parser/src/databases/txid_to_tx_data.rs index eaa1ce23c..3e7ac320b 100644 --- a/parser/src/databases/txid_to_tx_data.rs +++ b/parser/src/databases/txid_to_tx_data.rs @@ -8,7 +8,7 @@ use allocative::Allocative; use bitcoin::Txid; use rayon::prelude::*; -use crate::structs::{TxData, WNaiveDate}; +use crate::structs::{Date, Height, TxData}; use super::{AnyDatabaseGroup, Metadata, SizedDatabase, U8x31}; @@ -127,7 +127,7 @@ impl AnyDatabaseGroup for TxidToTxData { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { mem::take(&mut self.map) .into_par_iter() .try_for_each(|(_, db)| db.export())?; diff --git a/parser/src/databases/txout_index_to_address_index.rs b/parser/src/databases/txout_index_to_address_index.rs index 1c2da8a89..d5bd35911 100644 --- a/parser/src/databases/txout_index_to_address_index.rs +++ b/parser/src/databases/txout_index_to_address_index.rs @@ -7,7 +7,7 @@ use std::{ use allocative::Allocative; use rayon::prelude::*; -use crate::structs::{TxoutIndex, WNaiveDate}; +use crate::structs::{Date, Height, TxoutIndex}; use super::{AnyDatabaseGroup, Metadata, SizedDatabase}; @@ -94,7 +94,7 @@ impl AnyDatabaseGroup for TxoutIndexToAddressIndex { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { mem::take(&mut self.map) .into_par_iter() .try_for_each(|(_, db)| db.export())?; diff --git a/parser/src/databases/txout_index_to_amount.rs b/parser/src/databases/txout_index_to_amount.rs index aefc70adb..473045ad8 100644 --- a/parser/src/databases/txout_index_to_amount.rs +++ b/parser/src/databases/txout_index_to_amount.rs @@ -7,12 +7,12 @@ use std::{ use allocative::Allocative; use rayon::prelude::*; -use crate::structs::{TxoutIndex, WAmount, WNaiveDate}; +use crate::structs::{Amount, Date, Height, TxoutIndex}; use super::{AnyDatabaseGroup, Metadata, SizedDatabase}; type Key = TxoutIndex; -type Value = WAmount; +type Value = Amount; type Database = SizedDatabase; #[derive(Allocative)] @@ -94,7 +94,7 @@ impl AnyDatabaseGroup for TxoutIndexToAmount { } } - fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> { + fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { mem::take(&mut self.map) .into_par_iter() .try_for_each(|(_, db)| db.export())?; diff --git a/parser/src/datasets/_traits/any_dataset.rs b/parser/src/datasets/_traits/any_dataset.rs index f08292267..cfe8a0653 100644 --- a/parser/src/datasets/_traits/any_dataset.rs +++ b/parser/src/datasets/_traits/any_dataset.rs @@ -3,7 +3,7 @@ use rayon::prelude::*; use crate::{ datasets::ComputeData, - structs::{AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, WNaiveDate}, + structs::{AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, Date, Height}, }; use super::MinInitialStates; @@ -11,23 +11,23 @@ use super::MinInitialStates; pub trait AnyDataset { fn get_min_initial_states(&self) -> &MinInitialStates; - fn needs_insert(&self, height: usize, date: WNaiveDate) -> bool { + fn needs_insert(&self, height: Height, date: Date) -> bool { self.needs_insert_height(height) || self.needs_insert_date(date) } #[inline(always)] - fn needs_insert_height(&self, height: usize) -> bool { + fn needs_insert_height(&self, height: Height) -> bool { !self.to_all_inserted_height_map_vec().is_empty() && self .get_min_initial_states() .inserted .first_unsafe_height - .unwrap_or(0) + .unwrap_or(Height::ZERO) <= height } #[inline(always)] - fn needs_insert_date(&self, date: WNaiveDate) -> bool { + fn needs_insert_date(&self, date: Date) -> bool { !self.to_all_inserted_date_map_vec().is_empty() && self .get_min_initial_states() @@ -117,18 +117,18 @@ pub trait AnyDataset { } #[inline(always)] - fn should_compute_height(&self, height: usize) -> bool { + fn should_compute_height(&self, height: Height) -> bool { !self.to_all_computed_height_map_vec().is_empty() && self .get_min_initial_states() .computed .first_unsafe_height - .unwrap_or(0) + .unwrap_or(Height::ZERO) <= height } #[inline(always)] - fn should_compute_date(&self, date: WNaiveDate) -> bool { + fn should_compute_date(&self, date: Date) -> bool { !self.to_all_computed_date_map_vec().is_empty() && self .get_min_initial_states() diff --git a/parser/src/datasets/_traits/min_initial_state.rs b/parser/src/datasets/_traits/min_initial_state.rs index 64e786f60..d0d26e4b4 100644 --- a/parser/src/datasets/_traits/min_initial_state.rs +++ b/parser/src/datasets/_traits/min_initial_state.rs @@ -1,6 +1,6 @@ use allocative::Allocative; -use crate::structs::{AnyDateMap, AnyHeightMap, WNaiveDate}; +use crate::structs::{AnyDateMap, AnyHeightMap, Date, Height}; use super::{AnyDataset, AnyDatasets}; @@ -33,10 +33,10 @@ impl MinInitialStates { #[derive(Default, Debug, Clone, Copy, Allocative)] pub struct MinInitialState { - pub first_unsafe_date: Option, - pub first_unsafe_height: Option, - pub last_date: Option, - pub last_height: Option, + pub first_unsafe_date: Option, + pub first_unsafe_height: Option, + pub last_date: Option, + pub last_height: Option, } enum Mode { @@ -172,8 +172,8 @@ impl MinInitialState { fn min_datasets_date( datasets: &dyn AnyDatasets, is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool, - map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option, - ) -> Option { + map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option, + ) -> Option { Self::min_date( datasets .to_any_dataset_vec() @@ -186,8 +186,8 @@ impl MinInitialState { fn min_datasets_height( datasets: &dyn AnyDatasets, is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool, - map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option, - ) -> Option { + map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option, + ) -> Option { Self::min_height( datasets .to_any_dataset_vec() @@ -235,38 +235,38 @@ impl MinInitialState { #[inline(always)] fn compute_min_initial_last_date_from_dataset( arr: &[&(dyn AnyDateMap + Sync + Send)], - ) -> Option { + ) -> Option { Self::min_date(arr.iter().map(|map| map.get_initial_last_date())) } #[inline(always)] fn compute_min_initial_last_height_from_dataset( arr: &[&(dyn AnyHeightMap + Sync + Send)], - ) -> Option { + ) -> Option { Self::min_height(arr.iter().map(|map| map.get_initial_last_height())) } #[inline(always)] fn compute_min_initial_first_unsafe_date_from_dataset( arr: &[&(dyn AnyDateMap + Sync + Send)], - ) -> Option { + ) -> Option { Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date())) } #[inline(always)] fn compute_min_initial_first_unsafe_height_from_dataset( arr: &[&(dyn AnyHeightMap + Sync + Send)], - ) -> Option { + ) -> Option { Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height())) } #[inline(always)] - fn min_date(iter: impl Iterator>) -> Option { + fn min_date(iter: impl Iterator>) -> Option { iter.min().and_then(|opt| opt) } #[inline(always)] - fn min_height(iter: impl Iterator>) -> Option { + fn min_height(iter: impl Iterator>) -> Option { iter.min().and_then(|opt| opt) } } diff --git a/parser/src/datasets/address/all_metadata.rs b/parser/src/datasets/address/all_metadata.rs index d32649eff..d20ef0708 100644 --- a/parser/src/datasets/address/all_metadata.rs +++ b/parser/src/datasets/address/all_metadata.rs @@ -62,7 +62,7 @@ impl AllAddressesMetadataDataset { } } - pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) { + pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) { self.new_addresses .multi_insert_net_change(heights, dates, &mut self.created_addreses, 1) } diff --git a/parser/src/datasets/address/cohort.rs b/parser/src/datasets/address/cohort.rs index f0815c524..72690284f 100644 --- a/parser/src/datasets/address/cohort.rs +++ b/parser/src/datasets/address/cohort.rs @@ -6,7 +6,7 @@ use crate::{ AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset, }, states::{AddressCohortDurableStates, AddressCohortId}, - structs::{AddressSplit, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate}, + structs::{AddressSplit, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, Date, Height}, }; use super::cohort_metadata::MetadataDataset; @@ -60,47 +60,47 @@ impl CohortDataset { vec![&self.all, &self.illiquid, &self.liquid, &self.highly_liquid] } - pub fn needs_insert_metadata(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_metadata(&self, height: Height, date: Date) -> bool { self.metadata.needs_insert(height, date) } - pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.utxo.needs_insert(height, date)) } - pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.capitalization.needs_insert(height, date)) } - pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.supply.needs_insert(height, date)) } - pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.price_paid.needs_insert(height, date)) } - pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.realized.needs_insert(height, date)) } - pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.unrealized.needs_insert(height, date)) } - pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_input(&self, height: Height, date: Date) -> bool { self.sub_datasets_vec() .iter() .any(|sub| sub.input.needs_insert(height, date)) diff --git a/parser/src/datasets/address/mod.rs b/parser/src/datasets/address/mod.rs index 238b79ded..8326ec6b9 100644 --- a/parser/src/datasets/address/mod.rs +++ b/parser/src/datasets/address/mod.rs @@ -6,7 +6,11 @@ use allocative::Allocative; use itertools::Itertools; use rayon::prelude::*; -use crate::{states::SplitByAddressCohort, structs::BiMap, WNaiveDate}; +use crate::{ + states::SplitByAddressCohort, + structs::{BiMap, Height}, + Date, +}; use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset}; @@ -59,7 +63,7 @@ impl AddressDatasets { .for_each(|(cohort, _)| cohort.insert(insert_data)) } - pub fn needs_durable_states(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_durable_states(&self, height: Height, date: Date) -> bool { let needs_insert_utxo = self.needs_insert_utxo(height, date); let needs_insert_capitalization = self.needs_insert_capitalization(height, date); let needs_insert_supply = self.needs_insert_supply(height, date); @@ -71,57 +75,57 @@ impl AddressDatasets { || needs_one_shot_states } - pub fn needs_one_shot_states(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool { self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date) } - // pub fn needs_sent_states(&self, height: usize, date: WNaiveDate) -> bool { + // pub fn needs_sent_states(&self, height: Height, date: WNaiveDate) -> bool { // self.needs_insert_input(height, date) || self.needs_insert_realized(height, date) // } - pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool { self.cohorts .as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_utxo(height, date)) } - pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool { self.cohorts .as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_capitalization(height, date)) } - pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool { self.cohorts .as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_supply(height, date)) } - pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool { self.cohorts .as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_price_paid(height, date)) } - // pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool { + // pub fn needs_insert_realized(&self, height: Height, date: WNaiveDate) -> bool { // self.cohorts // .as_vec() // .iter() // .any(|(dataset, _)| dataset.needs_insert_realized(height, date)) // } - pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool { self.cohorts .as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_unrealized(height, date)) } - // pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool { + // pub fn needs_insert_input(&self, height: Height, date: WNaiveDate) -> bool { // self.cohorts // .as_vec() // .iter() diff --git a/parser/src/datasets/block_metadata.rs b/parser/src/datasets/block_metadata.rs index b1483f38c..0b320bdd2 100644 --- a/parser/src/datasets/block_metadata.rs +++ b/parser/src/datasets/block_metadata.rs @@ -2,7 +2,7 @@ use allocative::Allocative; use crate::{ datasets::AnyDataset, - structs::{AnyHeightMap, HeightMap, WNaiveDate}, + structs::{AnyHeightMap, Date, HeightMap}, }; use super::{InsertData, MinInitialStates}; @@ -12,7 +12,7 @@ pub struct BlockMetadataDataset { min_initial_states: MinInitialStates, // Inserted - pub date: HeightMap, + pub date: HeightMap, pub timestamp: HeightMap, } @@ -41,8 +41,7 @@ impl BlockMetadataDataset { ) { self.timestamp.insert(height, timestamp); - self.date - .insert(height, WNaiveDate::from_timestamp(timestamp)); + self.date.insert(height, Date::from_timestamp(timestamp)); } } diff --git a/parser/src/datasets/cointime.rs b/parser/src/datasets/cointime.rs index 8c52c79c9..11e7e2613 100644 --- a/parser/src/datasets/cointime.rs +++ b/parser/src/datasets/cointime.rs @@ -1,7 +1,7 @@ use allocative::Allocative; use crate::{ - structs::{AnyBiMap, BiMap, DateMap}, + structs::{AnyBiMap, BiMap, DateMap, Height}, utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS}, }; @@ -166,8 +166,8 @@ impl CointimeDataset { pub fn compute( &mut self, compute_data: &ComputeData, - first_height: &mut DateMap, - last_height: &mut DateMap, + first_height: &mut DateMap, + last_height: &mut DateMap, closes: &mut BiMap, circulating_supply: &mut BiMap, realized_cap: &mut BiMap, @@ -176,7 +176,7 @@ impl CointimeDataset { annualized_transaction_volume: &mut BiMap, cumulative_subsidy_in_dollars: &mut BiMap, ) { - let &ComputeData { heights, dates } = compute_data; + let &ComputeData { heights, dates, .. } = compute_data; self.cumulative_coinblocks_destroyed .multi_insert_cumulative(heights, dates, &mut self.coinblocks_destroyed); @@ -403,7 +403,7 @@ impl CointimeDataset { .multi_insert_complex_transform( heights, &mut self.active_cap.height, - |(active_cap, height)| { + |(active_cap, height, ..)| { let investor_cap = self.investor_cap.height.get(height).unwrap(); (active_cap - investor_cap) / active_cap diff --git a/parser/src/datasets/constant.rs b/parser/src/datasets/constant.rs index 03459f0ba..22468eba7 100644 --- a/parser/src/datasets/constant.rs +++ b/parser/src/datasets/constant.rs @@ -34,7 +34,7 @@ impl ConstantDataset { Ok(s) } - pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) { + pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) { self._0.multi_insert_const(heights, dates, 0); self._1.multi_insert_const(heights, dates, 1); self._50.multi_insert_const(heights, dates, 50); diff --git a/parser/src/datasets/date_metadata.rs b/parser/src/datasets/date_metadata.rs index 87ef0804f..db812d9b5 100644 --- a/parser/src/datasets/date_metadata.rs +++ b/parser/src/datasets/date_metadata.rs @@ -2,7 +2,7 @@ use allocative::Allocative; use crate::{ datasets::AnyDataset, - structs::{AnyDateMap, DateMap}, + structs::{AnyDateMap, DateMap, Height}, }; use super::{InsertData, MinInitialStates}; @@ -12,8 +12,8 @@ pub struct DateMetadataDataset { min_initial_states: MinInitialStates, // Inserted - pub first_height: DateMap, - pub last_height: DateMap, + pub first_height: DateMap, + pub last_height: DateMap, } impl DateMetadataDataset { diff --git a/parser/src/datasets/mining.rs b/parser/src/datasets/mining.rs index 17b217e99..e49203657 100644 --- a/parser/src/datasets/mining.rs +++ b/parser/src/datasets/mining.rs @@ -3,7 +3,9 @@ use allocative::Allocative; use crate::{ bitcoin::TARGET_BLOCKS_PER_DAY, datasets::AnyDataset, - structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, HeightMap, WAmount}, + structs::{ + Amount, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, Height, HeightMap, MapKey, + }, utils::{BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS}, }; @@ -224,7 +226,7 @@ impl MiningDataset { .height .insert(height, (block_price * coinbase).to_dollar() as f32); - let sumed_fees = WAmount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum()); + let sumed_fees = Amount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum()); self.fees.height.insert(height, sumed_fees.to_btc()); @@ -281,10 +283,10 @@ impl MiningDataset { self.last_fees_in_dollars .insert(date, sumed_fees_in_dollars); - let total_blocks_mined = self.total_blocks_mined.insert(date, height + 1); + let total_blocks_mined = self.total_blocks_mined.insert(date, height.to_usize() + 1); self.blocks_mined - .insert(date, total_blocks_mined - date_first_height); + .insert(date, total_blocks_mined - date_first_height.to_usize()); self.difficulty.date.insert(date, difficulty); } @@ -292,8 +294,8 @@ impl MiningDataset { pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, - last_height: &mut DateMap, + &ComputeData { heights, dates, .. }: &ComputeData, + last_height: &mut DateMap, ) { self.blocks_mined_1w_sum.multi_insert_last_x_sum( dates, diff --git a/parser/src/datasets/mod.rs b/parser/src/datasets/mod.rs index 617736f44..c432b6dc7 100644 --- a/parser/src/datasets/mod.rs +++ b/parser/src/datasets/mod.rs @@ -44,31 +44,31 @@ use crate::{ // UTXOCohortsReceivedStates, UTXOCohortsSentStates, }, - structs::{Price, WAmount, WNaiveDate}, + structs::{Amount, Date, Height, Price}, }; pub struct InsertData<'a> { pub address_cohorts_input_states: &'a Option, pub address_cohorts_one_shot_states: &'a Option, pub address_cohorts_realized_states: &'a Option, - pub amount_sent: WAmount, + pub amount_sent: Amount, pub block_interval: u32, pub block_price: Price, pub block_size: usize, pub block_vbytes: u64, pub block_weight: u64, - pub coinbase: WAmount, + pub coinbase: Amount, pub compute_addresses: bool, pub databases: &'a Databases, - pub date: WNaiveDate, - pub date_blocks_range: &'a RangeInclusive, - pub date_first_height: usize, + pub date: Date, + pub date_blocks_range: &'a RangeInclusive, + pub date_first_height: Height, pub difficulty: f64, - pub fees: &'a Vec, - pub height: usize, + pub fees: &'a Vec, + pub height: Height, pub is_date_last_block: bool, - pub satblocks_destroyed: WAmount, - pub satdays_destroyed: WAmount, + pub satblocks_destroyed: Amount, + pub satdays_destroyed: Amount, pub states: &'a States, pub timestamp: u32, pub transaction_count: usize, @@ -78,8 +78,8 @@ pub struct InsertData<'a> { } pub struct ComputeData<'a> { - pub heights: &'a [usize], - pub dates: &'a [WNaiveDate], + pub heights: &'a [Height], + pub dates: &'a [Date], } #[derive(Allocative)] diff --git a/parser/src/datasets/price/mod.rs b/parser/src/datasets/price/mod.rs index 6e423b30b..b9f2e0bed 100644 --- a/parser/src/datasets/price/mod.rs +++ b/parser/src/datasets/price/mod.rs @@ -10,7 +10,7 @@ pub use ohlc::*; use crate::{ price::{Binance, Kraken}, - structs::{AnyBiMap, AnyDateMap, BiMap, DateMap, WNaiveDate}, + structs::{AnyBiMap, AnyDateMap, BiMap, Date, DateMap, Height, MapKey}, utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS}, }; @@ -20,7 +20,7 @@ use super::{AnyDataset, ComputeData, MinInitialStates, RatioDataset}; pub struct PriceDatasets { min_initial_states: MinInitialStates, - kraken_daily: Option>, + kraken_daily: Option>, kraken_1mn: Option>, binance_1mn: Option>, binance_har: Option>, @@ -90,8 +90,8 @@ impl PriceDatasets { kraken_daily: None, satonomics_by_height: BTreeMap::default(), - ohlcs: BiMap::new_json(1, &format!("{price_path}/ohlc")), - closes: BiMap::new_json(1, &f("close")), + ohlcs: BiMap::new_json(1, price_path), + closes: BiMap::new_bin(1, &f("close")), market_cap: BiMap::new_bin(1, &f("market_cap")), price_1w_sma: BiMap::new_bin(1, &f("price_1w_sma")), price_1w_sma_ratio: RatioDataset::import(datasets_path, "price_1w_sma")?, @@ -139,7 +139,7 @@ impl PriceDatasets { } pub fn compute(&mut self, compute_data: &ComputeData, circulating_supply: &mut BiMap) { - let &ComputeData { dates, heights } = compute_data; + let &ComputeData { dates, heights, .. } = compute_data; self.closes .multi_insert_simple_transform(heights, dates, &mut self.ohlcs, &|ohlc| ohlc.close); @@ -265,7 +265,7 @@ impl PriceDatasets { |(last_value, date, closes)| { let previous_value = date .checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64)) - .and_then(|date| closes.get_or_import(&WNaiveDate::wrap(date))) + .and_then(|date| closes.get_or_import(&Date::wrap(date))) .unwrap_or_default(); (((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0 @@ -300,8 +300,8 @@ impl PriceDatasets { .compute(compute_data, &mut self.closes, &mut self.price_200w_sma); } - pub fn get_date_ohlc(&mut self, date: WNaiveDate) -> color_eyre::Result { - if self.ohlcs.date.is_date_safe(date) { + pub fn get_date_ohlc(&mut self, date: Date) -> color_eyre::Result { + if self.ohlcs.date.is_key_safe(date) { Ok(self.ohlcs.date.get(&date).unwrap().to_owned()) } else { let ohlc = self.get_from_daily_kraken(&date)?; @@ -312,7 +312,7 @@ impl PriceDatasets { } } - fn get_from_daily_kraken(&mut self, date: &WNaiveDate) -> color_eyre::Result { + fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result { if self.kraken_daily.is_none() { self.kraken_daily.replace( Kraken::fetch_daily_prices() @@ -330,7 +330,7 @@ impl PriceDatasets { pub fn get_height_ohlc( &mut self, - height: usize, + height: Height, timestamp: u32, previous_timestamp: Option, ) -> color_eyre::Result { @@ -351,7 +351,7 @@ impl PriceDatasets { let timestamp = clean_timestamp(timestamp); - if previous_timestamp.is_none() && height > 0 { + if previous_timestamp.is_none() && !height.is_first() { panic!("Shouldn't be possible"); } @@ -364,7 +364,7 @@ impl PriceDatasets { .unwrap_or_else(|_| { self.get_from_har_binance(timestamp, previous_timestamp) .unwrap_or_else(|_| { - let date = WNaiveDate::from_timestamp(timestamp); + let date = Date::from_timestamp(timestamp); panic!( "Can't find the price for: height: {height} - date: {date} diff --git a/parser/src/datasets/subs/capitalization.rs b/parser/src/datasets/subs/capitalization.rs index c8f114729..041cb5c2b 100644 --- a/parser/src/datasets/subs/capitalization.rs +++ b/parser/src/datasets/subs/capitalization.rs @@ -79,7 +79,7 @@ impl CapitalizationDataset { closes: &mut BiMap, cohort_supply: &mut BiMap, ) { - let &ComputeData { heights, dates } = compute_data; + let &ComputeData { heights, dates, .. } = compute_data; self.realized_price.multi_insert_divide( heights, diff --git a/parser/src/datasets/subs/mod.rs b/parser/src/datasets/subs/mod.rs index f5a62ca2b..3aaa9d16c 100644 --- a/parser/src/datasets/subs/mod.rs +++ b/parser/src/datasets/subs/mod.rs @@ -5,7 +5,7 @@ mod input; mod price_paid; mod ratio; mod realized; -// mod recap; +mod recap; mod supply; mod unrealized; mod utxo; @@ -15,7 +15,7 @@ pub use input::*; pub use price_paid::*; pub use ratio::*; pub use realized::*; -// pub use recap::*; +pub use recap::*; pub use supply::*; pub use unrealized::*; pub use utxo::*; diff --git a/parser/src/datasets/subs/price_paid.rs b/parser/src/datasets/subs/price_paid.rs index 7ec003093..efdf77123 100644 --- a/parser/src/datasets/subs/price_paid.rs +++ b/parser/src/datasets/subs/price_paid.rs @@ -4,7 +4,7 @@ use itertools::Itertools; use crate::{ datasets::{AnyDataset, InsertData, MinInitialStates}, states::PricePaidState, - structs::{AnyBiMap, BiMap, WNaiveDate}, + structs::{AnyBiMap, BiMap, Date, Height}, }; #[derive(Default, Allocative)] @@ -217,13 +217,13 @@ impl PricePaidSubDataset { } } - fn insert_height_default(&mut self, height: usize) { + fn insert_height_default(&mut self, height: Height) { self.inserted_as_mut_vec().into_iter().for_each(|bi| { bi.height.insert_default(height); }) } - fn insert_date_default(&mut self, date: WNaiveDate) { + fn insert_date_default(&mut self, date: Date) { self.inserted_as_mut_vec().into_iter().for_each(|bi| { bi.date.insert_default(date); }) diff --git a/parser/src/datasets/subs/ratio.rs b/parser/src/datasets/subs/ratio.rs index 24a3c9282..e2b6e55c0 100644 --- a/parser/src/datasets/subs/ratio.rs +++ b/parser/src/datasets/subs/ratio.rs @@ -68,7 +68,7 @@ impl RatioDataset { pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, market_price: &mut BiMap, other_price: &mut BiMap, ) { @@ -112,9 +112,13 @@ impl RatioDataset { self.ratio_1y_sma_momentum_oscillator .height - .multi_insert_complex_transform(heights, &mut self.ratio.height, |(ratio, height)| { - (ratio / self.ratio_1y_sma.height.get_or_import(height)) - 1.0 - }); + .multi_insert_complex_transform( + heights, + &mut self.ratio.height, + |(ratio, height, ..)| { + (ratio / self.ratio_1y_sma.height.get_or_import(height).unwrap()) - 1.0 + }, + ); self.ratio_1y_sma_momentum_oscillator .date diff --git a/parser/src/datasets/subs/realized.rs b/parser/src/datasets/subs/realized.rs index 65808c072..a02e57ca2 100644 --- a/parser/src/datasets/subs/realized.rs +++ b/parser/src/datasets/subs/realized.rs @@ -130,7 +130,7 @@ impl RealizedSubDataset { pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, market_cap: &mut BiMap, ) { self.negative_realized_loss.multi_insert_simple_transform( diff --git a/parser/src/datasets/subs/recap.rs b/parser/src/datasets/subs/recap.rs index a0d0602d1..1b877574e 100644 --- a/parser/src/datasets/subs/recap.rs +++ b/parser/src/datasets/subs/recap.rs @@ -6,33 +6,40 @@ use crate::{ DateMap, HeightMap, }; -#[derive(Default, Allocative)] +#[derive(Allocative)] +pub enum RecapTime { + Insert, + Compute, +} + +#[derive(Allocative)] pub struct RecapDataset { min_initial_states: MinInitialStates, + time: RecapTime, // Computed - min: Option>, - max: Option>, - median: Option>, average: Option>, sum: Option>, + max: Option>, _90p: Option>, _75p: Option>, + median: Option>, _25p: Option>, _10p: Option>, + min: Option>, } #[derive(Default)] -struct RecapOptions { - min: bool, - max: bool, - median: bool, +pub struct RecapOptions { average: bool, sum: bool, + max: bool, _90p: bool, _75p: bool, + median: bool, _25p: bool, _10p: bool, + min: bool, } impl RecapOptions { @@ -77,11 +84,16 @@ impl RecapDataset where T: MapValue, { - pub fn import(parent_path: &str, options: RecapOptions) -> color_eyre::Result { + pub fn import( + parent_path: &str, + time: RecapTime, + options: RecapOptions, + ) -> color_eyre::Result { let f = |s: &str| format!("{parent_path}/{s}"); let mut s = Self { min_initial_states: MinInitialStates::default(), + time, min: options.min.then(|| DateMap::new_bin(1, &f("min"))), max: options.max.then(|| DateMap::new_bin(1, &f("max"))), @@ -102,44 +114,48 @@ where pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, source: &mut HeightMap, ) { - if let Some(min) = self.min.as_ref() { - // v.push(min); - } + dates.iter().enumerate().for_each(|(index, date)| { + // let heights = heights_by_date.get(index).unwrap(); - if let Some(max) = self.max.as_ref() { - // v.push(max); - } + if let Some(sum) = self.sum.as_ref() { + // v.push(sum); + } - if let Some(median) = self.median.as_ref() { - // v.push(median); - } + if let Some(average) = self.average.as_ref() { + // v.push(average); + } - if let Some(average) = self.average.as_ref() { - // v.push(average); - } + if let Some(max) = self.max.as_ref() { + // v.push(max); + } - if let Some(sum) = self.sum.as_ref() { - // v.push(sum); - } + if let Some(_90p) = self._90p.as_ref() { + // v.push(_90p); + } - if let Some(_90p) = self._90p.as_ref() { - // v.push(_90p); - } + if let Some(_75p) = self._75p.as_ref() { + // v.push(_75p); + } - if let Some(_75p) = self._75p.as_ref() { - // v.push(_75p); - } + if let Some(median) = self.median.as_ref() { + // v.push(median); + } - if let Some(_25p) = self._25p.as_ref() { - // v.push(_25p); - } + if let Some(_25p) = self._25p.as_ref() { + // v.push(_25p); + } - if let Some(_10p) = self._10p.as_ref() { - // v.push(_10p); - } + if let Some(_10p) = self._10p.as_ref() { + // v.push(_10p); + } + + if let Some(min) = self.min.as_ref() { + // v.push(min); + } + }); } } diff --git a/parser/src/datasets/subs/supply.rs b/parser/src/datasets/subs/supply.rs index 2e871d36d..44a430aa9 100644 --- a/parser/src/datasets/subs/supply.rs +++ b/parser/src/datasets/subs/supply.rs @@ -70,7 +70,7 @@ impl SupplySubDataset { #[allow(unused_variables)] pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, circulating_supply: &mut BiMap, ) { self.supply_to_circulating_supply_ratio diff --git a/parser/src/datasets/subs/unrealized.rs b/parser/src/datasets/subs/unrealized.rs index 92661e5ac..e3d286fec 100644 --- a/parser/src/datasets/subs/unrealized.rs +++ b/parser/src/datasets/subs/unrealized.rs @@ -115,7 +115,7 @@ impl UnrealizedSubDataset { pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, own_supply: &mut BiMap, circulating_supply: &mut BiMap, market_cap: &mut BiMap, diff --git a/parser/src/datasets/transaction.rs b/parser/src/datasets/transaction.rs index ef5b1f9e7..63ab80a43 100644 --- a/parser/src/datasets/transaction.rs +++ b/parser/src/datasets/transaction.rs @@ -107,7 +107,7 @@ impl TransactionDataset { pub fn compute( &mut self, - &ComputeData { heights, dates }: &ComputeData, + &ComputeData { heights, dates, .. }: &ComputeData, circulating_supply: &mut BiMap, block_interval: &mut HeightMap, ) { diff --git a/parser/src/datasets/utxo/dataset.rs b/parser/src/datasets/utxo/dataset.rs index 3024e3084..b004434a5 100644 --- a/parser/src/datasets/utxo/dataset.rs +++ b/parser/src/datasets/utxo/dataset.rs @@ -6,7 +6,7 @@ use crate::{ AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset, }, states::UTXOCohortId, - structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate}, + structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, Date, Height}, }; #[derive(Default, Allocative)] @@ -122,31 +122,31 @@ impl UTXODataset { // } } - pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool { self.subs.utxo.needs_insert(height, date) } - pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool { self.subs.capitalization.needs_insert(height, date) } - pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool { self.subs.supply.needs_insert(height, date) } - pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool { self.subs.price_paid.needs_insert(height, date) } - pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool { self.subs.realized.needs_insert(height, date) } - pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool { self.subs.unrealized.needs_insert(height, date) } - pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_input(&self, height: Height, date: Date) -> bool { self.subs.input.needs_insert(height, date) } diff --git a/parser/src/datasets/utxo/mod.rs b/parser/src/datasets/utxo/mod.rs index 730185181..41986e57c 100644 --- a/parser/src/datasets/utxo/mod.rs +++ b/parser/src/datasets/utxo/mod.rs @@ -9,7 +9,7 @@ use itertools::Itertools; use crate::{ datasets::AnyDatasets, states::{SplitByUTXOCohort, UTXOCohortId}, - structs::{BiMap, WNaiveDate}, + structs::{BiMap, Date, Height}, }; use super::{AnyDataset, ComputeData, InsertData, MinInitialStates}; @@ -55,7 +55,7 @@ impl UTXODatasets { .for_each(|(cohort, _)| cohort.insert(insert_data)) } - pub fn needs_durable_states(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_durable_states(&self, height: Height, date: Date) -> bool { let needs_insert_utxo = self.needs_insert_utxo(height, date); let needs_insert_capitalization = self.needs_insert_capitalization(height, date); let needs_insert_supply = self.needs_insert_supply(height, date); @@ -67,51 +67,51 @@ impl UTXODatasets { || needs_one_shot_states } - pub fn needs_one_shot_states(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool { self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date) } - pub fn needs_sent_states(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_sent_states(&self, height: Height, date: Date) -> bool { self.needs_insert_input(height, date) || self.needs_insert_realized(height, date) } - pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_utxo(height, date)) } - pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_capitalization(height, date)) } - pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_supply(height, date)) } - pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_price_paid(height, date)) } - pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_realized(height, date)) } - pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_unrealized(height, date)) } - pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool { + pub fn needs_insert_input(&self, height: Height, date: Date) -> bool { self.as_vec() .iter() .any(|(dataset, _)| dataset.needs_insert_input(height, date)) diff --git a/parser/src/io/mod.rs b/parser/src/io/mod.rs index a51dac6ee..674784567 100644 --- a/parser/src/io/mod.rs +++ b/parser/src/io/mod.rs @@ -1,11 +1,9 @@ mod binary; mod consts; mod json; -mod path; mod serialization; pub use binary::*; pub use consts::*; pub use json::*; -pub use path::*; pub use serialization::*; diff --git a/parser/src/io/path.rs b/parser/src/io/path.rs deleted file mode 100644 index f62a03fca..000000000 --- a/parser/src/io/path.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub fn format_path(path: &str) -> String { - path.replace(['-', '_', ' '], "/") -} diff --git a/parser/src/lib.rs b/parser/src/lib.rs index 8b297cb25..f313e75a4 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -14,7 +14,7 @@ pub use crate::{ datasets::OHLC, io::{Binary, Json, Serialization}, structs::{ - Config, DateMap, HeightMap, SerializedDateMap, SerializedHeightMap, WNaiveDate, + Config, Date, DateMap, Height, HeightMap, MapChunkId, SerializedBTreeMap, SerializedVec, HEIGHT_MAP_CHUNK_SIZE, }, utils::log, diff --git a/parser/src/main.rs b/parser/src/main.rs index 34a9177b5..605748896 100644 --- a/parser/src/main.rs +++ b/parser/src/main.rs @@ -1,6 +1,6 @@ use std::{path::Path, thread::sleep, time::Duration}; -use parser::{iter_blocks, log, BitcoinDB, BitcoinDaemon, Config}; +use parser::{iter_blocks, log, BitcoinDB, BitcoinDaemon, Config, Height}; fn main() -> color_eyre::Result<()> { color_eyre::install()?; @@ -38,7 +38,7 @@ fn main() -> color_eyre::Result<()> { daemon.start(); if daemon.check_if_fully_synced() { - daemon.wait_for_new_block(block_count - 1); + daemon.wait_for_new_block(Height::new(block_count as u32 - 1)); } else { daemon.wait_sync(); } diff --git a/parser/src/price/binance.rs b/parser/src/price/binance.rs index 864383733..2520d9690 100644 --- a/parser/src/price/binance.rs +++ b/parser/src/price/binance.rs @@ -9,7 +9,7 @@ use serde_json::Value; use crate::{ datasets::OHLC, io::{Json, IMPORTS_FOLDER_PATH}, - structs::WNaiveDate, + structs::Date, utils::{log, retry}, }; @@ -150,7 +150,7 @@ impl Binance { ) } - pub fn fetch_daily_prices() -> color_eyre::Result> { + pub fn fetch_daily_prices() -> color_eyre::Result> { log("binance: fetch 1d"); retry( @@ -168,7 +168,7 @@ impl Binance { // [timestamp, open, high, low, close, volume, ...] let array = value.as_array().unwrap(); - let date = WNaiveDate::from_timestamp( + let date = Date::from_timestamp( array.first().unwrap().as_u64().unwrap() as u32 / 1000, ); diff --git a/parser/src/price/kraken.rs b/parser/src/price/kraken.rs index 8f804b8ca..09c3f896a 100644 --- a/parser/src/price/kraken.rs +++ b/parser/src/price/kraken.rs @@ -5,7 +5,7 @@ use serde_json::Value; use crate::{ datasets::OHLC, - structs::WNaiveDate, + structs::Date, utils::{log, retry}, }; @@ -66,7 +66,7 @@ impl Kraken { ) } - pub fn fetch_daily_prices() -> color_eyre::Result> { + pub fn fetch_daily_prices() -> color_eyre::Result> { log("fetch kraken daily"); retry( @@ -91,9 +91,8 @@ impl Kraken { .map(|value| { let array = value.as_array().unwrap(); - let date = WNaiveDate::from_timestamp( - array.first().unwrap().as_u64().unwrap() as u32, - ); + let date = + Date::from_timestamp(array.first().unwrap().as_u64().unwrap() as u32); let get_f32 = |index: usize| { array diff --git a/parser/src/states/cohorts_states/address/cohort_durable_states.rs b/parser/src/states/cohorts_states/address/cohort_durable_states.rs index 42fbbafed..57301a268 100644 --- a/parser/src/states/cohorts_states/address/cohort_durable_states.rs +++ b/parser/src/states/cohorts_states/address/cohort_durable_states.rs @@ -2,14 +2,14 @@ use allocative::Allocative; use crate::{ states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState}, - structs::{LiquiditySplitResult, Price, SplitByLiquidity, WAmount}, + structs::{Amount, LiquiditySplitResult, Price, SplitByLiquidity}, }; #[derive(Default, Debug, Allocative)] pub struct AddressCohortDurableStates { pub address_count: usize, pub split_durable_states: SplitByLiquidity, - pub price_to_split_amount: PriceToValue>, + pub price_to_split_amount: PriceToValue>, } const ONE_THIRD: f64 = 1.0 / 3.0; @@ -19,7 +19,7 @@ impl AddressCohortDurableStates { #[allow(clippy::too_many_arguments)] pub fn increment( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, realized_cap: Price, mean_price_paid: Price, @@ -44,7 +44,7 @@ impl AddressCohortDurableStates { #[allow(clippy::too_many_arguments)] pub fn decrement( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, realized_cap: Price, mean_price_paid: Price, @@ -69,7 +69,7 @@ impl AddressCohortDurableStates { #[allow(clippy::too_many_arguments)] pub fn _crement( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, realized_cap: Price, mean_price_paid: Price, @@ -98,7 +98,7 @@ impl AddressCohortDurableStates { let illiquid_amount = split_sat_amount_result.illiquid.trunc(); let illiquid_amount_rest = split_sat_amount_result.illiquid - illiquid_amount; - let mut illiquid_amount = WAmount::from_sat(illiquid_amount as u64); + let mut illiquid_amount = Amount::from_sat(illiquid_amount as u64); let mut illiquid_utxo_count = split_utxo_count_result.illiquid.trunc() as usize; let illiquid_utxo_count_rest = split_utxo_count_result.illiquid.fract(); let mut illiquid_realized_cap = @@ -107,7 +107,7 @@ impl AddressCohortDurableStates { let liquid_amount = split_sat_amount_result.liquid.trunc(); let liquid_amount_rest = split_sat_amount_result.liquid - liquid_amount; - let mut liquid_amount = WAmount::from_sat(liquid_amount as u64); + let mut liquid_amount = Amount::from_sat(liquid_amount as u64); let mut liquid_utxo_count = split_utxo_count_result.liquid.trunc() as usize; let liquid_utxo_count_rest = split_utxo_count_result.liquid.fract(); let mut liquid_realized_cap = @@ -120,7 +120,7 @@ impl AddressCohortDurableStates { realized_cap - illiquid_realized_cap - liquid_realized_cap; let amount_diff = amount - illiquid_amount - liquid_amount - highly_liquid_amount; - if amount_diff > WAmount::ZERO { + if amount_diff > Amount::ZERO { if illiquid_amount_rest >= ONE_THIRD && illiquid_amount_rest > liquid_amount_rest { illiquid_amount += amount_diff; } else if illiquid_amount_rest >= ONE_THIRD { @@ -337,7 +337,7 @@ impl AddressCohortDurableStates { ); } - if split_amount.illiquid > WAmount::ZERO { + if split_amount.illiquid > Amount::ZERO { one_shot_states_ref.illiquid.price_paid_state.iterate( price_paid, split_amount.illiquid, @@ -359,7 +359,7 @@ impl AddressCohortDurableStates { } } - if split_amount.liquid > WAmount::ZERO { + if split_amount.liquid > Amount::ZERO { one_shot_states_ref.liquid.price_paid_state.iterate( price_paid, split_amount.liquid, @@ -381,7 +381,7 @@ impl AddressCohortDurableStates { } } - if split_amount.highly_liquid > WAmount::ZERO { + if split_amount.highly_liquid > Amount::ZERO { one_shot_states_ref.highly_liquid.price_paid_state.iterate( price_paid, split_amount.highly_liquid, diff --git a/parser/src/states/cohorts_states/address/cohorts_input_states.rs b/parser/src/states/cohorts_states/address/cohorts_input_states.rs index 1fd03c415..97cb20517 100644 --- a/parser/src/states/cohorts_states/address/cohorts_input_states.rs +++ b/parser/src/states/cohorts_states/address/cohorts_input_states.rs @@ -2,7 +2,7 @@ use derive_deref::{Deref, DerefMut}; use crate::{ states::InputState, - structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount}, + structs::{AddressRealizedData, Amount, LiquidityClassification, SplitByLiquidity}, }; use super::SplitByAddressCohort; @@ -27,17 +27,17 @@ impl AddressCohortsInputStates { state.illiquid.iterate( split_count.illiquid, - WAmount::from_sat(split_volume.illiquid.round() as u64), + Amount::from_sat(split_volume.illiquid.round() as u64), ); state.liquid.iterate( split_count.liquid, - WAmount::from_sat(split_volume.liquid.round() as u64), + Amount::from_sat(split_volume.liquid.round() as u64), ); state.highly_liquid.iterate( split_count.highly_liquid, - WAmount::from_sat(split_volume.highly_liquid.round() as u64), + Amount::from_sat(split_volume.highly_liquid.round() as u64), ); Ok(()) diff --git a/parser/src/states/cohorts_states/address/cohorts_output_states.rs b/parser/src/states/cohorts_states/address/cohorts_output_states.rs index 24675e349..dc8a7aeb3 100644 --- a/parser/src/states/cohorts_states/address/cohorts_output_states.rs +++ b/parser/src/states/cohorts_states/address/cohorts_output_states.rs @@ -2,7 +2,7 @@ use derive_deref::{Deref, DerefMut}; use crate::{ states::OutputState, - structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount}, + structs::{AddressRealizedData, Amount, LiquidityClassification, SplitByLiquidity}, }; use super::SplitByAddressCohort; @@ -27,17 +27,17 @@ impl AddressCohortsOutputStates { state.illiquid.iterate( split_count.illiquid, - WAmount::from_sat(split_volume.illiquid.round() as u64), + Amount::from_sat(split_volume.illiquid.round() as u64), ); state.liquid.iterate( split_count.liquid, - WAmount::from_sat(split_volume.liquid.round() as u64), + Amount::from_sat(split_volume.liquid.round() as u64), ); state.highly_liquid.iterate( split_count.highly_liquid, - WAmount::from_sat(split_volume.highly_liquid.round() as u64), + Amount::from_sat(split_volume.highly_liquid.round() as u64), ); Ok(()) diff --git a/parser/src/states/cohorts_states/any/durable_states.rs b/parser/src/states/cohorts_states/any/durable_states.rs index 49800bd9a..d2ea3af03 100644 --- a/parser/src/states/cohorts_states/any/durable_states.rs +++ b/parser/src/states/cohorts_states/any/durable_states.rs @@ -1,7 +1,7 @@ use allocative::Allocative; use color_eyre::eyre::eyre; -use crate::structs::{Price, WAmount}; +use crate::structs::{Amount, Price}; use super::{CapitalizationState, SupplyState, UTXOState}; @@ -15,11 +15,11 @@ pub struct DurableStates { impl DurableStates { pub fn increment( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, realized_cap: Price, ) -> color_eyre::Result<()> { - if amount == WAmount::ZERO { + if amount == Amount::ZERO { if utxo_count != 0 { dbg!(amount, utxo_count); return Err(eyre!("Shouldn't be possible")); @@ -35,11 +35,11 @@ impl DurableStates { pub fn decrement( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, realized_cap: Price, ) -> color_eyre::Result<()> { - if amount == WAmount::ZERO { + if amount == Amount::ZERO { if utxo_count != 0 { dbg!(amount, utxo_count); unreachable!("Shouldn't be possible") diff --git a/parser/src/states/cohorts_states/any/input_state.rs b/parser/src/states/cohorts_states/any/input_state.rs index c55b7f83d..07d13930d 100644 --- a/parser/src/states/cohorts_states/any/input_state.rs +++ b/parser/src/states/cohorts_states/any/input_state.rs @@ -1,13 +1,13 @@ -use crate::structs::WAmount; +use crate::structs::Amount; #[derive(Debug, Default)] pub struct InputState { pub count: f64, - pub volume: WAmount, + pub volume: Amount, } impl InputState { - pub fn iterate(&mut self, count: f64, volume: WAmount) { + pub fn iterate(&mut self, count: f64, volume: Amount) { self.count += count; self.volume += volume; } diff --git a/parser/src/states/cohorts_states/any/output_state.rs b/parser/src/states/cohorts_states/any/output_state.rs index 910657e4f..33a5dcc47 100644 --- a/parser/src/states/cohorts_states/any/output_state.rs +++ b/parser/src/states/cohorts_states/any/output_state.rs @@ -1,13 +1,13 @@ -use crate::structs::WAmount; +use crate::structs::Amount; #[derive(Debug, Default)] pub struct OutputState { pub count: f64, - pub volume: WAmount, + pub volume: Amount, } impl OutputState { - pub fn iterate(&mut self, count: f64, volume: WAmount) { + pub fn iterate(&mut self, count: f64, volume: Amount) { self.count += count; self.volume += volume; } diff --git a/parser/src/states/cohorts_states/any/price_paid_state.rs b/parser/src/states/cohorts_states/any/price_paid_state.rs index a93da4c6d..ca19bee9e 100644 --- a/parser/src/states/cohorts_states/any/price_paid_state.rs +++ b/parser/src/states/cohorts_states/any/price_paid_state.rs @@ -1,4 +1,4 @@ -use crate::structs::{Price, WAmount}; +use crate::structs::{Amount, Price}; #[derive(Default, Debug)] pub struct PricePaidState { @@ -22,11 +22,11 @@ pub struct PricePaidState { pub pp_90p: Option, pub pp_95p: Option, - pub processed_amount: WAmount, + pub processed_amount: Amount, } impl PricePaidState { - pub fn iterate(&mut self, price: Price, amount: WAmount, total_supply: WAmount) { + pub fn iterate(&mut self, price: Price, amount: Amount, total_supply: Amount) { let PricePaidState { processed_amount, pp_05p, diff --git a/parser/src/states/cohorts_states/any/price_to_value.rs b/parser/src/states/cohorts_states/any/price_to_value.rs index cbc94e126..10ba788e5 100644 --- a/parser/src/states/cohorts_states/any/price_to_value.rs +++ b/parser/src/states/cohorts_states/any/price_to_value.rs @@ -8,7 +8,7 @@ use allocative::Allocative; use color_eyre::eyre::eyre; use derive_deref::{Deref, DerefMut}; -use crate::structs::{Price, SplitByLiquidity, WAmount}; +use crate::structs::{Amount, Price, SplitByLiquidity}; #[derive(Deref, DerefMut, Default, Debug, Allocative)] pub struct PriceToValue(BTreeMap); @@ -82,13 +82,13 @@ pub trait CanSubtract { fn can_subtract(&self, other: &Self) -> bool; } -impl CanSubtract for WAmount { +impl CanSubtract for Amount { fn can_subtract(&self, other: &Self) -> bool { self >= other } } -impl CanSubtract for SplitByLiquidity { +impl CanSubtract for SplitByLiquidity { fn can_subtract(&self, other: &Self) -> bool { self.all >= other.all && self.illiquid >= other.illiquid @@ -101,23 +101,23 @@ pub trait IsZero { fn is_zero(&self) -> color_eyre::Result; } -impl IsZero for WAmount { +impl IsZero for Amount { fn is_zero(&self) -> color_eyre::Result { - Ok(*self == WAmount::ZERO) + Ok(*self == Amount::ZERO) } } -impl IsZero for SplitByLiquidity { +impl IsZero for SplitByLiquidity { fn is_zero(&self) -> color_eyre::Result { - if self.all == WAmount::ZERO - && (self.illiquid != WAmount::ZERO - || self.liquid != WAmount::ZERO - || self.highly_liquid != WAmount::ZERO) + if self.all == Amount::ZERO + && (self.illiquid != Amount::ZERO + || self.liquid != Amount::ZERO + || self.highly_liquid != Amount::ZERO) { dbg!(&self); Err(eyre!("Bad split")) } else { - Ok(self.all == WAmount::ZERO) + Ok(self.all == Amount::ZERO) } } } diff --git a/parser/src/states/cohorts_states/any/supply_state.rs b/parser/src/states/cohorts_states/any/supply_state.rs index a727a8e2a..2b36c2655 100644 --- a/parser/src/states/cohorts_states/any/supply_state.rs +++ b/parser/src/states/cohorts_states/any/supply_state.rs @@ -1,19 +1,19 @@ use allocative::Allocative; use color_eyre::eyre::eyre; -use crate::structs::WAmount; +use crate::structs::Amount; #[derive(Debug, Default, Allocative)] pub struct SupplyState { - pub supply: WAmount, + pub supply: Amount, } impl SupplyState { - pub fn increment(&mut self, amount: WAmount) { + pub fn increment(&mut self, amount: Amount) { self.supply += amount; } - pub fn decrement(&mut self, amount: WAmount) -> color_eyre::Result<()> { + pub fn decrement(&mut self, amount: Amount) -> color_eyre::Result<()> { if self.supply < amount { dbg!(self.supply, amount); diff --git a/parser/src/states/cohorts_states/any/unrealized_state.rs b/parser/src/states/cohorts_states/any/unrealized_state.rs index 106b3607f..121d85920 100644 --- a/parser/src/states/cohorts_states/any/unrealized_state.rs +++ b/parser/src/states/cohorts_states/any/unrealized_state.rs @@ -1,17 +1,17 @@ use std::{cmp::Ordering, ops::Add}; -use crate::structs::{Price, WAmount}; +use crate::structs::{Amount, Price}; #[derive(Debug, Default)] pub struct UnrealizedState { - pub supply_in_profit: WAmount, + pub supply_in_profit: Amount, pub unrealized_profit: Price, pub unrealized_loss: Price, } impl UnrealizedState { #[inline] - pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: WAmount) { + pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: Amount) { match price_then.cmp(&price_now) { Ordering::Less => { self.unrealized_profit += (price_now - price_then) * amount; diff --git a/parser/src/states/cohorts_states/utxo/cohort_durable_states.rs b/parser/src/states/cohorts_states/utxo/cohort_durable_states.rs index dbc3b977d..41eef3944 100644 --- a/parser/src/states/cohorts_states/utxo/cohort_durable_states.rs +++ b/parser/src/states/cohorts_states/utxo/cohort_durable_states.rs @@ -2,19 +2,19 @@ use allocative::Allocative; use crate::{ states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState}, - structs::{Price, WAmount}, + structs::{Amount, Price}, }; #[derive(Default, Debug, Allocative)] pub struct UTXOCohortDurableStates { pub durable_states: DurableStates, - pub price_to_amount: PriceToValue, + pub price_to_amount: PriceToValue, } impl UTXOCohortDurableStates { pub fn increment( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, price: Price, ) -> color_eyre::Result<()> { @@ -23,7 +23,7 @@ impl UTXOCohortDurableStates { pub fn decrement( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, price: Price, ) -> color_eyre::Result<()> { @@ -32,7 +32,7 @@ impl UTXOCohortDurableStates { pub fn _crement( &mut self, - amount: WAmount, + amount: Amount, utxo_count: usize, price: Price, increment: bool, diff --git a/parser/src/states/cohorts_states/utxo/cohorts_durable_states.rs b/parser/src/states/cohorts_states/utxo/cohorts_durable_states.rs index ebd0751ba..88c2f0196 100644 --- a/parser/src/states/cohorts_states/utxo/cohorts_durable_states.rs +++ b/parser/src/states/cohorts_states/utxo/cohorts_durable_states.rs @@ -5,9 +5,9 @@ use rayon::prelude::*; use crate::{ states::DateDataVec, - structs::{BlockData, Price, SentData, WAmount}, + structs::{Amount, BlockData, Price, SentData}, utils::difference_in_days_between_timestamps, - WNaiveDate, + Date, }; use super::{SplitByUTXOCohort, UTXOCohortDurableStates, UTXOCohortsOneShotStates}; @@ -33,7 +33,7 @@ impl UTXOCohortsDurableStates { let utxo_count = block_data.utxos as usize; // No need to either insert or remove if 0 - if amount == WAmount::ZERO { + if amount == Amount::ZERO { return; } @@ -65,12 +65,12 @@ impl UTXOCohortsDurableStates { let price = block_data.price; // No need to either insert or remove if 0 - if amount == WAmount::ZERO { + if amount == Amount::ZERO { return; } if block_data.height == last_block_data.height { - let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32; + let year = Date::from_timestamp(block_data.timestamp).year() as u32; self.initial_filtered_apply(&0, &year, |state| { state.increment(amount, utxo_count, price).unwrap(); @@ -118,7 +118,7 @@ impl UTXOCohortsDurableStates { let utxo_count = sent_data.count as usize; // No need to either insert or remove if 0 - if amount == WAmount::ZERO { + if amount == Amount::ZERO { return; } @@ -127,7 +127,7 @@ impl UTXOCohortsDurableStates { previous_last_block_data.timestamp, ); - let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32; + let year = Date::from_timestamp(block_data.timestamp).year() as u32; self.initial_filtered_apply(&days_old, &year, |state| { state diff --git a/parser/src/structs/address_data.rs b/parser/src/structs/address_data.rs index 788167dd2..36592fcda 100644 --- a/parser/src/structs/address_data.rs +++ b/parser/src/structs/address_data.rs @@ -2,14 +2,14 @@ use allocative::Allocative; use color_eyre::eyre::eyre; use sanakirja::{direct_repr, Storable, UnsizedStorable}; -use super::{AddressType, EmptyAddressData, LiquidityClassification, Price, WAmount}; +use super::{AddressType, Amount, EmptyAddressData, LiquidityClassification, Price}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Allocative)] pub struct AddressData { pub address_type: AddressType, - pub amount: WAmount, - pub sent: WAmount, - pub received: WAmount, + pub amount: Amount, + pub sent: Amount, + pub received: Amount, pub realized_cap: Price, pub outputs_len: u32, } @@ -19,15 +19,15 @@ impl AddressData { pub fn new(address_type: AddressType) -> Self { Self { address_type, - amount: WAmount::ZERO, - sent: WAmount::ZERO, - received: WAmount::ZERO, + amount: Amount::ZERO, + sent: Amount::ZERO, + received: Amount::ZERO, realized_cap: Price::ZERO, outputs_len: 0, } } - pub fn receive(&mut self, amount: WAmount, price: Price) { + pub fn receive(&mut self, amount: Amount, price: Price) { let previous_amount = self.amount; let new_amount = previous_amount + amount; @@ -43,7 +43,7 @@ impl AddressData { self.realized_cap += received_value; } - pub fn send(&mut self, amount: WAmount, previous_price: Price) -> color_eyre::Result<()> { + pub fn send(&mut self, amount: Amount, previous_price: Price) -> color_eyre::Result<()> { let previous_amount = self.amount; if previous_amount < amount { @@ -66,7 +66,7 @@ impl AddressData { #[inline(always)] pub fn is_empty(&self) -> bool { - if self.amount == WAmount::ZERO { + if self.amount == Amount::ZERO { if self.outputs_len != 0 { unreachable!(); } @@ -80,7 +80,7 @@ impl AddressData { pub fn from_empty(empty: &EmptyAddressData) -> Self { Self { address_type: empty.address_type, - amount: WAmount::ZERO, + amount: Amount::ZERO, sent: empty.transfered, received: empty.transfered, realized_cap: Price::ZERO, diff --git a/parser/src/structs/address_realized_data.rs b/parser/src/structs/address_realized_data.rs index 44c348502..a2ce42740 100644 --- a/parser/src/structs/address_realized_data.rs +++ b/parser/src/structs/address_realized_data.rs @@ -1,10 +1,10 @@ -use super::{AddressData, Price, WAmount}; +use super::{AddressData, Amount, Price}; #[derive(Debug)] pub struct AddressRealizedData { pub initial_address_data: AddressData, - pub received: WAmount, - pub sent: WAmount, + pub received: Amount, + pub sent: Amount, pub profit: Price, pub loss: Price, pub value_created: Price, @@ -16,8 +16,8 @@ pub struct AddressRealizedData { impl AddressRealizedData { pub fn default(initial_address_data: &AddressData) -> Self { Self { - received: WAmount::ZERO, - sent: WAmount::ZERO, + received: Amount::ZERO, + sent: Amount::ZERO, profit: Price::ZERO, loss: Price::ZERO, utxos_created: 0, @@ -28,12 +28,12 @@ impl AddressRealizedData { } } - pub fn receive(&mut self, amount: WAmount) { + pub fn receive(&mut self, amount: Amount) { self.received += amount; self.utxos_created += 1; } - pub fn send(&mut self, amount: WAmount, current_price: Price, previous_price: Price) { + pub fn send(&mut self, amount: Amount, current_price: Price, previous_price: Price) { self.sent += amount; self.utxos_destroyed += 1; diff --git a/parser/src/structs/address_size.rs b/parser/src/structs/address_size.rs index 59de49429..27d274ceb 100644 --- a/parser/src/structs/address_size.rs +++ b/parser/src/structs/address_size.rs @@ -1,6 +1,6 @@ use allocative::Allocative; -use super::WAmount; +use super::Amount; #[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Allocative)] pub enum AddressSize { @@ -16,7 +16,7 @@ pub enum AddressSize { } impl AddressSize { - pub fn from_amount(amount: WAmount) -> Self { + pub fn from_amount(amount: Amount) -> Self { match amount.to_sat() { 0 => Self::Empty, 1..=9_999_999 => Self::Plankton, diff --git a/parser/src/structs/wamount.rs b/parser/src/structs/amount.rs similarity index 52% rename from parser/src/structs/wamount.rs rename to parser/src/structs/amount.rs index a525a3020..3691f0135 100644 --- a/parser/src/structs/wamount.rs +++ b/parser/src/structs/amount.rs @@ -10,11 +10,13 @@ use bincode::{ error::{DecodeError, EncodeError}, BorrowDecode, Decode, Encode, }; -use bitcoin::Amount; +use bitcoin::Amount as BitcoinAmount; use derive_deref::{Deref, DerefMut}; use sanakirja::{direct_repr, Storable, UnsizedStorable}; use serde::{Deserialize, Serialize}; +use super::Height; + #[derive( Debug, PartialEq, @@ -29,98 +31,106 @@ use serde::{Deserialize, Serialize}; Serialize, Deserialize, )] -pub struct WAmount(Amount); -direct_repr!(WAmount); +pub struct Amount(BitcoinAmount); +direct_repr!(Amount); -impl WAmount { - pub const ZERO: Self = Self(Amount::ZERO); +impl Amount { + pub const ZERO: Self = Self(BitcoinAmount::ZERO); pub const ONE_BTC_F64: f64 = 100_000_000.0; #[inline(always)] - pub fn wrap(amount: Amount) -> Self { + pub fn wrap(amount: BitcoinAmount) -> Self { Self(amount) } #[inline(always)] pub fn from_sat(sats: u64) -> Self { - Self(Amount::from_sat(sats)) + Self(BitcoinAmount::from_sat(sats)) } } -impl Add for WAmount { - type Output = WAmount; +impl Add for Amount { + type Output = Amount; - fn add(self, rhs: WAmount) -> Self::Output { - WAmount::from_sat(self.to_sat() + rhs.to_sat()) + fn add(self, rhs: Amount) -> Self::Output { + Amount::from_sat(self.to_sat() + rhs.to_sat()) } } -impl AddAssign for WAmount { +impl AddAssign for Amount { fn add_assign(&mut self, rhs: Self) { - *self = WAmount::from_sat(self.to_sat() + rhs.to_sat()); + *self = Amount::from_sat(self.to_sat() + rhs.to_sat()); } } -impl Sub for WAmount { - type Output = WAmount; +impl Sub for Amount { + type Output = Amount; - fn sub(self, rhs: WAmount) -> Self::Output { - WAmount::from_sat(self.to_sat() - rhs.to_sat()) + fn sub(self, rhs: Amount) -> Self::Output { + Amount::from_sat(self.to_sat() - rhs.to_sat()) } } -impl SubAssign for WAmount { +impl SubAssign for Amount { fn sub_assign(&mut self, rhs: Self) { - *self = WAmount::from_sat(self.to_sat() - rhs.to_sat()); + *self = Amount::from_sat(self.to_sat() - rhs.to_sat()); } } -impl Mul for WAmount { - type Output = WAmount; +impl Mul for Amount { + type Output = Amount; - fn mul(self, rhs: WAmount) -> Self::Output { - WAmount::from_sat(self.to_sat() * rhs.to_sat()) + fn mul(self, rhs: Amount) -> Self::Output { + Amount::from_sat(self.to_sat() * rhs.to_sat()) } } -impl Mul for WAmount { - type Output = WAmount; +impl Mul for Amount { + type Output = Amount; fn mul(self, rhs: u64) -> Self::Output { - WAmount::from_sat(self.to_sat() * rhs) + Amount::from_sat(self.to_sat() * rhs) } } -impl Sum for WAmount { +impl Mul for Amount { + type Output = Amount; + + fn mul(self, rhs: Height) -> Self::Output { + Amount::from_sat(self.to_sat() * *rhs as u64) + } +} + +impl Sum for Amount { fn sum>(iter: I) -> Self { let sats = iter.map(|amt| amt.to_sat()).sum(); - WAmount::from_sat(sats) + Amount::from_sat(sats) } } -impl Encode for WAmount { +impl Encode for Amount { fn encode(&self, encoder: &mut E) -> Result<(), EncodeError> { Encode::encode(&self.to_sat(), encoder) } } -impl Decode for WAmount { +impl Decode for Amount { fn decode(decoder: &mut D) -> core::result::Result { let sats: u64 = Decode::decode(decoder)?; - Ok(WAmount::from_sat(sats)) + Ok(Amount::from_sat(sats)) } } -impl<'de> BorrowDecode<'de> for WAmount { +impl<'de> BorrowDecode<'de> for Amount { fn borrow_decode>(decoder: &mut D) -> Result { let sats: u64 = BorrowDecode::borrow_decode(decoder)?; - Ok(WAmount::from_sat(sats)) + Ok(Amount::from_sat(sats)) } } -impl Allocative for WAmount { +impl Allocative for Amount { fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) { visitor.visit_simple_sized::(); } diff --git a/parser/src/structs/bi_map.rs b/parser/src/structs/bi_map.rs index 96b1375c9..f9cd9a191 100644 --- a/parser/src/structs/bi_map.rs +++ b/parser/src/structs/bi_map.rs @@ -8,7 +8,7 @@ use ordered_float::FloatCore; use crate::{bitcoin::TARGET_BLOCKS_PER_DAY, utils::LossyFrom}; -use super::{AnyDateMap, AnyHeightMap, AnyMap, DateMap, HeightMap, MapValue, WNaiveDate}; +use super::{AnyDateMap, AnyHeightMap, AnyMap, Date, DateMap, Height, HeightMap, MapValue}; #[derive(Default, Allocative)] pub struct BiMap @@ -37,11 +37,8 @@ where } } - pub fn date_insert_sum_range( - &mut self, - date: WNaiveDate, - date_blocks_range: &RangeInclusive, - ) where + pub fn date_insert_sum_range(&mut self, date: Date, date_blocks_range: &RangeInclusive) + where T: Sum, { self.date @@ -50,22 +47,22 @@ where pub fn multi_date_insert_sum_range( &mut self, - dates: &[WNaiveDate], - first_height: &mut DateMap, - last_height: &mut DateMap, + dates: &[Date], + first_height: &mut DateMap, + last_height: &mut DateMap, ) where T: Sum, { dates.iter().for_each(|date| { let first_height = first_height.get_or_import(date).unwrap(); let last_height = last_height.get_or_import(date).unwrap(); - let range = first_height..=last_height; + let range = (*first_height)..=(*last_height); self.date.insert(*date, self.height.sum_range(&range)); }) } - pub fn multi_insert_const(&mut self, heights: &[usize], dates: &[WNaiveDate], constant: T) { + pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: T) { self.height.multi_insert_const(heights, constant); self.date.multi_insert_const(dates, constant); @@ -73,8 +70,8 @@ where pub fn multi_insert_simple_transform( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, transform: &F, ) where @@ -91,8 +88,8 @@ where #[allow(unused)] pub fn multi_insert_add( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], added: &mut BiMap, adder: &mut BiMap, ) where @@ -109,8 +106,8 @@ where pub fn multi_insert_subtract( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], subtracted: &mut BiMap, subtracter: &mut BiMap, ) where @@ -128,8 +125,8 @@ where pub fn multi_insert_multiply( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], multiplied: &mut BiMap, multiplier: &mut BiMap, ) where @@ -146,8 +143,8 @@ where pub fn multi_insert_divide( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], divided: &mut BiMap, divider: &mut BiMap, ) where @@ -164,8 +161,8 @@ where pub fn multi_insert_percentage( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], divided: &mut BiMap, divider: &mut BiMap, ) where @@ -182,8 +179,8 @@ where pub fn multi_insert_cumulative( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, ) where K: MapValue, @@ -198,8 +195,8 @@ where pub fn multi_insert_last_x_sum( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, days: usize, ) where @@ -219,8 +216,8 @@ where pub fn multi_insert_simple_average( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, days: usize, ) where @@ -239,8 +236,8 @@ where pub fn multi_insert_net_change( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, days: usize, ) where @@ -257,8 +254,8 @@ where pub fn multi_insert_median( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], source: &mut BiMap, days: Option, ) where @@ -275,8 +272,8 @@ where #[allow(unused)] pub fn multi_insert_percentile( &mut self, - heights: &[usize], - dates: &[WNaiveDate], + heights: &[Height], + dates: &[Date], mut map_and_percentiles: Vec<(&mut BiMap, f32)>, days: Option, ) where diff --git a/parser/src/structs/block_data.rs b/parser/src/structs/block_data.rs index 25b8c1fb6..00b96bb3b 100644 --- a/parser/src/structs/block_data.rs +++ b/parser/src/structs/block_data.rs @@ -1,29 +1,29 @@ use allocative::Allocative; use bincode::{Decode, Encode}; -use super::{Price, WAmount}; +use super::{Amount, Height, Price}; #[derive(Debug, Encode, Decode, Allocative)] pub struct BlockData { - pub height: u32, + pub height: Height, pub price: Price, pub timestamp: u32, - pub amount: WAmount, + pub amount: Amount, pub utxos: u32, } impl BlockData { - pub fn new(height: u32, price: Price, timestamp: u32) -> Self { + pub fn new(height: Height, price: Price, timestamp: u32) -> Self { Self { height, price, timestamp, - amount: WAmount::ZERO, + amount: Amount::ZERO, utxos: 0, } } - pub fn send(&mut self, amount: WAmount) { + pub fn send(&mut self, amount: Amount) { self.utxos -= 1; if self.amount < amount { @@ -33,7 +33,7 @@ impl BlockData { self.amount -= amount; } - pub fn receive(&mut self, amount: WAmount) { + pub fn receive(&mut self, amount: Amount) { self.utxos += 1; self.amount += amount; diff --git a/parser/src/structs/date.rs b/parser/src/structs/date.rs new file mode 100644 index 000000000..15858ef64 --- /dev/null +++ b/parser/src/structs/date.rs @@ -0,0 +1,130 @@ +use std::{fmt, str::FromStr}; + +use allocative::{Allocative, Visitor}; +use bincode::{ + de::{BorrowDecoder, Decoder}, + enc::Encoder, + error::{DecodeError, EncodeError}, + BorrowDecode, Decode, Encode, +}; +use chrono::{Datelike, Days, NaiveDate, TimeZone, Utc}; +use derive_deref::{Deref, DerefMut}; +use serde::{Deserialize, Serialize}; + +use super::{DateMapChunkId, MapKey}; + +const NUMBER_OF_UNSAFE_DATES: usize = 2; +const MIN_YEAR: i32 = 2009; +const APPROX_MAX_YEAR: i32 = 2100; + +#[derive( + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Deserialize, +)] +pub struct Date(NaiveDate); + +impl Date { + pub fn wrap(date: NaiveDate) -> Self { + Self(date) + } + + pub fn from_timestamp(timestamp: u32) -> Self { + Self( + Utc.timestamp_opt(i64::from(timestamp), 0) + .unwrap() + .date_naive(), + ) + } +} + +impl MapKey for Date { + fn to_chunk_id(&self) -> DateMapChunkId { + DateMapChunkId::new(self) + } + + fn to_first_unsafe(&self) -> Option { + let offset = NUMBER_OF_UNSAFE_DATES - 1; + + self.checked_sub_days(Days::new(offset as u64)) + .map(Date::wrap) + } + + fn to_serialized_key(&self) -> Self { + *self + } + + fn is_out_of_bounds(&self) -> bool { + !(MIN_YEAR..=APPROX_MAX_YEAR).contains(&self.year()) + } + + fn is_first(&self) -> bool { + let day = self.day(); + + if self.year() == 2009 && self.month() == 1 { + day == 3 + } else { + day == 1 + } + } + + fn checked_sub(&self, days: usize) -> Option { + self.checked_sub_days(Days::new(days as u64)) + .map(Self::wrap) + } + + fn min_percentile_key() -> Self { + Self::wrap(NaiveDate::from_ymd_opt(2012, 1, 1).unwrap()) + } + + fn iter_up_to(&self, other: &Self) -> impl Iterator { + self.iter_days().take_while(|d| d <= other).map(Date::wrap) + } + + fn map_name<'a>() -> &'a str { + "date" + } +} + +impl fmt::Display for Date { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.0, f) + } +} + +impl Encode for Date { + fn encode(&self, encoder: &mut E) -> Result<(), EncodeError> { + Encode::encode(&self.to_string(), encoder) + } +} + +impl Decode for Date { + fn decode(decoder: &mut D) -> core::result::Result { + let str: String = Decode::decode(decoder)?; + + Ok(Self(NaiveDate::from_str(&str).unwrap())) + } +} + +impl<'de> BorrowDecode<'de> for Date { + fn borrow_decode>(decoder: &mut D) -> Result { + let str: String = BorrowDecode::borrow_decode(decoder)?; + + Ok(Self(NaiveDate::from_str(&str).unwrap())) + } +} + +impl Allocative for Date { + fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) { + visitor.visit_simple_sized::(); + } +} diff --git a/parser/src/structs/date_data.rs b/parser/src/structs/date_data.rs index 243cd0774..521ff6fe3 100644 --- a/parser/src/structs/date_data.rs +++ b/parser/src/structs/date_data.rs @@ -1,16 +1,16 @@ use allocative::Allocative; use bincode::{Decode, Encode}; -use super::{BlockData, BlockPath, WNaiveDate}; +use super::{BlockData, BlockPath, Date}; #[derive(Debug, Encode, Decode, Allocative)] pub struct DateData { - pub date: WNaiveDate, + pub date: Date, pub blocks: Vec, } impl DateData { - pub fn new(date: WNaiveDate, blocks: Vec) -> Self { + pub fn new(date: Date, blocks: Vec) -> Self { Self { date, blocks } } diff --git a/parser/src/structs/date_map copy.rs b/parser/src/structs/date_map copy.rs new file mode 100644 index 000000000..3428528f9 --- /dev/null +++ b/parser/src/structs/date_map copy.rs @@ -0,0 +1,1287 @@ +use std::{ + collections::{BTreeMap, VecDeque}, + fmt::Debug, + fs, + iter::Sum, + mem, + ops::{Add, ControlFlow, Div, Mul, Sub}, + path::{Path, PathBuf}, +}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use chrono::{Datelike, Days}; +use itertools::Itertools; +use ordered_float::{FloatCore, OrderedFloat}; +use serde::{Deserialize, Serialize}; + +use crate::{ + io::{format_path, Serialization}, + utils::{log, LossyFrom}, +}; + +use super::{AnyMap, Date, HeightMap, MapValue}; + +const NUMBER_OF_UNSAFE_DATES: usize = 2; +const MIN_YEAR: usize = 2009; + +#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)] +pub struct SerializedDateMap { + version: u32, + map: BTreeMap, +} + +#[derive(Default, Allocative)] +pub struct DateMap { + version: u32, + + path_all: String, + path_last: Option, + + chunks_in_memory: usize, + + serialization: Serialization, + + pub initial_last_date: Option, + pub initial_first_unsafe_date: Option, + + imported: BTreeMap>, + to_insert: BTreeMap>, +} + +impl DateMap +where + T: MapValue, +{ + pub fn new_bin(version: u32, path: &str) -> Self { + Self::new(version, path, Serialization::Binary, 1, true) + } + + pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Binary, 1, export_last) + } + + pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Json, usize::MAX, export_last) + } + + fn new( + version: u32, + path: &str, + serialization: Serialization, + chunks_in_memory: usize, + export_last: bool, + ) -> Self { + if chunks_in_memory < 1 { + panic!("Should always have at least the latest chunk in memory"); + } + + let path = format_path(path); + + let path_all = format!("{path}/date"); + + fs::create_dir_all(&path_all).unwrap(); + + let path_last = { + if export_last { + Some(serialization.append_extension(&format!("{path}/last"))) + } else { + None + } + }; + + let mut s = Self { + version, + + path_all, + path_last, + + chunks_in_memory, + + serialization, + + initial_last_date: None, + initial_first_unsafe_date: None, + + to_insert: BTreeMap::default(), + imported: BTreeMap::default(), + }; + + s.read_dir() + .into_iter() + .rev() + .take(chunks_in_memory) + .for_each(|(chunk_start, path)| { + if let Ok(serialized) = s.import(&path) { + if serialized.version == s.version { + s.imported.insert(chunk_start, serialized); + } else { + s.read_dir() + .iter() + .for_each(|(_, path)| fs::remove_file(path).unwrap()) + } + } + }); + + s.initial_last_date = s + .imported + .values() + .last() + .and_then(|serialized| serialized.map.keys().copied().max()); + + s.initial_first_unsafe_date = s.initial_last_date.and_then(|last_date| { + let offset = NUMBER_OF_UNSAFE_DATES - 1; + last_date + .checked_sub_days(Days::new(offset as u64)) + .map(Date::wrap) + }); + + if s.initial_first_unsafe_date.is_none() { + log(&format!("New {path}")); + } + + s + } + + pub fn insert(&mut self, date: Date, value: T) -> T { + if !self.is_date_safe(date) { + self.to_insert + .entry(date.year() as usize) + .or_default() + .insert(date, value); + } + + value + } + + pub fn insert_default(&mut self, date: Date) -> T { + self.insert(date, T::default()) + } + + pub fn get(&self, date: &Date) -> Option { + let year = date.year() as usize; + + self.to_insert + .get(&year) + .and_then(|tree| tree.get(date).cloned()) + .or_else(|| { + self.imported + .get(&year) + .and_then(|serialized| serialized.map.get(date)) + .cloned() + }) + } + + pub fn get_or_import(&mut self, date: &Date) -> Option { + let year = date.year() as usize; + + if year < MIN_YEAR { + return None; + } + + self.to_insert + .get(&year) + .and_then(|tree| tree.get(date).cloned()) + .or_else(|| { + #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() + if !self.imported.contains_key(&year) { + let dir_content = self.read_dir(); + + if let Some(path) = dir_content.get(&year) { + let serialized = self.import(path).unwrap(); + // .unwrap_or(SerializedDateMap { + // version: self.version, + // map: BTreeMap::default(), + // }); + + self.imported.insert(year, serialized); + } + } + + self.imported + .get(&year) + .and_then(|serialized| serialized.map.get(date)) + .cloned() + }) + } + + #[inline(always)] + pub fn is_date_safe(&self, date: Date) -> bool { + self.initial_first_unsafe_date + .map_or(false, |initial_first_unsafe_date| { + initial_first_unsafe_date > date + }) + } + + fn read_dir(&self) -> BTreeMap { + Self::_read_dir(&self.path_all, &self.serialization) + } + + pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { + fs::read_dir(path) + .unwrap() + .map(|entry| entry.unwrap().path()) + .filter(|path| { + let file_stem = path.file_stem().unwrap().to_str().unwrap(); + let extension = path.extension().unwrap().to_str().unwrap(); + + path.is_file() + && file_stem.len() == 4 + && file_stem.starts_with("20") + && extension == serialization.to_extension() + }) + .map(|path| { + let year = path + .file_stem() + .unwrap() + .to_str() + .unwrap() + .parse::() + .unwrap(); + + (year, path) + }) + .collect() + } + + fn import(&self, path: &Path) -> color_eyre::Result> { + self.serialization + .import::>(path.to_str().unwrap()) + } +} + +impl AnyMap for DateMap +where + T: MapValue, +{ + fn path(&self) -> &str { + &self.path_all + } + + fn path_last(&self) -> &Option { + &self.path_last + } + + fn t_name(&self) -> &str { + std::any::type_name::() + } + + // fn reset(&mut self) -> color_eyre::Result<()> { + // fs::remove_dir(&self.path_all)?; + + // self.initial_last_date = None; + // self.initial_first_unsafe_date = None; + + // self.imported.clear(); + // self.to_insert.clear(); + + // Ok(()) + // } + + fn pre_export(&mut self) { + self.to_insert.iter_mut().for_each(|(chunk_start, map)| { + self.imported + .entry(*chunk_start) + .or_insert(SerializedDateMap { + version: self.version, + map: BTreeMap::default(), + }) + .map + .extend(mem::take(map)); + }); + } + + fn export(&self) -> color_eyre::Result<()> { + let len = self.imported.len(); + + self.to_insert.iter().enumerate().try_for_each( + |(index, (year, map))| -> color_eyre::Result<()> { + if !map.is_empty() { + unreachable!() + } + + let path = self + .serialization + .append_extension(&format!("{}/{}", self.path_all, year)); + + let serialized = self.imported.get(year).unwrap(); + + self.serialization.export(&path, serialized)?; + + if index == len - 1 { + if let Some(path_last) = self.path_last.as_ref() { + self.serialization + .export(path_last, serialized.map.values().last().unwrap())?; + } + } + + Ok(()) + }, + ) + } + + fn post_export(&mut self) { + self.imported + .keys() + .rev() + .enumerate() + .filter(|(index, _)| *index + 1 > self.chunks_in_memory) + .map(|(_, key)| *key) + .collect_vec() + .iter() + .for_each(|key| { + self.imported.remove(key); + }); + + self.to_insert.clear(); + } +} + +pub trait AnyDateMap: AnyMap { + fn get_initial_first_unsafe_date(&self) -> Option; + + fn get_initial_last_date(&self) -> Option; + + fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync); + + fn as_any_mut_map(&mut self) -> &mut dyn AnyMap; +} + +impl AnyDateMap for DateMap +where + T: MapValue, +{ + #[inline(always)] + fn get_initial_first_unsafe_date(&self) -> Option { + self.initial_first_unsafe_date + } + + #[inline(always)] + fn get_initial_last_date(&self) -> Option { + self.initial_last_date + } + + fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) { + self + } + + fn as_any_mut_map(&mut self) -> &mut dyn AnyMap { + self + } +} + +impl DateMap +where + T: MapValue, +{ + pub fn multi_insert(&mut self, dates: &[Date], mut callback: F) + where + F: FnMut(&Date) -> T, + { + dates.iter().for_each(|date| { + self.insert(*date, callback(date)); + }); + } + + // KEEEEEP + pub fn multi_insert_last( + &mut self, + dates: &[Date], + source: &mut HeightMap, + last_height: &mut DateMap, + ) { + dates.iter().for_each(|date| { + self.insert( + *date, + source.get_or_import(&last_height.get_or_import(date).unwrap()), + ); + }); + } + + pub fn multi_insert_const(&mut self, dates: &[Date], constant: T) { + dates.iter().for_each(|date| { + self.insert(*date, constant); + }); + } + + pub fn multi_insert_simple_transform( + &mut self, + dates: &[Date], + source: &mut DateMap, + transform: F, + ) where + F: Fn(K) -> T, + K: MapValue, + { + dates.iter().for_each(|date| { + self.insert(*date, transform(source.get_or_import(date).unwrap())); + }); + } + + pub fn multi_insert_complex_transform( + &mut self, + dates: &[Date], + source: &mut DateMap, + mut transform: F, + ) where + K: MapValue, + F: FnMut((K, &Date, &mut DateMap)) -> T, + { + dates.iter().for_each(|date| { + self.insert( + *date, + transform((source.get_or_import(date).unwrap(), date, source)), + ); + }); + } + + pub fn multi_insert_add( + &mut self, + dates: &[Date], + added: &mut DateMap, + adder: &mut DateMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Add, + { + dates.iter().for_each(|date| { + self.insert( + *date, + T::lossy_from(added.get_or_import(date).unwrap()) + + T::lossy_from(adder.get_or_import(date).unwrap()), + ); + }); + } + + pub fn multi_insert_subtract( + &mut self, + dates: &[Date], + subtracted: &mut DateMap, + subtracter: &mut DateMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Sub, + { + dates.iter().for_each(|date| { + self.insert( + *date, + T::lossy_from(subtracted.get_or_import(date).unwrap()) + - T::lossy_from(subtracter.get_or_import(date).unwrap()), + ); + }); + } + + pub fn multi_insert_multiply( + &mut self, + dates: &[Date], + multiplied: &mut DateMap, + multiplier: &mut DateMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Mul, + { + dates.iter().for_each(|date| { + self.insert( + *date, + T::lossy_from(multiplied.get_or_import(date).unwrap()) + * T::lossy_from(multiplier.get_or_import(date).unwrap()), + ); + }); + } + + pub fn multi_insert_divide( + &mut self, + dates: &[Date], + divided: &mut DateMap, + divider: &mut DateMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + self._multi_insert_divide(dates, divided, divider, false) + } + + pub fn multi_insert_percentage( + &mut self, + dates: &[Date], + divided: &mut DateMap, + divider: &mut DateMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + self._multi_insert_divide(dates, divided, divider, true) + } + + fn _multi_insert_divide( + &mut self, + dates: &[Date], + divided: &mut DateMap, + divider: &mut DateMap, + as_percentage: bool, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + let multiplier = T::from(if as_percentage { 100 } else { 1 }); + + dates.iter().for_each(|date| { + self.insert( + *date, + T::lossy_from(divided.get_or_import(date).unwrap()) + / T::lossy_from(divider.get_or_import(date).unwrap()) + * multiplier, + ); + }); + } + + pub fn multi_insert_cumulative(&mut self, dates: &[Date], source: &mut DateMap) + where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + self._multi_insert_last_x_sum(dates, source, None) + } + + pub fn multi_insert_last_x_sum( + &mut self, + dates: &[Date], + source: &mut DateMap, + days: usize, + ) where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + self._multi_insert_last_x_sum(dates, source, Some(days)) + } + + fn _multi_insert_last_x_sum( + &mut self, + dates: &[Date], + source: &mut DateMap, + days: Option, + ) where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + let mut sum = None; + + dates.iter().for_each(|date| { + let to_subtract = days + .and_then(|x| { + date.checked_sub_days(Days::new(x as u64)) + .and_then(|previous_date| source.get_or_import(&Date::wrap(previous_date))) + }) + .unwrap_or_default(); + + let previous_sum = sum.unwrap_or_else(|| { + date.checked_sub_days(Days::new(1)) + .and_then(|previous_sum_date| { + self.get_or_import(&Date::wrap(previous_sum_date)) + }) + .unwrap_or_default() + }); + + let last_value = source.get_or_import(date).unwrap_or_else(|| { + dbg!(date); + panic!(); + }); + + sum.replace(previous_sum - T::lossy_from(to_subtract) + T::lossy_from(last_value)); + + self.insert(*date, sum.unwrap()); + }); + } + + pub fn multi_insert_simple_average( + &mut self, + dates: &[Date], + source: &mut DateMap, + days: usize, + ) where + T: Into + From, + K: MapValue + Sum, + f32: LossyFrom, + { + if days <= 1 { + panic!("Average of 1 or less is not useful"); + } + + let days = days as f32; + + let mut average = None; + + dates.iter().for_each(|date| { + let previous_average: f32 = average + .unwrap_or_else(|| { + date.checked_sub_days(Days::new(1)) + .and_then(|previous_average_date| { + self.get(&Date::wrap(previous_average_date)) + }) + .unwrap_or_default() + }) + .into(); + + let mut last_value = f32::lossy_from(source.get_or_import(date).unwrap_or_else(|| { + dbg!(date); + panic!() + })); + + if last_value.is_nan() { + last_value = 0.0; + } + + average.replace(((previous_average * (days - 1.0) + last_value) / days).into()); + + self.insert(*date, average.unwrap()); + }); + } + + pub fn multi_insert_net_change(&mut self, dates: &[Date], source: &mut DateMap, days: usize) + where + T: Sub, + { + dates.iter().for_each(|date| { + let previous_value = date + .checked_sub_days(Days::new(days as u64)) + .and_then(|date| source.get_or_import(&Date::wrap(date))) + .unwrap_or_default(); + + let last_value = source.get_or_import(date).unwrap(); + + let net_change = last_value - previous_value; + + self.insert(*date, net_change); + }); + } + + pub fn multi_insert_percentage_change( + &mut self, + dates: &[Date], + source: &mut DateMap, + days: usize, + ) where + T: Sub + FloatCore, + { + let one = T::from(1.0).unwrap(); + let hundred = T::from(100.0).unwrap(); + + dates.iter().for_each(|date| { + let previous_value = date + .checked_sub_days(Days::new(days as u64)) + .and_then(|date| source.get_or_import(&Date::wrap(date))) + .unwrap_or_default(); + + let last_value = source.get_or_import(date).unwrap(); + + let percentage_change = ((last_value / previous_value) - one) * hundred; + + self.insert(*date, percentage_change); + }); + } + + pub fn multi_insert_median( + &mut self, + dates: &[Date], + source: &mut DateMap, + days: Option, + ) where + T: FloatCore, + { + source.multi_insert_percentile(dates, vec![(self, 0.5)], days); + } + + pub fn multi_insert_percentile( + &mut self, + dates: &[Date], + mut map_and_percentiles: Vec<(&mut DateMap, f32)>, + days: Option, + ) where + T: FloatCore, + { + if days.map_or(false, |size| size < 3) { + panic!("Computing a percentile for a size lower than 3 is useless"); + } + + let mut ordered_vec = None; + let mut sorted_vec = None; + + let min_percentile_date = chrono::NaiveDate::from_ymd_opt(2012, 1, 1).unwrap(); + let min_percentile_wdate = Date::wrap(min_percentile_date); + + let nan = T::from(f32::NAN).unwrap(); + let two = T::from(2.0).unwrap(); + + dates.iter().cloned().try_for_each(|date| { + if date < min_percentile_wdate { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(date, nan); + }); + return ControlFlow::Continue::<()>(()); + } + + if let Some(start) = days.map_or(Some(min_percentile_date), |size| { + date.checked_sub_days(Days::new(size as u64)) + }) { + if sorted_vec.is_none() { + let mut vec = start + .iter_days() + .take_while(|d| *d <= *date) + .flat_map(|date| self.get_or_import(&Date::wrap(date))) + .filter(|f| !f.is_nan()) + .map(|f| OrderedFloat(f)) + .collect_vec(); + + if days.is_some() { + ordered_vec.replace(VecDeque::from(vec.clone())); + } + + vec.sort_unstable(); + sorted_vec.replace(vec); + } else { + let float_value = self.get_or_import(&date).unwrap(); + + if !float_value.is_nan() { + let float_value = OrderedFloat(float_value); + + if let Some(days) = days { + if let Some(ordered_vec) = ordered_vec.as_mut() { + if ordered_vec.len() == days { + let first = ordered_vec.pop_front().unwrap(); + + let pos = + sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); + + sorted_vec.as_mut().unwrap().remove(pos); + } + + ordered_vec.push_back(float_value); + } + } + + let pos = sorted_vec + .as_ref() + .unwrap() + .binary_search(&float_value) + .unwrap_or_else(|pos| pos); + + sorted_vec.as_mut().unwrap().insert(pos, float_value); + } + } + + let vec = sorted_vec.as_ref().unwrap(); + + let len = vec.len(); + + map_and_percentiles + .iter_mut() + .for_each(|(map, percentile)| { + if !(0.0..=1.0).contains(percentile) { + panic!("The percentile should be between 0.0 and 1.0"); + } + + let value = { + if len < 2 { + nan + } else { + let index = (len - 1) as f32 * *percentile; + + let fract = index.fract(); + + if fract != 0.0 { + (vec.get(index.ceil() as usize) + .unwrap_or_else(|| { + dbg!(vec, index, &self.path_all, &self.path_all, days); + panic!() + }) + .0 + + vec + .get(index.floor() as usize) + .unwrap_or_else(|| { + dbg!( + vec, + index, + &self.path_all, + &self.path_all, + days + ); + panic!() + }) + .0) + / two + } else { + vec.get(index.floor() as usize) + .unwrap_or_else(|| { + dbg!(vec, index); + panic!(); + }) + .0 + } + } + }; + + (*map).insert(date, value); + }); + } else { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(date, nan); + }); + } + + ControlFlow::Continue(()) + }); + } + + // + // pub fn transform(&self, transform: F) -> BTreeMap + // where + // T: Copy + Default, + // F: Fn((&WNaiveDate, &T, &BTreeMap, usize)) -> T, + // { + // Self::_transform(self.imported.lock().as_ref().unwrap(), transform) + // } + + // pub fn _transform(map: &BTreeMap, transform: F) -> BTreeMap + // where + // T: Copy + Default, + // F: Fn((&WNaiveDate, &T, &BTreeMap, usize)) -> T, + // { + // map.iter() + // .enumerate() + // .map(|(index, (date, value))| (date.to_owned(), transform((date, value, map, index)))) + // .collect() + // } + + // + // pub fn add(&self, other: &Self) -> BTreeMap + // where + // T: Add + Copy + Default, + // { + // Self::_add( + // self.imported.lock().as_ref().unwrap(), + // other.imported.lock().as_ref().unwrap(), + // ) + // } + + // pub fn _add( + // map1: &BTreeMap, + // map2: &BTreeMap, + // ) -> BTreeMap + // where + // T: Add + Copy + Default, + // { + // Self::_transform(map1, |(date, value, ..)| { + // map2.get(date) + // .map(|value2| *value + *value2) + // .unwrap_or_default() + // }) + // } + + // + // pub fn subtract(&self, other: &Self) -> BTreeMap + // where + // T: Sub + Copy + Default, + // { + // Self::_subtract( + // self.imported.lock().as_ref().unwrap(), + // other.imported.lock().as_ref().unwrap(), + // ) + // } + + // pub fn _subtract( + // map1: &BTreeMap, + // map2: &BTreeMap, + // ) -> BTreeMap + // where + // T: Sub + Copy + Default, + // { + // if map1.len() != map2.len() { + // panic!("Can't subtract two arrays with a different length"); + // } + + // Self::_transform(map1, |(date, value, ..)| { + // map2.get(date) + // .map(|value2| *value - *value2) + // .unwrap_or_default() + // }) + // } + + // + // pub fn multiply(&self, other: &Self) -> BTreeMap + // where + // T: Mul + Copy + Default, + // { + // Self::_multiply( + // self.imported.lock().as_ref().unwrap(), + // other.imported.lock().as_ref().unwrap(), + // ) + // } + + // + // pub fn _multiply( + // map1: &BTreeMap, + // map2: &BTreeMap, + // ) -> BTreeMap + // where + // T: Mul + Copy + Default, + // { + // Self::_transform(map1, |(date, value, ..)| { + // map2.get(date) + // .map(|value2| *value * *value2) + // .unwrap_or_default() + // }) + // } + + // + // pub fn divide(&self, other: &Self) -> BTreeMap + // where + // T: Div + Copy + Default, + // { + // Self::_divide( + // self.imported.lock().as_ref().unwrap(), + // other.imported.lock().as_ref().unwrap(), + // ) + // } + + // + // pub fn _divide( + // map1: &BTreeMap, + // map2: &BTreeMap, + // ) -> BTreeMap + // where + // T: Div + Copy + Default, + // { + // Self::_transform(map1, |(date, value, ..)| { + // map2.get(date) + // .map(|value2| *value / *value2) + // .unwrap_or_default() + // }) + // } + + // + // pub fn cumulate(&self) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign, + // { + // Self::_cumulate(self.imported.lock().as_ref().unwrap()) + // } + + // + // pub fn _cumulate(map: &BTreeMap) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign, + // { + // let mut sum = T::default(); + + // map.iter() + // .map(|(date, value)| { + // sum += *value; + // (date.to_owned(), sum) + // }) + // .collect() + // } + + // pub fn insert_cumulative(&mut self, date: NaiveDate, source: &DateMap) -> T + // where + // T: Add + Sub, + // { + // let previous_cum = date + // .checked_sub_days(Days::new(1)) + // .map(|previous_date| { + // self.get(previous_date).unwrap_or_else(|| { + // if previous_date.year() == 2009 && previous_date.month() == 1 { + // let day = previous_date.day(); + + // if day == 8 { + // self.get(NaiveDate::from_str("2009-01-03").unwrap()) + // .unwrap() + // } else if day == 2 { + // T::default() + // } else { + // panic!() + // } + // } else { + // dbg!(previous_date, &self.path_all); + // panic!() + // } + // }) + // }) + // .unwrap_or_default(); + + // let last_value = source.get(date).unwrap(); + + // let cum_value = previous_cum + last_value; + + // self.insert(date, cum_value); + + // cum_value + // } + + // + // pub fn insert_last_x_sum(&mut self, date: NaiveDate, source: &DateMap, x: usize) -> T + // where + // T: Add + Sub, + // { + // let to_subtract = date + // .checked_sub_days(Days::new(x as u64 - 1)) + // .and_then(|previous_date| source.get(previous_date)) + // .unwrap_or_default(); + + // let previous_sum = date + // .checked_sub_days(Days::new(1)) + // .and_then(|previous_sum_date| self.get(previous_sum_date)) + // .unwrap_or_default(); + + // let last_value = source.get(date).unwrap(); + + // let sum = previous_sum - to_subtract + last_value; + + // self.insert(date, sum); + + // sum + // } + + // + // pub fn last_x_sum(&self, x: usize) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign + SubAssign, + // { + // Self::_last_x_sum(self.imported.lock().as_ref().unwrap(), x) + // } + + // pub fn _last_x_sum(map: &BTreeMap, days: usize) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign + SubAssign, + // { + // let mut sum = T::default(); + + // map.iter() + // .enumerate() + // .map(|(index, (date, value))| { + // sum += *value; + + // if index >= days - 1 { + // let previous_index = index + 1 - days; + + // sum -= *map.values().nth(previous_index).unwrap() + // } + + // (date.to_owned(), sum) + // }) + // .collect() + // } + + // + // pub fn simple_moving_average(&self, x: usize) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign + SubAssign + ToF32, + // { + // Self::_simple_moving_average(self.imported.lock().as_ref().unwrap(), x) + // } + + // pub fn insert_simple_average(&mut self, date: NaiveDate, source: &DateMap, x: usize) + // where + // T: Into + From, + // K: Clone + // + Copy + // + Default + // + Debug + // + Serialize + // + DeserializeOwned + // + Sum + // + savefile::Serialize + // + savefile::Deserialize + // + savefile::ReprC + // + ToF32, + // { + // let previous_average: f32 = date + // .checked_sub_days(Days::new(1)) + // .and_then(|previous_average_date| self.get(previous_average_date)) + // .unwrap_or_default() + // .into(); + + // let last_value: f32 = source.get(date).unwrap().to_f32(); + + // let sum = previous_average * x as f32 - 1.0 + last_value; + + // let average: T = (sum / x as f32).into(); + + // self.insert(date, average); + // } + + // + // pub fn _simple_moving_average( + // map: &BTreeMap, + // x: usize, + // ) -> BTreeMap + // where + // T: Sum + Copy + Default + AddAssign + SubAssign + Into, + // { + // let mut sum = T::default(); + + // map.iter() + // .enumerate() + // .map(|(index, (date, value))| { + // sum += *value; + + // if index >= x - 1 { + // sum -= *map.values().nth(index + 1 - x).unwrap() + // } + + // let float_sum: f32 = sum.into(); + + // (date.to_owned(), float_sum / x as f32) + // }) + // .collect() + // } + + // + // pub fn net_change(&self, offset: usize) -> BTreeMap + // where + // T: Copy + Default + Sub, + // { + // Self::_net_change(self.imported.lock().as_ref().unwrap(), offset) + // } + // + // + // pub fn insert_net_change(&mut self, date: NaiveDate, source: &DateMap, offset: usize) -> T + // where + // T: Sub, + // { + // let previous_value = date + // .checked_sub_days(Days::new(offset as u64)) + // .and_then(|date| source.get(date)) + // .unwrap_or_default(); + + // let last_value = source.get(date).unwrap_or_else(|| { + // dbg!(date); + // panic!(); + // }); + + // let net = last_value - previous_value; + + // self.insert(date, net); + + // net + // } + + // + // pub fn _net_change(map: &BTreeMap, offset: usize) -> BTreeMap + // where + // T: Copy + Default + Sub, + // { + // Self::_transform(map, |(_, value, map, index)| { + // let previous = { + // if let Some(previous_index) = index.checked_sub(offset) { + // *map.values().nth(previous_index).unwrap() + // } else { + // T::default() + // } + // }; + + // *value - previous + // }) + // } + + // + // pub fn _median(map: &BTreeMap, size: usize) -> BTreeMap> + // where + // T: FloatCore, + // { + // let even = size % 2 == 0; + // let median_index = size / 2; + + // if size < 3 { + // panic!("Computing a median for a size lower than 3 is useless"); + // } + + // map.iter() + // .enumerate() + // .map(|(index, (date, _))| { + // let value = { + // if index >= size - 1 { + // let mut vec = map + // .values() + // .rev() + // .take(size) + // .map(|v| OrderedFloat(*v)) + // .collect_vec(); + + // vec.sort_unstable(); + + // if even { + // Some( + // (**vec.get(median_index).unwrap() + // + **vec.get(median_index - 1).unwrap()) + // / T::from(2.0).unwrap(), + // ) + // } else { + // Some(**vec.get(median_index).unwrap()) + // } + // } else { + // None + // } + // }; + + // (date.to_owned(), value) + // }) + // .collect() + // } + // + // pub fn insert_median(&mut self, date: NaiveDate, source: &DateMap, size: usize) -> T + // where + // T: FloatCore, + // { + // if size < 3 { + // panic!("Computing a median for a size lower than 3 is useless"); + // } + + // let median = { + // if let Some(start) = date.checked_sub_days(Days::new(size as u64 - 1)) { + // let even = size % 2 == 0; + // let median_index = size / 2; + + // let mut vec = start + // .iter_days() + // .take(size) + // .flat_map(|date| source.get(date)) + // .map(|f| OrderedFloat(f)) + // .collect_vec(); + + // if vec.len() != size { + // return T::default(); + // } + + // vec.sort_unstable(); + + // if even { + // (vec.get(median_index).unwrap().0 + vec.get(median_index - 1).unwrap().0) + // / T::from(2.0).unwrap() + // } else { + // vec.get(median_index).unwrap().0 + // } + // } else { + // T::default() + // } + // }; + + // self.insert(date, median); + + // median + // } +} diff --git a/parser/src/structs/date_map.rs b/parser/src/structs/date_map.rs index 5b7f4ef28..b3e754cfc 100644 --- a/parser/src/structs/date_map.rs +++ b/parser/src/structs/date_map.rs @@ -1,348 +1,34 @@ -use std::{ - collections::{BTreeMap, VecDeque}, - fmt::Debug, - fs, - iter::Sum, - mem, - ops::{Add, ControlFlow, Div, Mul, Sub}, - path::{Path, PathBuf}, -}; +use crate::{Date, HeightMap}; -use allocative::Allocative; -use bincode::{Decode, Encode}; -use chrono::{Datelike, Days}; -use itertools::Itertools; -use ordered_float::{FloatCore, OrderedFloat}; -use serde::{Deserialize, Serialize}; +use super::{AnyMap, DateMapChunkId, GenericMap, Height, MapValue, SerializedBTreeMap}; -use crate::{ - io::{format_path, Serialization}, - utils::{log, LossyFrom}, -}; - -use super::{AnyMap, HeightMap, MapValue, WNaiveDate}; - -const NUMBER_OF_UNSAFE_DATES: usize = 2; -const MIN_YEAR: usize = 2009; - -#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)] -pub struct SerializedDateMap { - version: u32, - map: BTreeMap, -} - -#[derive(Default, Allocative)] -pub struct DateMap { - version: u32, - - path_all: String, - path_last: Option, - - chunks_in_memory: usize, - - serialization: Serialization, - - pub initial_last_date: Option, - pub initial_first_unsafe_date: Option, - - imported: BTreeMap>, - to_insert: BTreeMap>, -} +pub type DateMap = GenericMap>; impl DateMap where T: MapValue, { - pub fn new_bin(version: u32, path: &str) -> Self { - Self::new(version, path, Serialization::Binary, 1, true) - } - - pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { - Self::new(version, path, Serialization::Binary, 1, export_last) - } - - pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { - Self::new(version, path, Serialization::Json, usize::MAX, export_last) - } - - fn new( - version: u32, - path: &str, - serialization: Serialization, - chunks_in_memory: usize, - export_last: bool, - ) -> Self { - if chunks_in_memory < 1 { - panic!("Should always have at least the latest chunk in memory"); - } - - let path = format_path(path); - - let path_all = format!("{path}/date"); - - fs::create_dir_all(&path_all).unwrap(); - - let path_last = { - if export_last { - Some(serialization.append_extension(&format!("{path}/last"))) - } else { - None - } - }; - - let mut s = Self { - version, - - path_all, - path_last, - - chunks_in_memory, - - serialization, - - initial_last_date: None, - initial_first_unsafe_date: None, - - to_insert: BTreeMap::default(), - imported: BTreeMap::default(), - }; - - s.read_dir() - .into_iter() - .rev() - .take(chunks_in_memory) - .for_each(|(chunk_start, path)| { - if let Ok(serialized) = s.import(&path) { - if serialized.version == s.version { - s.imported.insert(chunk_start, serialized); - } else { - s.read_dir() - .iter() - .for_each(|(_, path)| fs::remove_file(path).unwrap()) - } - } - }); - - s.initial_last_date = s - .imported - .values() - .last() - .and_then(|serialized| serialized.map.keys().copied().max()); - - s.initial_first_unsafe_date = s.initial_last_date.and_then(|last_date| { - let offset = NUMBER_OF_UNSAFE_DATES - 1; - last_date - .checked_sub_days(Days::new(offset as u64)) - .map(WNaiveDate::wrap) + pub fn multi_insert_last( + &mut self, + dates: &[Date], + source: &mut HeightMap, + last_height: &mut DateMap, + ) { + dates.iter().for_each(|date| { + self.insert( + *date, + source + .get_or_import(&last_height.get_or_import(date).unwrap()) + .unwrap(), + ); }); - - if s.initial_first_unsafe_date.is_none() { - log(&format!("New {path}")); - } - - s - } - - pub fn insert(&mut self, date: WNaiveDate, value: T) -> T { - if !self.is_date_safe(date) { - self.to_insert - .entry(date.year() as usize) - .or_default() - .insert(date, value); - } - - value - } - - pub fn insert_default(&mut self, date: WNaiveDate) -> T { - self.insert(date, T::default()) - } - - /// Same as get but with &WNaiveDate instead of NaiveDate - pub fn get(&self, date: &WNaiveDate) -> Option { - let year = date.year() as usize; - - self.to_insert - .get(&year) - .and_then(|tree| tree.get(date).cloned()) - .or_else(|| { - self.imported - .get(&year) - .and_then(|serialized| serialized.map.get(date)) - .cloned() - }) - } - - /// Same as get_or_import but with &WNaiveDate instead of NaiveDate - pub fn get_or_import(&mut self, date: &WNaiveDate) -> Option { - let year = date.year() as usize; - - if year < MIN_YEAR { - return None; - } - - self.to_insert - .get(&year) - .and_then(|tree| tree.get(date).cloned()) - .or_else(|| { - #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() - if !self.imported.contains_key(&year) { - let dir_content = self.read_dir(); - - if let Some(path) = dir_content.get(&year) { - let serialized = self.import(path).unwrap(); - // .unwrap_or(SerializedDateMap { - // version: self.version, - // map: BTreeMap::default(), - // }); - - self.imported.insert(year, serialized); - } - } - - self.imported - .get(&year) - .and_then(|serialized| serialized.map.get(date)) - .cloned() - }) - } - - #[inline(always)] - pub fn is_date_safe(&self, date: WNaiveDate) -> bool { - self.initial_first_unsafe_date - .map_or(false, |initial_first_unsafe_date| { - initial_first_unsafe_date > date - }) - } - - fn read_dir(&self) -> BTreeMap { - Self::_read_dir(&self.path_all, &self.serialization) - } - - pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { - fs::read_dir(path) - .unwrap() - .map(|entry| entry.unwrap().path()) - .filter(|path| { - let file_stem = path.file_stem().unwrap().to_str().unwrap(); - let extension = path.extension().unwrap().to_str().unwrap(); - - path.is_file() - && file_stem.len() == 4 - && file_stem.starts_with("20") - && extension == serialization.to_extension() - }) - .map(|path| { - let year = path - .file_stem() - .unwrap() - .to_str() - .unwrap() - .parse::() - .unwrap(); - - (year, path) - }) - .collect() - } - - fn import(&self, path: &Path) -> color_eyre::Result> { - self.serialization - .import::>(path.to_str().unwrap()) - } -} - -impl AnyMap for DateMap -where - T: MapValue, -{ - fn path(&self) -> &str { - &self.path_all - } - - fn path_last(&self) -> &Option { - &self.path_last - } - - fn t_name(&self) -> &str { - std::any::type_name::() - } - - // fn reset(&mut self) -> color_eyre::Result<()> { - // fs::remove_dir(&self.path_all)?; - - // self.initial_last_date = None; - // self.initial_first_unsafe_date = None; - - // self.imported.clear(); - // self.to_insert.clear(); - - // Ok(()) - // } - - fn pre_export(&mut self) { - self.to_insert.iter_mut().for_each(|(chunk_start, map)| { - self.imported - .entry(*chunk_start) - .or_insert(SerializedDateMap { - version: self.version, - map: BTreeMap::default(), - }) - .map - .extend(mem::take(map)); - }); - } - - fn export(&self) -> color_eyre::Result<()> { - let len = self.imported.len(); - - self.to_insert.iter().enumerate().try_for_each( - |(index, (year, map))| -> color_eyre::Result<()> { - if !map.is_empty() { - unreachable!() - } - - let path = self - .serialization - .append_extension(&format!("{}/{}", self.path_all, year)); - - let serialized = self.imported.get(year).unwrap(); - - self.serialization.export(&path, serialized)?; - - if index == len - 1 { - if let Some(path_last) = self.path_last.as_ref() { - self.serialization - .export(path_last, serialized.map.values().last().unwrap())?; - } - } - - Ok(()) - }, - ) - } - - fn post_export(&mut self) { - self.imported - .keys() - .rev() - .enumerate() - .filter(|(index, _)| *index + 1 > self.chunks_in_memory) - .map(|(_, key)| *key) - .collect_vec() - .iter() - .for_each(|key| { - self.imported.remove(key); - }); - - self.to_insert.clear(); } } pub trait AnyDateMap: AnyMap { - fn get_initial_first_unsafe_date(&self) -> Option; + fn get_initial_first_unsafe_date(&self) -> Option; - fn get_initial_last_date(&self) -> Option; + fn get_initial_last_date(&self) -> Option; fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync); @@ -354,13 +40,13 @@ where T: MapValue, { #[inline(always)] - fn get_initial_first_unsafe_date(&self) -> Option { - self.initial_first_unsafe_date + fn get_initial_first_unsafe_date(&self) -> Option { + self.initial_first_unsafe_key } #[inline(always)] - fn get_initial_last_date(&self) -> Option { - self.initial_last_date + fn get_initial_last_date(&self) -> Option { + self.initial_last_key } fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) { @@ -371,924 +57,3 @@ where self } } - -impl DateMap -where - T: MapValue, -{ - pub fn multi_insert(&mut self, dates: &[WNaiveDate], mut callback: F) - where - F: FnMut(&WNaiveDate) -> T, - { - dates.iter().for_each(|date| { - self.insert(*date, callback(date)); - }); - } - - pub fn multi_insert_last( - &mut self, - dates: &[WNaiveDate], - source: &mut HeightMap, - last_height: &mut DateMap, - ) { - dates.iter().for_each(|date| { - self.insert( - *date, - source.get_or_import(&last_height.get_or_import(date).unwrap()), - ); - }); - } - - pub fn multi_insert_const(&mut self, dates: &[WNaiveDate], constant: T) { - dates.iter().for_each(|date| { - self.insert(*date, constant); - }); - } - - pub fn multi_insert_simple_transform( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - transform: F, - ) where - F: Fn(K) -> T, - K: MapValue, - { - dates.iter().for_each(|date| { - self.insert(*date, transform(source.get_or_import(date).unwrap())); - }); - } - - pub fn multi_insert_complex_transform( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - mut transform: F, - ) where - K: MapValue, - F: FnMut((K, &WNaiveDate, &mut DateMap)) -> T, - { - dates.iter().for_each(|date| { - self.insert( - *date, - transform((source.get_or_import(date).unwrap(), date, source)), - ); - }); - } - - pub fn multi_insert_add( - &mut self, - dates: &[WNaiveDate], - added: &mut DateMap, - adder: &mut DateMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Add, - { - dates.iter().for_each(|date| { - self.insert( - *date, - T::lossy_from(added.get_or_import(date).unwrap()) - + T::lossy_from(adder.get_or_import(date).unwrap()), - ); - }); - } - - pub fn multi_insert_subtract( - &mut self, - dates: &[WNaiveDate], - subtracted: &mut DateMap, - subtracter: &mut DateMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Sub, - { - dates.iter().for_each(|date| { - self.insert( - *date, - T::lossy_from(subtracted.get_or_import(date).unwrap()) - - T::lossy_from(subtracter.get_or_import(date).unwrap()), - ); - }); - } - - pub fn multi_insert_multiply( - &mut self, - dates: &[WNaiveDate], - multiplied: &mut DateMap, - multiplier: &mut DateMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Mul, - { - dates.iter().for_each(|date| { - self.insert( - *date, - T::lossy_from(multiplied.get_or_import(date).unwrap()) - * T::lossy_from(multiplier.get_or_import(date).unwrap()), - ); - }); - } - - pub fn multi_insert_divide( - &mut self, - dates: &[WNaiveDate], - divided: &mut DateMap, - divider: &mut DateMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - self._multi_insert_divide(dates, divided, divider, false) - } - - pub fn multi_insert_percentage( - &mut self, - dates: &[WNaiveDate], - divided: &mut DateMap, - divider: &mut DateMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - self._multi_insert_divide(dates, divided, divider, true) - } - - pub fn _multi_insert_divide( - &mut self, - dates: &[WNaiveDate], - divided: &mut DateMap, - divider: &mut DateMap, - as_percentage: bool, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - let multiplier = T::from(if as_percentage { 100 } else { 1 }); - - dates.iter().for_each(|date| { - self.insert( - *date, - T::lossy_from(divided.get_or_import(date).unwrap()) - / T::lossy_from(divider.get_or_import(date).unwrap()) - * multiplier, - ); - }); - } - - pub fn multi_insert_cumulative(&mut self, dates: &[WNaiveDate], source: &mut DateMap) - where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - self._multi_insert_last_x_sum(dates, source, None) - } - - pub fn multi_insert_last_x_sum( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: usize, - ) where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - self._multi_insert_last_x_sum(dates, source, Some(days)) - } - - fn _multi_insert_last_x_sum( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: Option, - ) where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - let mut sum = None; - - dates.iter().for_each(|date| { - let to_subtract = days - .and_then(|x| { - date.checked_sub_days(Days::new(x as u64)) - .and_then(|previous_date| { - source.get_or_import(&WNaiveDate::wrap(previous_date)) - }) - }) - .unwrap_or_default(); - - let previous_sum = sum.unwrap_or_else(|| { - date.checked_sub_days(Days::new(1)) - .and_then(|previous_sum_date| { - self.get_or_import(&WNaiveDate::wrap(previous_sum_date)) - }) - .unwrap_or_default() - }); - - let last_value = source.get_or_import(date).unwrap_or_else(|| { - dbg!(date); - panic!(); - }); - - sum.replace(previous_sum - T::lossy_from(to_subtract) + T::lossy_from(last_value)); - - self.insert(*date, sum.unwrap()); - }); - } - - pub fn multi_insert_simple_average( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: usize, - ) where - T: Into + From, - K: MapValue + Sum, - f32: LossyFrom, - { - if days <= 1 { - panic!("Average of 1 or less is not useful"); - } - - let days = days as f32; - - let mut average = None; - - dates.iter().for_each(|date| { - let previous_average: f32 = average - .unwrap_or_else(|| { - date.checked_sub_days(Days::new(1)) - .and_then(|previous_average_date| { - self.get(&WNaiveDate::wrap(previous_average_date)) - }) - .unwrap_or_default() - }) - .into(); - - let mut last_value = f32::lossy_from(source.get_or_import(date).unwrap_or_else(|| { - dbg!(date); - panic!() - })); - - if last_value.is_nan() { - last_value = 0.0; - } - - average.replace(((previous_average * (days - 1.0) + last_value) / days).into()); - - self.insert(*date, average.unwrap()); - }); - } - - pub fn multi_insert_net_change( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: usize, - ) where - T: Sub, - { - dates.iter().for_each(|date| { - let previous_value = date - .checked_sub_days(Days::new(days as u64)) - .and_then(|date| source.get_or_import(&WNaiveDate::wrap(date))) - .unwrap_or_default(); - - let last_value = source.get_or_import(date).unwrap(); - - let net_change = last_value - previous_value; - - self.insert(*date, net_change); - }); - } - - pub fn multi_insert_percentage_change( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: usize, - ) where - T: Sub + FloatCore, - { - let one = T::from(1.0).unwrap(); - let hundred = T::from(100.0).unwrap(); - - dates.iter().for_each(|date| { - let previous_value = date - .checked_sub_days(Days::new(days as u64)) - .and_then(|date| source.get_or_import(&WNaiveDate::wrap(date))) - .unwrap_or_default(); - - let last_value = source.get_or_import(date).unwrap(); - - let percentage_change = ((last_value / previous_value) - one) * hundred; - - self.insert(*date, percentage_change); - }); - } - - pub fn multi_insert_median( - &mut self, - dates: &[WNaiveDate], - source: &mut DateMap, - days: Option, - ) where - T: FloatCore, - { - source.multi_insert_percentile(dates, vec![(self, 0.5)], days); - } - - pub fn multi_insert_percentile( - &mut self, - dates: &[WNaiveDate], - mut map_and_percentiles: Vec<(&mut DateMap, f32)>, - days: Option, - ) where - T: FloatCore, - { - if days.map_or(false, |size| size < 3) { - panic!("Computing a percentile for a size lower than 3 is useless"); - } - - let mut ordered_vec = None; - let mut sorted_vec = None; - - let min_percentile_date = chrono::NaiveDate::from_ymd_opt(2012, 1, 1).unwrap(); - let min_percentile_wdate = WNaiveDate::wrap(min_percentile_date); - - let nan = T::from(f32::NAN).unwrap(); - let two = T::from(2.0).unwrap(); - - dates.iter().cloned().try_for_each(|date| { - if date < min_percentile_wdate { - map_and_percentiles.iter_mut().for_each(|(map, _)| { - (*map).insert(date, nan); - }); - return ControlFlow::Continue::<()>(()); - } - - if let Some(start) = days.map_or(Some(min_percentile_date), |size| { - date.checked_sub_days(Days::new(size as u64)) - }) { - if sorted_vec.is_none() { - let mut vec = start - .iter_days() - .take_while(|d| *d <= *date) - .flat_map(|date| self.get_or_import(&WNaiveDate::wrap(date))) - .filter(|f| !f.is_nan()) - .map(|f| OrderedFloat(f)) - .collect_vec(); - - if days.is_some() { - ordered_vec.replace(VecDeque::from(vec.clone())); - } - - vec.sort_unstable(); - sorted_vec.replace(vec); - } else { - let float_value = self.get_or_import(&date).unwrap(); - - if !float_value.is_nan() { - let float_value = OrderedFloat(float_value); - - if let Some(days) = days { - if let Some(ordered_vec) = ordered_vec.as_mut() { - if ordered_vec.len() == days { - let first = ordered_vec.pop_front().unwrap(); - - let pos = - sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); - - sorted_vec.as_mut().unwrap().remove(pos); - } - - ordered_vec.push_back(float_value); - } - } - - let pos = sorted_vec - .as_ref() - .unwrap() - .binary_search(&float_value) - .unwrap_or_else(|pos| pos); - - sorted_vec.as_mut().unwrap().insert(pos, float_value); - } - } - - let vec = sorted_vec.as_ref().unwrap(); - - let len = vec.len(); - - map_and_percentiles - .iter_mut() - .for_each(|(map, percentile)| { - if !(0.0..=1.0).contains(percentile) { - panic!("The percentile should be between 0.0 and 1.0"); - } - - let value = { - if len < 2 { - nan - } else { - let index = (len - 1) as f32 * *percentile; - - let fract = index.fract(); - - if fract != 0.0 { - (vec.get(index.ceil() as usize) - .unwrap_or_else(|| { - dbg!(vec, index, &self.path_all, &self.path_all, days); - panic!() - }) - .0 - + vec - .get(index.floor() as usize) - .unwrap_or_else(|| { - dbg!( - vec, - index, - &self.path_all, - &self.path_all, - days - ); - panic!() - }) - .0) - / two - } else { - vec.get(index.floor() as usize) - .unwrap_or_else(|| { - dbg!(vec, index); - panic!(); - }) - .0 - } - } - }; - - (*map).insert(date, value); - }); - } else { - map_and_percentiles.iter_mut().for_each(|(map, _)| { - (*map).insert(date, nan); - }); - } - - ControlFlow::Continue(()) - }); - } - - // - // pub fn transform(&self, transform: F) -> BTreeMap - // where - // T: Copy + Default, - // F: Fn((&WNaiveDate, &T, &BTreeMap, usize)) -> T, - // { - // Self::_transform(self.imported.lock().as_ref().unwrap(), transform) - // } - - // pub fn _transform(map: &BTreeMap, transform: F) -> BTreeMap - // where - // T: Copy + Default, - // F: Fn((&WNaiveDate, &T, &BTreeMap, usize)) -> T, - // { - // map.iter() - // .enumerate() - // .map(|(index, (date, value))| (date.to_owned(), transform((date, value, map, index)))) - // .collect() - // } - - // - // pub fn add(&self, other: &Self) -> BTreeMap - // where - // T: Add + Copy + Default, - // { - // Self::_add( - // self.imported.lock().as_ref().unwrap(), - // other.imported.lock().as_ref().unwrap(), - // ) - // } - - // pub fn _add( - // map1: &BTreeMap, - // map2: &BTreeMap, - // ) -> BTreeMap - // where - // T: Add + Copy + Default, - // { - // Self::_transform(map1, |(date, value, ..)| { - // map2.get(date) - // .map(|value2| *value + *value2) - // .unwrap_or_default() - // }) - // } - - // - // pub fn subtract(&self, other: &Self) -> BTreeMap - // where - // T: Sub + Copy + Default, - // { - // Self::_subtract( - // self.imported.lock().as_ref().unwrap(), - // other.imported.lock().as_ref().unwrap(), - // ) - // } - - // pub fn _subtract( - // map1: &BTreeMap, - // map2: &BTreeMap, - // ) -> BTreeMap - // where - // T: Sub + Copy + Default, - // { - // if map1.len() != map2.len() { - // panic!("Can't subtract two arrays with a different length"); - // } - - // Self::_transform(map1, |(date, value, ..)| { - // map2.get(date) - // .map(|value2| *value - *value2) - // .unwrap_or_default() - // }) - // } - - // - // pub fn multiply(&self, other: &Self) -> BTreeMap - // where - // T: Mul + Copy + Default, - // { - // Self::_multiply( - // self.imported.lock().as_ref().unwrap(), - // other.imported.lock().as_ref().unwrap(), - // ) - // } - - // - // pub fn _multiply( - // map1: &BTreeMap, - // map2: &BTreeMap, - // ) -> BTreeMap - // where - // T: Mul + Copy + Default, - // { - // Self::_transform(map1, |(date, value, ..)| { - // map2.get(date) - // .map(|value2| *value * *value2) - // .unwrap_or_default() - // }) - // } - - // - // pub fn divide(&self, other: &Self) -> BTreeMap - // where - // T: Div + Copy + Default, - // { - // Self::_divide( - // self.imported.lock().as_ref().unwrap(), - // other.imported.lock().as_ref().unwrap(), - // ) - // } - - // - // pub fn _divide( - // map1: &BTreeMap, - // map2: &BTreeMap, - // ) -> BTreeMap - // where - // T: Div + Copy + Default, - // { - // Self::_transform(map1, |(date, value, ..)| { - // map2.get(date) - // .map(|value2| *value / *value2) - // .unwrap_or_default() - // }) - // } - - // - // pub fn cumulate(&self) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign, - // { - // Self::_cumulate(self.imported.lock().as_ref().unwrap()) - // } - - // - // pub fn _cumulate(map: &BTreeMap) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign, - // { - // let mut sum = T::default(); - - // map.iter() - // .map(|(date, value)| { - // sum += *value; - // (date.to_owned(), sum) - // }) - // .collect() - // } - - // pub fn insert_cumulative(&mut self, date: NaiveDate, source: &DateMap) -> T - // where - // T: Add + Sub, - // { - // let previous_cum = date - // .checked_sub_days(Days::new(1)) - // .map(|previous_date| { - // self.get(previous_date).unwrap_or_else(|| { - // if previous_date.year() == 2009 && previous_date.month() == 1 { - // let day = previous_date.day(); - - // if day == 8 { - // self.get(NaiveDate::from_str("2009-01-03").unwrap()) - // .unwrap() - // } else if day == 2 { - // T::default() - // } else { - // panic!() - // } - // } else { - // dbg!(previous_date, &self.path_all); - // panic!() - // } - // }) - // }) - // .unwrap_or_default(); - - // let last_value = source.get(date).unwrap(); - - // let cum_value = previous_cum + last_value; - - // self.insert(date, cum_value); - - // cum_value - // } - - // - // pub fn insert_last_x_sum(&mut self, date: NaiveDate, source: &DateMap, x: usize) -> T - // where - // T: Add + Sub, - // { - // let to_subtract = date - // .checked_sub_days(Days::new(x as u64 - 1)) - // .and_then(|previous_date| source.get(previous_date)) - // .unwrap_or_default(); - - // let previous_sum = date - // .checked_sub_days(Days::new(1)) - // .and_then(|previous_sum_date| self.get(previous_sum_date)) - // .unwrap_or_default(); - - // let last_value = source.get(date).unwrap(); - - // let sum = previous_sum - to_subtract + last_value; - - // self.insert(date, sum); - - // sum - // } - - // - // pub fn last_x_sum(&self, x: usize) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign + SubAssign, - // { - // Self::_last_x_sum(self.imported.lock().as_ref().unwrap(), x) - // } - - // pub fn _last_x_sum(map: &BTreeMap, days: usize) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign + SubAssign, - // { - // let mut sum = T::default(); - - // map.iter() - // .enumerate() - // .map(|(index, (date, value))| { - // sum += *value; - - // if index >= days - 1 { - // let previous_index = index + 1 - days; - - // sum -= *map.values().nth(previous_index).unwrap() - // } - - // (date.to_owned(), sum) - // }) - // .collect() - // } - - // - // pub fn simple_moving_average(&self, x: usize) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign + SubAssign + ToF32, - // { - // Self::_simple_moving_average(self.imported.lock().as_ref().unwrap(), x) - // } - - // pub fn insert_simple_average(&mut self, date: NaiveDate, source: &DateMap, x: usize) - // where - // T: Into + From, - // K: Clone - // + Copy - // + Default - // + Debug - // + Serialize - // + DeserializeOwned - // + Sum - // + savefile::Serialize - // + savefile::Deserialize - // + savefile::ReprC - // + ToF32, - // { - // let previous_average: f32 = date - // .checked_sub_days(Days::new(1)) - // .and_then(|previous_average_date| self.get(previous_average_date)) - // .unwrap_or_default() - // .into(); - - // let last_value: f32 = source.get(date).unwrap().to_f32(); - - // let sum = previous_average * x as f32 - 1.0 + last_value; - - // let average: T = (sum / x as f32).into(); - - // self.insert(date, average); - // } - - // - // pub fn _simple_moving_average( - // map: &BTreeMap, - // x: usize, - // ) -> BTreeMap - // where - // T: Sum + Copy + Default + AddAssign + SubAssign + Into, - // { - // let mut sum = T::default(); - - // map.iter() - // .enumerate() - // .map(|(index, (date, value))| { - // sum += *value; - - // if index >= x - 1 { - // sum -= *map.values().nth(index + 1 - x).unwrap() - // } - - // let float_sum: f32 = sum.into(); - - // (date.to_owned(), float_sum / x as f32) - // }) - // .collect() - // } - - // - // pub fn net_change(&self, offset: usize) -> BTreeMap - // where - // T: Copy + Default + Sub, - // { - // Self::_net_change(self.imported.lock().as_ref().unwrap(), offset) - // } - // - // - // pub fn insert_net_change(&mut self, date: NaiveDate, source: &DateMap, offset: usize) -> T - // where - // T: Sub, - // { - // let previous_value = date - // .checked_sub_days(Days::new(offset as u64)) - // .and_then(|date| source.get(date)) - // .unwrap_or_default(); - - // let last_value = source.get(date).unwrap_or_else(|| { - // dbg!(date); - // panic!(); - // }); - - // let net = last_value - previous_value; - - // self.insert(date, net); - - // net - // } - - // - // pub fn _net_change(map: &BTreeMap, offset: usize) -> BTreeMap - // where - // T: Copy + Default + Sub, - // { - // Self::_transform(map, |(_, value, map, index)| { - // let previous = { - // if let Some(previous_index) = index.checked_sub(offset) { - // *map.values().nth(previous_index).unwrap() - // } else { - // T::default() - // } - // }; - - // *value - previous - // }) - // } - - // - // pub fn _median(map: &BTreeMap, size: usize) -> BTreeMap> - // where - // T: FloatCore, - // { - // let even = size % 2 == 0; - // let median_index = size / 2; - - // if size < 3 { - // panic!("Computing a median for a size lower than 3 is useless"); - // } - - // map.iter() - // .enumerate() - // .map(|(index, (date, _))| { - // let value = { - // if index >= size - 1 { - // let mut vec = map - // .values() - // .rev() - // .take(size) - // .map(|v| OrderedFloat(*v)) - // .collect_vec(); - - // vec.sort_unstable(); - - // if even { - // Some( - // (**vec.get(median_index).unwrap() - // + **vec.get(median_index - 1).unwrap()) - // / T::from(2.0).unwrap(), - // ) - // } else { - // Some(**vec.get(median_index).unwrap()) - // } - // } else { - // None - // } - // }; - - // (date.to_owned(), value) - // }) - // .collect() - // } - // - // pub fn insert_median(&mut self, date: NaiveDate, source: &DateMap, size: usize) -> T - // where - // T: FloatCore, - // { - // if size < 3 { - // panic!("Computing a median for a size lower than 3 is useless"); - // } - - // let median = { - // if let Some(start) = date.checked_sub_days(Days::new(size as u64 - 1)) { - // let even = size % 2 == 0; - // let median_index = size / 2; - - // let mut vec = start - // .iter_days() - // .take(size) - // .flat_map(|date| source.get(date)) - // .map(|f| OrderedFloat(f)) - // .collect_vec(); - - // if vec.len() != size { - // return T::default(); - // } - - // vec.sort_unstable(); - - // if even { - // (vec.get(median_index).unwrap().0 + vec.get(median_index - 1).unwrap().0) - // / T::from(2.0).unwrap() - // } else { - // vec.get(median_index).unwrap().0 - // } - // } else { - // T::default() - // } - // }; - - // self.insert(date, median); - - // median - // } -} diff --git a/parser/src/structs/date_map_chunk_id.rs b/parser/src/structs/date_map_chunk_id.rs new file mode 100644 index 000000000..7592754a7 --- /dev/null +++ b/parser/src/structs/date_map_chunk_id.rs @@ -0,0 +1,33 @@ +use allocative::Allocative; +use chrono::Datelike; + +use crate::Date; + +use super::MapChunkId; + +#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative)] +pub struct DateMapChunkId(i32); + +impl DateMapChunkId { + pub fn new(date: &Date) -> Self { + Self(date.year()) + } +} + +impl MapChunkId for DateMapChunkId { + fn to_name(&self) -> String { + self.0.to_string() + } + + fn from_name(name: &str) -> Self { + Self(name.parse::().unwrap()) + } + + fn to_usize(self) -> usize { + self.0 as usize + } + + fn from_usize(id: usize) -> Self { + Self(id as i32) + } +} diff --git a/parser/src/structs/empty_address_data.rs b/parser/src/structs/empty_address_data.rs index c396e1bec..e3e244210 100644 --- a/parser/src/structs/empty_address_data.rs +++ b/parser/src/structs/empty_address_data.rs @@ -1,12 +1,12 @@ use allocative::Allocative; use sanakirja::{direct_repr, Storable, UnsizedStorable}; -use super::{AddressData, AddressType, WAmount}; +use super::{AddressData, AddressType, Amount}; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)] pub struct EmptyAddressData { pub address_type: AddressType, - pub transfered: WAmount, + pub transfered: Amount, } direct_repr!(EmptyAddressData); diff --git a/parser/src/structs/generic_map.rs b/parser/src/structs/generic_map.rs new file mode 100644 index 000000000..3f38d0903 --- /dev/null +++ b/parser/src/structs/generic_map.rs @@ -0,0 +1,860 @@ +use std::{ + collections::{BTreeMap, VecDeque}, + fmt::Debug, + fs, + iter::Sum, + mem, + ops::{Add, ControlFlow, Div, Mul, Sub}, + path::{Path, PathBuf}, +}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use itertools::Itertools; +use ordered_float::{FloatCore, OrderedFloat}; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{log, utils::LossyFrom, Serialization}; + +use super::{AnyMap, MapValue}; + +pub trait MapKey +where + Self: Sized + PartialOrd + Ord + Clone + Copy + Debug, + ChunkId: MapChunkId, +{ + fn to_chunk_id(&self) -> ChunkId; + fn to_first_unsafe(&self) -> Option; + fn to_serialized_key(&self) -> Self; + fn is_out_of_bounds(&self) -> bool; + fn is_first(&self) -> bool; + fn checked_sub(&self, x: usize) -> Option; + fn min_percentile_key() -> Self; + fn iter_up_to(&self, other: &Self) -> impl Iterator; + fn map_name<'a>() -> &'a str; + + fn from_usize(_: usize) -> Self { + unreachable!() + } + fn to_usize(&self) -> usize { + unreachable!() + } +} + +pub trait MapSerialized +where + Self: Debug + Serialize + DeserializeOwned + Encode + Decode, + ChunkId: MapChunkId, +{ + fn new(version: u32) -> Self; + fn get_last_key(&self, last_chunk_id: &ChunkId) -> Option; + fn version(&self) -> u32; + fn get(&self, serialized_key: &Key) -> Option<&Value>; + fn last(&self) -> Option<&Value>; + fn extend(&mut self, map: BTreeMap); +} + +pub trait MapChunkId +where + Self: Ord + Debug + Copy + Clone, +{ + fn to_name(&self) -> String; + fn from_name(name: &str) -> Self; + fn to_usize(self) -> usize; + fn from_usize(id: usize) -> Self; +} + +#[derive(Default, Debug, Allocative)] +pub struct GenericMap { + version: u32, + + path_all: String, + path_last: Option, + + chunks_in_memory: usize, + + serialization: Serialization, + + pub initial_last_key: Option, + pub initial_first_unsafe_key: Option, + + imported: BTreeMap, + to_insert: BTreeMap>, +} + +impl GenericMap +where + Value: MapValue, + ChunkId: MapChunkId, + Key: MapKey, + Serialized: MapSerialized, +{ + pub fn new_bin(version: u32, path: &str) -> Self { + Self::new(version, path, Serialization::Binary, 1, true) + } + + pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Binary, 1, export_last) + } + + pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Json, usize::MAX, export_last) + } + + fn new( + version: u32, + path: &str, + serialization: Serialization, + chunks_in_memory: usize, + export_last: bool, + ) -> Self { + if chunks_in_memory < 1 { + panic!("Should always have at least the latest chunk in memory"); + } + + let path = path.replace(['-', '_', ' '], "/"); + + let path_all = format!("{path}/{}", Key::map_name()); + + fs::create_dir_all(&path_all).unwrap(); + + let path_last = { + if export_last { + Some(serialization.append_extension(&format!("{path}/last"))) + } else { + None + } + }; + + let mut s = Self { + version, + + path_all, + path_last, + + chunks_in_memory, + + serialization, + + initial_last_key: None, + initial_first_unsafe_key: None, + + to_insert: BTreeMap::default(), + imported: BTreeMap::default(), + }; + + s.read_dir() + .into_iter() + .rev() + .take(chunks_in_memory) + .for_each(|(chunk_start, path)| { + if let Ok(serialized) = s.import(&path) { + if serialized.version() == s.version { + s.imported.insert(chunk_start, serialized); + } else { + s.read_dir() + .iter() + .for_each(|(_, path)| fs::remove_file(path).unwrap()) + } + } + }); + + s.initial_last_key = s + .imported + .iter() + .last() + .and_then(|(last_chunk_id, serialized)| serialized.get_last_key(last_chunk_id)); + + s.initial_first_unsafe_key = s + .initial_last_key + .and_then(|last_key| last_key.to_first_unsafe()); + + if s.initial_first_unsafe_key.is_none() { + log(&format!("New {path}/{}", Key::map_name())); + } + + s + } + + fn read_dir(&self) -> BTreeMap { + Self::_read_dir(&self.path_all, &self.serialization) + } + + pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { + fs::read_dir(path) + .unwrap() + .map(|entry| entry.unwrap().path()) + .filter(|path| { + let extension = path.extension().unwrap().to_str().unwrap(); + + path.is_file() && extension == serialization.to_extension() + }) + .map(|path| { + let chunk_id = ChunkId::from_name(path.file_stem().unwrap().to_str().unwrap()); + + (chunk_id, path) + }) + .collect() + } + + fn import(&self, path: &Path) -> color_eyre::Result { + self.serialization + .import::(path.to_str().unwrap()) + } + + pub fn insert(&mut self, key: Key, value: Value) -> Value { + if !self.is_key_safe(key) { + self.to_insert + .entry(key.to_chunk_id()) + .or_default() + .insert(key.to_serialized_key(), value); + } + + value + } + + pub fn insert_default(&mut self, key: Key) -> Value { + self.insert(key, Value::default()) + } + + #[inline(always)] + pub fn is_key_safe(&self, key: Key) -> bool { + self.initial_first_unsafe_key + .map_or(false, |initial_first_unsafe_key| { + initial_first_unsafe_key > key + }) + } + + pub fn get(&self, key: &Key) -> Option { + let chunk_id = key.to_chunk_id(); + + let serialized_key = key.to_serialized_key(); + + self.to_insert + .get(&chunk_id) + .and_then(|tree| tree.get(&serialized_key).cloned()) + .or_else(|| { + self.imported + .get(&chunk_id) + .and_then(|serialized| serialized.get(&serialized_key)) + .cloned() + }) + } + + pub fn get_or_import(&mut self, key: &Key) -> Option { + if key.is_out_of_bounds() { + return None; + } + + let chunk_id = key.to_chunk_id(); + + let serialized_key = key.to_serialized_key(); + + self.to_insert + .get(&chunk_id) + .and_then(|tree| tree.get(&serialized_key).cloned()) + .or_else(|| { + #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() + if !self.imported.contains_key(&chunk_id) { + let dir_content = self.read_dir(); + + if let Some(path) = dir_content.get(&chunk_id) { + let serialized = self.import(path).unwrap(); + + self.imported.insert(chunk_id, serialized); + } + } + + self.imported + .get(&chunk_id) + .and_then(|serialized| serialized.get(&serialized_key)) + .cloned() + }) + } +} + +impl AnyMap for GenericMap +where + Value: MapValue, + ChunkId: MapChunkId, + Key: MapKey, + Serialized: MapSerialized, +{ + fn path(&self) -> &str { + &self.path_all + } + + fn path_last(&self) -> &Option { + &self.path_last + } + + fn t_name(&self) -> &str { + std::any::type_name::() + } + + fn pre_export(&mut self) { + self.to_insert.iter_mut().for_each(|(chunk_id, map)| { + if let Some((key, _)) = map.first_key_value() { + if !key.is_first() && !self.imported.contains_key(chunk_id) { + // Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut + + let dir_content = Self::_read_dir(&self.path_all, &self.serialization); + + let path = dir_content.get(chunk_id).unwrap_or_else(|| { + dbg!(&self.path_all, chunk_id, &dir_content); + panic!(); + }); + + let serialized = self + .serialization + .import::(path.to_str().unwrap()) + .unwrap(); + + self.imported.insert(*chunk_id, serialized); + } + } + + self.imported + .entry(*chunk_id) + .or_insert(Serialized::new(self.version)) + .extend(mem::take(map)); + }); + } + + fn export(&self) -> color_eyre::Result<()> { + let len = self.imported.len(); + + self.to_insert.iter().enumerate().try_for_each( + |(index, (chunk_id, map))| -> color_eyre::Result<()> { + if !map.is_empty() { + unreachable!() + } + + let path = self.serialization.append_extension(&format!( + "{}/{}", + self.path_all, + chunk_id.to_name() + )); + + let serialized = self.imported.get(chunk_id).unwrap_or_else(|| { + dbg!(&self.path_all, chunk_id, &self.imported); + panic!(); + }); + + self.serialization.export(&path, serialized)?; + + if index == len - 1 { + if let Some(path_last) = self.path_last.as_ref() { + self.serialization + .export(path_last, serialized.last().unwrap())?; + } + } + + Ok(()) + }, + ) + } + + fn post_export(&mut self) { + self.imported + .keys() + .rev() + .enumerate() + .filter(|(index, _)| *index + 1 > self.chunks_in_memory) + .map(|(_, key)| *key) + .collect_vec() + .iter() + .for_each(|key| { + self.imported.remove(key); + }); + + self.to_insert.clear(); + } +} + +impl GenericMap +where + Value: MapValue, + ChunkId: MapChunkId, + Key: MapKey, + Serialized: MapSerialized, +{ + pub fn sum_keys(&self, keys: &[Key]) -> Value + where + Value: Sum, + { + keys.iter().flat_map(|key| self.get(key)).sum::() + } + + pub fn average_keys(&self, keys: &[Key]) -> f32 + where + Value: Sum, + f32: LossyFrom, + { + f32::lossy_from(self.sum_keys(keys)) / keys.len() as f32 + } + + pub fn multi_insert(&mut self, keys: &[Key], mut callback: F) + where + F: FnMut(&Key) -> Value, + { + keys.iter().for_each(|key| { + self.insert(*key, callback(key)); + }); + } + + pub fn multi_insert_const(&mut self, keys: &[Key], constant: Value) { + keys.iter().for_each(|key| { + self.insert(*key, constant); + }); + } + + pub fn multi_insert_simple_transform( + &mut self, + keys: &[Key], + source: &mut GenericMap, + transform: F, + ) where + SourceValue: MapValue, + SourceSerialized: MapSerialized, + F: Fn(SourceValue) -> Value, + { + keys.iter().for_each(|key| { + self.insert(*key, transform(source.get_or_import(key).unwrap())); + }); + } + + pub fn multi_insert_complex_transform( + &mut self, + keys: &[Key], + source: &mut GenericMap, + mut transform: F, + ) where + SourceValue: MapValue, + SourceSerialized: MapSerialized, + F: FnMut( + ( + SourceValue, + &Key, + &mut GenericMap, + ), + ) -> Value, + { + keys.iter().for_each(|key| { + self.insert( + *key, + transform((source.get_or_import(key).unwrap(), key, source)), + ); + }); + } + + pub fn multi_insert_add( + &mut self, + keys: &[Key], + added: &mut GenericMap, + adder: &mut GenericMap, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Add, + { + keys.iter().for_each(|key| { + self.insert( + *key, + Value::lossy_from(added.get_or_import(key).unwrap()) + + Value::lossy_from(adder.get_or_import(key).unwrap()), + ); + }); + } + + pub fn multi_insert_subtract( + &mut self, + keys: &[Key], + subtracted: &mut GenericMap, + subtracter: &mut GenericMap, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Sub, + { + keys.iter().for_each(|key| { + self.insert( + *key, + Value::lossy_from(subtracted.get_or_import(key).unwrap()) + - Value::lossy_from(subtracter.get_or_import(key).unwrap()), + ); + }); + } + + pub fn multi_insert_multiply( + &mut self, + keys: &[Key], + multiplied: &mut GenericMap, + multiplier: &mut GenericMap, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Mul, + { + keys.iter().for_each(|key| { + self.insert( + *key, + Value::lossy_from(multiplied.get_or_import(key).unwrap()) + * Value::lossy_from(multiplier.get_or_import(key).unwrap()), + ); + }); + } + + pub fn multi_insert_divide( + &mut self, + keys: &[Key], + divided: &mut GenericMap, + divider: &mut GenericMap, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Div + Mul + From, + { + self._multi_insert_divide(keys, divided, divider, false) + } + + pub fn multi_insert_percentage( + &mut self, + keys: &[Key], + divided: &mut GenericMap, + divider: &mut GenericMap, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Div + Mul + From, + { + self._multi_insert_divide(keys, divided, divider, true) + } + + fn _multi_insert_divide( + &mut self, + keys: &[Key], + divided: &mut GenericMap, + divider: &mut GenericMap, + as_percentage: bool, + ) where + A: MapValue, + ASerialized: MapSerialized, + B: MapValue, + BSerialized: MapSerialized, + Value: LossyFrom + LossyFrom + Div + Mul + From, + { + let multiplier = Value::from(if as_percentage { 100 } else { 1 }); + + keys.iter().for_each(|key| { + self.insert( + *key, + Value::lossy_from(divided.get_or_import(key).unwrap()) + / Value::lossy_from(divider.get_or_import(key).unwrap()) + * multiplier, + ); + }); + } + + pub fn multi_insert_cumulative( + &mut self, + keys: &[Key], + source: &mut GenericMap, + ) where + SourceValue: MapValue, + SourceSerialized: MapSerialized, + Value: LossyFrom + Add + Sub, + { + self._multi_insert_last_x_sum(keys, source, None) + } + + pub fn multi_insert_last_x_sum( + &mut self, + keys: &[Key], + source: &mut GenericMap, + len: usize, + ) where + SourceValue: MapValue, + SourceSerialized: MapSerialized, + Value: LossyFrom + Add + Sub, + { + self._multi_insert_last_x_sum(keys, source, Some(len)) + } + + fn _multi_insert_last_x_sum( + &mut self, + keys: &[Key], + source: &mut GenericMap, + len: Option, + ) where + SourceValue: MapValue, + SourceSerialized: MapSerialized, + Value: LossyFrom + Add + Sub, + { + let mut sum = None; + + keys.iter().for_each(|key| { + let to_subtract = len + .and_then(|x| { + key.checked_sub(x) + .and_then(|previous_key| source.get_or_import(&previous_key)) + }) + .unwrap_or_default(); + + let previous_sum = sum.unwrap_or_else(|| { + key.checked_sub(1) + .and_then(|previous_sum_key| self.get_or_import(&previous_sum_key)) + .unwrap_or_default() + }); + + let last_value = source.get_or_import(key).unwrap_or_else(|| { + dbg!(&source.to_insert, &source.path(), key); + panic!(); + }); + + sum.replace( + previous_sum + Value::lossy_from(last_value) - Value::lossy_from(to_subtract), + ); + + self.insert(*key, sum.unwrap()); + }); + } + + pub fn multi_insert_simple_average( + &mut self, + keys: &[Key], + source: &mut GenericMap, + len: usize, + ) where + SourceValue: MapValue + Sum, + SourceSerialized: MapSerialized, + Value: Into + From, + f32: LossyFrom, + { + if len <= 1 { + panic!("Average of 1 or less is not useful"); + } + + let len = len as f32; + + let mut average = None; + + keys.iter().for_each(|key| { + let previous_average: f32 = average + .unwrap_or_else(|| { + key.checked_sub(1) + .and_then(|previous_average_key| self.get(&previous_average_key)) + .unwrap_or_default() + }) + .into(); + + let mut last_value = f32::lossy_from(source.get_or_import(key).unwrap_or_else(|| { + dbg!(key); + panic!() + })); + + if last_value.is_nan() { + last_value = 0.0; + } + + average.replace(((previous_average * (len - 1.0) + last_value) / len).into()); + + self.insert(*key, average.unwrap()); + }); + } + + pub fn multi_insert_net_change(&mut self, keys: &[Key], source: &mut Self, len: usize) + where + Value: Sub, + { + keys.iter().for_each(|key| { + let previous_value = key + .checked_sub(len) + .and_then(|previous_key| source.get_or_import(&previous_key)) + .unwrap_or_default(); + + let last_value = source.get_or_import(key).unwrap(); + + let net_change = last_value - previous_value; + + self.insert(*key, net_change); + }); + } + + pub fn multi_insert_percentage_change(&mut self, keys: &[Key], source: &mut Self, len: usize) + where + Value: Sub + FloatCore, + { + let one = Value::from(1.0).unwrap(); + let hundred = Value::from(100.0).unwrap(); + + keys.iter().for_each(|key| { + let previous_value = key + .checked_sub(len) + .and_then(|previous_key| source.get_or_import(&previous_key)) + .unwrap_or_default(); + + let last_value = source.get_or_import(key).unwrap(); + + let percentage_change = ((last_value / previous_value) - one) * hundred; + + self.insert(*key, percentage_change); + }); + } + + pub fn multi_insert_median(&mut self, keys: &[Key], source: &mut Self, len: Option) + where + Value: FloatCore, + { + source.multi_insert_percentile(keys, vec![(self, 0.5)], len); + } + + pub fn multi_insert_percentile( + &mut self, + keys: &[Key], + mut map_and_percentiles: Vec<(&mut Self, f32)>, + len: Option, + ) where + Value: FloatCore, + { + if len.map_or(false, |size| size < 3) { + panic!("Computing a percentile for a size lower than 3 is useless"); + } + + let mut ordered_vec = None; + let mut sorted_vec = None; + + let min_percentile_key = Key::min_percentile_key(); + + let nan = Value::from(f32::NAN).unwrap(); + let two = Value::from(2.0).unwrap(); + + keys.iter().cloned().try_for_each(|key| { + if key < min_percentile_key { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(key, nan); + }); + return ControlFlow::Continue::<()>(()); + } + + if let Some(start) = len.map_or(Some(min_percentile_key), |size| key.checked_sub(size)) + { + if sorted_vec.is_none() { + let mut vec = start + .iter_up_to(&key) + .flat_map(|key| self.get_or_import(&key)) + .filter(|f| !f.is_nan()) + .map(|f| OrderedFloat(f)) + .collect_vec(); + + if len.is_some() { + ordered_vec.replace(VecDeque::from(vec.clone())); + } + + vec.sort_unstable(); + + sorted_vec.replace(vec); + } else { + let float_value = self.get_or_import(&key).unwrap(); + + if !float_value.is_nan() { + let float_value = OrderedFloat(float_value); + + if let Some(len) = len { + if let Some(ordered_vec) = ordered_vec.as_mut() { + if ordered_vec.len() == len { + let first = ordered_vec.pop_front().unwrap(); + + let pos = + sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); + + sorted_vec.as_mut().unwrap().remove(pos); + } + + ordered_vec.push_back(float_value); + } + } + + let pos = sorted_vec + .as_ref() + .unwrap() + .binary_search(&float_value) + .unwrap_or_else(|pos| pos); + + sorted_vec.as_mut().unwrap().insert(pos, float_value); + } + } + + let vec = sorted_vec.as_ref().unwrap(); + + let len = vec.len(); + + map_and_percentiles + .iter_mut() + .for_each(|(map, percentile)| { + if !(0.0..=1.0).contains(percentile) { + panic!("The percentile should be between 0.0 and 1.0"); + } + + let value = { + if len < 2 { + nan + } else { + let index = (len - 1) as f32 * *percentile; + + let fract = index.fract(); + + if fract != 0.0 { + (vec.get(index.ceil() as usize) + .unwrap_or_else(|| { + dbg!(vec, index, &self.path_all, &self.path_all, len); + panic!() + }) + .0 + + vec + .get(index as usize) + .unwrap_or_else(|| { + dbg!( + vec, + index, + &self.path_all, + &self.path_all, + len + ); + panic!() + }) + .0) + / two + } else { + vec.get(index as usize) + .unwrap_or_else(|| { + dbg!(vec, index); + panic!(); + }) + .0 + } + } + }; + + (*map).insert(key, value); + }); + } else { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(key, nan); + }); + } + + ControlFlow::Continue(()) + }); + } +} diff --git a/parser/src/structs/height.rs b/parser/src/structs/height.rs new file mode 100644 index 000000000..0c46d2d9a --- /dev/null +++ b/parser/src/structs/height.rs @@ -0,0 +1,154 @@ +use std::{ + fmt, + ops::{Add, AddAssign, Sub}, +}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use derive_deref::{Deref, DerefMut}; +use serde::{Deserialize, Serialize}; + +use crate::{bitcoin::NUMBER_OF_UNSAFE_BLOCKS, HEIGHT_MAP_CHUNK_SIZE}; + +use super::{HeightMapChunkId, MapKey}; + +#[derive( + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Deserialize, + Encode, + Decode, + Allocative, +)] +pub struct Height(u32); + +impl Height { + pub const ZERO: Height = Height(0); + + pub fn new(height: u32) -> Self { + Self(height) + } + + pub fn is_close_to_end(&self, block_count: usize) -> bool { + **self > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3)) as u32 + } + + pub fn is_safe(&self, block_count: usize) -> bool { + **self < (block_count - NUMBER_OF_UNSAFE_BLOCKS) as u32 + } +} + +impl PartialEq for Height { + fn eq(&self, other: &u64) -> bool { + **self == *other as u32 + } +} + +impl Add for Height { + type Output = Height; + + fn add(self, rhs: u32) -> Self::Output { + Self::new(*self + rhs) + } +} + +impl Add for Height { + type Output = Height; + + fn add(self, rhs: usize) -> Self::Output { + Self::new(*self + rhs as u32) + } +} + +impl Sub for Height { + type Output = Height; + + fn sub(self, rhs: Height) -> Self::Output { + Self::new(*self - *rhs) + } +} + +impl Sub for Height { + type Output = Height; + + fn sub(self, rhs: u32) -> Self::Output { + Self::new(*self - rhs) + } +} + +impl Sub for Height { + type Output = Height; + + fn sub(self, rhs: usize) -> Self::Output { + Self::new(*self - rhs as u32) + } +} + +impl AddAssign for Height { + fn add_assign(&mut self, rhs: usize) { + *self = self.add(rhs); + } +} + +impl fmt::Display for Height { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", **self) + } +} + +impl MapKey for Height { + fn to_chunk_id(&self) -> HeightMapChunkId { + HeightMapChunkId::new(self) + } + + fn to_first_unsafe(&self) -> Option { + let offset = NUMBER_OF_UNSAFE_BLOCKS - 1; + + self.checked_sub(offset) + } + + fn to_serialized_key(&self) -> Self { + Height::new(**self % HEIGHT_MAP_CHUNK_SIZE) + } + + fn is_out_of_bounds(&self) -> bool { + !(0..=2_100_000).contains(&**self) + } + + fn is_first(&self) -> bool { + **self == 0 + } + + fn checked_sub(&self, x: usize) -> Option { + (**self).checked_sub(x as u32).map(Height::new) + } + + fn min_percentile_key() -> Self { + Self(160_000) + } + + fn iter_up_to(&self, other: &Self) -> impl Iterator { + (**self..=**other).map(Height::new) + } + + fn map_name<'a>() -> &'a str { + "height" + } + + fn to_usize(&self) -> usize { + (**self) as usize + } + + fn from_usize(h: usize) -> Self { + Self(h as u32) + } +} diff --git a/parser/src/structs/height_map copy.rs b/parser/src/structs/height_map copy.rs new file mode 100644 index 000000000..d5722168e --- /dev/null +++ b/parser/src/structs/height_map copy.rs @@ -0,0 +1,986 @@ +use std::{ + cmp::Ordering, + collections::{BTreeMap, VecDeque}, + fmt::Debug, + fs, + iter::Sum, + mem, + ops::{Add, ControlFlow, Div, Mul, RangeInclusive, Sub}, + path::{Path, PathBuf}, +}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use itertools::Itertools; +use ordered_float::{FloatCore, OrderedFloat}; +use serde::{Deserialize, Serialize}; + +use crate::{ + bitcoin::NUMBER_OF_UNSAFE_BLOCKS, + io::{format_path, Serialization}, + utils::{log, LossyFrom}, +}; + +use super::{AnyMap, MapValue}; + +pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000; + +#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)] +pub struct SerializedHeightMap { + version: u32, + map: Vec, +} + +#[derive(Default, Allocative)] +pub struct HeightMap +where + T: MapValue, +{ + version: u32, + + path_all: String, + path_last: Option, + + chunks_in_memory: usize, + + serialization: Serialization, + + initial_last_height: Option, + initial_first_unsafe_height: Option, + + imported: BTreeMap>, + to_insert: BTreeMap>, +} + +impl HeightMap +where + T: MapValue, +{ + pub fn new_bin(version: u32, path: &str) -> Self { + Self::new(version, path, Serialization::Binary, 1, true) + } + + pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Binary, 1, export_last) + } + + pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { + Self::new(version, path, Serialization::Json, usize::MAX, export_last) + } + + fn new( + version: u32, + path: &str, + serialization: Serialization, + chunks_in_memory: usize, + export_last: bool, + ) -> Self { + if chunks_in_memory < 1 { + panic!("Should always have at least the latest chunk in memory"); + } + + let path = format_path(path); + + let path_all = format!("{path}/height"); + + fs::create_dir_all(&path_all).unwrap(); + + let path_last = { + if export_last { + Some(serialization.append_extension(&format!("{path}/last"))) + } else { + None + } + }; + + let mut s = Self { + version, + + path_all, + path_last, + + chunks_in_memory, + + serialization, + + initial_first_unsafe_height: None, + initial_last_height: None, + + to_insert: BTreeMap::default(), + imported: BTreeMap::default(), + }; + + s.read_dir() + .into_iter() + .rev() + .take(chunks_in_memory) + .for_each(|(chunk_start, path)| { + if let Ok(serialized) = s.import(&path) { + if serialized.version == s.version { + s.imported.insert(chunk_start, serialized); + } else { + s.read_dir() + .iter() + .for_each(|(_, path)| fs::remove_file(path).unwrap()) + } + } + }); + + s.initial_last_height = s + .imported + .iter() + .last() + .map(|(chunk_start, serialized)| chunk_start + serialized.map.len()); + + s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| { + let offset = NUMBER_OF_UNSAFE_BLOCKS - 1; + last_height.checked_sub(offset) + }); + + if s.initial_first_unsafe_height.is_none() { + log(&format!("New {path}")); + } + + s + } + + fn height_to_chunk_name(height: Height) -> String { + let start = Self::height_to_chunk_start(height); + let end = start + HEIGHT_MAP_CHUNK_SIZE; + + format!("{start}..{end}") + } + + fn height_to_chunk_start(height: Height) -> usize { + height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE + } + + pub fn insert(&mut self, height: Height, value: T) -> T { + if !self.is_height_safe(height) { + self.to_insert + .entry(Self::height_to_chunk_start(height)) + .or_default() + .insert(height % HEIGHT_MAP_CHUNK_SIZE, value); + } + + value + } + + pub fn insert_default(&mut self, height: Height) -> T { + self.insert(height, T::default()) + } + + pub fn get(&self, height: &usize) -> Option { + let chunk_start = Self::height_to_chunk_start(*height); + + self.to_insert + .get(&chunk_start) + .and_then(|map| map.get(&(height - chunk_start)).cloned()) + .or_else(|| { + self.imported + .get(&chunk_start) + .and_then(|serialized| serialized.map.get(height - chunk_start)) + .cloned() + }) + } + + pub fn get_or_import(&mut self, height: &usize) -> T { + let chunk_start = Self::height_to_chunk_start(*height); + + self.to_insert + .get(&chunk_start) + .and_then(|map| map.get(&(height - chunk_start)).cloned()) + .or_else(|| { + #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() + if !self.imported.contains_key(&chunk_start) { + let dir_content = self.read_dir(); + + let path = dir_content.get(&chunk_start).unwrap_or_else(|| { + dbg!(self.path(), chunk_start, &dir_content); + panic!(); + }); + + let serialized = self.import(path).unwrap(); + + self.imported.insert(chunk_start, serialized); + } + + self.imported + .get(&chunk_start) + .and_then(|serialized| serialized.map.get(height - chunk_start)) + .cloned() + }) + .unwrap_or_else(|| { + dbg!(height, self.path()); + panic!(); + }) + } + + #[inline(always)] + pub fn is_height_safe(&self, height: Height) -> bool { + self.initial_first_unsafe_height.unwrap_or(0) > height + } + + fn read_dir(&self) -> BTreeMap { + Self::_read_dir(&self.path_all, &self.serialization) + } + + pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { + fs::read_dir(path) + .unwrap() + .map(|entry| entry.unwrap().path()) + .filter(|path| { + let extension = path.extension().unwrap().to_str().unwrap(); + + path.is_file() && extension == serialization.to_extension() + }) + .map(|path| { + ( + path.file_stem() + .unwrap() + .to_str() + .unwrap() + .split("..") + .next() + .unwrap() + .parse::() + .unwrap(), + path, + ) + }) + .collect() + } + + fn import(&self, path: &Path) -> color_eyre::Result> { + self.serialization + .import::>(path.to_str().unwrap()) + } +} + +impl AnyMap for HeightMap +where + T: MapValue, +{ + fn path(&self) -> &str { + &self.path_all + } + + fn path_last(&self) -> &Option { + &self.path_last + } + + fn t_name(&self) -> &str { + std::any::type_name::() + } + + fn pre_export(&mut self) { + let to_insert = &mut self.to_insert; + + to_insert.iter_mut().for_each(|(chunk_start, map)| { + if let Some((key, _)) = map.first_key_value() { + if *key > 0 && !self.imported.contains_key(chunk_start) { + // Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut + + let dir_content = Self::_read_dir(&self.path_all, &self.serialization); + + let path = dir_content.get(chunk_start).unwrap_or_else(|| { + dbg!(&self.path_all, chunk_start, &dir_content); + panic!(); + }); + + let serialized = self + .serialization + .import::>(path.to_str().unwrap()) + .unwrap(); + + self.imported.insert(*chunk_start, serialized); + } + } + + let serialized = self + .imported + .entry(*chunk_start) + .or_insert(SerializedHeightMap { + version: self.version, + map: vec![], + }); + + mem::take(map) + .into_iter() + .for_each( + |(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) { + Ordering::Greater => serialized.map[chunk_height] = value, + Ordering::Equal => serialized.map.push(value), + Ordering::Less => { + dbg!(&self.path_all, &serialized.map, chunk_height, value); + panic!() + } + }, + ); + }); + } + + fn export(&self) -> color_eyre::Result<()> { + let len = self.imported.len(); + + self.to_insert.iter().enumerate().try_for_each( + |(index, (chunk_start, map))| -> color_eyre::Result<()> { + if !map.is_empty() { + unreachable!() + } + + let chunk_name = Self::height_to_chunk_name(*chunk_start); + + let path = self + .serialization + .append_extension(&format!("{}/{}", self.path_all, chunk_name)); + + let serialized = self.imported.get(chunk_start).unwrap_or_else(|| { + dbg!(&self.path_all, chunk_start, &self.imported); + panic!(); + }); + + self.serialization.export(&path, serialized)?; + + if index == len - 1 { + if let Some(path_last) = self.path_last.as_ref() { + self.serialization + .export(path_last, serialized.map.last().unwrap())?; + } + } + + Ok(()) + }, + ) + } + + fn post_export(&mut self) { + self.imported + .keys() + .rev() + .enumerate() + .filter(|(index, _)| *index + 1 > self.chunks_in_memory) + .map(|(_, key)| *key) + .collect_vec() + .iter() + .for_each(|key| { + self.imported.remove(key); + }); + + self.to_insert.clear(); + } +} + +pub trait AnyHeightMap: AnyMap { + fn get_initial_first_unsafe_height(&self) -> Option; + + fn get_initial_last_height(&self) -> Option; + + fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync); + + fn as_any_mut_map(&mut self) -> &mut dyn AnyMap; +} + +impl AnyHeightMap for HeightMap +where + T: MapValue, +{ + #[inline(always)] + fn get_initial_first_unsafe_height(&self) -> Option { + self.initial_first_unsafe_height + } + + #[inline(always)] + fn get_initial_last_height(&self) -> Option { + self.initial_last_height + } + + fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) { + self + } + + fn as_any_mut_map(&mut self) -> &mut dyn AnyMap { + self + } +} + +impl HeightMap +where + T: MapValue, +{ + pub fn sum_range(&self, range: &RangeInclusive) -> T + where + T: Sum, + { + range + .to_owned() + .flat_map(|height| self.get(&height)) + .sum::() + } + + pub fn multi_insert_const(&mut self, heights: &[Height], constant: T) { + heights.iter().for_each(|height| { + let height = *height; + + self.insert(height, constant); + }); + } + + pub fn multi_insert_simple_transform( + &mut self, + heights: &[Height], + source: &mut HeightMap, + transform: F, + ) where + K: MapValue, + F: Fn(K) -> T, + { + heights.iter().for_each(|height| { + self.insert(*height, transform(source.get_or_import(height))); + }); + } + + pub fn multi_insert_complex_transform( + &mut self, + heights: &[Height], + source: &mut HeightMap, + mut transform: F, + ) where + K: MapValue, + F: FnMut((K, &usize)) -> T, + { + heights.iter().for_each(|height| { + self.insert(*height, transform((source.get_or_import(height), height))); + }); + } + + pub fn multi_insert_add( + &mut self, + heights: &[Height], + added: &mut HeightMap, + adder: &mut HeightMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Add, + { + heights.iter().for_each(|height| { + self.insert( + *height, + T::lossy_from(added.get_or_import(height)) + + T::lossy_from(adder.get_or_import(height)), + ); + }); + } + + pub fn multi_insert_subtract( + &mut self, + heights: &[Height], + subtracted: &mut HeightMap, + subtracter: &mut HeightMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Sub, + { + heights.iter().for_each(|height| { + self.insert( + *height, + T::lossy_from(subtracted.get_or_import(height)) + - T::lossy_from(subtracter.get_or_import(height)), + ); + }); + } + + pub fn multi_insert_multiply( + &mut self, + heights: &[Height], + multiplied: &mut HeightMap, + multiplier: &mut HeightMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Mul, + { + heights.iter().for_each(|height| { + self.insert( + *height, + T::lossy_from(multiplied.get_or_import(height)) + * T::lossy_from(multiplier.get_or_import(height)), + ); + }); + } + + pub fn multi_insert_divide( + &mut self, + heights: &[Height], + divided: &mut HeightMap, + divider: &mut HeightMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + self._multi_insert_divide(heights, divided, divider, false) + } + + pub fn multi_insert_percentage( + &mut self, + heights: &[Height], + divided: &mut HeightMap, + divider: &mut HeightMap, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + self._multi_insert_divide(heights, divided, divider, true) + } + + pub fn _multi_insert_divide( + &mut self, + heights: &[Height], + divided: &mut HeightMap, + divider: &mut HeightMap, + as_percentage: bool, + ) where + A: MapValue, + B: MapValue, + T: LossyFrom + LossyFrom, + T: Div + Mul + From, + { + let multiplier = T::from(if as_percentage { 100 } else { 1 }); + + heights.iter().for_each(|height| { + self.insert( + *height, + T::lossy_from(divided.get_or_import(height)) + / T::lossy_from(divider.get_or_import(height)) + * multiplier, + ); + }); + } + + pub fn multi_insert_cumulative(&mut self, heights: &[Height], source: &mut HeightMap) + where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + self._multi_insert_last_x_sum(heights, source, None) + } + + pub fn multi_insert_last_x_sum( + &mut self, + heights: &[Height], + source: &mut HeightMap, + block_time: usize, + ) where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + self._multi_insert_last_x_sum(heights, source, Some(block_time)) + } + + fn _multi_insert_last_x_sum( + &mut self, + heights: &[Height], + source: &mut HeightMap, + block_time: Option, + ) where + K: MapValue, + T: LossyFrom, + T: Add + Sub, + { + let mut sum = None; + + heights.iter().for_each(|height| { + let to_subtract = block_time + .and_then(|x| { + (height + 1) + .checked_sub(x) + .map(|previous_height| source.get_or_import(&previous_height)) + }) + .unwrap_or_default(); + + let previous_sum = sum.unwrap_or_else(|| { + height + .checked_sub(1) + .map(|previous_sum_height| self.get_or_import(&previous_sum_height)) + .unwrap_or_default() + }); + + let last_value = source.get_or_import(height); + + sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract)); + + self.insert(*height, sum.unwrap()); + }); + } + + pub fn multi_insert_simple_average( + &mut self, + heights: &[Height], + source: &mut HeightMap, + block_time: usize, + ) where + T: Into + From, + K: MapValue + Sum, + f32: LossyFrom, + { + if block_time <= 1 { + panic!("Average of 1 or less is not useful"); + } + + let mut average = None; + + heights.iter().for_each(|height| { + let height = *height; + + let previous_average: f32 = average + .unwrap_or_else(|| { + height + .checked_sub(block_time) + .and_then(|previous_average_height| self.get(&previous_average_height)) + .unwrap_or_default() + }) + .into(); + + let mut last_value = f32::lossy_from(source.get_or_import(&height)); + + if last_value.is_nan() { + last_value = 0.0; + } + + average.replace( + ((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32) + .into(), + ); + + self.insert(height, average.unwrap()); + }); + } + + pub fn multi_insert_net_change( + &mut self, + heights: &[Height], + source: &mut HeightMap, + block_time: usize, + ) where + T: Sub, + { + heights.iter().for_each(|height| { + let height = *height; + + let previous_value = height + .checked_sub(block_time) + .map(|height| source.get_or_import(&height)) + .unwrap_or_default(); + + let last_value = source.get_or_import(&height); + + let net = last_value - previous_value; + + self.insert(height, net); + }); + } + + pub fn multi_insert_median( + &mut self, + heights: &[Height], + source: &mut HeightMap, + block_time: Option, + ) where + T: FloatCore, + { + source.multi_insert_percentile(heights, vec![(self, 0.5)], block_time); + } + + pub fn multi_insert_percentile( + &mut self, + heights: &[Height], + mut map_and_percentiles: Vec<(&mut HeightMap, f32)>, + block_time: Option, + ) where + T: FloatCore, + { + if block_time.map_or(false, |size| size < 3) { + panic!("Computing a percentile for a size lower than 3 is useless"); + } + + let mut ordered_vec = None; + let mut sorted_vec = None; + + let min_percentile_height = 160_000; + + let nan = T::from(f32::NAN).unwrap(); + let two = T::from(2.0).unwrap(); + + if min_percentile_height % HEIGHT_MAP_CHUNK_SIZE != 0 { + panic!("Should be 0"); + } + + heights.iter().cloned().try_for_each(|height| { + if height < min_percentile_height { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(height, nan); + }); + return ControlFlow::Continue::<()>(()); + } + + if let Some(start) = + block_time.map_or(Some(min_percentile_height), |size| height.checked_sub(size)) + { + if sorted_vec.is_none() { + let mut vec = (start..=height) + .map(|height| self.get_or_import(&height)) + .filter(|f| !f.is_nan()) + .map(|f| OrderedFloat(f)) + .collect_vec(); + + if block_time.is_some() { + ordered_vec.replace(VecDeque::from(vec.clone())); + } + + vec.sort_unstable(); + + sorted_vec.replace(vec); + } else { + let float_value = self.get_or_import(&height); + + if !float_value.is_nan() { + let float_value = OrderedFloat(float_value); + + if block_time.is_some() { + let first = ordered_vec.as_mut().unwrap().pop_front().unwrap(); + let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); + sorted_vec.as_mut().unwrap().remove(pos); + + ordered_vec.as_mut().unwrap().push_back(float_value); + } + + let pos = sorted_vec + .as_ref() + .unwrap() + .binary_search(&float_value) + .unwrap_or_else(|pos| pos); + + sorted_vec.as_mut().unwrap().insert(pos, float_value); + } + } + + let vec = sorted_vec.as_ref().unwrap(); + + let len = vec.len(); + + map_and_percentiles + .iter_mut() + .for_each(|(map, percentile)| { + if !(0.0..=1.0).contains(percentile) { + panic!("The percentile should be between 0.0 and 1.0"); + } + + let value = { + if len < 2 { + nan + } else { + let index = (len - 1) as f32 * *percentile; + + let fract = index.fract(); + + if fract != 0.0 { + (vec.get(index.ceil() as usize) + .unwrap_or_else(|| { + dbg!( + index, + &self.path_all, + &self.path_all, + &self.to_insert, + block_time, + vec + ); + panic!() + }) + .0 + + vec + .get(index.floor() as usize) + .unwrap_or_else(|| { + dbg!( + index, + &self.path_all, + &self.path_all, + block_time + ); + panic!() + }) + .0) + / two + } else { + vec.get(index as usize).unwrap().0 + } + } + }; + + (*map).insert(height, value); + }); + } else { + map_and_percentiles.iter_mut().for_each(|(map, _)| { + (*map).insert(height, nan); + }); + } + + ControlFlow::Continue(()) + }); + } + + // pub fn insert_cumulative(&mut self, height: Height, source: &HeightMap) -> T + // where + // T: Add + Sub, + // { + // let previous_cum = height + // .checked_sub(1) + // .map(|previous_sum_height| { + // self.get(&previous_sum_height).unwrap_or_else(|| { + // dbg!(previous_sum_height); + // panic!() + // }) + // }) + // .unwrap_or_default(); + + // let last_value = source.get(&height).unwrap(); + + // let cum_value = previous_cum + last_value; + + // self.insert(height, cum_value); + + // cum_value + // } + + // pub fn insert_last_x_sum(&mut self, height: Height, source: &HeightMap, x: usize) -> T + // where + // T: Add + Sub, + // { + // let to_subtract = (height + 1) + // .checked_sub(x) + // .map(|previous_height| { + // source.get(&previous_height).unwrap_or_else(|| { + // dbg!(&self.path_all, &source.path_all, previous_height); + // panic!() + // }) + // }) + // .unwrap_or_default(); + + // let previous_sum = height + // .checked_sub(1) + // .map(|previous_sum_height| self.get(&previous_sum_height).unwrap()) + // .unwrap_or_default(); + + // let last_value = source.get(&height).unwrap(); + + // let sum = previous_sum + last_value - to_subtract; + + // self.insert(height, sum); + + // sum + // } + + // pub fn insert_simple_average(&mut self, height: Height, source: &HeightMap, block_time: usize) + // where + // T: Into + From, + // { + // let to_subtract: f32 = (height + 1) + // .checked_sub(block_time) + // .map(|previous_height| source.get(&previous_height).unwrap()) + // .unwrap_or_default() + // .into(); + + // let previous_average: f32 = height + // .checked_sub(1) + // .map(|previous_average_height| self.get(&previous_average_height).unwrap()) + // .unwrap_or_default() + // .into(); + + // let last_value: f32 = source.get(&height).unwrap().into(); + + // let sum = previous_average * block_time as f32 - to_subtract + last_value; + + // let average: T = (sum / block_time as f32).into(); + + // self.insert(height, average); + // } + + // pub fn insert_net_change(&mut self, height: Height, source: &HeightMap, offset: usize) -> T + // where + // T: Sub, + // { + // let previous_value = height + // .checked_sub(offset) + // .map(|height| { + // source.get(&height).unwrap_or_else(|| { + // dbg!(&self.path_all, &source.path_all, offset); + // panic!(); + // }) + // }) + // .unwrap_or_default(); + + // let last_value = source.get(&height).unwrap(); + + // let net = last_value - previous_value; + + // self.insert(height, net); + + // net + // } + + // pub fn insert_median(&mut self, height: Height, source: &HeightMap, size: usize) -> T + // where + // T: FloatCore, + // { + // if size < 3 { + // panic!("Computing a median for a size lower than 3 is useless"); + // } + + // let median = { + // if let Some(start) = height.checked_sub(size - 1) { + // let even = size % 2 == 0; + // let median_index = size / 2; + + // let mut vec = (start..=height) + // .map(|height| { + // OrderedFloat(source.get(&height).unwrap_or_else(|| { + // dbg!(height, &source.path_all, size); + // panic!() + // })) + // }) + // .collect_vec(); + + // vec.sort_unstable(); + + // if even { + // (vec.get(median_index) + // .unwrap_or_else(|| { + // dbg!(median_index, &self.path_all, &source.path_all, size); + // panic!() + // }) + // .0 + // + vec.get(median_index - 1).unwrap().0) + // / T::from(2.0).unwrap() + // } else { + // vec.get(median_index).unwrap().0 + // } + // } else { + // T::default() + // } + // }; + + // self.insert(height, median); + + // median + // } +} diff --git a/parser/src/structs/height_map.rs b/parser/src/structs/height_map.rs index ac984b338..965ef847f 100644 --- a/parser/src/structs/height_map.rs +++ b/parser/src/structs/height_map.rs @@ -1,380 +1,32 @@ -use std::{ - cmp::Ordering, - collections::{BTreeMap, VecDeque}, - fmt::Debug, - fs, - iter::Sum, - mem, - ops::{Add, ControlFlow, Div, Mul, RangeInclusive, Sub}, - path::{Path, PathBuf}, -}; +use std::{iter::Sum, ops::RangeInclusive}; -use allocative::Allocative; -use bincode::{Decode, Encode}; -use itertools::Itertools; -use ordered_float::{FloatCore, OrderedFloat}; -use serde::{Deserialize, Serialize}; +use crate::SerializedVec; -use crate::{ - bitcoin::NUMBER_OF_UNSAFE_BLOCKS, - io::{format_path, Serialization}, - utils::{log, LossyFrom}, -}; +use super::{AnyMap, GenericMap, Height, HeightMapChunkId, MapValue}; -use super::{AnyMap, MapValue}; +pub const HEIGHT_MAP_CHUNK_SIZE: u32 = 10_000; -pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000; - -#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)] -pub struct SerializedHeightMap { - version: u32, - map: Vec, -} - -#[derive(Default, Allocative)] -pub struct HeightMap -where - T: MapValue, -{ - version: u32, - - path_all: String, - path_last: Option, - - chunks_in_memory: usize, - - serialization: Serialization, - - initial_last_height: Option, - initial_first_unsafe_height: Option, - - imported: BTreeMap>, - to_insert: BTreeMap>, -} +pub type HeightMap = GenericMap>; impl HeightMap where T: MapValue, { - pub fn new_bin(version: u32, path: &str) -> Self { - Self::new(version, path, Serialization::Binary, 1, true) - } - - pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { - Self::new(version, path, Serialization::Binary, 1, export_last) - } - - pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { - Self::new(version, path, Serialization::Json, usize::MAX, export_last) - } - - fn new( - version: u32, - path: &str, - serialization: Serialization, - chunks_in_memory: usize, - export_last: bool, - ) -> Self { - if chunks_in_memory < 1 { - panic!("Should always have at least the latest chunk in memory"); - } - - let path = format_path(path); - - let path_all = format!("{path}/height"); - - fs::create_dir_all(&path_all).unwrap(); - - let path_last = { - if export_last { - Some(serialization.append_extension(&format!("{path}/last"))) - } else { - None - } - }; - - let mut s = Self { - version, - - path_all, - path_last, - - chunks_in_memory, - - serialization, - - initial_first_unsafe_height: None, - initial_last_height: None, - - to_insert: BTreeMap::default(), - imported: BTreeMap::default(), - }; - - s.read_dir() - .into_iter() - .rev() - .take(chunks_in_memory) - .for_each(|(chunk_start, path)| { - if let Ok(serialized) = s.import(&path) { - if serialized.version == s.version { - s.imported.insert(chunk_start, serialized); - } else { - s.read_dir() - .iter() - .for_each(|(_, path)| fs::remove_file(path).unwrap()) - } - } - }); - - s.initial_last_height = s - .imported - .iter() - .last() - .map(|(chunk_start, serialized)| chunk_start + serialized.map.len()); - - s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| { - let offset = NUMBER_OF_UNSAFE_BLOCKS - 1; - last_height.checked_sub(offset) - }); - - if s.initial_first_unsafe_height.is_none() { - log(&format!("New {path}")); - } - - s - } - - fn height_to_chunk_name(height: usize) -> String { - let start = Self::height_to_chunk_start(height); - let end = start + HEIGHT_MAP_CHUNK_SIZE; - - format!("{start}..{end}") - } - - fn height_to_chunk_start(height: usize) -> usize { - height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE - } - - pub fn insert(&mut self, height: usize, value: T) -> T { - if !self.is_height_safe(height) { - self.to_insert - .entry(Self::height_to_chunk_start(height)) - .or_default() - .insert(height % HEIGHT_MAP_CHUNK_SIZE, value); - } - - value - } - - pub fn insert_default(&mut self, height: usize) -> T { - self.insert(height, T::default()) - } - - pub fn get(&self, height: &usize) -> Option { - let chunk_start = Self::height_to_chunk_start(*height); - - self.to_insert - .get(&chunk_start) - .and_then(|map| map.get(&(height - chunk_start)).cloned()) - .or_else(|| { - self.imported - .get(&chunk_start) - .and_then(|serialized| serialized.map.get(height - chunk_start)) - .cloned() - }) - } - - pub fn get_or_import(&mut self, height: &usize) -> T { - let chunk_start = Self::height_to_chunk_start(*height); - - self.to_insert - .get(&chunk_start) - .and_then(|map| map.get(&(height - chunk_start)).cloned()) - .or_else(|| { - #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() - if !self.imported.contains_key(&chunk_start) { - let dir_content = self.read_dir(); - - let path = dir_content.get(&chunk_start).unwrap_or_else(|| { - dbg!(self.path(), chunk_start, &dir_content); - panic!(); - }); - - let serialized = self.import(path).unwrap(); - - self.imported.insert(chunk_start, serialized); - } - - self.imported - .get(&chunk_start) - .and_then(|serialized| serialized.map.get(height - chunk_start)) - .cloned() - }) - .unwrap_or_else(|| { - dbg!(height, self.path()); - panic!(); - }) - } - - #[inline(always)] - pub fn is_height_safe(&self, height: usize) -> bool { - self.initial_first_unsafe_height.unwrap_or(0) > height - } - - fn read_dir(&self) -> BTreeMap { - Self::_read_dir(&self.path_all, &self.serialization) - } - - pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { - fs::read_dir(path) - .unwrap() - .map(|entry| entry.unwrap().path()) - .filter(|path| { - let extension = path.extension().unwrap().to_str().unwrap(); - - path.is_file() && extension == serialization.to_extension() - }) - .map(|path| { - ( - path.file_stem() - .unwrap() - .to_str() - .unwrap() - .split("..") - .next() - .unwrap() - .parse::() - .unwrap(), - path, - ) - }) - .collect() - } - - fn import(&self, path: &Path) -> color_eyre::Result> { - self.serialization - .import::>(path.to_str().unwrap()) - } -} - -impl AnyMap for HeightMap -where - T: MapValue, -{ - fn path(&self) -> &str { - &self.path_all - } - - fn path_last(&self) -> &Option { - &self.path_last - } - - fn t_name(&self) -> &str { - std::any::type_name::() - } - - fn pre_export(&mut self) { - let to_insert = &mut self.to_insert; - - to_insert.iter_mut().for_each(|(chunk_start, map)| { - if let Some((key, _)) = map.first_key_value() { - if *key > 0 && !self.imported.contains_key(chunk_start) { - // Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut - - let dir_content = Self::_read_dir(&self.path_all, &self.serialization); - - let path = dir_content.get(chunk_start).unwrap_or_else(|| { - dbg!(&self.path_all, chunk_start, &dir_content); - panic!(); - }); - - let serialized = self - .serialization - .import::>(path.to_str().unwrap()) - .unwrap(); - - self.imported.insert(*chunk_start, serialized); - } - } - - let serialized = self - .imported - .entry(*chunk_start) - .or_insert(SerializedHeightMap { - version: self.version, - map: vec![], - }); - - mem::take(map) - .into_iter() - .for_each( - |(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) { - Ordering::Greater => serialized.map[chunk_height] = value, - Ordering::Equal => serialized.map.push(value), - Ordering::Less => { - dbg!(&self.path_all, &serialized.map, chunk_height, value); - panic!() - } - }, - ); - }); - } - - fn export(&self) -> color_eyre::Result<()> { - let len = self.imported.len(); - - self.to_insert.iter().enumerate().try_for_each( - |(index, (chunk_start, map))| -> color_eyre::Result<()> { - if !map.is_empty() { - unreachable!() - } - - let chunk_name = Self::height_to_chunk_name(*chunk_start); - - let path = self - .serialization - .append_extension(&format!("{}/{}", self.path_all, chunk_name)); - - let serialized = self.imported.get(chunk_start).unwrap_or_else(|| { - dbg!(&self.path_all, chunk_start, &self.imported); - panic!(); - }); - - self.serialization.export(&path, serialized)?; - - if index == len - 1 { - if let Some(path_last) = self.path_last.as_ref() { - self.serialization - .export(path_last, serialized.map.last().unwrap())?; - } - } - - Ok(()) - }, - ) - } - - fn post_export(&mut self) { - self.imported - .keys() - .rev() - .enumerate() - .filter(|(index, _)| *index + 1 > self.chunks_in_memory) - .map(|(_, key)| *key) - .collect_vec() - .iter() - .for_each(|key| { - self.imported.remove(key); - }); - - self.to_insert.clear(); + pub fn sum_range(&self, range: &RangeInclusive) -> T + where + T: Sum, + { + range + .to_owned() + .flat_map(|height| self.get(&Height::new(height))) + .sum::() } } pub trait AnyHeightMap: AnyMap { - fn get_initial_first_unsafe_height(&self) -> Option; + fn get_initial_first_unsafe_height(&self) -> Option; - fn get_initial_last_height(&self) -> Option; + fn get_initial_last_height(&self) -> Option; fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync); @@ -386,13 +38,13 @@ where T: MapValue, { #[inline(always)] - fn get_initial_first_unsafe_height(&self) -> Option { - self.initial_first_unsafe_height + fn get_initial_first_unsafe_height(&self) -> Option { + self.initial_first_unsafe_key } #[inline(always)] - fn get_initial_last_height(&self) -> Option { - self.initial_last_height + fn get_initial_last_height(&self) -> Option { + self.initial_last_key } fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) { @@ -404,583 +56,989 @@ where } } -impl HeightMap -where - T: MapValue, -{ - pub fn sum_range(&self, range: &RangeInclusive) -> T - where - T: Sum, - { - range - .to_owned() - .flat_map(|height| self.get(&height)) - .sum::() - } - - pub fn multi_insert_const(&mut self, heights: &[usize], constant: T) { - heights.iter().for_each(|height| { - let height = *height; - - self.insert(height, constant); - }); - } - - pub fn multi_insert_simple_transform( - &mut self, - heights: &[usize], - source: &mut HeightMap, - transform: F, - ) where - K: MapValue, - F: Fn(K) -> T, - { - heights.iter().for_each(|height| { - self.insert(*height, transform(source.get_or_import(height))); - }); - } - - pub fn multi_insert_complex_transform( - &mut self, - heights: &[usize], - source: &mut HeightMap, - mut transform: F, - ) where - K: MapValue, - F: FnMut((K, &usize)) -> T, - { - heights.iter().for_each(|height| { - self.insert(*height, transform((source.get_or_import(height), height))); - }); - } - - pub fn multi_insert_add( - &mut self, - heights: &[usize], - added: &mut HeightMap, - adder: &mut HeightMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Add, - { - heights.iter().for_each(|height| { - self.insert( - *height, - T::lossy_from(added.get_or_import(height)) - + T::lossy_from(adder.get_or_import(height)), - ); - }); - } - - pub fn multi_insert_subtract( - &mut self, - heights: &[usize], - subtracted: &mut HeightMap, - subtracter: &mut HeightMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Sub, - { - heights.iter().for_each(|height| { - self.insert( - *height, - T::lossy_from(subtracted.get_or_import(height)) - - T::lossy_from(subtracter.get_or_import(height)), - ); - }); - } - - pub fn multi_insert_multiply( - &mut self, - heights: &[usize], - multiplied: &mut HeightMap, - multiplier: &mut HeightMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Mul, - { - heights.iter().for_each(|height| { - self.insert( - *height, - T::lossy_from(multiplied.get_or_import(height)) - * T::lossy_from(multiplier.get_or_import(height)), - ); - }); - } - - pub fn multi_insert_divide( - &mut self, - heights: &[usize], - divided: &mut HeightMap, - divider: &mut HeightMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - self._multi_insert_divide(heights, divided, divider, false) - } - - pub fn multi_insert_percentage( - &mut self, - heights: &[usize], - divided: &mut HeightMap, - divider: &mut HeightMap, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - self._multi_insert_divide(heights, divided, divider, true) - } - - pub fn _multi_insert_divide( - &mut self, - heights: &[usize], - divided: &mut HeightMap, - divider: &mut HeightMap, - as_percentage: bool, - ) where - A: MapValue, - B: MapValue, - T: LossyFrom + LossyFrom, - T: Div + Mul + From, - { - let multiplier = T::from(if as_percentage { 100 } else { 1 }); - - heights.iter().for_each(|height| { - self.insert( - *height, - T::lossy_from(divided.get_or_import(height)) - / T::lossy_from(divider.get_or_import(height)) - * multiplier, - ); - }); - } - - pub fn multi_insert_cumulative(&mut self, heights: &[usize], source: &mut HeightMap) - where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - self._multi_insert_last_x_sum(heights, source, None) - } - - pub fn multi_insert_last_x_sum( - &mut self, - heights: &[usize], - source: &mut HeightMap, - block_time: usize, - ) where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - self._multi_insert_last_x_sum(heights, source, Some(block_time)) - } - - fn _multi_insert_last_x_sum( - &mut self, - heights: &[usize], - source: &mut HeightMap, - block_time: Option, - ) where - K: MapValue, - T: LossyFrom, - T: Add + Sub, - { - let mut sum = None; - - heights.iter().for_each(|height| { - let to_subtract = block_time - .and_then(|x| { - (height + 1) - .checked_sub(x) - .map(|previous_height| source.get_or_import(&previous_height)) - }) - .unwrap_or_default(); - - let previous_sum = sum.unwrap_or_else(|| { - height - .checked_sub(1) - .map(|previous_sum_height| self.get_or_import(&previous_sum_height)) - .unwrap_or_default() - }); - - let last_value = source.get_or_import(height); - - sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract)); - - self.insert(*height, sum.unwrap()); - }); - } - - pub fn multi_insert_simple_average( - &mut self, - heights: &[usize], - source: &mut HeightMap, - block_time: usize, - ) where - T: Into + From, - K: MapValue + Sum, - f32: LossyFrom, - { - if block_time <= 1 { - panic!("Average of 1 or less is not useful"); - } - - let mut average = None; - - heights.iter().for_each(|height| { - let height = *height; - - let previous_average: f32 = average - .unwrap_or_else(|| { - height - .checked_sub(block_time) - .and_then(|previous_average_height| self.get(&previous_average_height)) - .unwrap_or_default() - }) - .into(); - - let mut last_value = f32::lossy_from(source.get_or_import(&height)); - - if last_value.is_nan() { - last_value = 0.0; - } - - average.replace( - ((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32) - .into(), - ); - - self.insert(height, average.unwrap()); - }); - } - - pub fn multi_insert_net_change( - &mut self, - heights: &[usize], - source: &mut HeightMap, - block_time: usize, - ) where - T: Sub, - { - heights.iter().for_each(|height| { - let height = *height; - - let previous_value = height - .checked_sub(block_time) - .map(|height| source.get_or_import(&height)) - .unwrap_or_default(); - - let last_value = source.get_or_import(&height); - - let net = last_value - previous_value; - - self.insert(height, net); - }); - } - - pub fn multi_insert_median( - &mut self, - heights: &[usize], - source: &mut HeightMap, - block_time: Option, - ) where - T: FloatCore, - { - source.multi_insert_percentile(heights, vec![(self, 0.5)], block_time); - } - - pub fn multi_insert_percentile( - &mut self, - heights: &[usize], - mut map_and_percentiles: Vec<(&mut HeightMap, f32)>, - block_time: Option, - ) where - T: FloatCore, - { - if block_time.map_or(false, |size| size < 3) { - panic!("Computing a percentile for a size lower than 3 is useless"); - } - - let mut ordered_vec = None; - let mut sorted_vec = None; - - let min_percentile_height = 160_000; - - let nan = T::from(f32::NAN).unwrap(); - let two = T::from(2.0).unwrap(); - - if min_percentile_height % HEIGHT_MAP_CHUNK_SIZE != 0 { - panic!("Should be 0"); - } - - heights.iter().cloned().try_for_each(|height| { - if height < min_percentile_height { - map_and_percentiles.iter_mut().for_each(|(map, _)| { - (*map).insert(height, nan); - }); - return ControlFlow::Continue::<()>(()); - } - - if let Some(start) = - block_time.map_or(Some(min_percentile_height), |size| height.checked_sub(size)) - { - if sorted_vec.is_none() { - let mut vec = (start..=height) - .map(|height| self.get_or_import(&height)) - .filter(|f| !f.is_nan()) - .map(|f| OrderedFloat(f)) - .collect_vec(); - - if block_time.is_some() { - ordered_vec.replace(VecDeque::from(vec.clone())); - } - - vec.sort_unstable(); - - sorted_vec.replace(vec); - } else { - let float_value = self.get_or_import(&height); - - if !float_value.is_nan() { - let float_value = OrderedFloat(float_value); - - if block_time.is_some() { - let first = ordered_vec.as_mut().unwrap().pop_front().unwrap(); - let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); - sorted_vec.as_mut().unwrap().remove(pos); - - ordered_vec.as_mut().unwrap().push_back(float_value); - } - - let pos = sorted_vec - .as_ref() - .unwrap() - .binary_search(&float_value) - .unwrap_or_else(|pos| pos); - - sorted_vec.as_mut().unwrap().insert(pos, float_value); - } - } - - let vec = sorted_vec.as_ref().unwrap(); - - let len = vec.len(); - - map_and_percentiles - .iter_mut() - .for_each(|(map, percentile)| { - if !(0.0..=1.0).contains(percentile) { - panic!("The percentile should be between 0.0 and 1.0"); - } - - let value = { - if len < 2 { - nan - } else { - let index = (len - 1) as f32 * *percentile; - - let fract = index.fract(); - - if fract != 0.0 { - (vec.get(index.ceil() as usize) - .unwrap_or_else(|| { - dbg!( - index, - &self.path_all, - &self.path_all, - &self.to_insert, - block_time, - vec - ); - panic!() - }) - .0 - + vec - .get(index.floor() as usize) - .unwrap_or_else(|| { - dbg!( - index, - &self.path_all, - &self.path_all, - block_time - ); - panic!() - }) - .0) - / two - } else { - vec.get(index as usize).unwrap().0 - } - } - }; - - (*map).insert(height, value); - }); - } else { - map_and_percentiles.iter_mut().for_each(|(map, _)| { - (*map).insert(height, nan); - }); - } - - ControlFlow::Continue(()) - }); - } - - // pub fn insert_cumulative(&mut self, height: usize, source: &HeightMap) -> T - // where - // T: Add + Sub, - // { - // let previous_cum = height - // .checked_sub(1) - // .map(|previous_sum_height| { - // self.get(&previous_sum_height).unwrap_or_else(|| { - // dbg!(previous_sum_height); - // panic!() - // }) - // }) - // .unwrap_or_default(); - - // let last_value = source.get(&height).unwrap(); - - // let cum_value = previous_cum + last_value; - - // self.insert(height, cum_value); - - // cum_value - // } - - // pub fn insert_last_x_sum(&mut self, height: usize, source: &HeightMap, x: usize) -> T - // where - // T: Add + Sub, - // { - // let to_subtract = (height + 1) - // .checked_sub(x) - // .map(|previous_height| { - // source.get(&previous_height).unwrap_or_else(|| { - // dbg!(&self.path_all, &source.path_all, previous_height); - // panic!() - // }) - // }) - // .unwrap_or_default(); - - // let previous_sum = height - // .checked_sub(1) - // .map(|previous_sum_height| self.get(&previous_sum_height).unwrap()) - // .unwrap_or_default(); - - // let last_value = source.get(&height).unwrap(); - - // let sum = previous_sum + last_value - to_subtract; - - // self.insert(height, sum); - - // sum - // } - - // pub fn insert_simple_average(&mut self, height: usize, source: &HeightMap, block_time: usize) - // where - // T: Into + From, - // { - // let to_subtract: f32 = (height + 1) - // .checked_sub(block_time) - // .map(|previous_height| source.get(&previous_height).unwrap()) - // .unwrap_or_default() - // .into(); - - // let previous_average: f32 = height - // .checked_sub(1) - // .map(|previous_average_height| self.get(&previous_average_height).unwrap()) - // .unwrap_or_default() - // .into(); - - // let last_value: f32 = source.get(&height).unwrap().into(); - - // let sum = previous_average * block_time as f32 - to_subtract + last_value; - - // let average: T = (sum / block_time as f32).into(); - - // self.insert(height, average); - // } - - // pub fn insert_net_change(&mut self, height: usize, source: &HeightMap, offset: usize) -> T - // where - // T: Sub, - // { - // let previous_value = height - // .checked_sub(offset) - // .map(|height| { - // source.get(&height).unwrap_or_else(|| { - // dbg!(&self.path_all, &source.path_all, offset); - // panic!(); - // }) - // }) - // .unwrap_or_default(); - - // let last_value = source.get(&height).unwrap(); - - // let net = last_value - previous_value; - - // self.insert(height, net); - - // net - // } - - // pub fn insert_median(&mut self, height: usize, source: &HeightMap, size: usize) -> T - // where - // T: FloatCore, - // { - // if size < 3 { - // panic!("Computing a median for a size lower than 3 is useless"); - // } - - // let median = { - // if let Some(start) = height.checked_sub(size - 1) { - // let even = size % 2 == 0; - // let median_index = size / 2; - - // let mut vec = (start..=height) - // .map(|height| { - // OrderedFloat(source.get(&height).unwrap_or_else(|| { - // dbg!(height, &source.path_all, size); - // panic!() - // })) - // }) - // .collect_vec(); - - // vec.sort_unstable(); - - // if even { - // (vec.get(median_index) - // .unwrap_or_else(|| { - // dbg!(median_index, &self.path_all, &source.path_all, size); - // panic!() - // }) - // .0 - // + vec.get(median_index - 1).unwrap().0) - // / T::from(2.0).unwrap() - // } else { - // vec.get(median_index).unwrap().0 - // } - // } else { - // T::default() - // } - // }; - - // self.insert(height, median); - - // median - // } -} +// use std::{ +// cmp::Ordering, +// collections::{BTreeMap, VecDeque}, +// fmt::Debug, +// fs, +// iter::Sum, +// mem, +// ops::{Add, ControlFlow, Div, Mul, RangeInclusive, Sub}, +// path::{Path, PathBuf}, +// }; + +// use allocative::Allocative; +// use bincode::{Decode, Encode}; +// use itertools::Itertools; +// use ordered_float::{FloatCore, OrderedFloat}; +// use serde::{Deserialize, Serialize}; + +// use crate::{ +// bitcoin::NUMBER_OF_UNSAFE_BLOCKS, +// io::{format_path, Serialization}, +// utils::{log, LossyFrom}, +// }; + +// use super::{AnyMap, MapValue}; + +// pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000; + +// #[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)] +// pub struct SerializedHeightMap { +// version: u32, +// map: Vec, +// } + +// #[derive(Default, Allocative)] +// pub struct HeightMap +// where +// T: MapValue, +// { +// version: u32, + +// path_all: String, +// path_last: Option, + +// chunks_in_memory: usize, + +// serialization: Serialization, + +// initial_last_height: Option, +// initial_first_unsafe_height: Option, + +// imported: BTreeMap>, +// to_insert: BTreeMap>, +// } + +// impl HeightMap +// where +// T: MapValue, +// { +// pub fn new_bin(version: u32, path: &str) -> Self { +// Self::new(version, path, Serialization::Binary, 1, true) +// } + +// pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self { +// Self::new(version, path, Serialization::Binary, 1, export_last) +// } + +// pub fn new_json(version: u32, path: &str, export_last: bool) -> Self { +// Self::new(version, path, Serialization::Json, usize::MAX, export_last) +// } + +// fn new( +// version: u32, +// path: &str, +// serialization: Serialization, +// chunks_in_memory: usize, +// export_last: bool, +// ) -> Self { +// if chunks_in_memory < 1 { +// panic!("Should always have at least the latest chunk in memory"); +// } + +// let path = format_path(path); + +// let path_all = format!("{path}/height"); + +// fs::create_dir_all(&path_all).unwrap(); + +// let path_last = { +// if export_last { +// Some(serialization.append_extension(&format!("{path}/last"))) +// } else { +// None +// } +// }; + +// let mut s = Self { +// version, + +// path_all, +// path_last, + +// chunks_in_memory, + +// serialization, + +// initial_first_unsafe_height: None, +// initial_last_height: None, + +// to_insert: BTreeMap::default(), +// imported: BTreeMap::default(), +// }; + +// s.read_dir() +// .into_iter() +// .rev() +// .take(chunks_in_memory) +// .for_each(|(chunk_start, path)| { +// if let Ok(serialized) = s.import(&path) { +// if serialized.version == s.version { +// s.imported.insert(chunk_start, serialized); +// } else { +// s.read_dir() +// .iter() +// .for_each(|(_, path)| fs::remove_file(path).unwrap()) +// } +// } +// }); + +// s.initial_last_height = s +// .imported +// .iter() +// .last() +// .map(|(chunk_start, serialized)| chunk_start + serialized.map.len()); + +// s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| { +// let offset = NUMBER_OF_UNSAFE_BLOCKS - 1; +// last_height.checked_sub(offset) +// }); + +// if s.initial_first_unsafe_height.is_none() { +// log(&format!("New {path}")); +// } + +// s +// } + +// fn height_to_chunk_name(height: Height) -> String { +// let start = Self::height_to_chunk_start(height); +// let end = start + HEIGHT_MAP_CHUNK_SIZE; + +// format!("{start}..{end}") +// } + +// fn height_to_chunk_start(height: Height) -> usize { +// height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE +// } + +// pub fn insert(&mut self, height: Height, value: T) -> T { +// if !self.is_height_safe(height) { +// self.to_insert +// .entry(Self::height_to_chunk_start(height)) +// .or_default() +// .insert(height % HEIGHT_MAP_CHUNK_SIZE, value); +// } + +// value +// } + +// pub fn insert_default(&mut self, height: Height) -> T { +// self.insert(height, T::default()) +// } + +// pub fn get(&self, height: &usize) -> Option { +// let chunk_start = Self::height_to_chunk_start(*height); + +// self.to_insert +// .get(&chunk_start) +// .and_then(|map| map.get(&(height - chunk_start)).cloned()) +// .or_else(|| { +// self.imported +// .get(&chunk_start) +// .and_then(|serialized| serialized.map.get(height - chunk_start)) +// .cloned() +// }) +// } + +// pub fn get_or_import(&mut self, height: &usize) -> T { +// let chunk_start = Self::height_to_chunk_start(*height); + +// self.to_insert +// .get(&chunk_start) +// .and_then(|map| map.get(&(height - chunk_start)).cloned()) +// .or_else(|| { +// #[allow(clippy::map_entry)] // Can't be mut and then use read_dir() +// if !self.imported.contains_key(&chunk_start) { +// let dir_content = self.read_dir(); + +// let path = dir_content.get(&chunk_start).unwrap_or_else(|| { +// dbg!(self.path(), chunk_start, &dir_content); +// panic!(); +// }); + +// let serialized = self.import(path).unwrap(); + +// self.imported.insert(chunk_start, serialized); +// } + +// self.imported +// .get(&chunk_start) +// .and_then(|serialized| serialized.map.get(height - chunk_start)) +// .cloned() +// }) +// .unwrap_or_else(|| { +// dbg!(height, self.path()); +// panic!(); +// }) +// } + +// #[inline(always)] +// pub fn is_height_safe(&self, height: Height) -> bool { +// self.initial_first_unsafe_height.unwrap_or(0) > height +// } + +// fn read_dir(&self) -> BTreeMap { +// Self::_read_dir(&self.path_all, &self.serialization) +// } + +// pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap { +// fs::read_dir(path) +// .unwrap() +// .map(|entry| entry.unwrap().path()) +// .filter(|path| { +// let extension = path.extension().unwrap().to_str().unwrap(); + +// path.is_file() && extension == serialization.to_extension() +// }) +// .map(|path| { +// ( +// path.file_stem() +// .unwrap() +// .to_str() +// .unwrap() +// .split("..") +// .next() +// .unwrap() +// .parse::() +// .unwrap(), +// path, +// ) +// }) +// .collect() +// } + +// fn import(&self, path: &Path) -> color_eyre::Result> { +// self.serialization +// .import::>(path.to_str().unwrap()) +// } +// } + +// impl AnyMap for HeightMap +// where +// T: MapValue, +// { +// fn path(&self) -> &str { +// &self.path_all +// } + +// fn path_last(&self) -> &Option { +// &self.path_last +// } + +// fn t_name(&self) -> &str { +// std::any::type_name::() +// } + +// fn pre_export(&mut self) { +// let to_insert = &mut self.to_insert; + +// to_insert.iter_mut().for_each(|(chunk_start, map)| { +// if let Some((key, _)) = map.first_key_value() { +// if *key > 0 && !self.imported.contains_key(chunk_start) { +// // Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut + +// let dir_content = Self::_read_dir(&self.path_all, &self.serialization); + +// let path = dir_content.get(chunk_start).unwrap_or_else(|| { +// dbg!(&self.path_all, chunk_start, &dir_content); +// panic!(); +// }); + +// let serialized = self +// .serialization +// .import::>(path.to_str().unwrap()) +// .unwrap(); + +// self.imported.insert(*chunk_start, serialized); +// } +// } + +// let serialized = self +// .imported +// .entry(*chunk_start) +// .or_insert(SerializedHeightMap { +// version: self.version, +// map: vec![], +// }); + +// mem::take(map) +// .into_iter() +// .for_each( +// |(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) { +// Ordering::Greater => serialized.map[chunk_height] = value, +// Ordering::Equal => serialized.map.push(value), +// Ordering::Less => { +// dbg!(&self.path_all, &serialized.map, chunk_height, value); +// panic!() +// } +// }, +// ); +// }); +// } + +// fn export(&self) -> color_eyre::Result<()> { +// let len = self.imported.len(); + +// self.to_insert.iter().enumerate().try_for_each( +// |(index, (chunk_start, map))| -> color_eyre::Result<()> { +// if !map.is_empty() { +// unreachable!() +// } + +// let chunk_name = Self::height_to_chunk_name(*chunk_start); + +// let path = self +// .serialization +// .append_extension(&format!("{}/{}", self.path_all, chunk_name)); + +// let serialized = self.imported.get(chunk_start).unwrap_or_else(|| { +// dbg!(&self.path_all, chunk_start, &self.imported); +// panic!(); +// }); + +// self.serialization.export(&path, serialized)?; + +// if index == len - 1 { +// if let Some(path_last) = self.path_last.as_ref() { +// self.serialization +// .export(path_last, serialized.map.last().unwrap())?; +// } +// } + +// Ok(()) +// }, +// ) +// } + +// fn post_export(&mut self) { +// self.imported +// .keys() +// .rev() +// .enumerate() +// .filter(|(index, _)| *index + 1 > self.chunks_in_memory) +// .map(|(_, key)| *key) +// .collect_vec() +// .iter() +// .for_each(|key| { +// self.imported.remove(key); +// }); + +// self.to_insert.clear(); +// } +// } + +// pub trait AnyHeightMap: AnyMap { +// fn get_initial_first_unsafe_height(&self) -> Option; + +// fn get_initial_last_height(&self) -> Option; + +// fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync); + +// fn as_any_mut_map(&mut self) -> &mut dyn AnyMap; +// } + +// impl AnyHeightMap for HeightMap +// where +// T: MapValue, +// { +// #[inline(always)] +// fn get_initial_first_unsafe_height(&self) -> Option { +// self.initial_first_unsafe_height +// } + +// #[inline(always)] +// fn get_initial_last_height(&self) -> Option { +// self.initial_last_height +// } + +// fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) { +// self +// } + +// fn as_any_mut_map(&mut self) -> &mut dyn AnyMap { +// self +// } +// } + +// impl HeightMap +// where +// T: MapValue, +// { +// pub fn sum_range(&self, range: &RangeInclusive) -> T +// where +// T: Sum, +// { +// range +// .to_owned() +// .flat_map(|height| self.get(&height)) +// .sum::() +// } + +// pub fn multi_insert_const(&mut self, heights: &[Height], constant: T) { +// heights.iter().for_each(|height| { +// let height = *height; + +// self.insert(height, constant); +// }); +// } + +// pub fn multi_insert_simple_transform( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// transform: F, +// ) where +// K: MapValue, +// F: Fn(K) -> T, +// { +// heights.iter().for_each(|height| { +// self.insert(*height, transform(source.get_or_import(height))); +// }); +// } + +// pub fn multi_insert_complex_transform( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// mut transform: F, +// ) where +// K: MapValue, +// F: FnMut((K, &usize)) -> T, +// { +// heights.iter().for_each(|height| { +// self.insert(*height, transform((source.get_or_import(height), height))); +// }); +// } + +// pub fn multi_insert_add( +// &mut self, +// heights: &[Height], +// added: &mut HeightMap, +// adder: &mut HeightMap, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Add, +// { +// heights.iter().for_each(|height| { +// self.insert( +// *height, +// T::lossy_from(added.get_or_import(height)) +// + T::lossy_from(adder.get_or_import(height)), +// ); +// }); +// } + +// pub fn multi_insert_subtract( +// &mut self, +// heights: &[Height], +// subtracted: &mut HeightMap, +// subtracter: &mut HeightMap, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Sub, +// { +// heights.iter().for_each(|height| { +// self.insert( +// *height, +// T::lossy_from(subtracted.get_or_import(height)) +// - T::lossy_from(subtracter.get_or_import(height)), +// ); +// }); +// } + +// pub fn multi_insert_multiply( +// &mut self, +// heights: &[Height], +// multiplied: &mut HeightMap, +// multiplier: &mut HeightMap, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Mul, +// { +// heights.iter().for_each(|height| { +// self.insert( +// *height, +// T::lossy_from(multiplied.get_or_import(height)) +// * T::lossy_from(multiplier.get_or_import(height)), +// ); +// }); +// } + +// pub fn multi_insert_divide( +// &mut self, +// heights: &[Height], +// divided: &mut HeightMap, +// divider: &mut HeightMap, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Div + Mul + From, +// { +// self._multi_insert_divide(heights, divided, divider, false) +// } + +// pub fn multi_insert_percentage( +// &mut self, +// heights: &[Height], +// divided: &mut HeightMap, +// divider: &mut HeightMap, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Div + Mul + From, +// { +// self._multi_insert_divide(heights, divided, divider, true) +// } + +// pub fn _multi_insert_divide( +// &mut self, +// heights: &[Height], +// divided: &mut HeightMap, +// divider: &mut HeightMap, +// as_percentage: bool, +// ) where +// A: MapValue, +// B: MapValue, +// T: LossyFrom + LossyFrom, +// T: Div + Mul + From, +// { +// let multiplier = T::from(if as_percentage { 100 } else { 1 }); + +// heights.iter().for_each(|height| { +// self.insert( +// *height, +// T::lossy_from(divided.get_or_import(height)) +// / T::lossy_from(divider.get_or_import(height)) +// * multiplier, +// ); +// }); +// } + +// pub fn multi_insert_cumulative(&mut self, heights: &[Height], source: &mut HeightMap) +// where +// K: MapValue, +// T: LossyFrom, +// T: Add + Sub, +// { +// self._multi_insert_last_x_sum(heights, source, None) +// } + +// pub fn multi_insert_last_x_sum( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// block_time: usize, +// ) where +// K: MapValue, +// T: LossyFrom, +// T: Add + Sub, +// { +// self._multi_insert_last_x_sum(heights, source, Some(block_time)) +// } + +// fn _multi_insert_last_x_sum( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// block_time: Option, +// ) where +// K: MapValue, +// T: LossyFrom, +// T: Add + Sub, +// { +// let mut sum = None; + +// heights.iter().for_each(|height| { +// let to_subtract = block_time +// .and_then(|x| { +// (height + 1) +// .checked_sub(x) +// .map(|previous_height| source.get_or_import(&previous_height)) +// }) +// .unwrap_or_default(); + +// let previous_sum = sum.unwrap_or_else(|| { +// height +// .checked_sub(1) +// .map(|previous_sum_height| self.get_or_import(&previous_sum_height)) +// .unwrap_or_default() +// }); + +// let last_value = source.get_or_import(height); + +// sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract)); + +// self.insert(*height, sum.unwrap()); +// }); +// } + +// pub fn multi_insert_simple_average( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// block_time: usize, +// ) where +// T: Into + From, +// K: MapValue + Sum, +// f32: LossyFrom, +// { +// if block_time <= 1 { +// panic!("Average of 1 or less is not useful"); +// } + +// let mut average = None; + +// heights.iter().for_each(|height| { +// let height = *height; + +// let previous_average: f32 = average +// .unwrap_or_else(|| { +// height +// .checked_sub(block_time) +// .and_then(|previous_average_height| self.get(&previous_average_height)) +// .unwrap_or_default() +// }) +// .into(); + +// let mut last_value = f32::lossy_from(source.get_or_import(&height)); + +// if last_value.is_nan() { +// last_value = 0.0; +// } + +// average.replace( +// ((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32) +// .into(), +// ); + +// self.insert(height, average.unwrap()); +// }); +// } + +// pub fn multi_insert_net_change( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// block_time: usize, +// ) where +// T: Sub, +// { +// heights.iter().for_each(|height| { +// let height = *height; + +// let previous_value = height +// .checked_sub(block_time) +// .map(|height| source.get_or_import(&height)) +// .unwrap_or_default(); + +// let last_value = source.get_or_import(&height); + +// let net = last_value - previous_value; + +// self.insert(height, net); +// }); +// } + +// pub fn multi_insert_median( +// &mut self, +// heights: &[Height], +// source: &mut HeightMap, +// block_time: Option, +// ) where +// T: FloatCore, +// { +// source.multi_insert_percentile(heights, vec![(self, 0.5)], block_time); +// } + +// pub fn multi_insert_percentile( +// &mut self, +// heights: &[Height], +// mut map_and_percentiles: Vec<(&mut HeightMap, f32)>, +// block_time: Option, +// ) where +// T: FloatCore, +// { +// if block_time.map_or(false, |size| size < 3) { +// panic!("Computing a percentile for a size lower than 3 is useless"); +// } + +// let mut ordered_vec = None; +// let mut sorted_vec = None; + +// let min_percentile_height = 160_000; + +// let nan = T::from(f32::NAN).unwrap(); +// let two = T::from(2.0).unwrap(); + +// if min_percentile_height % HEIGHT_MAP_CHUNK_SIZE != 0 { +// panic!("Should be 0"); +// } + +// heights.iter().cloned().try_for_each(|height| { +// if height < min_percentile_height { +// map_and_percentiles.iter_mut().for_each(|(map, _)| { +// (*map).insert(height, nan); +// }); +// return ControlFlow::Continue::<()>(()); +// } + +// if let Some(start) = +// block_time.map_or(Some(min_percentile_height), |size| height.checked_sub(size)) +// { +// if sorted_vec.is_none() { +// let mut vec = (start..=height) +// .map(|height| self.get_or_import(&height)) +// .filter(|f| !f.is_nan()) +// .map(|f| OrderedFloat(f)) +// .collect_vec(); + +// if block_time.is_some() { +// ordered_vec.replace(VecDeque::from(vec.clone())); +// } + +// vec.sort_unstable(); + +// sorted_vec.replace(vec); +// } else { +// let float_value = self.get_or_import(&height); + +// if !float_value.is_nan() { +// let float_value = OrderedFloat(float_value); + +// if block_time.is_some() { +// let first = ordered_vec.as_mut().unwrap().pop_front().unwrap(); +// let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap(); +// sorted_vec.as_mut().unwrap().remove(pos); + +// ordered_vec.as_mut().unwrap().push_back(float_value); +// } + +// let pos = sorted_vec +// .as_ref() +// .unwrap() +// .binary_search(&float_value) +// .unwrap_or_else(|pos| pos); + +// sorted_vec.as_mut().unwrap().insert(pos, float_value); +// } +// } + +// let vec = sorted_vec.as_ref().unwrap(); + +// let len = vec.len(); + +// map_and_percentiles +// .iter_mut() +// .for_each(|(map, percentile)| { +// if !(0.0..=1.0).contains(percentile) { +// panic!("The percentile should be between 0.0 and 1.0"); +// } + +// let value = { +// if len < 2 { +// nan +// } else { +// let index = (len - 1) as f32 * *percentile; + +// let fract = index.fract(); + +// if fract != 0.0 { +// (vec.get(index.ceil() as usize) +// .unwrap_or_else(|| { +// dbg!( +// index, +// &self.path_all, +// &self.path_all, +// &self.to_insert, +// block_time, +// vec +// ); +// panic!() +// }) +// .0 +// + vec +// .get(index.floor() as usize) +// .unwrap_or_else(|| { +// dbg!( +// index, +// &self.path_all, +// &self.path_all, +// block_time +// ); +// panic!() +// }) +// .0) +// / two +// } else { +// vec.get(index as usize).unwrap().0 +// } +// } +// }; + +// (*map).insert(height, value); +// }); +// } else { +// map_and_percentiles.iter_mut().for_each(|(map, _)| { +// (*map).insert(height, nan); +// }); +// } + +// ControlFlow::Continue(()) +// }); +// } + +// // pub fn insert_cumulative(&mut self, height: Height, source: &HeightMap) -> T +// // where +// // T: Add + Sub, +// // { +// // let previous_cum = height +// // .checked_sub(1) +// // .map(|previous_sum_height| { +// // self.get(&previous_sum_height).unwrap_or_else(|| { +// // dbg!(previous_sum_height); +// // panic!() +// // }) +// // }) +// // .unwrap_or_default(); + +// // let last_value = source.get(&height).unwrap(); + +// // let cum_value = previous_cum + last_value; + +// // self.insert(height, cum_value); + +// // cum_value +// // } + +// // pub fn insert_last_x_sum(&mut self, height: Height, source: &HeightMap, x: usize) -> T +// // where +// // T: Add + Sub, +// // { +// // let to_subtract = (height + 1) +// // .checked_sub(x) +// // .map(|previous_height| { +// // source.get(&previous_height).unwrap_or_else(|| { +// // dbg!(&self.path_all, &source.path_all, previous_height); +// // panic!() +// // }) +// // }) +// // .unwrap_or_default(); + +// // let previous_sum = height +// // .checked_sub(1) +// // .map(|previous_sum_height| self.get(&previous_sum_height).unwrap()) +// // .unwrap_or_default(); + +// // let last_value = source.get(&height).unwrap(); + +// // let sum = previous_sum + last_value - to_subtract; + +// // self.insert(height, sum); + +// // sum +// // } + +// // pub fn insert_simple_average(&mut self, height: Height, source: &HeightMap, block_time: usize) +// // where +// // T: Into + From, +// // { +// // let to_subtract: f32 = (height + 1) +// // .checked_sub(block_time) +// // .map(|previous_height| source.get(&previous_height).unwrap()) +// // .unwrap_or_default() +// // .into(); + +// // let previous_average: f32 = height +// // .checked_sub(1) +// // .map(|previous_average_height| self.get(&previous_average_height).unwrap()) +// // .unwrap_or_default() +// // .into(); + +// // let last_value: f32 = source.get(&height).unwrap().into(); + +// // let sum = previous_average * block_time as f32 - to_subtract + last_value; + +// // let average: T = (sum / block_time as f32).into(); + +// // self.insert(height, average); +// // } + +// // pub fn insert_net_change(&mut self, height: Height, source: &HeightMap, offset: usize) -> T +// // where +// // T: Sub, +// // { +// // let previous_value = height +// // .checked_sub(offset) +// // .map(|height| { +// // source.get(&height).unwrap_or_else(|| { +// // dbg!(&self.path_all, &source.path_all, offset); +// // panic!(); +// // }) +// // }) +// // .unwrap_or_default(); + +// // let last_value = source.get(&height).unwrap(); + +// // let net = last_value - previous_value; + +// // self.insert(height, net); + +// // net +// // } + +// // pub fn insert_median(&mut self, height: Height, source: &HeightMap, size: usize) -> T +// // where +// // T: FloatCore, +// // { +// // if size < 3 { +// // panic!("Computing a median for a size lower than 3 is useless"); +// // } + +// // let median = { +// // if let Some(start) = height.checked_sub(size - 1) { +// // let even = size % 2 == 0; +// // let median_index = size / 2; + +// // let mut vec = (start..=height) +// // .map(|height| { +// // OrderedFloat(source.get(&height).unwrap_or_else(|| { +// // dbg!(height, &source.path_all, size); +// // panic!() +// // })) +// // }) +// // .collect_vec(); + +// // vec.sort_unstable(); + +// // if even { +// // (vec.get(median_index) +// // .unwrap_or_else(|| { +// // dbg!(median_index, &self.path_all, &source.path_all, size); +// // panic!() +// // }) +// // .0 +// // + vec.get(median_index - 1).unwrap().0) +// // / T::from(2.0).unwrap() +// // } else { +// // vec.get(median_index).unwrap().0 +// // } +// // } else { +// // T::default() +// // } +// // }; + +// // self.insert(height, median); + +// // median +// // } +// } diff --git a/parser/src/structs/height_map_chunk_id.rs b/parser/src/structs/height_map_chunk_id.rs new file mode 100644 index 000000000..c0bb9c536 --- /dev/null +++ b/parser/src/structs/height_map_chunk_id.rs @@ -0,0 +1,42 @@ +use allocative::Allocative; +use derive_deref::{Deref, DerefMut}; + +use crate::HEIGHT_MAP_CHUNK_SIZE; + +use super::{Height, MapChunkId}; + +#[derive( + Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative, Deref, DerefMut, +)] +pub struct HeightMapChunkId(Height); + +impl HeightMapChunkId { + pub fn new(height: &Height) -> Self { + Self(Height::new( + **height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE, + )) + } +} + +impl MapChunkId for HeightMapChunkId { + fn to_name(&self) -> String { + let start = ***self; + let end = start + HEIGHT_MAP_CHUNK_SIZE; + + format!("{start}..{end}") + } + + fn from_name(name: &str) -> Self { + Self(Height::new( + name.split("..").next().unwrap().parse::().unwrap(), + )) + } + + fn to_usize(self) -> usize { + **self as usize + } + + fn from_usize(id: usize) -> Self { + Self(Height::new(id as u32)) + } +} diff --git a/parser/src/structs/liquidity.rs b/parser/src/structs/liquidity.rs index 73ccbeb0c..c1461f2e5 100644 --- a/parser/src/structs/liquidity.rs +++ b/parser/src/structs/liquidity.rs @@ -5,7 +5,7 @@ use std::{ use allocative::Allocative; -use super::WAmount; +use super::Amount; #[derive(Debug)] pub struct LiquidityClassification { @@ -18,8 +18,8 @@ impl LiquidityClassification { /// Following this: /// https://insights.glassnode.com/bitcoin-liquid-supply/ /// https://www.desmos.com/calculator/dutgni5rtj - pub fn new(sent: WAmount, received: WAmount) -> Self { - if received == WAmount::ZERO { + pub fn new(sent: Amount, received: Amount) -> Self { + if received == Amount::ZERO { dbg!(sent, received); panic!() } @@ -29,7 +29,7 @@ impl LiquidityClassification { panic!("Shouldn't be possible"); } - if sent == WAmount::ZERO { + if sent == Amount::ZERO { 0.0 } else { let liquidity = sent.to_sat() as f64 / received.to_sat() as f64; diff --git a/parser/src/structs/map_value.rs b/parser/src/structs/map_value.rs index e5029b0e7..b29aa25e4 100644 --- a/parser/src/structs/map_value.rs +++ b/parser/src/structs/map_value.rs @@ -1,14 +1,25 @@ use std::fmt::Debug; +use allocative::Allocative; use bincode::{Decode, Encode}; use serde::{de::DeserializeOwned, Serialize}; use crate::datasets::OHLC; -use super::WNaiveDate; +use super::{Date, Height}; pub trait MapValue: - Clone + Copy + Default + Debug + Serialize + DeserializeOwned + Encode + Decode + Sync + Send + Clone + + Copy + + Default + + Debug + + Serialize + + DeserializeOwned + + Encode + + Decode + + Sync + + Send + + Allocative { } @@ -18,5 +29,6 @@ impl MapValue for u64 {} impl MapValue for usize {} impl MapValue for f32 {} impl MapValue for f64 {} -impl MapValue for WNaiveDate {} +impl MapValue for Date {} impl MapValue for OHLC {} +impl MapValue for Height {} diff --git a/parser/src/structs/mod.rs b/parser/src/structs/mod.rs index 8f8d21403..bbb5bac85 100644 --- a/parser/src/structs/mod.rs +++ b/parser/src/structs/mod.rs @@ -4,25 +4,31 @@ mod address_realized_data; mod address_size; mod address_split; mod address_type; +mod amount; mod any_map; mod bi_map; mod block_data; mod block_path; mod config; mod counter; +mod date; mod date_data; mod date_map; +mod date_map_chunk_id; mod empty_address_data; +mod generic_map; +mod height; mod height_map; +mod height_map_chunk_id; mod liquidity; mod map_value; mod partial_txout_data; mod price; mod sent_data; +mod serialized_btreemap; +mod serialized_vec; mod tx_data; mod txout_index; -mod wamount; -mod wnaivedate; pub use address::*; pub use address_data::*; @@ -30,22 +36,28 @@ pub use address_realized_data::*; pub use address_size::*; pub use address_split::*; pub use address_type::*; +pub use amount::*; pub use any_map::*; pub use bi_map::*; pub use block_data::*; pub use block_path::*; pub use config::*; pub use counter::*; +pub use date::*; pub use date_data::*; pub use date_map::*; +pub use date_map_chunk_id::*; pub use empty_address_data::*; +pub use generic_map::*; +pub use height::*; pub use height_map::*; +pub use height_map_chunk_id::*; pub use liquidity::*; pub use map_value::*; pub use partial_txout_data::*; pub use price::*; pub use sent_data::*; +pub use serialized_btreemap::*; +pub use serialized_vec::*; pub use tx_data::*; pub use txout_index::*; -pub use wamount::*; -pub use wnaivedate::*; diff --git a/parser/src/structs/partial_txout_data.rs b/parser/src/structs/partial_txout_data.rs index cdb5e1755..7a4ed7bcc 100644 --- a/parser/src/structs/partial_txout_data.rs +++ b/parser/src/structs/partial_txout_data.rs @@ -1,14 +1,14 @@ -use super::{Address, WAmount}; +use super::{Address, Amount}; #[derive(Debug)] pub struct PartialTxoutData { - pub amount: WAmount, + pub amount: Amount, pub address: Option
, pub address_index_opt: Option, } impl PartialTxoutData { - pub fn new(address: Option
, amount: WAmount, address_index_opt: Option) -> Self { + pub fn new(address: Option
, amount: Amount, address_index_opt: Option) -> Self { Self { address, amount, diff --git a/parser/src/structs/price.rs b/parser/src/structs/price.rs index 94a3074e2..6d4e64eec 100644 --- a/parser/src/structs/price.rs +++ b/parser/src/structs/price.rs @@ -3,7 +3,7 @@ use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign}; use allocative::Allocative; use bincode::{Decode, Encode}; -use super::WAmount; +use super::Amount; #[derive( Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Allocative, @@ -76,18 +76,18 @@ impl SubAssign for Price { } } -impl Mul for Price { +impl Mul for Price { type Output = Self; - fn mul(self, rhs: WAmount) -> Self::Output { - Self((self.to_cent() as f64 * rhs.to_sat() as f64 / WAmount::ONE_BTC_F64).round() as u64) + fn mul(self, rhs: Amount) -> Self::Output { + Self((self.to_cent() as f64 * rhs.to_sat() as f64 / Amount::ONE_BTC_F64).round() as u64) } } -impl Div for Price { +impl Div for Price { type Output = Self; - fn div(self, rhs: WAmount) -> Self::Output { - Self((self.to_cent() as f64 * WAmount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64) + fn div(self, rhs: Amount) -> Self::Output { + Self((self.to_cent() as f64 * Amount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64) } } diff --git a/parser/src/structs/sent_data.rs b/parser/src/structs/sent_data.rs index 717ec0ce1..02a55c793 100644 --- a/parser/src/structs/sent_data.rs +++ b/parser/src/structs/sent_data.rs @@ -1,13 +1,13 @@ -use super::WAmount; +use super::Amount; #[derive(Default, Debug)] pub struct SentData { - pub volume: WAmount, + pub volume: Amount, pub count: u32, } impl SentData { - pub fn send(&mut self, amount: WAmount) { + pub fn send(&mut self, amount: Amount) { self.volume += amount; self.count += 1; } diff --git a/parser/src/structs/serialized_btreemap.rs b/parser/src/structs/serialized_btreemap.rs new file mode 100644 index 000000000..64b3b0a95 --- /dev/null +++ b/parser/src/structs/serialized_btreemap.rs @@ -0,0 +1,51 @@ +use std::{collections::BTreeMap, fmt::Debug}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use super::{MapChunkId, MapKey, MapSerialized, MapValue}; + +#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)] +pub struct SerializedBTreeMap +where + Key: Ord, +{ + version: u32, + map: BTreeMap, +} + +impl MapSerialized for SerializedBTreeMap +where + Self: Debug + Serialize + DeserializeOwned + Encode + Decode, + ChunkId: MapChunkId, + Key: MapKey, + Value: MapValue, +{ + fn new(version: u32) -> Self { + Self { + version, + map: BTreeMap::default(), + } + } + + fn get_last_key(&self, _: &ChunkId) -> Option { + self.map.last_key_value().map(|(k, _)| k.to_owned()) + } + + fn version(&self) -> u32 { + self.version + } + + fn get(&self, key: &Key) -> Option<&Value> { + self.map.get(key) + } + + fn last(&self) -> Option<&Value> { + self.map.last_key_value().map(|(_, v)| v) + } + + fn extend(&mut self, map: BTreeMap) { + self.map.extend(map) + } +} diff --git a/parser/src/structs/serialized_vec.rs b/parser/src/structs/serialized_vec.rs new file mode 100644 index 000000000..ab9966949 --- /dev/null +++ b/parser/src/structs/serialized_vec.rs @@ -0,0 +1,59 @@ +use std::{cmp::Ordering, collections::BTreeMap, fmt::Debug}; + +use allocative::Allocative; +use bincode::{Decode, Encode}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use super::{MapChunkId, MapKey, MapSerialized, MapValue}; + +#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)] +pub struct SerializedVec { + version: u32, + map: Vec, +} + +impl MapSerialized for SerializedVec +where + Self: Debug + Serialize + DeserializeOwned + Encode + Decode, + ChunkId: MapChunkId, + Key: MapKey, + Value: MapValue, +{ + fn new(version: u32) -> Self { + Self { + version, + map: vec![], + } + } + + fn get_last_key(&self, chunk_id: &ChunkId) -> Option { + Some(Key::from_usize(chunk_id.to_usize() + self.map.len())) + } + + fn version(&self) -> u32 { + self.version + } + + fn get(&self, serialized_key: &Key) -> Option<&Value> { + self.map.get(serialized_key.to_usize()) + } + + fn last(&self) -> Option<&Value> { + self.map.last() + } + + fn extend(&mut self, map: BTreeMap) { + map.into_iter().for_each(|(key, value)| { + let key = key.to_serialized_key().to_usize(); + + match self.map.len().cmp(&key) { + Ordering::Greater => self.map[key] = value, + Ordering::Equal => self.map.push(value), + Ordering::Less => { + dbg!(&self.map, key, value); + panic!() + } + } + }); + } +} diff --git a/parser/src/structs/wnaivedate.rs b/parser/src/structs/wnaivedate.rs deleted file mode 100644 index c480046ad..000000000 --- a/parser/src/structs/wnaivedate.rs +++ /dev/null @@ -1,76 +0,0 @@ -use std::{fmt, str::FromStr}; - -use allocative::{Allocative, Visitor}; -use bincode::{ - de::{BorrowDecoder, Decoder}, - enc::Encoder, - error::{DecodeError, EncodeError}, - BorrowDecode, Decode, Encode, -}; -use chrono::{NaiveDate, TimeZone, Utc}; -use derive_deref::{Deref, DerefMut}; -use serde::{Deserialize, Serialize}; - -#[derive( - Debug, - PartialEq, - Eq, - PartialOrd, - Ord, - Clone, - Copy, - Deref, - DerefMut, - Default, - Serialize, - Deserialize, -)] -pub struct WNaiveDate(NaiveDate); - -impl WNaiveDate { - pub fn wrap(date: NaiveDate) -> Self { - Self(date) - } - - pub fn from_timestamp(timestamp: u32) -> Self { - Self( - Utc.timestamp_opt(i64::from(timestamp), 0) - .unwrap() - .date_naive(), - ) - } -} - -impl fmt::Display for WNaiveDate { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.0, f) - } -} - -impl Encode for WNaiveDate { - fn encode(&self, encoder: &mut E) -> Result<(), EncodeError> { - Encode::encode(&self.to_string(), encoder) - } -} - -impl Decode for WNaiveDate { - fn decode(decoder: &mut D) -> core::result::Result { - let str: String = Decode::decode(decoder)?; - - Ok(Self(NaiveDate::from_str(&str).unwrap())) - } -} - -impl<'de> BorrowDecode<'de> for WNaiveDate { - fn borrow_decode>(decoder: &mut D) -> Result { - let str: String = BorrowDecode::borrow_decode(decoder)?; - - Ok(Self(NaiveDate::from_str(&str).unwrap())) - } -} - -impl Allocative for WNaiveDate { - fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) { - visitor.visit_simple_sized::(); - } -} diff --git a/parser/src/utils/flamegraph.rs b/parser/src/utils/flamegraph.rs index 7aa2641a2..2a40ab398 100644 --- a/parser/src/utils/flamegraph.rs +++ b/parser/src/utils/flamegraph.rs @@ -2,13 +2,13 @@ use std::{fs, path::PathBuf}; use chrono::Local; -use crate::{databases::Databases, datasets::AllDatasets, states::States}; +use crate::{databases::Databases, datasets::AllDatasets, states::States, structs::Height}; pub fn generate_allocation_files( datasets: &AllDatasets, databases: &Databases, states: &States, - last_height: usize, + last_height: Height, ) -> color_eyre::Result<()> { let mut flamegraph = allocative::FlameGraphBuilder::default(); flamegraph.visit_root(datasets); diff --git a/server/src/handler.rs b/server/src/handler.rs index 56e1150d9..32ad39dfb 100644 --- a/server/src/handler.rs +++ b/server/src/handler.rs @@ -1,3 +1,5 @@ +use std::{collections::BTreeMap, path::PathBuf}; + use axum::{ extract::{Path, Query, State}, http::HeaderMap, @@ -7,11 +9,11 @@ use color_eyre::{eyre::eyre, owo_colors::OwoColorize}; use reqwest::{header::HOST, StatusCode}; use serde::Deserialize; -use parser::{log, DateMap, HeightMap, WNaiveDate, HEIGHT_MAP_CHUNK_SIZE, OHLC}; +use parser::{log, Date, DateMap, Height, HeightMap, MapChunkId, HEIGHT_MAP_CHUNK_SIZE, OHLC}; use crate::{ chunk::Chunk, headers::add_cors_to_headers, kind::Kind, response::typed_value_to_response, - AppState, + routes::Route, AppState, }; #[derive(Deserialize)] @@ -63,19 +65,22 @@ fn _file_handler( let (kind, route) = if path.starts_with(date_prefix) { ( Kind::Date, - routes - .date - .get(&path.strip_prefix(date_prefix).unwrap().replace('-', "_")), + routes.date.get(&replace_dash_by_underscore( + path.strip_prefix(date_prefix).unwrap(), + )), ) } else if path.starts_with(height_prefix) { ( Kind::Height, - routes - .height - .get(&path.strip_prefix(height_prefix).unwrap().replace('-', "_")), + routes.height.get(&replace_dash_by_underscore( + path.strip_prefix(height_prefix).unwrap(), + )), ) } else { - (Kind::Last, routes.last.get(&path.replace('-', "_"))) + ( + Kind::Last, + routes.last.get(&replace_dash_by_underscore(&path)), + ) }; if route.is_none() { @@ -87,48 +92,18 @@ fn _file_handler( let mut chunk = None; if kind != Kind::Last { - let datasets = match kind { - Kind::Date => DateMap::::_read_dir(&route.file_path, &route.serialization), - Kind::Height => HeightMap::::_read_dir(&route.file_path, &route.serialization), + match kind { + Kind::Date => { + let datasets = DateMap::::_read_dir(&route.file_path, &route.serialization); + process_datasets(headers, kind, &mut chunk, &mut route, query, datasets)?; + } + Kind::Height => { + let datasets = + HeightMap::::_read_dir(&route.file_path, &route.serialization); + process_datasets(headers, kind, &mut chunk, &mut route, query, datasets)?; + } _ => panic!(), }; - - let (last_chunk_id, _) = datasets.last_key_value().unwrap(); - - let chunk_id = query.chunk.unwrap_or(*last_chunk_id); - - let path = datasets.get(&chunk_id); - - if path.is_none() { - return Err(eyre!("Couldn't find chunk")); - } - - route.file_path = path.unwrap().to_str().unwrap().to_string(); - - let offset = match kind { - Kind::Date => 1, - Kind::Height => HEIGHT_MAP_CHUNK_SIZE, - _ => panic!(), - }; - - let offsetted_to_url = |offseted| { - datasets.get(&offseted).map(|_| { - let host = headers[HOST].to_str().unwrap(); - let scheme = if host.contains("0.0.0.0") || host.contains("localhost") { - "http" - } else { - "https" - }; - - format!("{scheme}://{host}{}?chunk={offseted}", route.url_path) - }) - }; - - chunk = Some(Chunk { - id: chunk_id, - next: chunk_id.checked_add(offset).and_then(offsetted_to_url), - previous: chunk_id.checked_sub(offset).and_then(offsetted_to_url), - }) } let type_name = route.values_type.split("::").last().unwrap(); @@ -142,9 +117,73 @@ fn _file_handler( "f32" => typed_value_to_response::(kind, &route.file_path, chunk)?, "f64" => typed_value_to_response::(kind, &route.file_path, chunk)?, "OHLC" => typed_value_to_response::(kind, &route.file_path, chunk)?, - "WNaiveDate" => typed_value_to_response::(kind, &route.file_path, chunk)?, + "Date" => typed_value_to_response::(kind, &route.file_path, chunk)?, + "Height" => typed_value_to_response::(kind, &route.file_path, chunk)?, _ => panic!("Incompatible type: {type_name}"), }; Ok(value) } + +fn replace_dash_by_underscore(s: &str) -> String { + s.replace('-', "_") +} + +fn process_datasets( + headers: HeaderMap, + kind: Kind, + chunk: &mut Option, + route: &mut Route, + query: Query, + datasets: BTreeMap, +) -> color_eyre::Result<()> +where + ChunkId: MapChunkId, +{ + let (last_chunk_id, _) = datasets.last_key_value().unwrap_or_else(|| { + dbg!(&datasets, &route); + panic!() + }); + + let chunk_id = query + .chunk + .map(|id| ChunkId::from_usize(id)) + .unwrap_or(*last_chunk_id); + + let path = datasets.get(&chunk_id); + + if path.is_none() { + return Err(eyre!("Couldn't find chunk")); + } + + route.file_path = path.unwrap().to_str().unwrap().to_string(); + + let offset = match kind { + Kind::Date => 1, + Kind::Height => HEIGHT_MAP_CHUNK_SIZE as usize, + _ => panic!(), + }; + + let offsetted_to_url = |offseted| { + datasets.get(&ChunkId::from_usize(offseted)).map(|_| { + let host = headers[HOST].to_str().unwrap(); + let scheme = if host.contains("0.0.0.0") || host.contains("localhost") { + "http" + } else { + "https" + }; + + format!("{scheme}://{host}{}?chunk={offseted}", route.url_path) + }) + }; + + let chunk_id = chunk_id.to_usize(); + + chunk.replace(Chunk { + id: chunk_id, + next: chunk_id.checked_add(offset).and_then(offsetted_to_url), + previous: chunk_id.checked_sub(offset).and_then(offsetted_to_url), + }); + + Ok(()) +} diff --git a/server/src/imports.rs b/server/src/imports.rs index 7a273e4ac..f389520ab 100644 --- a/server/src/imports.rs +++ b/server/src/imports.rs @@ -1,17 +1,17 @@ use std::fmt::Debug; use bincode::Decode; -use parser::{Serialization, SerializedDateMap, SerializedHeightMap}; +use parser::{Date, Serialization, SerializedBTreeMap, SerializedVec}; use serde::{de::DeserializeOwned, Serialize}; -pub fn import_map(relative_path: &str) -> color_eyre::Result> +pub fn import_map(relative_path: &str) -> color_eyre::Result> where T: Serialize + Debug + DeserializeOwned + Decode, { Serialization::from_extension(relative_path.split('.').last().unwrap()).import(relative_path) } -pub fn import_vec(relative_path: &str) -> color_eyre::Result> +pub fn import_vec(relative_path: &str) -> color_eyre::Result> where T: Serialize + Debug + DeserializeOwned + Decode, { diff --git a/server/src/kind.rs b/server/src/kind.rs index 2e7f7ad29..93d4ac932 100644 --- a/server/src/kind.rs +++ b/server/src/kind.rs @@ -1,4 +1,4 @@ -#[derive(PartialEq, Eq)] +#[derive(PartialEq, Eq, Clone, Copy)] pub enum Kind { Date, Height, diff --git a/server/src/main.rs b/server/src/main.rs index da6916a19..0fff05623 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -56,7 +56,7 @@ async fn main() -> color_eyre::Result<()> { .with_state(state) .layer(compression_layer); - let port = 3110; + let port = 3111; log(&format!("Starting server on port {port}...")); diff --git a/server/src/routes.rs b/server/src/routes.rs index 39a994e3c..466a46203 100644 --- a/server/src/routes.rs +++ b/server/src/routes.rs @@ -33,10 +33,16 @@ impl Routes { let mut split_key = key.split('/').collect_vec(); let mut split_last = split_key.pop().unwrap().split('.').rev().collect_vec(); + let last = split_last.pop().unwrap().to_owned(); + + let mut skip = 2; + let serialization = split_last.pop().map_or_else( || { if *split_key.get(1).unwrap() == "price" { + skip = 1; + Serialization::Json } else { Serialization::Binary @@ -44,8 +50,11 @@ impl Routes { }, Serialization::from_extension, ); - let split_key = split_key.iter().skip(2).collect_vec(); + + let split_key = split_key.iter().skip(skip).collect_vec(); + let map_key = split_key.iter().join("_"); + let url_path = split_key.iter().join("-"); let file_path = key.to_owned();