git: removed old files

This commit is contained in:
nym21
2025-02-24 00:53:57 +01:00
parent 92758f3e4e
commit bc7a76755b
161 changed files with 0 additions and 18246 deletions

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 11 KiB

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
<path d="M 10.874 7.57 L 10.874 6.802 C 10.104 5.809 9.587 4.634 9.397 3.379 C 9.339 3.009 8.877 2.865 8.637 3.152 C 8.059 3.833 7.605 4.632 7.299 5.517 C 8.234 6.565 9.486 7.284 10.874 7.57 Z M 13.937 4.749 C 12.729 4.749 11.751 5.731 11.751 6.939 L 11.751 8.563 C 8.895 8.394 6.474 6.636 5.379 4.142 C 5.229 3.8 4.746 3.78 4.585 4.117 C 4.131 5.077 3.876 6.149 3.876 7.28 C 3.876 9.217 4.808 11.025 6.203 12.364 C 6.562 12.711 6.915 12.998 7.266 13.261 L 3.331 14.245 C 3.038 14.318 2.907 14.658 3.072 14.912 C 3.547 15.648 4.723 16.895 7.261 16.999 C 7.48 17.007 7.698 16.928 7.864 16.783 L 9.648 15.249 L 11.751 15.249 C 14.167 15.249 16.125 13.293 16.125 10.876 L 16.125 6.499 L 17 4.749 L 13.937 4.749 Z M 13.937 7.391 C 13.697 7.391 13.458 7.175 13.458 6.935 C 13.458 6.695 13.697 6.483 13.937 6.483 C 14.177 6.483 14.399 6.703 14.399 6.943 C 14.399 7.183 14.177 7.391 13.937 7.391 Z" style="fill: #12100f;"></path>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
<path d="M 10.874 7.57 L 10.874 6.802 C 10.104 5.809 9.587 4.634 9.397 3.379 C 9.339 3.009 8.877 2.865 8.637 3.152 C 8.059 3.833 7.605 4.632 7.299 5.517 C 8.234 6.565 9.486 7.284 10.874 7.57 Z M 13.937 4.749 C 12.729 4.749 11.751 5.731 11.751 6.939 L 11.751 8.563 C 8.895 8.394 6.474 6.636 5.379 4.142 C 5.229 3.8 4.746 3.78 4.585 4.117 C 4.131 5.077 3.876 6.149 3.876 7.28 C 3.876 9.217 4.808 11.025 6.203 12.364 C 6.562 12.711 6.915 12.998 7.266 13.261 L 3.331 14.245 C 3.038 14.318 2.907 14.658 3.072 14.912 C 3.547 15.648 4.723 16.895 7.261 16.999 C 7.48 17.007 7.698 16.928 7.864 16.783 L 9.648 15.249 L 11.751 15.249 C 14.167 15.249 16.125 13.293 16.125 10.876 L 16.125 6.499 L 17 4.749 L 13.937 4.749 Z M 13.937 7.391 C 13.697 7.391 13.458 7.175 13.458 6.935 C 13.458 6.695 13.697 6.483 13.937 6.483 C 14.177 6.483 14.399 6.703 14.399 6.943 C 14.399 7.183 14.177 7.391 13.937 7.391 Z" style="fill: #fffaf6;"></path>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
<path d="M 10.874 7.57 L 10.874 6.802 C 10.104 5.809 9.587 4.634 9.397 3.379 C 9.339 3.009 8.877 2.865 8.637 3.152 C 8.059 3.833 7.605 4.632 7.299 5.517 C 8.234 6.565 9.486 7.284 10.874 7.57 Z M 13.937 4.749 C 12.729 4.749 11.751 5.731 11.751 6.939 L 11.751 8.563 C 8.895 8.394 6.474 6.636 5.379 4.142 C 5.229 3.8 4.746 3.78 4.585 4.117 C 4.131 5.077 3.876 6.149 3.876 7.28 C 3.876 9.217 4.808 11.025 6.203 12.364 C 6.562 12.711 6.915 12.998 7.266 13.261 L 3.331 14.245 C 3.038 14.318 2.907 14.658 3.072 14.912 C 3.547 15.648 4.723 16.895 7.261 16.999 C 7.48 17.007 7.698 16.928 7.864 16.783 L 9.648 15.249 L 11.751 15.249 C 14.167 15.249 16.125 13.293 16.125 10.876 L 16.125 6.499 L 17 4.749 L 13.937 4.749 Z M 13.937 7.391 C 13.697 7.391 13.458 7.175 13.458 6.935 C 13.458 6.695 13.697 6.483 13.937 6.483 C 14.177 6.483 14.399 6.703 14.399 6.943 C 14.399 7.183 14.177 7.391 13.937 7.391 Z" style="fill: #f26610;"></path>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1,11 +0,0 @@
<svg viewBox="0 0 720 180" xmlns="http://www.w3.org/2000/svg">
<defs></defs>
<g transform="matrix(7.5, 0, 0, 7.5, -2046.71228, -1592.744873)">
<ellipse style="fill: #f26610;" cx="284.895" cy="224.366" rx="12" ry="12"></ellipse>
<path d="M 285.769 221.936 L 285.769 221.168 C 284.999 220.175 284.482 219 284.292 217.745 C 284.234 217.375 283.772 217.231 283.532 217.518 C 282.954 218.199 282.5 218.998 282.194 219.883 C 283.129 220.931 284.381 221.65 285.769 221.936 Z M 288.832 219.115 C 287.624 219.115 286.646 220.097 286.646 221.305 L 286.646 222.929 C 283.79 222.76 281.369 221.002 280.274 218.508 C 280.124 218.166 279.641 218.146 279.48 218.483 C 279.026 219.443 278.771 220.515 278.771 221.646 C 278.771 223.583 279.703 225.391 281.098 226.73 C 281.457 227.077 281.81 227.364 282.161 227.627 L 278.226 228.611 C 277.933 228.684 277.802 229.024 277.967 229.278 C 278.442 230.014 279.618 231.261 282.156 231.365 C 282.375 231.373 282.593 231.294 282.759 231.149 L 284.543 229.615 L 286.646 229.615 C 289.062 229.615 291.02 227.659 291.02 225.242 L 291.02 220.865 L 291.895 219.115 L 288.832 219.115 Z M 288.832 221.757 C 288.592 221.757 288.353 221.541 288.353 221.301 C 288.353 221.061 288.592 220.849 288.832 220.849 C 289.072 220.849 289.294 221.069 289.294 221.309 C 289.294 221.549 289.072 221.757 288.832 221.757 Z" style="fill: #fffaf6;"></path>
</g>
<g transform="matrix(1, 0, 0, 1, -30, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: #fffaf6;"></path>
<path d="M 589.19 97.802 L 589.19 106.23 L 610.948 106.23 C 605.1 112.938 597.446 119.044 587.986 124.376 L 593.404 131.514 C 597.532 128.934 601.488 126.268 605.186 123.43 L 605.186 146.048 L 614.13 146.048 L 614.13 123.43 L 626.944 123.43 L 626.944 149.402 L 635.974 149.402 L 635.974 123.43 L 649.82 123.43 L 649.82 134.008 C 649.82 136.072 649.046 137.104 647.498 137.104 L 640.36 136.674 L 642.768 145.188 L 650.422 145.188 C 655.926 145.188 658.678 142.092 658.678 135.986 L 658.678 115.174 L 635.974 115.174 L 635.974 108.638 L 626.944 108.638 L 626.944 115.174 L 614.388 115.174 C 617.054 112.336 619.548 109.326 621.784 106.23 L 665.128 106.23 L 665.128 97.802 L 626.858 97.802 C 627.89 95.824 628.836 93.76 629.696 91.61 L 620.838 90.492 C 619.806 92.9 618.516 95.394 617.14 97.802 L 589.19 97.802 Z M 648.1 68.734 C 642.338 72.088 636.232 75.098 629.868 77.678 C 621.612 75.012 612.926 72.518 603.896 70.282 L 599.252 77.248 C 605.272 78.624 611.206 80.258 617.226 82.15 C 610.088 84.386 602.606 86.106 594.78 87.482 L 599.596 95.308 C 612.324 92.04 622.472 89.116 630.04 86.364 C 638.124 89.116 646.122 92.298 654.034 95.824 L 658.936 88.428 C 653.26 86.02 647.412 83.698 641.392 81.548 C 646.208 79.226 651.11 76.56 655.926 73.55 L 648.1 68.734 Z M 675.438 77.85 L 675.438 85.848 L 682.404 85.848 L 682.404 98.92 C 682.404 101.5 681.114 103.22 678.62 104.166 L 680.684 110.874 C 692.036 108.896 701.926 106.66 710.182 104.08 L 708.634 96.426 C 703.474 98.146 697.454 99.608 690.574 100.984 L 690.574 85.848 L 712.332 85.848 L 712.332 77.85 L 698.916 77.85 C 698.4 74.668 697.884 71.744 697.368 69.164 L 688.338 70.712 C 688.94 72.862 689.542 75.27 690.144 77.85 L 675.438 77.85 Z M 724.028 89.632 L 739.25 89.632 L 739.25 93.502 L 723.856 93.502 C 723.942 92.47 724.028 91.352 724.028 90.32 L 724.028 89.632 Z M 739.25 83.096 L 724.028 83.096 L 724.028 79.226 L 739.25 79.226 L 739.25 83.096 Z M 722.652 100.038 L 739.25 100.038 L 739.25 100.898 C 739.25 103.048 738.218 104.166 736.24 104.166 C 733.918 104.166 731.424 103.994 728.758 103.822 L 730.822 111.562 L 738.734 111.562 C 744.582 111.562 747.506 108.982 747.506 103.908 L 747.506 72.002 L 715.6 72.002 L 715.6 90.922 C 715.428 97.286 713.192 102.532 708.892 106.746 L 715.342 112.594 C 718.782 109.068 721.276 104.854 722.652 100.038 Z M 708.462 121.452 L 708.462 126.784 L 683.608 126.784 L 683.608 134.352 L 708.462 134.352 L 708.462 139.598 L 675.524 139.598 L 675.524 147.51 L 750 147.51 L 750 139.598 L 717.062 139.598 L 717.062 134.352 L 742.174 134.352 L 742.174 126.784 L 717.062 126.784 L 717.062 121.452 L 746.216 121.452 L 746.216 113.712 L 679.308 113.712 L 679.308 121.452 L 708.462 121.452 Z" style="fill: #68625f"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -1,11 +0,0 @@
<svg viewBox="0 0 720 180" xmlns="http://www.w3.org/2000/svg">
<defs></defs>
<g transform="matrix(7.5, 0, 0, 7.5, -2046.71228, -1592.744873)">
<ellipse style="fill: #f26610;" cx="284.895" cy="224.366" rx="12" ry="12"></ellipse>
<path d="M 285.769 221.936 L 285.769 221.168 C 284.999 220.175 284.482 219 284.292 217.745 C 284.234 217.375 283.772 217.231 283.532 217.518 C 282.954 218.199 282.5 218.998 282.194 219.883 C 283.129 220.931 284.381 221.65 285.769 221.936 Z M 288.832 219.115 C 287.624 219.115 286.646 220.097 286.646 221.305 L 286.646 222.929 C 283.79 222.76 281.369 221.002 280.274 218.508 C 280.124 218.166 279.641 218.146 279.48 218.483 C 279.026 219.443 278.771 220.515 278.771 221.646 C 278.771 223.583 279.703 225.391 281.098 226.73 C 281.457 227.077 281.81 227.364 282.161 227.627 L 278.226 228.611 C 277.933 228.684 277.802 229.024 277.967 229.278 C 278.442 230.014 279.618 231.261 282.156 231.365 C 282.375 231.373 282.593 231.294 282.759 231.149 L 284.543 229.615 L 286.646 229.615 C 289.062 229.615 291.02 227.659 291.02 225.242 L 291.02 220.865 L 291.895 219.115 L 288.832 219.115 Z M 288.832 221.757 C 288.592 221.757 288.353 221.541 288.353 221.301 C 288.353 221.061 288.592 220.849 288.832 220.849 C 289.072 220.849 289.294 221.069 289.294 221.309 C 289.294 221.549 289.072 221.757 288.832 221.757 Z" style="fill: #fffaf6;"></path>
</g>
<g transform="matrix(1, 0, 0, 1, -30, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: #12100f;"></path>
<path d="M 589.19 97.802 L 589.19 106.23 L 610.948 106.23 C 605.1 112.938 597.446 119.044 587.986 124.376 L 593.404 131.514 C 597.532 128.934 601.488 126.268 605.186 123.43 L 605.186 146.048 L 614.13 146.048 L 614.13 123.43 L 626.944 123.43 L 626.944 149.402 L 635.974 149.402 L 635.974 123.43 L 649.82 123.43 L 649.82 134.008 C 649.82 136.072 649.046 137.104 647.498 137.104 L 640.36 136.674 L 642.768 145.188 L 650.422 145.188 C 655.926 145.188 658.678 142.092 658.678 135.986 L 658.678 115.174 L 635.974 115.174 L 635.974 108.638 L 626.944 108.638 L 626.944 115.174 L 614.388 115.174 C 617.054 112.336 619.548 109.326 621.784 106.23 L 665.128 106.23 L 665.128 97.802 L 626.858 97.802 C 627.89 95.824 628.836 93.76 629.696 91.61 L 620.838 90.492 C 619.806 92.9 618.516 95.394 617.14 97.802 L 589.19 97.802 Z M 648.1 68.734 C 642.338 72.088 636.232 75.098 629.868 77.678 C 621.612 75.012 612.926 72.518 603.896 70.282 L 599.252 77.248 C 605.272 78.624 611.206 80.258 617.226 82.15 C 610.088 84.386 602.606 86.106 594.78 87.482 L 599.596 95.308 C 612.324 92.04 622.472 89.116 630.04 86.364 C 638.124 89.116 646.122 92.298 654.034 95.824 L 658.936 88.428 C 653.26 86.02 647.412 83.698 641.392 81.548 C 646.208 79.226 651.11 76.56 655.926 73.55 L 648.1 68.734 Z M 675.438 77.85 L 675.438 85.848 L 682.404 85.848 L 682.404 98.92 C 682.404 101.5 681.114 103.22 678.62 104.166 L 680.684 110.874 C 692.036 108.896 701.926 106.66 710.182 104.08 L 708.634 96.426 C 703.474 98.146 697.454 99.608 690.574 100.984 L 690.574 85.848 L 712.332 85.848 L 712.332 77.85 L 698.916 77.85 C 698.4 74.668 697.884 71.744 697.368 69.164 L 688.338 70.712 C 688.94 72.862 689.542 75.27 690.144 77.85 L 675.438 77.85 Z M 724.028 89.632 L 739.25 89.632 L 739.25 93.502 L 723.856 93.502 C 723.942 92.47 724.028 91.352 724.028 90.32 L 724.028 89.632 Z M 739.25 83.096 L 724.028 83.096 L 724.028 79.226 L 739.25 79.226 L 739.25 83.096 Z M 722.652 100.038 L 739.25 100.038 L 739.25 100.898 C 739.25 103.048 738.218 104.166 736.24 104.166 C 733.918 104.166 731.424 103.994 728.758 103.822 L 730.822 111.562 L 738.734 111.562 C 744.582 111.562 747.506 108.982 747.506 103.908 L 747.506 72.002 L 715.6 72.002 L 715.6 90.922 C 715.428 97.286 713.192 102.532 708.892 106.746 L 715.342 112.594 C 718.782 109.068 721.276 104.854 722.652 100.038 Z M 708.462 121.452 L 708.462 126.784 L 683.608 126.784 L 683.608 134.352 L 708.462 134.352 L 708.462 139.598 L 675.524 139.598 L 675.524 147.51 L 750 147.51 L 750 139.598 L 717.062 139.598 L 717.062 134.352 L 742.174 134.352 L 742.174 126.784 L 717.062 126.784 L 717.062 121.452 L 746.216 121.452 L 746.216 113.712 L 679.308 113.712 L 679.308 121.452 L 708.462 121.452 Z" style="fill: #b4aca9;"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<ellipse style="fill: #f26610;" cx="12" cy="12" rx="12" ry="12"/>
<path d="M 12.874 9.57 L 12.874 8.802 C 12.104 7.809 11.587 6.634 11.397 5.379 C 11.339 5.009 10.877 4.865 10.637 5.152 C 10.059 5.833 9.605 6.632 9.299 7.517 C 10.234 8.565 11.486 9.284 12.874 9.57 Z M 15.937 6.749 C 14.729 6.749 13.751 7.731 13.751 8.939 L 13.751 10.563 C 10.895 10.394 8.474 8.636 7.379 6.142 C 7.229 5.8 6.746 5.78 6.585 6.117 C 6.131 7.077 5.876 8.149 5.876 9.28 C 5.876 11.217 6.808 13.025 8.203 14.364 C 8.562 14.711 8.915 14.998 9.266 15.261 L 5.331 16.245 C 5.038 16.318 4.907 16.658 5.072 16.912 C 5.547 17.648 6.723 18.895 9.261 18.999 C 9.48 19.007 9.698 18.928 9.864 18.783 L 11.648 17.249 L 13.751 17.249 C 16.167 17.249 18.125 15.293 18.125 12.876 L 18.125 8.499 L 19 6.749 L 15.937 6.749 Z M 15.937 9.391 C 15.697 9.391 15.458 9.175 15.458 8.935 C 15.458 8.695 15.697 8.483 15.937 8.483 C 16.177 8.483 16.399 8.703 16.399 8.943 C 16.399 9.183 16.177 9.391 15.937 9.391 Z" style="fill: #fffaf6;"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 500 180" xmlns="http://www.w3.org/2000/svg">
<defs/>
<g transform="matrix(1, 0, 0, 1, -252.158997, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: #fffaf6;"/>
<path d="M 589.19 97.802 L 589.19 106.23 L 610.948 106.23 C 605.1 112.938 597.446 119.044 587.986 124.376 L 593.404 131.514 C 597.532 128.934 601.488 126.268 605.186 123.43 L 605.186 146.048 L 614.13 146.048 L 614.13 123.43 L 626.944 123.43 L 626.944 149.402 L 635.974 149.402 L 635.974 123.43 L 649.82 123.43 L 649.82 134.008 C 649.82 136.072 649.046 137.104 647.498 137.104 L 640.36 136.674 L 642.768 145.188 L 650.422 145.188 C 655.926 145.188 658.678 142.092 658.678 135.986 L 658.678 115.174 L 635.974 115.174 L 635.974 108.638 L 626.944 108.638 L 626.944 115.174 L 614.388 115.174 C 617.054 112.336 619.548 109.326 621.784 106.23 L 665.128 106.23 L 665.128 97.802 L 626.858 97.802 C 627.89 95.824 628.836 93.76 629.696 91.61 L 620.838 90.492 C 619.806 92.9 618.516 95.394 617.14 97.802 L 589.19 97.802 Z M 648.1 68.734 C 642.338 72.088 636.232 75.098 629.868 77.678 C 621.612 75.012 612.926 72.518 603.896 70.282 L 599.252 77.248 C 605.272 78.624 611.206 80.258 617.226 82.15 C 610.088 84.386 602.606 86.106 594.78 87.482 L 599.596 95.308 C 612.324 92.04 622.472 89.116 630.04 86.364 C 638.124 89.116 646.122 92.298 654.034 95.824 L 658.936 88.428 C 653.26 86.02 647.412 83.698 641.392 81.548 C 646.208 79.226 651.11 76.56 655.926 73.55 L 648.1 68.734 Z M 675.438 77.85 L 675.438 85.848 L 682.404 85.848 L 682.404 98.92 C 682.404 101.5 681.114 103.22 678.62 104.166 L 680.684 110.874 C 692.036 108.896 701.926 106.66 710.182 104.08 L 708.634 96.426 C 703.474 98.146 697.454 99.608 690.574 100.984 L 690.574 85.848 L 712.332 85.848 L 712.332 77.85 L 698.916 77.85 C 698.4 74.668 697.884 71.744 697.368 69.164 L 688.338 70.712 C 688.94 72.862 689.542 75.27 690.144 77.85 L 675.438 77.85 Z M 724.028 89.632 L 739.25 89.632 L 739.25 93.502 L 723.856 93.502 C 723.942 92.47 724.028 91.352 724.028 90.32 L 724.028 89.632 Z M 739.25 83.096 L 724.028 83.096 L 724.028 79.226 L 739.25 79.226 L 739.25 83.096 Z M 722.652 100.038 L 739.25 100.038 L 739.25 100.898 C 739.25 103.048 738.218 104.166 736.24 104.166 C 733.918 104.166 731.424 103.994 728.758 103.822 L 730.822 111.562 L 738.734 111.562 C 744.582 111.562 747.506 108.982 747.506 103.908 L 747.506 72.002 L 715.6 72.002 L 715.6 90.922 C 715.428 97.286 713.192 102.532 708.892 106.746 L 715.342 112.594 C 718.782 109.068 721.276 104.854 722.652 100.038 Z M 708.462 121.452 L 708.462 126.784 L 683.608 126.784 L 683.608 134.352 L 708.462 134.352 L 708.462 139.598 L 675.524 139.598 L 675.524 147.51 L 750 147.51 L 750 139.598 L 717.062 139.598 L 717.062 134.352 L 742.174 134.352 L 742.174 126.784 L 717.062 126.784 L 717.062 121.452 L 746.216 121.452 L 746.216 113.712 L 679.308 113.712 L 679.308 121.452 L 708.462 121.452 Z" style="fill: #867e7b;"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 4.4 KiB

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 500 180" xmlns="http://www.w3.org/2000/svg">
<defs/>
<g transform="matrix(1, 0, 0, 1, -252.158997, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: rgb(16, 16, 14);"/>
<path d="M 589.19 97.802 L 589.19 106.23 L 610.948 106.23 C 605.1 112.938 597.446 119.044 587.986 124.376 L 593.404 131.514 C 597.532 128.934 601.488 126.268 605.186 123.43 L 605.186 146.048 L 614.13 146.048 L 614.13 123.43 L 626.944 123.43 L 626.944 149.402 L 635.974 149.402 L 635.974 123.43 L 649.82 123.43 L 649.82 134.008 C 649.82 136.072 649.046 137.104 647.498 137.104 L 640.36 136.674 L 642.768 145.188 L 650.422 145.188 C 655.926 145.188 658.678 142.092 658.678 135.986 L 658.678 115.174 L 635.974 115.174 L 635.974 108.638 L 626.944 108.638 L 626.944 115.174 L 614.388 115.174 C 617.054 112.336 619.548 109.326 621.784 106.23 L 665.128 106.23 L 665.128 97.802 L 626.858 97.802 C 627.89 95.824 628.836 93.76 629.696 91.61 L 620.838 90.492 C 619.806 92.9 618.516 95.394 617.14 97.802 L 589.19 97.802 Z M 648.1 68.734 C 642.338 72.088 636.232 75.098 629.868 77.678 C 621.612 75.012 612.926 72.518 603.896 70.282 L 599.252 77.248 C 605.272 78.624 611.206 80.258 617.226 82.15 C 610.088 84.386 602.606 86.106 594.78 87.482 L 599.596 95.308 C 612.324 92.04 622.472 89.116 630.04 86.364 C 638.124 89.116 646.122 92.298 654.034 95.824 L 658.936 88.428 C 653.26 86.02 647.412 83.698 641.392 81.548 C 646.208 79.226 651.11 76.56 655.926 73.55 L 648.1 68.734 Z M 675.438 77.85 L 675.438 85.848 L 682.404 85.848 L 682.404 98.92 C 682.404 101.5 681.114 103.22 678.62 104.166 L 680.684 110.874 C 692.036 108.896 701.926 106.66 710.182 104.08 L 708.634 96.426 C 703.474 98.146 697.454 99.608 690.574 100.984 L 690.574 85.848 L 712.332 85.848 L 712.332 77.85 L 698.916 77.85 C 698.4 74.668 697.884 71.744 697.368 69.164 L 688.338 70.712 C 688.94 72.862 689.542 75.27 690.144 77.85 L 675.438 77.85 Z M 724.028 89.632 L 739.25 89.632 L 739.25 93.502 L 723.856 93.502 C 723.942 92.47 724.028 91.352 724.028 90.32 L 724.028 89.632 Z M 739.25 83.096 L 724.028 83.096 L 724.028 79.226 L 739.25 79.226 L 739.25 83.096 Z M 722.652 100.038 L 739.25 100.038 L 739.25 100.898 C 739.25 103.048 738.218 104.166 736.24 104.166 C 733.918 104.166 731.424 103.994 728.758 103.822 L 730.822 111.562 L 738.734 111.562 C 744.582 111.562 747.506 108.982 747.506 103.908 L 747.506 72.002 L 715.6 72.002 L 715.6 90.922 C 715.428 97.286 713.192 102.532 708.892 106.746 L 715.342 112.594 C 718.782 109.068 721.276 104.854 722.652 100.038 Z M 708.462 121.452 L 708.462 126.784 L 683.608 126.784 L 683.608 134.352 L 708.462 134.352 L 708.462 139.598 L 675.524 139.598 L 675.524 147.51 L 750 147.51 L 750 139.598 L 717.062 139.598 L 717.062 134.352 L 742.174 134.352 L 742.174 126.784 L 717.062 126.784 L 717.062 121.452 L 746.216 121.452 L 746.216 113.712 L 679.308 113.712 L 679.308 121.452 L 708.462 121.452 Z" style="fill: rgb(192, 192, 171);"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 4.4 KiB

View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 310 180" xmlns="http://www.w3.org/2000/svg">
<defs/>
<g transform="matrix(1, 0, 0, 1, -253.876495, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: rgb(16, 16, 14);"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 310 180" xmlns="http://www.w3.org/2000/svg">
<defs/>
<g transform="matrix(1, 0, 0, 1, -253.876495, 0)">
<path d="M 278.049 146.789 L 278.049 127.527 L 287.141 117.972 L 304.4 146.789 L 331.83 146.789 L 303.784 100.251 L 332.755 69.739 L 303.013 69.739 L 278.049 97.477 L 278.049 30.598 L 254.318 30.598 L 254.318 146.789 L 278.049 146.789 Z M 354.169 57.719 C 361.565 57.719 367.575 51.709 367.575 44.158 C 367.575 36.608 361.565 30.752 354.169 30.752 C 346.618 30.752 340.608 36.608 340.608 44.158 C 340.608 51.709 346.618 57.719 354.169 57.719 Z M 342.457 146.789 L 366.188 146.789 L 366.188 69.739 L 342.457 69.739 L 342.457 146.789 Z M 406.407 146.789 L 407.64 136.927 C 411.801 144.015 421.047 148.792 431.834 148.792 C 453.716 148.792 468.972 132.92 468.972 109.035 C 468.972 83.916 455.257 67.119 433.683 67.119 C 422.588 67.119 412.417 71.742 407.794 78.677 L 407.794 30.598 L 384.063 30.598 L 384.063 146.789 L 406.407 146.789 Z M 407.948 107.802 C 407.948 96.244 415.653 88.539 426.749 88.539 C 437.998 88.539 445.087 96.398 445.087 107.802 C 445.087 119.205 437.998 127.064 426.749 127.064 C 415.653 127.064 407.948 119.359 407.948 107.802 Z M 498.713 56.332 L 543.402 56.332 L 543.402 40.306 L 498.713 40.306 L 498.713 56.332 Z M 478.526 108.11 C 478.526 132.458 496.402 148.638 521.058 148.638 C 545.56 148.638 563.435 132.458 563.435 108.11 C 563.435 83.762 545.56 67.428 521.058 67.428 C 496.402 67.428 478.526 83.762 478.526 108.11 Z M 502.412 107.956 C 502.412 96.398 509.963 88.693 521.058 88.693 C 531.999 88.693 539.55 96.398 539.55 107.956 C 539.55 119.667 531.999 127.372 521.058 127.372 C 509.963 127.372 502.412 119.667 502.412 107.956 Z" style="fill: rgb(16, 16, 14);"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 1.8 MiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 564 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 592 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 453 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 526 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 386 KiB

View File

@@ -1,11 +0,0 @@
FROM rust:1.81
ENV rpcconnect=localhost
ENV rpcport=8332
ENV rpcuser=satoshi
ENV rpcpassword=nakamoto
WORKDIR /
COPY . .
CMD exec cargo run -r --manifest-path /kibo/parser/Cargo.toml -- --datadir=/bitcoin --rpcconnect=${rpcconnect} --rpcport=${rpcport} --rpcuser=${rpcuser} --rpcpassword=${rpcpassword}

View File

@@ -1,6 +0,0 @@
# !/usr/bin/env bash
[[ -d "./kibo" ]] && sudo rm -r ./kibo
git clone https://github.com/kibo-money/kibo.git
docker build -t kibo-parser .

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env bash
cd kibo/parser
./run.sh \
--datadir=/bitcoin \
--rpcconnect=$1 \
--rpcport=$2 \
--rpcuser=$3 \
--rpcpassword=$4
# cd ../server
# ./run.sh &

View File

@@ -1,11 +0,0 @@
docker run \
--env rpcuser=satoshi \
--env rpcpassword=nakamoto \
--env rpcport=localhost \
--env rpcport=8332 \
--volume /tmp/kibo/datasets:/kibo/datasets \
--volume /tmp/kibo/price:/kibo/price \
--volume /tmp/kibo/outputs:/kibo/parser/out \
--volume $HOME/Developer/bitcoin:/bitcoin \
--net=host \
kibo-parser

View File

@@ -1,67 +0,0 @@
use brk_parser::bitcoincore_rpc::Client;
use log::info;
use rlimit::{Resource, getrlimit, setrlimit};
mod io;
mod parser;
mod server;
mod structs;
mod utils;
use brk_parser::Datasets;
use server::api::structs::Routes;
use structs::{Config, Exit};
use utils::init_log;
fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
init_log();
let (_, nofile_limit) = getrlimit(Resource::NOFILE).unwrap();
setrlimit(Resource::NOFILE, 138_240, nofile_limit)?;
std::thread::Builder::new()
.stack_size(getrlimit(Resource::STACK).unwrap().1 as usize)
.spawn(|| -> color_eyre::Result<()> {
let exit = Exit::new();
let config = Config::import()?;
info!("Starting...");
let rpc = Client::from(&config);
let routes = Routes::build(&Datasets::import(&config)?, &config);
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
let run_parser = config.parser();
let run_server = config.server();
let config_clone = config.clone();
let handle = tokio::spawn(async move {
if run_server {
server::main(routes, config_clone).await.unwrap();
} else {
info!("Skipping server");
}
});
if run_parser {
parser::main(&config, &rpc, &exit)?;
} else {
info!("Skipping parser");
}
handle.await?;
Ok(())
})
})?
.join()
.unwrap()
}

View File

@@ -1,63 +0,0 @@
use log::info;
use crate::{
parser::{databases::Databases, datasets::Datasets, states::States},
structs::{Config, Date, Exit, Height},
utils::time,
};
pub struct ExportedData<'a> {
pub config: &'a Config,
pub databases: Option<&'a mut Databases>,
pub datasets: &'a mut Datasets,
pub date: Date,
pub defragment: bool,
pub exit: Exit,
pub height: Height,
pub states: Option<&'a States>,
}
pub fn export(
ExportedData {
config,
databases,
datasets,
date,
defragment,
exit,
height,
states,
}: ExportedData,
) -> color_eyre::Result<()> {
if exit.active() {
info!("Exit in progress, skipping export");
return Ok(());
}
exit.block();
let text = if defragment {
"Exporting and defragmenting..."
} else {
"Exporting..."
};
info!("{text}");
time("Finished export", || -> color_eyre::Result<()> {
datasets.export(config, height)?;
if let Some(databases) = databases {
databases.export(height, date, defragment)?;
}
if let Some(states) = states {
states.export(config)?;
}
Ok(())
})?;
exit.unblock();
Ok(())
}

View File

@@ -1,228 +0,0 @@
use std::{collections::BTreeSet, time::Instant};
use brk_parser::bitcoincore_rpc::Client;
use chrono::Datelike;
use export::ExportedData;
use itertools::Itertools;
use log::info;
use parse::ParseData;
use crate::{
parser::{
actions::{export, find_first_inserted_unsafe_height, parse},
databases::Databases,
datasets::{ComputeData, Datasets},
states::{AddressCohortsDurableStates, States, UTXOCohortsDurableStates},
},
structs::{Config, DateData, DisplayInstant, Exit, Height, MapKey, Timestamp},
utils::{generate_allocation_files, time},
};
pub fn iter_blocks(
config: &Config,
rpc: &Client,
approx_block_count: usize,
exit: Exit,
databases: &mut Databases,
datasets: &mut Datasets,
) -> color_eyre::Result<()> {
let mut states = States::import(config).unwrap_or_default();
info!("Imported states");
let first_unsafe_heights = find_first_inserted_unsafe_height(&mut states, databases, datasets, config);
let mut height = first_unsafe_heights.min();
info!("Starting parsing at height: {height}");
let mut next_block_opt = None;
let mut blocks_loop_date = None;
let mut next_date_opt;
let block_receiver = biter::new(
&config.path_bitcoindir(),
Some(height.to_usize()),
None,
Client::from(config),
);
let mut block_iter = block_receiver.iter();
'parsing: loop {
let mut processed_heights = BTreeSet::new();
let mut processed_dates = BTreeSet::new();
'days: loop {
let mut blocks_loop_i = 0;
if next_block_opt.is_some() {
blocks_loop_date.take();
}
let instant = Instant::now();
'blocks: loop {
let current_block_opt = next_block_opt.take().or_else(|| block_iter.next());
next_block_opt = block_iter.next();
if let Some((_current_block_height, current_block, _current_block_hash)) = current_block_opt {
let timestamp = Timestamp::from(current_block.header.time);
let current_block_date = timestamp.to_date();
let current_block_height: Height = height + blocks_loop_i;
if current_block_height.to_usize() != _current_block_height {
dbg!(current_block_height, _current_block_height);
panic!()
}
next_date_opt = next_block_opt
.as_ref()
.map(|(_, next_block, _)| Timestamp::from(next_block.header.time).to_date());
// Always run for the first block of the loop
if blocks_loop_date.is_none() {
blocks_loop_date.replace(current_block_date);
if states
.date_data_vec
.last()
.map(|date_data| *date_data.date < *current_block_date)
.unwrap_or(true)
{
states.date_data_vec.push(DateData::new(current_block_date, vec![]));
}
processed_dates.insert(current_block_date);
}
let blocks_loop_date = blocks_loop_date.unwrap();
if current_block_date > blocks_loop_date {
panic!("current block should always have the same date as the current blocks loop");
}
let is_date_last_block = next_date_opt
// Do NOT change `blocks_loop_date` to `current_block_date` !!!
.map_or(true, |next_block_date| blocks_loop_date < next_block_date);
processed_heights.insert(current_block_height);
if first_unsafe_heights.inserted <= current_block_height {
let compute_addresses =
databases.check_if_needs_to_compute_addresses(current_block_height, blocks_loop_date);
if states.address_cohorts_durable_states.is_none()
&& (compute_addresses
|| datasets
.address
.needs_durable_states(current_block_height, current_block_date))
{
states.address_cohorts_durable_states = Some(AddressCohortsDurableStates::init(
&mut databases.address_index_to_address_data,
));
}
if states.utxo_cohorts_durable_states.is_none()
&& datasets
.utxo
.needs_durable_states(current_block_height, current_block_date)
{
states.utxo_cohorts_durable_states =
Some(UTXOCohortsDurableStates::init(&states.date_data_vec));
}
parse(ParseData {
block: current_block,
block_index: blocks_loop_i,
compute_addresses,
config,
databases,
datasets,
date: blocks_loop_date,
first_date_height: height,
height: current_block_height,
is_date_last_block,
rpc,
states: &mut states,
});
}
blocks_loop_i += 1;
if is_date_last_block {
info!(
"Processed {current_block_date} ({height} - {current_block_height}) {}",
instant.display()
);
height += blocks_loop_i;
let is_check_point = next_date_opt.as_ref().map_or(true, |date| date.is_first_of_month());
if (is_check_point && instant.elapsed().as_secs() >= 1)
|| height.is_close_to_end(approx_block_count)
{
break 'days;
}
break 'blocks;
}
} else {
break 'parsing;
}
}
}
// Don't remember why -1
let last_height = height - 1_u32;
if first_unsafe_heights.computed <= last_height {
info!("Computing datasets...");
time("Computed datasets", || {
let dates = processed_dates.into_iter().collect_vec();
let heights = processed_heights.into_iter().collect_vec();
datasets.compute(ComputeData {
dates: &dates,
heights: &heights,
})
});
}
if !config.dry_run() {
let is_safe = height.is_safe(approx_block_count);
let defragment = is_safe
&& next_date_opt.is_some_and(|date| {
(date.year() >= 2020 && date.is_january() || date.year() >= 2022 && date.is_july())
&& date.is_first_of_month()
});
export(ExportedData {
config,
databases: is_safe.then_some(databases),
datasets,
date: blocks_loop_date.unwrap(),
defragment,
height: last_height,
states: is_safe.then_some(&states),
exit: exit.clone(),
})?;
if config.record_ram_usage() {
time("Exporting allocation files", || {
generate_allocation_files(datasets, databases, &states, last_height)
})?;
}
} else {
info!("Skipping export");
}
}
Ok(())
}

View File

@@ -1,127 +0,0 @@
use log::info;
use crate::{
parser::{
databases::Databases,
datasets::{AnyDatasets, Datasets},
states::States,
},
structs::{Config, Height},
};
#[derive(Default, Debug)]
pub struct Heights {
pub inserted: Height,
pub computed: Height,
}
impl Heights {
pub fn min(&self) -> Height {
self.inserted.min(self.computed)
}
}
pub fn find_first_inserted_unsafe_height(
states: &mut States,
databases: &mut Databases,
datasets: &mut Datasets,
config: &Config,
) -> Heights {
let min_initial_inserted_last_address_height = datasets
.address
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned();
let min_initial_inserted_last_address_date = datasets
.address
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned();
let usable_databases = databases.check_if_usable(
min_initial_inserted_last_address_height,
min_initial_inserted_last_address_date,
);
states
.date_data_vec
.iter()
.last()
.map(|date_data| date_data.date)
.and_then(|last_safe_date| {
if !usable_databases {
info!("Unusable databases");
return None;
}
let datasets_min_initial_states = datasets.get_min_initial_states().to_owned();
let min_datasets_inserted_last_height = datasets_min_initial_states.inserted.last_height;
let min_datasets_inserted_last_date = datasets_min_initial_states.inserted.last_date;
info!("min_datasets_inserted_last_height: {:?}", min_datasets_inserted_last_height);
info!("min_datasets_inserted_last_date: {:?}", min_datasets_inserted_last_date);
let inserted_last_date_is_older_than_saved_state = min_datasets_inserted_last_date.map_or(true, |min_datasets_last_date| min_datasets_last_date < last_safe_date);
if inserted_last_date_is_older_than_saved_state {
// dbg!(min_datasets_inserted_last_date , *last_safe_date);
return None;
}
datasets
.date_metadata
.last_height
.get_or_import(&last_safe_date)
.and_then(|last_safe_height| {
let inserted_heights_and_dates_are_out_of_sync = min_datasets_inserted_last_height.map_or(true, |min_datasets_inserted_last_height| min_datasets_inserted_last_height < last_safe_height);
if inserted_heights_and_dates_are_out_of_sync {
info!("last_safe_height ({last_safe_height}) > min_datasets_height ({min_datasets_inserted_last_height:?})");
None
} else {
let computed = datasets_min_initial_states.computed.last_date.and_then(
|last_date| datasets.date_metadata
.last_height
.get_or_import(&last_date)
.and_then(|last_date_height| {
if datasets_min_initial_states.computed.last_height.map_or(true, |last_height| {
last_height < last_date_height
}) {
None
} else {
Some(last_date_height + 1_u32)
}
})
).unwrap_or_default();
Some(Heights {
inserted: last_safe_height + 1_u32,
computed,
})
}
}
)
})
.unwrap_or_else(|| {
info!("Starting over...");
let include_addresses = !usable_databases
|| min_initial_inserted_last_address_date.is_none()
|| min_initial_inserted_last_address_height.is_none();
states.reset(config, include_addresses);
databases.reset(include_addresses);
Heights::default()
})
}

View File

@@ -1,9 +0,0 @@
mod export;
mod iter_blocks;
mod min_height;
mod parse;
pub use export::*;
pub use iter_blocks::*;
pub use min_height::*;
pub use parse::*;

View File

@@ -1,945 +0,0 @@
use std::{collections::BTreeMap, ops::ControlFlow, thread};
use brk_parser::{
bitcoin::{Block, Txid},
bitcoincore_rpc::RpcApi,
};
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
parser::{
databases::{
AddressIndexToAddressData, AddressIndexToEmptyAddressData, AddressToAddressIndex, Databases, TxidToTxData,
TxoutIndexToAddressIndex, TxoutIndexToAmount,
},
datasets::{Datasets, InsertData},
states::{
AddressCohortsInputStates, AddressCohortsOutputStates, AddressCohortsRealizedStates, States,
UTXOCohortsOneShotStates, UTXOCohortsSentStates,
},
},
structs::{
Address, AddressData, AddressRealizedData, Amount, BlockData, BlockPath, Config, Counter, Date,
EmptyAddressData, Height, PartialTxoutData, Price, SentData, Timestamp, TxData, TxoutIndex,
},
};
pub struct ParseData<'a> {
// pub bitcoin_cli: &'a BitcoinCli,
pub block: Block,
pub block_index: usize,
pub config: &'a Config,
pub compute_addresses: bool,
pub databases: &'a mut Databases,
pub datasets: &'a mut Datasets,
pub date: Date,
pub first_date_height: Height,
pub height: Height,
pub is_date_last_block: bool,
pub rpc: &'a biter::bitcoincore_rpc::Client,
pub states: &'a mut States,
}
pub fn parse(
ParseData {
block,
block_index,
config,
compute_addresses,
databases,
datasets,
date,
first_date_height,
height,
is_date_last_block,
rpc,
states,
}: ParseData,
) {
// log(&format!("{height}"));
let timestamp = Timestamp::from(block.header.time);
// If false, expect that the code is flawless
// or create a 0 value txid database
let enable_check_if_txout_value_is_zero_in_db: bool = true;
let date_index = states.date_data_vec.len() - 1;
let previous_timestamp = height
.checked_sub(1)
.map(Height::new)
.and_then(|height| datasets.block_metadata.timestamp.get_or_import(&height));
let block_price = Price::from_dollar(
datasets
.price
.get_height_ohlc(height, timestamp, previous_timestamp, config)
.unwrap_or_else(|_| panic!("Expect {height} to have a price"))
.close as f64,
);
let date_price = Price::from_dollar(
datasets
.price
.get_date_ohlc(date)
.unwrap_or_else(|_| panic!("Expect {date} to have a price"))
.close as f64,
);
let difficulty = block.header.difficulty_float();
let block_size = block.total_size();
let block_weight = block.weight().to_wu();
let block_vbytes = block.weight().to_vbytes_floor();
let block_interval = previous_timestamp.map_or(Timestamp::ZERO, |previous_timestamp| {
if previous_timestamp >= timestamp {
Timestamp::ZERO
} else {
timestamp - previous_timestamp
}
});
states
.date_data_vec
.last_mut()
.unwrap()
.blocks
.push(BlockData::new(height, block_price, timestamp));
let mut block_path_to_sent_data: BTreeMap<BlockPath, SentData> = BTreeMap::default();
// let mut received_data: ReceivedData = ReceivedData::default();
let mut address_index_to_address_realized_data: BTreeMap<u32, AddressRealizedData> = BTreeMap::default();
let mut coinbase = Amount::ZERO;
let mut satblocks_destroyed = Amount::ZERO;
let mut satdays_destroyed = Amount::ZERO;
let mut amount_sent = Amount::ZERO;
let mut transaction_count = 0;
let mut fees = vec![];
let mut fees_total = Amount::ZERO;
let (
TxoutsParsingResults {
op_returns: _op_returns,
mut partial_txout_data_vec,
provably_unspendable: _provably_unspendable,
},
(mut txid_to_tx_data, mut txout_index_to_amount_and_address_index),
) = thread::scope(|scope| {
let output_handle = scope.spawn(|| {
let mut txouts_parsing_results = prepare_outputs(
&block,
compute_addresses,
&mut states.address_counters.multisig_addresses,
&mut states.address_counters.op_return_addresses,
&mut states.address_counters.push_only_addresses,
&mut states.address_counters.unknown_addresses,
&mut states.address_counters.empty_addresses,
&mut databases.address_to_address_index,
);
// Reverse to get in order via pop later
txouts_parsing_results.partial_txout_data_vec.reverse();
txouts_parsing_results
});
let input_handle = scope.spawn(|| {
prepare_inputs(
&block,
&mut databases.txid_to_tx_data,
&mut databases.txout_index_to_amount,
&mut databases.txout_index_to_address_index,
compute_addresses,
)
});
(output_handle.join().unwrap(), input_handle.join().unwrap())
});
let mut address_index_to_address_data = compute_addresses.then(|| {
compute_address_index_to_address_data(
&mut databases.address_index_to_address_data,
&mut databases.address_index_to_empty_address_data,
&partial_txout_data_vec,
&txout_index_to_amount_and_address_index,
compute_addresses,
)
});
block.txdata.iter().enumerate().try_for_each(|(block_tx_index, tx)| {
let txid = tx.compute_txid();
let tx_index = databases.txid_to_tx_data.metadata.serial as u32;
transaction_count += 1;
// --
// outputs
// ---
let mut utxos = BTreeMap::new();
let mut spendable_amount = Amount::ZERO;
let is_coinbase = tx.is_coinbase();
if is_coinbase != (block_tx_index == 0) {
unreachable!();
}
let mut inputs_sum = Amount::ZERO;
let mut outputs_sum = Amount::ZERO;
let last_block = states.date_data_vec.last_mut_block().unwrap();
// Before `input` to cover outputs being used in the same block as inputs
tx.output
.iter()
.enumerate()
.filter_map(|(vout, tx_out)| {
if vout > (u16::MAX as usize) {
panic!("vout can indeed be bigger than u16::MAX !");
}
let amount = Amount::wrap(tx_out.value);
if is_coinbase {
coinbase += amount;
} else {
outputs_sum += amount;
}
partial_txout_data_vec
.pop()
.unwrap()
// None if not worth parsing (empty/op_return/...)
.map(|partial_txout_data| (vout, partial_txout_data))
})
.for_each(|(vout, partial_txout_data)| {
let vout = vout as u16;
let txout_index = TxoutIndex::new(tx_index, vout);
let PartialTxoutData {
address,
address_index_opt,
amount,
} = partial_txout_data;
spendable_amount += amount;
last_block.receive(amount);
utxos.insert(vout, amount);
databases.txout_index_to_amount.insert_to_ram(txout_index, amount);
if compute_addresses {
let address = address.unwrap();
let address_index_to_address_data = address_index_to_address_data.as_mut().unwrap();
let (address_data, address_index) = {
if let Some(address_index) = address_index_opt
.or_else(|| databases.address_to_address_index.get_from_ram(&address).cloned())
{
let address_data = address_index_to_address_data.get_mut(&address_index).unwrap();
(address_data, address_index)
} else {
let address_index = databases.address_to_address_index.metadata.serial as u32;
let address_type = address.to_type();
if let Some(previous) = databases.address_to_address_index.insert(address, address_index) {
dbg!(previous);
panic!("address #{address_index} shouldn't be present during put");
}
// Checked new
let address_data = address_index_to_address_data
.entry(address_index)
.and_modify(|_| {
panic!("Shouldn't exist");
})
// Will always insert, it's to avoid insert + get
.or_insert(AddressData::new(address_type));
(address_data, address_index)
}
};
// MUST be before received !
let address_realized_data = address_index_to_address_realized_data
.entry(address_index)
.or_insert_with(|| AddressRealizedData::default(address_data));
address_data.receive(amount, block_price);
address_realized_data.receive(amount);
databases
.txout_index_to_address_index
.insert_to_ram(txout_index, address_index);
}
});
if !utxos.is_empty() {
databases.txid_to_tx_data.insert(
&txid,
TxData::new(
tx_index,
BlockPath::new(date_index as u16, block_index as u16),
utxos.len() as u16,
),
);
}
// ---
// inputs
// ---
if !is_coinbase {
tx.input.iter().try_for_each(|txin| {
let outpoint = txin.previous_output;
let input_txid = outpoint.txid;
let input_vout = outpoint.vout;
let remove_tx_data_from_cached_puts = {
let mut is_tx_data_from_cached_puts = false;
let input_tx_data = txid_to_tx_data.get_mut(&input_txid).unwrap().as_mut().or_else(|| {
is_tx_data_from_cached_puts = true;
databases.txid_to_tx_data.get_mut_from_ram(&input_txid)
});
// Can be none because 0 sats inputs happen
// https://mempool.space/tx/f329e55c2de9b821356e6f2c4bba923ea7030cad61120f5ced5d4429f5c86fda#vin=27
if input_tx_data.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| rpc
.get_raw_transaction(&input_txid, None)
.unwrap()
.output
.get(input_vout as usize)
.unwrap()
.value
.to_sat()
== 0
{
return ControlFlow::Continue::<()>(());
}
dbg!((input_txid, txid, tx_index, input_vout));
panic!("Txid to be in txid_to_tx_data");
}
let input_tx_data = input_tx_data.unwrap();
let input_tx_index = input_tx_data.index;
let input_vout = input_vout as u16;
let input_txout_index = TxoutIndex::new(input_tx_index, input_vout);
// if input_tx_index == 2516 || input_tx_index == 2490 {
// dbg!(input_tx_index, &input_tx_data.utxos);
// }
// let input_amount = input_tx_data.utxos.remove(&input_vout);
let input_amount_and_address_index = databases
.txout_index_to_amount
.remove(&input_txout_index)
.map(|amount| {
(
amount,
databases.txout_index_to_address_index.remove(&input_txout_index),
)
}) // Remove from cached puts
.or_else(|| txout_index_to_amount_and_address_index.remove(&input_txout_index));
if input_amount_and_address_index.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| rpc
.get_raw_transaction(&input_txid, None)
.unwrap()
.output
.get(input_vout as usize)
.unwrap()
.value
.to_sat()
== 0
{
return ControlFlow::Continue::<()>(());
}
dbg!((input_txid, tx_index, input_tx_index, input_vout, input_tx_data, txid,));
panic!("Txout index to be in txout_index_to_txout_value");
}
input_tx_data.utxos -= 1;
let (input_amount, input_address_index) = input_amount_and_address_index.unwrap();
let input_block_path = input_tx_data.block_path;
let BlockPath {
date_index: input_date_index,
block_index: input_block_index,
} = input_block_path;
let input_date_data =
states
.date_data_vec
.get_mut(input_date_index as usize)
.unwrap_or_else(|| {
dbg!(height, &input_txid, input_block_path, input_date_index);
panic!()
});
let input_block_data = input_date_data
.blocks
.get_mut(input_block_index as usize)
.unwrap_or_else(|| {
dbg!(
height,
&input_txid,
input_block_path,
input_date_index,
input_block_index,
);
panic!()
});
input_block_data.send(input_amount);
inputs_sum += input_amount;
block_path_to_sent_data
.entry(input_block_path)
.or_default()
.send(input_amount);
satblocks_destroyed += input_amount * (height - input_block_data.height);
satdays_destroyed +=
input_amount * date.signed_duration_since(*input_date_data.date).num_days() as u64;
if compute_addresses {
let input_address_index = input_address_index.unwrap_or_else(|| {
dbg!(
height,
input_amount,
&input_tx_data,
input_address_index,
input_txout_index,
txid,
input_txid,
input_vout
);
panic!()
});
let address_index_to_address_data = address_index_to_address_data.as_mut().unwrap();
let input_address_data = address_index_to_address_data
.get_mut(&input_address_index)
.unwrap_or_else(|| {
dbg!(input_address_index, input_txout_index, input_txid, input_vout);
panic!();
});
let input_address_realized_data = address_index_to_address_realized_data
.entry(input_address_index)
.or_insert_with(|| AddressRealizedData::default(input_address_data));
let previous_price = input_block_data.price;
// MUST be after `or_insert_with`
input_address_data
.send(input_amount, previous_price)
.unwrap_or_else(|_| {
dbg!(
input_address_index,
txid,
input_txid,
input_amount,
tx_index,
input_tx_index,
input_vout,
&input_address_data
);
panic!()
});
input_address_realized_data.send(
input_amount,
block_price,
previous_price,
timestamp,
input_block_data.timestamp,
);
};
is_tx_data_from_cached_puts && input_tx_data.is_empty()
};
if remove_tx_data_from_cached_puts {
// Pre remove tx_datas that are empty and weren't yet added to the database to avoid having it was in there or not (and thus avoid useless operations)
databases.txid_to_tx_data.remove_from_ram(&input_txid)
}
ControlFlow::Continue(())
})?;
}
amount_sent += inputs_sum;
let fee = inputs_sum - outputs_sum;
fees_total += fee;
fees.push(fee);
ControlFlow::Continue(())
});
if !partial_txout_data_vec.is_empty() {
panic!("partial_txout_data_vec should've been fully consumed");
}
txid_to_tx_data.into_iter().for_each(|(txid, tx_data)| {
if let Some(tx_data) = tx_data {
if tx_data.is_empty() {
databases.txid_to_tx_data.remove_later_from_disk(txid);
} else {
databases.txid_to_tx_data.update(txid, tx_data);
}
}
});
let mut utxo_cohorts_sent_states = UTXOCohortsSentStates::default();
let mut utxo_cohorts_one_shot_states = UTXOCohortsOneShotStates::default();
// let mut utxo_cohorts_received_states = UTXOCohortsReceivedStates::default();
let mut address_cohorts_input_states = None;
let mut address_cohorts_one_shot_states = None;
let mut address_cohorts_output_states = None;
let mut address_cohorts_realized_states = None;
// log("Starting heavy work...");
thread::scope(|scope| {
scope.spawn(|| {
let previous_last_block_data = states.date_data_vec.second_last_block();
if datasets.utxo.needs_durable_states(height, date) {
if let Some(previous_last_block_data) = previous_last_block_data {
block_path_to_sent_data.iter().for_each(|(block_path, sent_data)| {
let block_data = states.date_data_vec.get_block_data(block_path).unwrap();
if block_data.height != height {
states.utxo_cohorts_durable_states.as_mut().unwrap().subtract_moved(
block_data,
sent_data,
previous_last_block_data,
);
}
});
}
let last_block_data = states.date_data_vec.last_block().unwrap();
if last_block_data.height != height {
unreachable!()
}
states
.date_data_vec
.iter()
.flat_map(|date_data| &date_data.blocks)
.for_each(|block_data| {
states
.utxo_cohorts_durable_states
.as_mut()
.unwrap()
.udpate_age_if_needed(block_data, last_block_data, previous_last_block_data);
});
}
if datasets.utxo.needs_one_shot_states(height, date) {
utxo_cohorts_one_shot_states = states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.compute_one_shot_states(block_price, if is_date_last_block { Some(date_price) } else { None });
}
});
// scope.spawn(|| {
// utxo_cohorts_received_states
// .compute(&states.date_data_vec, block_path_to_received_data);
// });
if datasets.utxo.needs_sent_states(height, date) {
scope.spawn(|| {
utxo_cohorts_sent_states.compute(
&states.date_data_vec,
&block_path_to_sent_data,
block_price,
timestamp,
);
});
}
if compute_addresses {
scope.spawn(|| {
let address_index_to_address_data = address_index_to_address_data.as_ref().unwrap();
// TODO: Only compute if needed
address_cohorts_realized_states.replace(AddressCohortsRealizedStates::default());
// TODO: Only compute if needed
address_cohorts_input_states.replace(AddressCohortsInputStates::default());
// TODO: Only compute if needed
address_cohorts_output_states.replace(AddressCohortsOutputStates::default());
address_index_to_address_realized_data
.iter()
.for_each(|(address_index, address_realized_data)| {
let current_address_data = address_index_to_address_data.get(address_index).unwrap();
states
.address_cohorts_durable_states
.as_mut()
.unwrap()
.iterate(address_realized_data, current_address_data)
.unwrap_or_else(|report| {
dbg!(report.to_string(), address_index);
panic!();
});
if !address_realized_data.initial_address_data.is_empty() {
// Realized == previous amount
// If a whale sent all its sats to another address at a loss, it's the whale that realized the loss not the now empty adress
let liquidity_classification = address_realized_data
.initial_address_data
.compute_liquidity_classification();
address_cohorts_realized_states
.as_mut()
.unwrap()
.iterate_realized(address_realized_data, &liquidity_classification)
.unwrap();
address_cohorts_input_states
.as_mut()
.unwrap()
.iterate_input(address_realized_data, &liquidity_classification)
.unwrap();
}
address_cohorts_output_states
.as_mut()
.unwrap()
.iterate_output(
address_realized_data,
&current_address_data.compute_liquidity_classification(),
)
.unwrap();
});
address_cohorts_one_shot_states.replace(
states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.compute_one_shot_states(block_price, if is_date_last_block { Some(date_price) } else { None }),
);
});
}
});
if compute_addresses {
address_index_to_address_data
.unwrap()
.into_iter()
.for_each(|(address_index, address_data)| {
if address_data.is_empty() {
databases
.address_index_to_empty_address_data
.insert_to_ram(address_index, EmptyAddressData::from_non_empty(&address_data));
} else {
databases
.address_index_to_address_data
.insert_to_ram(address_index, address_data);
}
})
}
datasets.insert(InsertData {
address_cohorts_input_states: &address_cohorts_input_states,
block_size,
block_vbytes,
block_weight,
address_cohorts_one_shot_states: &address_cohorts_one_shot_states,
address_cohorts_realized_states: &address_cohorts_realized_states,
block_interval,
block_price,
coinbase,
compute_addresses,
databases,
date,
date_blocks_range: &(*first_date_height..=*height),
date_first_height: first_date_height,
difficulty,
fees: &fees,
height,
is_date_last_block,
satblocks_destroyed,
satdays_destroyed,
amount_sent,
states,
timestamp,
transaction_count,
utxo_cohorts_one_shot_states: &utxo_cohorts_one_shot_states,
utxo_cohorts_sent_states: &utxo_cohorts_sent_states,
});
}
pub struct TxoutsParsingResults {
partial_txout_data_vec: Vec<Option<PartialTxoutData>>,
provably_unspendable: Amount,
op_returns: usize,
}
#[allow(clippy::too_many_arguments)]
fn prepare_outputs(
block: &Block,
compute_addresses: bool,
multisig_addresses: &mut Counter,
op_return_addresses: &mut Counter,
push_only_addresses: &mut Counter,
unknown_addresses: &mut Counter,
empty_addresses: &mut Counter,
address_to_address_index: &mut AddressToAddressIndex,
) -> TxoutsParsingResults {
let mut provably_unspendable = Amount::ZERO;
let mut op_returns = 0;
let mut partial_txout_data_vec = block
.txdata
.iter()
.flat_map(|tx| &tx.output)
.map(|txout| {
let script = &txout.script_pubkey;
let amount = Amount::wrap(txout.value);
// 0 sats outputs are possible and allowed !
// https://mempool.space/tx/2f2442f68e38b980a6c4cec21e71851b0d8a5847d85208331a27321a9967bbd6
// https://bitcoin.stackexchange.com/questions/104937/transaction-outputs-with-value-0
if amount == Amount::ZERO {
return None;
}
// Op Return
// https://mempool.space/tx/139c004f477101c468767983536caaeef568613fab9c2ed9237521f5ff530afd
// Provably unspendable https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
#[allow(deprecated)]
if script.is_op_return() {
// TODO: Count fee paid to write said OP_RETURN, beware of coinbase transactions
// For coinbase transactions, count miners
op_returns += 1;
provably_unspendable += amount;
// return None;
// https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
} else if script.is_provably_unspendable() {
provably_unspendable += amount;
// return None;
}
let address_opt = compute_addresses.then(|| {
let address = Address::from(
txout,
multisig_addresses,
op_return_addresses,
push_only_addresses,
unknown_addresses,
empty_addresses,
);
address_to_address_index.open_db(&address);
address
});
Some(PartialTxoutData::new(address_opt, amount, None))
})
.collect_vec();
if compute_addresses {
partial_txout_data_vec.par_iter_mut().for_each(|partial_tx_out_data| {
if let Some(partial_tx_out_data) = partial_tx_out_data {
let address_index_opt = address_to_address_index
.unsafe_get(partial_tx_out_data.address.as_ref().unwrap())
.cloned();
partial_tx_out_data.address_index_opt = address_index_opt;
}
});
}
TxoutsParsingResults {
partial_txout_data_vec,
provably_unspendable,
op_returns,
}
}
#[allow(clippy::type_complexity)]
fn prepare_inputs<'a>(
block: &'a Block,
txid_to_tx_data_db: &mut TxidToTxData,
txout_index_to_amount_db: &mut TxoutIndexToAmount,
txout_index_to_address_index_db: &mut TxoutIndexToAddressIndex,
compute_addresses: bool,
) -> (
BTreeMap<&'a Txid, Option<TxData>>,
BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
) {
let mut txid_to_tx_data: BTreeMap<&Txid, Option<TxData>> = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.fold(BTreeMap::default(), |mut tree, tx_in| {
let txid = &tx_in.previous_output.txid;
txid_to_tx_data_db.open_db(txid);
tree.entry(txid).or_default();
tree
});
let mut tx_datas = txid_to_tx_data
.par_iter()
.map(|(txid, _)| txid_to_tx_data_db.get(txid))
.collect::<Vec<_>>();
txid_to_tx_data.values_mut().rev().for_each(|tx_data_opt| {
*tx_data_opt = tx_datas.pop().unwrap().cloned();
});
let txout_index_to_amount_and_address_index = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.flat_map(|tx_in| {
let txid = &tx_in.previous_output.txid;
if let Some(Some(tx_data)) = txid_to_tx_data.get(txid) {
let txout_index = TxoutIndex::new(tx_data.index, tx_in.previous_output.vout as u16);
txout_index_to_amount_db.open_db(&txout_index);
if compute_addresses {
txout_index_to_address_index_db.open_db(&txout_index);
}
Some(txout_index)
} else {
None
}
})
.collect_vec()
.into_par_iter()
.flat_map(|txout_index| {
txout_index_to_amount_db
.unsafe_get(&txout_index)
// Will be None if value of utxo is 0
// https://mempool.space/tx/9d8a0d851c9fb2cdf1c6d9406ce97e19e6911ae3503ab2dd5f38640bacdac996
// which is used later as input
.map(|amount| {
let address_index =
compute_addresses.then(|| *txout_index_to_address_index_db.unsafe_get(&txout_index).unwrap());
(txout_index, (*amount, address_index))
})
})
.collect::<BTreeMap<_, _>>();
// No need to call remove, it's being called later in the parse function
// To more easily support removing cached puts
(txid_to_tx_data, txout_index_to_amount_and_address_index)
}
fn compute_address_index_to_address_data(
address_index_to_address_data_db: &mut AddressIndexToAddressData,
address_index_to_empty_address_data_db: &mut AddressIndexToEmptyAddressData,
partial_txout_data_vec: &[Option<PartialTxoutData>],
txout_index_to_amount_and_address_index: &BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
compute_addresses: bool,
) -> BTreeMap<u32, AddressData> {
if !compute_addresses {
return BTreeMap::default();
}
let mut address_index_to_address_data = partial_txout_data_vec
.iter()
.flatten()
.flat_map(|partial_txout_data| partial_txout_data.address_index_opt)
.map(|address_index| (address_index, true))
.chain(
txout_index_to_amount_and_address_index
.values()
.map(|(_, address_index)| (*address_index.as_ref().unwrap(), false)), // False because we assume non zero inputs values
)
.map(|(address_index, open_empty)| {
address_index_to_address_data_db.open_db(&address_index);
if open_empty {
address_index_to_empty_address_data_db.open_db(&address_index);
}
(address_index, AddressData::default())
})
.collect::<BTreeMap<_, _>>();
address_index_to_address_data
.par_iter_mut()
.for_each(|(address_index, address_data)| {
if let Some(_address_data) = address_index_to_address_data_db.get_from_ram(address_index) {
_address_data.clone_into(address_data);
} else if let Some(empty_address_data) = address_index_to_empty_address_data_db.get_from_ram(address_index)
{
*address_data = AddressData::from_empty(empty_address_data);
} else if let Some(_address_data) = address_index_to_address_data_db.get_from_disk(address_index) {
_address_data.clone_into(address_data);
} else {
let empty_address_data = address_index_to_empty_address_data_db
.get_from_disk(address_index)
.unwrap();
*address_data = AddressData::from_empty(empty_address_data);
}
});
// Parallel unsafe_get + Linear remove = Parallel-ish take
address_index_to_address_data
.iter()
.for_each(|(address_index, address_data)| {
if address_data.is_empty() {
address_index_to_empty_address_data_db.remove(address_index);
} else {
address_index_to_address_data_db.remove(address_index);
}
});
address_index_to_address_data
}

View File

@@ -1,60 +0,0 @@
use std::{fs, io, path::Path};
use log::info;
use snkrj::AnyDatabase;
use crate::structs::{Config, Date, Height};
use super::Metadata;
pub trait AnyDatabaseGroup
where
Self: Sized,
{
fn init(config: &Config) -> Self {
let s = Self::import(config);
s.create_dir_all().unwrap();
s
}
fn import(config: &Config) -> Self;
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>>;
fn open_all(&mut self);
fn metadata(&mut self) -> &mut Metadata;
fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
self.metadata().export(height, date)
}
fn create_dir_all(&self) -> color_eyre::Result<(), std::io::Error> {
fs::create_dir_all(self.path())
}
fn remove_dir_all(&self) -> color_eyre::Result<(), io::Error> {
fs::remove_dir_all(self.path())
}
fn reset(&mut self) -> color_eyre::Result<(), io::Error> {
info!(
"Reset {}",
self.path()
.components()
.last()
.unwrap()
.as_os_str()
.to_str()
.unwrap()
);
self.reset_metadata();
self.remove_dir_all()?;
self.create_dir_all()?;
Ok(())
}
fn reset_metadata(&mut self);
fn path(&self) -> &Path;
}

View File

@@ -1,173 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use snkrj::{AnyDatabase, Database as _Database};
use crate::{
parser::states::AddressCohortsDurableStates,
structs::{AddressData, Config},
};
use super::{AnyDatabaseGroup, Metadata};
type Key = u32;
type Value = AddressData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToAddressData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
pub map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
pub const ADDRESS_INDEX_DB_MAX_SIZE: usize = 250_000;
impl AddressIndexToAddressData {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn get_from_ram(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get_from_ram(key)
}
pub fn get_from_disk(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get_from_disk(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path().to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * ADDRESS_INDEX_DB_MAX_SIZE,
(db_index + 1) * ADDRESS_INDEX_DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
pub fn compute_addres_cohorts_durable_states(&mut self) -> AddressCohortsDurableStates {
// time("Iter through address_index_to_address_data", || {
self.open_all();
// MUST CLEAR MAP, otherwise some weird things are happening later in the export I think
mem::take(&mut self.map)
.par_iter()
.map(|(_, database)| {
let mut s = AddressCohortsDurableStates::default();
database
.iter_disk()
.map(|r| r.unwrap().1)
.for_each(|address_data| s.increment(address_data).unwrap());
s
})
.sum()
// })
}
fn db_index(key: &Key) -> usize {
*key as usize / ADDRESS_INDEX_DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToAddressData {
fn import(config: &Config) -> Self {
let path = config
.path_databases()
.join("address_index_to_address_data");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(&path.split("..").next().unwrap().parse::<u32>().unwrap());
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,151 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, EmptyAddressData};
use super::{AnyDatabaseGroup, Metadata, ADDRESS_INDEX_DB_MAX_SIZE};
type Key = u32;
type Value = EmptyAddressData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToEmptyAddressData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToEmptyAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToEmptyAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
impl AddressIndexToEmptyAddressData {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn get_from_ram(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).and_then(|db| db.get_from_ram(key))
}
pub fn get_from_disk(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index)
.unwrap_or_else(|| {
dbg!(&self.map.keys(), &key, &db_index);
panic!()
})
.get_from_disk(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * ADDRESS_INDEX_DB_MAX_SIZE,
(db_index + 1) * ADDRESS_INDEX_DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
*key as usize / ADDRESS_INDEX_DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToEmptyAddressData {
fn import(config: &Config) -> Self {
let path = config
.path_databases()
.join("address_index_to_empty_address_data");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(&path.split("..").next().unwrap().parse::<u32>().unwrap());
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,445 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database};
use crate::structs::{Address, Config, U8x19, U8x31};
use super::{AnyDatabaseGroup, Metadata};
type Value = u32;
type U8x19Database = Database<U8x19, Value>;
type U8x31Database = Database<U8x31, Value>;
type U32Database = Database<u32, Value>;
type P2PKDatabase = U8x19Database;
type P2PKHDatabase = U8x19Database;
type P2SHDatabase = U8x19Database;
type P2WPKHDatabase = U8x19Database;
type P2WSHDatabase = U8x31Database;
type P2TRDatabase = U8x31Database;
type UnknownDatabase = U32Database;
type OpReturnDatabase = U32Database;
type PushOnlyDatabase = U32Database;
type EmptyDatabase = U32Database;
type MultisigDatabase = U32Database;
#[derive(Allocative)]
pub struct AddressToAddressIndex {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
p2pk: BTreeMap<u16, P2PKDatabase>,
#[allocative(skip)]
p2pkh: BTreeMap<u16, P2PKHDatabase>,
#[allocative(skip)]
p2sh: BTreeMap<u16, P2SHDatabase>,
#[allocative(skip)]
p2wpkh: BTreeMap<u16, P2WPKHDatabase>,
#[allocative(skip)]
p2wsh: BTreeMap<u16, P2WSHDatabase>,
#[allocative(skip)]
p2tr: BTreeMap<u16, P2TRDatabase>,
#[allocative(skip)]
op_return: Option<OpReturnDatabase>,
#[allocative(skip)]
push_only: Option<PushOnlyDatabase>,
#[allocative(skip)]
unknown: Option<UnknownDatabase>,
#[allocative(skip)]
empty: Option<EmptyDatabase>,
#[allocative(skip)]
multisig: Option<MultisigDatabase>,
}
impl AddressToAddressIndex {
pub fn open_db(&mut self, address: &Address) {
match address {
Address::Empty(_) => {
self.open_empty();
}
Address::Unknown(_) => {
self.open_unknown();
}
Address::OpReturn(_) => {
self.open_op_return();
}
Address::PushOnly(_) => {
self.open_push_only();
}
Address::MultiSig(_) => {
self.open_multisig();
}
Address::P2PK((prefix, _)) => {
self.open_p2pk(*prefix);
}
Address::P2PKH((prefix, _)) => {
self.open_p2pkh(*prefix);
}
Address::P2SH((prefix, _)) => {
self.open_p2sh(*prefix);
}
Address::P2WPKH((prefix, _)) => {
self.open_p2wpkh(*prefix);
}
Address::P2WSH((prefix, _)) => {
self.open_p2wsh(*prefix);
}
Address::P2TR((prefix, _)) => {
self.open_p2tr(*prefix);
}
}
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn unsafe_get(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get(key),
}
}
pub fn get_from_ram(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get_from_ram(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get_from_ram(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get_from_ram(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get_from_ram(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get_from_ram(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get_from_ram(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get_from_ram(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get_from_ram(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get_from_ram(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get_from_ram(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get_from_ram(key),
}
}
pub fn insert(&mut self, address: Address, value: Value) -> Option<Value> {
self.metadata.called_insert();
match address {
Address::Empty(key) => self.open_empty().insert(key, value),
Address::Unknown(key) => self.open_unknown().insert(key, value),
Address::OpReturn(key) => self.open_op_return().insert(key, value),
Address::PushOnly(key) => self.open_push_only().insert(key, value),
Address::MultiSig(key) => self.open_multisig().insert(key, value),
Address::P2PK((prefix, rest)) => self.open_p2pk(prefix).insert(rest, value),
Address::P2PKH((prefix, rest)) => self.open_p2pkh(prefix).insert(rest, value),
Address::P2SH((prefix, rest)) => self.open_p2sh(prefix).insert(rest, value),
Address::P2WPKH((prefix, rest)) => self.open_p2wpkh(prefix).insert(rest, value),
Address::P2WSH((prefix, rest)) => self.open_p2wsh(prefix).insert(rest, value),
Address::P2TR((prefix, rest)) => self.open_p2tr(prefix).insert(rest, value),
}
}
fn path_to_group_prefixes(path: &Path) -> Vec<u16> {
let folder = fs::read_dir(path);
if folder.is_err() {
return vec![];
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
.parse::<u16>()
.unwrap()
})
.collect_vec()
}
fn path_p2pk(&self) -> PathBuf {
self.path().join("p2pk")
}
pub fn open_p2pk(&mut self, prefix: u16) -> &mut P2PKDatabase {
let path = self.path_p2pk();
self.p2pk.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2pk(&mut self) {
let path = self.path_p2pk();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2pk.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2pkh(&self) -> PathBuf {
self.path().join("p2pkh")
}
pub fn open_p2pkh(&mut self, prefix: u16) -> &mut P2PKHDatabase {
let path = self.path_p2pkh();
self.p2pkh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2pkh(&mut self) {
let path = self.path_p2pkh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2pkh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2sh(&self) -> PathBuf {
self.path().join("p2sh")
}
pub fn open_p2sh(&mut self, prefix: u16) -> &mut P2SHDatabase {
let path = self.path_p2sh();
self.p2sh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2sh(&mut self) {
let path = self.path_p2sh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2sh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2wpkh(&self) -> PathBuf {
self.path().join("p2wpkh")
}
pub fn open_p2wpkh(&mut self, prefix: u16) -> &mut P2WPKHDatabase {
let path = self.path_p2wpkh();
self.p2wpkh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2wpkh(&mut self) {
let path = self.path_p2wpkh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2wpkh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2wsh(&self) -> PathBuf {
self.path().join("p2wsh")
}
pub fn open_p2wsh(&mut self, prefix: u16) -> &mut P2WSHDatabase {
let path = self.path_p2wsh();
self.p2wsh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2wsh(&mut self) {
let path = self.path_p2wsh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2wsh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2tr(&self) -> PathBuf {
self.path().join("p2tr")
}
pub fn open_p2tr(&mut self, prefix: u16) -> &mut P2TRDatabase {
let path = self.path_p2tr();
self.p2tr.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2tr(&mut self) {
let path = self.path_p2tr();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2tr.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
pub fn open_unknown(&mut self) -> &mut UnknownDatabase {
self.unknown
.get_or_insert_with(|| Database::open(self.path.join("unknown")).unwrap())
}
pub fn open_op_return(&mut self) -> &mut UnknownDatabase {
self.op_return
.get_or_insert_with(|| Database::open(self.path.join("op_return")).unwrap())
}
pub fn open_push_only(&mut self) -> &mut UnknownDatabase {
self.push_only
.get_or_insert_with(|| Database::open(self.path.join("push_only")).unwrap())
}
pub fn open_empty(&mut self) -> &mut UnknownDatabase {
self.empty
.get_or_insert_with(|| Database::open(self.path.join("empty")).unwrap())
}
pub fn open_multisig(&mut self) -> &mut MultisigDatabase {
self.multisig
.get_or_insert_with(|| Database::open(self.path.join("multisig")).unwrap())
}
}
impl AnyDatabaseGroup for AddressToAddressIndex {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("address_to_address_index");
Self {
metadata: Metadata::import(&path, 1),
path,
p2pk: BTreeMap::default(),
p2pkh: BTreeMap::default(),
p2sh: BTreeMap::default(),
p2wpkh: BTreeMap::default(),
p2wsh: BTreeMap::default(),
p2tr: BTreeMap::default(),
op_return: None,
push_only: None,
unknown: None,
empty: None,
multisig: None,
}
}
fn create_dir_all(&self) -> color_eyre::Result<(), std::io::Error> {
fs::create_dir_all(self.path_p2pk()).unwrap();
fs::create_dir_all(self.path_p2pkh()).unwrap();
fs::create_dir_all(self.path_p2sh()).unwrap();
fs::create_dir_all(self.path_p2wpkh()).unwrap();
fs::create_dir_all(self.path_p2wsh()).unwrap();
fs::create_dir_all(self.path_p2tr())
}
fn reset_metadata(&mut self) {
self.metadata.reset()
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.p2pk)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.chain(
mem::take(&mut self.p2pkh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2sh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2wpkh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2wsh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2tr)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
[
self.unknown.take(),
self.op_return.take(),
self.push_only.take(),
self.empty.take(),
self.multisig.take(),
]
.into_iter()
.flatten()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.collect_vec()
}
fn open_all(&mut self) {
self.open_all_p2pk();
self.open_all_p2pkh();
self.open_all_p2wpkh();
self.open_all_p2wsh();
self.open_all_p2sh();
self.open_all_p2tr();
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,127 +0,0 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use color_eyre::eyre::eyre;
use serde::{Deserialize, Serialize};
use std::{
fmt::Debug,
fs, io,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use crate::{
io::Serialization,
structs::{Counter, Date, Height},
};
#[derive(Default, Debug, Encode, Decode, Allocative)]
pub struct Metadata {
path: PathBuf,
data: MetadataData,
}
impl Deref for Metadata {
type Target = MetadataData;
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl DerefMut for Metadata {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.data
}
}
impl Metadata {
pub fn import(path: &Path, version: u16) -> Self {
Self {
data: MetadataData::import(path, version),
path: path.to_owned(),
}
}
pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
if self.last_height.unwrap_or_default() < height {
self.last_height.replace(height);
}
if self.last_date.unwrap_or_default() < date {
self.last_date.replace(date);
}
self.data.export(&self.path)
}
pub fn reset(&mut self) {
let _ = self.data.reset(&self.path);
}
pub fn called_insert(&mut self) {
self.serial += 1;
self.len.increment();
}
pub fn called_remove(&mut self) {
self.len.decrement();
}
pub fn check_if_in_sync(&self, other: &Self) -> bool {
self.last_date == other.last_date && self.last_height == other.last_height
}
pub fn check_farer_or_in_sync(&self, other: &Self) -> bool {
self.last_date >= other.last_date && self.last_height >= other.last_height
}
}
#[derive(Default, Debug, Encode, Decode, Serialize, Deserialize, Allocative)]
pub struct MetadataData {
version: u16,
pub serial: usize,
pub len: Counter,
pub last_height: Option<Height>,
pub last_date: Option<Date>,
}
impl MetadataData {
fn full_path(folder_path: &Path) -> PathBuf {
folder_path.join("metadata")
}
pub fn import(path: &Path, version: u16) -> Self {
let mut s = Self::_import(path, version).unwrap_or_default();
s.version = version;
s
}
fn _import(path: &Path, version: u16) -> color_eyre::Result<Self> {
fs::create_dir_all(path)?;
let s: MetadataData = Serialization::Binary.import(&Self::full_path(path))?;
if s.version != version {
return Err(eyre!("Bad version"));
}
Ok(s)
}
pub fn export(&self, path: &Path) -> color_eyre::Result<()> {
Serialization::Binary.export(Path::new(&Self::full_path(path)), self)
}
pub fn reset(&mut self, path: &Path) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::full_path(path))
}
fn clear(&mut self) {
self.serial = 0;
self.len.reset();
self.last_height = None;
self.last_date = None;
}
}

View File

@@ -1,175 +0,0 @@
use allocative::Allocative;
mod _trait;
mod address_index_to_address_data;
mod address_index_to_empty_address_data;
mod address_to_address_index;
mod metadata;
mod txid_to_tx_data;
mod txout_index_to_address_index;
mod txout_index_to_amount;
use _trait::*;
pub use address_index_to_address_data::*;
pub use address_index_to_empty_address_data::*;
pub use address_to_address_index::*;
use itertools::Itertools;
use log::info;
use metadata::*;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use snkrj::AnyDatabase;
pub use txid_to_tx_data::*;
pub use txout_index_to_address_index::*;
pub use txout_index_to_amount::*;
use crate::structs::{Config, Date, Height};
#[derive(Allocative)]
pub struct Databases {
pub address_index_to_address_data: AddressIndexToAddressData,
pub address_index_to_empty_address_data: AddressIndexToEmptyAddressData,
pub address_to_address_index: AddressToAddressIndex,
pub txid_to_tx_data: TxidToTxData,
pub txout_index_to_address_index: TxoutIndexToAddressIndex,
pub txout_index_to_amount: TxoutIndexToAmount,
}
impl Databases {
pub fn import(config: &Config) -> Self {
let address_index_to_address_data = AddressIndexToAddressData::init(config);
let address_index_to_empty_address_data = AddressIndexToEmptyAddressData::init(config);
let address_to_address_index = AddressToAddressIndex::init(config);
let txid_to_tx_data = TxidToTxData::init(config);
let txout_index_to_address_index = TxoutIndexToAddressIndex::init(config);
let txout_index_to_amount = TxoutIndexToAmount::init(config);
info!("Imported databases");
Self {
address_index_to_address_data,
address_index_to_empty_address_data,
address_to_address_index,
txid_to_tx_data,
txout_index_to_address_index,
txout_index_to_amount,
}
}
pub fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
self.txid_to_tx_data
.drain_to_vec()
.into_iter()
.chain(self.txout_index_to_amount.drain_to_vec())
.chain(self.address_to_address_index.drain_to_vec())
.chain(self.address_index_to_address_data.drain_to_vec())
.chain(self.address_index_to_empty_address_data.drain_to_vec())
.chain(self.txout_index_to_address_index.drain_to_vec())
.collect_vec()
}
fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
self.txid_to_tx_data.export_metadata(height, date)?;
self.txout_index_to_amount.export_metadata(height, date)?;
self.address_index_to_address_data
.export_metadata(height, date)?;
self.address_index_to_empty_address_data
.export_metadata(height, date)?;
self.address_to_address_index
.export_metadata(height, date)?;
self.txout_index_to_address_index
.export_metadata(height, date)?;
Ok(())
}
pub fn export(
&mut self,
height: Height,
date: Date,
defragment: bool,
) -> color_eyre::Result<()> {
self.export_metadata(height, date)?;
self.drain_to_vec()
.into_par_iter()
.try_for_each(|s| AnyDatabase::boxed_export(s, defragment))?;
Ok(())
}
pub fn reset(&mut self, include_addresses: bool) {
if include_addresses {
let _ = self.address_index_to_address_data.reset();
let _ = self.address_index_to_empty_address_data.reset();
let _ = self.address_to_address_index.reset();
let _ = self.txout_index_to_address_index.reset();
}
let _ = self.txid_to_tx_data.reset();
let _ = self.txout_index_to_amount.reset();
}
pub fn check_if_needs_to_compute_addresses(&self, height: Height, date: Date) -> bool {
let check_height = |last_height: Option<Height>| {
last_height.map_or(true, |last_height| last_height < height)
};
let check_date =
|last_date: Option<Date>| last_date.map_or(true, |last_date| last_date < date);
let check_metadata = |metadata: &Metadata| {
check_height(metadata.last_height) || check_date(metadata.last_date)
};
// We only need to check one as we previously checked that they're all in sync
check_metadata(&self.address_to_address_index.metadata)
}
pub fn check_if_usable(
&self,
last_address_height: Option<Height>,
last_address_date: Option<Date>,
) -> bool {
let are_tx_databases_in_sync = self
.txout_index_to_amount
.metadata
.check_if_in_sync(&self.txid_to_tx_data.metadata);
if !are_tx_databases_in_sync {
return false;
}
let are_address_databases_in_sync = self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_empty_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.txout_index_to_address_index.metadata);
if !are_address_databases_in_sync {
return false;
}
let are_address_databases_farer_or_in_sync_with_tx_database = self
.address_to_address_index
.metadata
.check_farer_or_in_sync(&self.txid_to_tx_data.metadata);
if !are_address_databases_farer_or_in_sync_with_tx_database {
return false;
}
last_address_height >= self.address_to_address_index.metadata.last_height
&& last_address_date >= self.address_to_address_index.metadata.last_date
}
}

View File

@@ -1,156 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
path::{Path, PathBuf},
};
use allocative::Allocative;
use brk_parser::bitcoin::Txid;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, TxData, U8x31};
use super::{AnyDatabaseGroup, Metadata};
type Key = U8x31;
type Value = TxData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxidToTxData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<u16, Database>,
}
impl TxidToTxData {
pub fn insert(&mut self, txid: &Txid, tx_index: Value) -> Option<Value> {
self.metadata.called_insert();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).insert(txid_key, tx_index)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn get(&self, txid: &Txid) -> Option<&Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.map.get(&db_index).unwrap().get(&txid_key)
}
pub fn get_mut_from_ram(&mut self, txid: &Txid) -> Option<&mut Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.map.get_mut(&db_index).unwrap().get_mut_from_ram(&txid_key)
}
pub fn remove_later_from_disk(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).remove_later_from_disk(&txid_key);
}
pub fn remove_from_ram(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).remove_from_ram(&txid_key);
}
pub fn update(&mut self, txid: &Txid, tx_data: TxData) {
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).update(txid_key, tx_data);
}
#[inline(always)]
pub fn open_db(&mut self, txid: &Txid) -> &mut Database {
let db_index = Self::db_index(txid);
self._open_db(db_index)
}
#[inline(always)]
fn _open_db(&mut self, db_index: u16) -> &mut Database {
let path = self.path.to_owned();
self.map.entry(db_index).or_insert_with(|| {
let path = path.join(db_index.to_string());
Database::open(path).unwrap()
})
}
fn txid_to_key(txid: &Txid) -> U8x31 {
U8x31::from(&txid[1..])
}
fn db_index(txid: &Txid) -> u16 {
((txid[0] as u16) << 5) + ((txid[1] as u16) >> 3)
}
}
impl AnyDatabaseGroup for TxidToTxData {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txid_to_tx_data");
let metadata = Metadata::import(&path, 2);
Self {
path,
metadata,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.flat_map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
.parse::<u16>()
})
.for_each(|db_index| {
self._open_db(db_index);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,148 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata};
type Key = TxoutIndex;
type Value = u32;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAddressIndex {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAddressIndex {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAddressIndex {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAddressIndex {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAddressIndex {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txout_index_to_address_index");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(
&path
.split("..")
.next()
.unwrap()
.parse::<u64>()
.unwrap()
.into(),
);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,148 +0,0 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Amount, Config, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata};
type Key = TxoutIndex;
type Value = Amount;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAmount {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAmount {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAmount {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAmount {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAmount {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txout_index_to_amount");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(
&path
.split("..")
.next()
.unwrap()
.parse::<u64>()
.unwrap()
.into(),
);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -1,887 +0,0 @@
use itertools::Itertools;
use rayon::prelude::*;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{
cohort_metadata::AddressCohortMetadataDataset, ComputeData, DateRecapDataset, RatioDataset,
SubDataset,
},
structs::{
AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, BiMap, Date, DateMap, Height, HeightMap,
MapKind, Timestamp, OHLC,
},
};
use super::{AnyDatasetGroup, MinInitialStates};
pub trait AnyDataset: Iterable {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn needs_insert(&self, height: Height, date: Date) -> bool {
self.needs_insert_height(height) || self.needs_insert_date(date)
}
#[inline(always)]
fn needs_insert_height(&self, height: Height) -> bool {
!self.to_all_inserted_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn needs_insert_date(&self, date: Date) -> bool {
!self.to_all_inserted_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_kind_bi_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_bi_map_vec(kind));
});
}
});
v
}
fn to_kind_mut_bi_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyBiMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<BiMap<u8>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u16>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<usize>>() => {
if let Some(map) = any.downcast_mut::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Date>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Height>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<RatioDataset>() => {
if let Some(dataset) = any.downcast_mut::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_bi_map_vec(kind));
});
}
}
_ => {}
});
v
}
fn to_kind_date_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u64>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<f32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_date_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_date_map_vec(),
MapKind::Computed => dataset.to_computed_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_date_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyDateMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<DateMap<u8>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u16>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<usize>>() => {
if let Some(map) = any.downcast_mut::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Date>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Height>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateRecapDataset<u32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<u64>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u64>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<f32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<f32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_date_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_date_map_vec(),
MapKind::Computed => dataset.to_computed_mut_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_kind_height_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_height_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_height_map_vec(),
MapKind::Computed => dataset.to_computed_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_height_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyHeightMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<HeightMap<u8>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u16>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<usize>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Date>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Height>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_height_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_height_map_vec(),
MapKind::Computed => dataset.to_computed_mut_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Inserted)
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Inserted)
}
fn to_all_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_inserted_height_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_inserted_date_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_inserted_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_inserted_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_inserted_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
#[inline(always)]
fn should_compute(&self, compute_data: &ComputeData) -> bool {
compute_data
.heights
.last()
.map_or(false, |height| self.should_compute_height(*height))
|| compute_data
.dates
.last()
.map_or(false, |date| self.should_compute_date(*date))
}
#[inline(always)]
fn should_compute_height(&self, height: Height) -> bool {
!self.to_all_computed_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn should_compute_date(&self, date: Date) -> bool {
!self.to_all_computed_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Computed)
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Computed)
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Computed)
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Computed)
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Computed)
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Computed)
}
fn to_all_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_computed_height_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_computed_date_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_computed_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_computed_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_computed_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
fn to_all_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let mut inserted = self.to_all_inserted_map_vec();
inserted.append(&mut self.to_all_computed_map_vec());
inserted
}
// #[inline(always)]
// fn is_empty(&self) -> bool {
// self.to_any_map_vec().is_empty()
// }
fn pre_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
}
fn export(&self) -> color_eyre::Result<()> {
self.to_all_map_vec()
.into_par_iter()
.try_for_each(|map| -> color_eyre::Result<()> { map.export() })
}
fn post_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
}
fn reset_computed(&self) {
self.to_all_computed_date_map_vec()
.iter()
.for_each(|map| map.delete_files());
self.to_all_computed_height_map_vec()
.iter()
.for_each(|map| map.delete_files());
}
}

View File

@@ -1,7 +0,0 @@
use super::AnyDataset;
pub trait AnyDatasetGroup {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -1,9 +0,0 @@
use super::{AnyDataset, MinInitialStates};
pub trait AnyDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -1,279 +0,0 @@
use allocative::Allocative;
use crate::structs::{AnyDateMap, AnyHeightMap, Config, Date, Height};
use super::{AnyDataset, AnyDatasets};
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialStates {
pub inserted: MinInitialState,
pub computed: MinInitialState,
}
impl MinInitialStates {
pub fn consume(&mut self, other: Self) {
self.inserted = other.inserted;
self.computed = other.computed;
}
pub fn compute_from_dataset(dataset: &dyn AnyDataset, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_dataset(dataset, Mode::Inserted, config),
computed: MinInitialState::compute_from_dataset(dataset, Mode::Computed, config),
}
}
pub fn compute_from_datasets(datasets: &dyn AnyDatasets, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_datasets(datasets, Mode::Inserted, config),
computed: MinInitialState::compute_from_datasets(datasets, Mode::Computed, config),
}
}
pub fn min_last_height(&self) -> Option<Height> {
self.computed.last_height.min(self.inserted.last_height)
}
}
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialState {
pub first_unsafe_date: Option<Date>,
pub first_unsafe_height: Option<Height>,
pub last_date: Option<Date>,
pub last_height: Option<Height>,
}
enum Mode {
Inserted,
Computed,
}
impl MinInitialState {
fn compute_from_datasets(datasets: &dyn AnyDatasets, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned()
},
),
}
}
Mode::Computed => {
if config.recompute_computed() {
// datasets.reset_computed();
return Self::default();
}
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.computed
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.last_height
.as_ref()
.cloned()
},
),
}
}
}
}
fn min_datasets_date(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Date>,
) -> Option<Date> {
Self::min_date(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn min_datasets_height(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Height>,
) -> Option<Height> {
Self::min_height(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn compute_from_dataset(dataset: &dyn AnyDataset, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let date_vec = dataset.to_all_inserted_date_map_vec();
let height_vec = dataset.to_all_inserted_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
Mode::Computed => {
if config.recompute_computed() {
dataset.reset_computed();
return Self::default();
}
let date_vec = dataset.to_all_computed_date_map_vec();
let height_vec = dataset.to_all_computed_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
}
}
#[inline(always)]
fn compute_min_initial_last_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_last_date()))
}
#[inline(always)]
fn compute_min_initial_last_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_last_height()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height()))
}
#[inline(always)]
fn min_date(iter: impl Iterator<Item = Option<Date>>) -> Option<Date> {
iter.min().and_then(|opt| opt)
}
#[inline(always)]
fn min_height(iter: impl Iterator<Item = Option<Height>>) -> Option<Height> {
iter.min().and_then(|opt| opt)
}
}

View File

@@ -1,9 +0,0 @@
mod any_dataset;
mod any_dataset_group;
mod any_datasets;
mod min_initial_state;
pub use any_dataset::*;
pub use any_dataset_group::*;
pub use any_datasets::*;
pub use min_initial_state::*;

View File

@@ -1,74 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AllAddressesMetadataDataset {
min_initial_states: MinInitialStates,
created_addreses: BiMap<u32>,
empty_addresses: BiMap<u32>,
new_addresses: BiMap<u32>,
}
impl AllAddressesMetadataDataset {
pub fn import(path: &MapPath, config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
created_addreses: BiMap::new_bin(1, MapKind::Inserted, &f("created_addresses")),
empty_addresses: BiMap::new_bin(1, MapKind::Inserted, &f("empty_addresses")),
// Computed
new_addresses: BiMap::new_bin(1, MapKind::Computed, &f("new_addresses")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
databases,
height,
date,
is_date_last_block,
..
} = insert_data;
let created_addresses = self
.created_addreses
.height
.insert(height, *databases.address_to_address_index.metadata.len);
let empty_addresses = self.empty_addresses.height.insert(
height,
*databases.address_index_to_empty_address_data.metadata.len,
);
if is_date_last_block {
self.created_addreses.date.insert(date, created_addresses);
self.empty_addresses.date.insert(date, empty_addresses);
}
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self.new_addresses
.multi_insert_net_change(heights, dates, &mut self.created_addreses, 1)
}
}
impl AnyDataset for AllAddressesMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,381 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::{AddressCohortId, DurableStates},
},
structs::{AddressSplit, BiMap, Config, Date, Height, MapPath},
};
use super::cohort_metadata::AddressCohortMetadataDataset;
#[derive(Allocative, Iterable)]
pub struct CohortDataset {
min_initial_states: MinInitialStates,
split: AddressSplit,
metadata: AddressCohortMetadataDataset,
pub subs: SubDataset,
}
impl CohortDataset {
pub fn import(
path: &MapPath,
id: AddressCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.as_name().map(|s| s.to_owned());
let split = id.as_split();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
split,
metadata: AddressCohortMetadataDataset::import(path, &name, config)?,
subs: SubDataset::import(path, &name, config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn sub_datasets_vec(&self) -> Vec<&SubDataset> {
vec![&self.subs]
}
pub fn needs_insert_metadata(&self, height: Height, date: Date) -> bool {
self.metadata.needs_insert(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.utxo.needs_insert(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.needs_insert(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.needs_insert(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.price_paid.needs_insert(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.needs_insert(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.needs_insert(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.input.needs_insert(height, date))
}
// fn needs_insert_output(&self, insert_data: &InsertData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.needs_insert(height, date))
// }
pub fn insert_realized_data(&mut self, insert_data: &InsertData) {
let realized_state = insert_data
.address_cohorts_realized_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.realized.insert(insert_data, realized_state);
}
fn insert_metadata(&mut self, insert_data: &InsertData) {
let address_count = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap()
.address_count;
self.metadata.insert(insert_data, address_count);
}
fn insert_supply_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.supply
.insert(insert_data, &durable_states.supply_state);
}
fn insert_utxo_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.utxo
.insert(insert_data, &durable_states.utxo_state);
}
fn insert_capitalization_data(
&mut self,
insert_data: &InsertData,
durable_states: &DurableStates,
) {
self.subs
.capitalization
.insert(insert_data, &durable_states.capitalization_state);
}
fn insert_unrealized_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.unrealized.insert(
insert_data,
&states.unrealized_block_state,
&states.unrealized_date_state,
);
}
fn insert_price_paid_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs
.price_paid
.insert(insert_data, &states.price_paid_state);
}
fn insert_input_data(&mut self, insert_data: &InsertData) {
let state = insert_data
.address_cohorts_input_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.input.insert(insert_data, state);
}
// fn insert_output_data(&mut self, insert_data: &InsertData) {
// let state = insert_data
// .address_cohorts_output_states
// .as_ref()
// .unwrap()
// .get(&self.split)
// .unwrap();
// self.output.insert(insert_data, &state.all);
// self.illiquid.output.insert(insert_data, &state.illiquid);
// self.liquid.output.insert(insert_data, &state.liquid);
// self.highly_liquid
// .output
// .insert(insert_data, &state.highly_liquid);
// }
pub fn insert(&mut self, insert_data: &InsertData) {
if !insert_data.compute_addresses {
return;
}
let address_cohort_durable_states = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split);
if address_cohort_durable_states.is_none() {
return; // TODO: Check if should panic instead
}
let address_cohort_durable_states = address_cohort_durable_states.unwrap();
if self.needs_insert_metadata(insert_data.height, insert_data.date) {
self.insert_metadata(insert_data);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.insert_utxo_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.insert_capitalization_data(
insert_data,
&address_cohort_durable_states.durable_states,
);
}
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.insert_supply_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.insert_realized_data(insert_data);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.insert_unrealized_data(insert_data);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.insert_price_paid_data(insert_data);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.insert_input_data(insert_data);
}
// if self.needs_insert_output(insert_data) {
// self.insert_output_data(insert_data);
// }
}
// pub fn should_compute_metadata(&self, compute_data: &ComputeData) -> bool {
// self.metadata.should_compute(compute_data)
// }
// pub fn should_compute_utxo(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.utxo.should_compute(compute_data))
// }
pub fn should_compute_supply(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.should_compute(compute_data))
}
pub fn should_compute_capitalization(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.should_compute(compute_data))
}
fn should_compute_realized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.should_compute(compute_data))
}
fn should_compute_unrealized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.should_compute(compute_data))
}
// fn should_compute_input(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.input.should_compute(compute_data))
// }
// fn should_compute_output(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.should_compute(compute_data))
// }
fn compute_supply_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.subs.supply.compute(compute_data, circulating_supply);
}
fn compute_unrealized_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
fn compute_realized_data(&mut self, compute_data: &ComputeData, market_cap: &mut BiMap<f32>) {
self.subs.realized.compute(compute_data, market_cap);
}
fn compute_capitalization_data(&mut self, compute_data: &ComputeData, closes: &mut BiMap<f32>) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// fn compute_output_data(&mut self, compute_data: &ComputeData) {
// self.all
// .output
// .compute(compute_data, &mut self.supply.total);
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.should_compute_supply(compute_data) {
self.compute_supply_data(compute_data, circulating_supply);
}
if self.should_compute_unrealized(compute_data) {
self.compute_unrealized_data(compute_data, circulating_supply, market_cap);
}
if self.should_compute_realized(compute_data) {
self.compute_realized_data(compute_data, market_cap);
}
// MUST BE after compute_supply
if self.should_compute_capitalization(compute_data) {
self.compute_capitalization_data(compute_data, closes);
}
// if self.should_compute_output(compute_data) {
// self.compute_output_data(compute_data);
// }
}
}
impl AnyDataset for CohortDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,70 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AddressCohortMetadataDataset {
min_initial_states: MinInitialStates,
address_count: BiMap<f64>,
// pub output: OutputSubDataset,
// Sending addresses
// Receiving addresses
// Active addresses (Unique(Sending + Receiving))
}
impl AddressCohortMetadataDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
address_count: BiMap::new_bin(1, MapKind::Inserted, &f("address_count")),
// output: OutputSubDataset::import(parent_path)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
address_count: f64,
) {
self.address_count.height.insert(height, address_count);
if is_date_last_block {
self.address_count.date.insert(date, address_count);
}
}
}
impl AnyDataset for AddressCohortMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,176 +0,0 @@
mod all_metadata;
mod cohort;
pub mod cohort_metadata;
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
parser::states::SplitByAddressCohort,
structs::{BiMap, Config, Date, Height},
};
use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset};
use super::{AnyDataset, AnyDatasets, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct AddressDatasets {
min_initial_states: MinInitialStates,
metadata: AllAddressesMetadataDataset,
pub cohorts: SplitByAddressCohort<CohortDataset>,
}
impl AddressDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByAddressCohort::<Option<CohortDataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, CohortDataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut_from_id(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
metadata: AllAddressesMetadataDataset::import(&path_dataset, config)?,
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.metadata.insert(insert_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
// pub fn needs_sent_states(&self, height: Height, date: WNaiveDate) -> bool {
// self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
// }
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
// pub fn needs_insert_realized(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_realized(height, date))
// }
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
// pub fn needs_insert_input(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_input(height, date))
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.metadata.compute(compute_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
}
impl AnyDatasets for AddressDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.cohorts
.as_vec()
.into_iter()
.map(|(d, _)| d as &(dyn AnyDataset + Send + Sync))
.chain(vec![&self.metadata as &(dyn AnyDataset + Send + Sync)])
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.cohorts
.as_mut_vec()
.into_iter()
.map(|(d, _)| d as &mut dyn AnyDataset)
.chain(vec![&mut self.metadata as &mut dyn AnyDataset])
.collect_vec()
}
}

View File

@@ -1,51 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, Date, HeightMap, MapKind, Timestamp},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct BlockMetadataDataset {
min_initial_states: MinInitialStates,
pub date: HeightMap<Date>,
pub timestamp: HeightMap<Timestamp>,
}
impl BlockMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
date: HeightMap::new_bin(1, MapKind::Inserted, &f("date")),
timestamp: HeightMap::new_bin(1, MapKind::Inserted, &f("timestamp")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height, timestamp, ..
}: &InsertData,
) {
self.timestamp.insert(height, timestamp);
self.date.insert(height, timestamp.to_date());
}
}
impl AnyDataset for BlockMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,65 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, HeightMap, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct CoindaysDataset {
min_initial_states: MinInitialStates,
pub coindays_destroyed: HeightMap<f32>,
pub coindays_destroyed_1d_sum: DateMap<f32>,
}
impl CoindaysDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coindays_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("coindays_destroyed")),
coindays_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coindays_destroyed_1d_sum"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
satdays_destroyed,
date_blocks_range,
is_date_last_block,
date,
..
}: &InsertData,
) {
self.coindays_destroyed
.insert(height, satdays_destroyed.to_btc() as f32);
if is_date_last_block {
self.coindays_destroyed_1d_sum
.insert(date, self.coindays_destroyed.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for CoindaysDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,659 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
structs::{BiMap, Config, DateMap, Height, HeightMap, MapKind},
utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct CointimeDataset {
min_initial_states: MinInitialStates,
// Inserted
pub coinblocks_destroyed: HeightMap<f32>,
pub coinblocks_destroyed_1d_sum: DateMap<f32>,
// Computed
pub active_cap: BiMap<f32>,
pub active_price: BiMap<f32>,
pub active_price_ratio: RatioDataset,
pub active_supply: BiMap<f32>,
pub active_supply_3m_net_change: BiMap<f32>,
pub active_supply_net_change: BiMap<f32>,
pub activity_to_vaultedness_ratio: BiMap<f32>,
pub coinblocks_created: HeightMap<f32>,
pub coinblocks_created_1d_sum: DateMap<f32>,
pub coinblocks_stored: HeightMap<f32>,
pub coinblocks_stored_1d_sum: DateMap<f32>,
pub cointime_adjusted_velocity: DateMap<f32>,
pub cointime_adjusted_inflation_rate: DateMap<f32>,
pub cointime_adjusted_yearly_inflation_rate: DateMap<f32>,
pub cointime_cap: BiMap<f32>,
pub cointime_price: BiMap<f32>,
pub cointime_price_ratio: RatioDataset,
pub cointime_value_created: HeightMap<f32>,
pub cointime_value_created_1d_sum: DateMap<f32>,
pub cointime_value_destroyed: HeightMap<f32>,
pub cointime_value_destroyed_1d_sum: DateMap<f32>,
pub cointime_value_stored: HeightMap<f32>,
pub cointime_value_stored_1d_sum: DateMap<f32>,
pub concurrent_liveliness: DateMap<f32>,
pub concurrent_liveliness_2w_median: DateMap<f32>,
pub cumulative_coinblocks_created: BiMap<f32>,
pub cumulative_coinblocks_destroyed: BiMap<f32>,
pub cumulative_coinblocks_stored: BiMap<f32>,
pub investor_cap: BiMap<f32>,
pub investorness: BiMap<f32>,
pub liveliness: BiMap<f32>,
pub liveliness_net_change: BiMap<f32>,
pub liveliness_net_change_2w_median: BiMap<f32>,
pub producerness: BiMap<f32>,
pub thermo_cap: BiMap<f32>,
pub thermo_cap_to_investor_cap_ratio: BiMap<f32>,
pub total_cointime_value_created: BiMap<f32>,
pub total_cointime_value_destroyed: BiMap<f32>,
pub total_cointime_value_stored: BiMap<f32>,
pub true_market_deviation: BiMap<f32>,
pub true_market_mean: BiMap<f32>,
pub true_market_mean_ratio: RatioDataset,
pub true_market_net_unrealized_profit_and_loss: BiMap<f32>,
pub vaulted_cap: BiMap<f32>,
pub vaulted_price: BiMap<f32>,
pub vaulted_price_ratio: RatioDataset,
pub vaulted_supply: BiMap<f32>,
pub vaulted_supply_net_change: BiMap<f32>,
pub vaulted_supply_3m_net_change: BiMap<f32>,
pub vaultedness: BiMap<f32>,
pub vaulting_rate: BiMap<f32>,
}
impl CointimeDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coinblocks_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed"),
),
coinblocks_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed_1d_sum"),
),
// Computed
active_cap: BiMap::new_bin(1, MapKind::Computed, &f("active_cap")),
active_price: BiMap::new_bin(1, MapKind::Computed, &f("active_price")),
active_price_ratio: RatioDataset::import(&path_dataset, "active_price", config)?,
active_supply: BiMap::new_bin(1, MapKind::Computed, &f("active_supply")),
active_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_3m_net_change"),
),
active_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_net_change"),
),
activity_to_vaultedness_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("activity_to_vaultedness_ratio"),
),
coinblocks_created: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_created")),
coinblocks_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_created_1d_sum"),
),
coinblocks_stored: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_stored")),
coinblocks_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_stored_1d_sum"),
),
cointime_adjusted_velocity: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_velocity"),
),
cointime_adjusted_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_inflation_rate"),
),
cointime_adjusted_yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_yearly_inflation_rate"),
),
cointime_cap: BiMap::new_bin(1, MapKind::Computed, &f("cointime_cap")),
cointime_price: BiMap::new_bin(1, MapKind::Computed, &f("cointime_price")),
cointime_price_ratio: RatioDataset::import(&path_dataset, "cointime_price", config)?,
cointime_value_created: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created"),
),
cointime_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created_1d_sum"),
),
cointime_value_destroyed: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed"),
),
cointime_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed_1d_sum"),
),
cointime_value_stored: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored"),
),
cointime_value_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored_1d_sum"),
),
concurrent_liveliness: DateMap::new_bin(
1,
MapKind::Computed,
&f("concurrent_liveliness"),
),
concurrent_liveliness_2w_median: DateMap::new_bin(
2,
MapKind::Computed,
&f("concurrent_liveliness_2w_median"),
),
cumulative_coinblocks_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_created"),
),
cumulative_coinblocks_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_destroyed"),
),
cumulative_coinblocks_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_stored"),
),
investor_cap: BiMap::new_bin(1, MapKind::Computed, &f("investor_cap")),
investorness: BiMap::new_bin(1, MapKind::Computed, &f("investorness")),
liveliness: BiMap::new_bin(1, MapKind::Computed, &f("liveliness")),
liveliness_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("liveliness_net_change"),
),
liveliness_net_change_2w_median: BiMap::new_bin(
3,
MapKind::Computed,
&f("liveliness_net_change_2w_median"),
),
producerness: BiMap::new_bin(1, MapKind::Computed, &f("producerness")),
thermo_cap: BiMap::new_bin(1, MapKind::Computed, &f("thermo_cap")),
thermo_cap_to_investor_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("thermo_cap_to_investor_cap_ratio"),
),
total_cointime_value_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_created"),
),
total_cointime_value_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_destroyed"),
),
total_cointime_value_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_stored"),
),
true_market_deviation: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_deviation"),
),
true_market_mean: BiMap::new_bin(1, MapKind::Computed, &f("true_market_mean")),
true_market_mean_ratio: RatioDataset::import(
&path_dataset,
"true_market_mean",
config,
)?,
true_market_net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_net_unrealized_profit_and_loss"),
),
vaulted_cap: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_cap")),
vaulted_price: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_price")),
vaulted_price_ratio: RatioDataset::import(&path_dataset, "vaulted_price", config)?,
vaulted_supply: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_supply")),
vaulted_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_3m_net_change"),
),
vaulted_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_net_change"),
),
vaultedness: BiMap::new_bin(1, MapKind::Computed, &f("vaultedness")),
vaulting_rate: BiMap::new_bin(1, MapKind::Computed, &f("vaulting_rate")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
satblocks_destroyed,
date_blocks_range,
is_date_last_block,
..
}: &InsertData,
) {
self.coinblocks_destroyed
.insert(height, satblocks_destroyed.to_btc() as f32);
if is_date_last_block {
self.coinblocks_destroyed_1d_sum
.insert(date, self.coinblocks_destroyed.sum_range(date_blocks_range));
}
}
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
compute_data: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
realized_cap: &mut BiMap<f32>,
realized_price: &mut BiMap<f32>,
inflation_rate: &mut DateMap<f64>,
yearly_inflation_rate: &mut DateMap<f64>,
annualized_transaction_volume: &mut DateMap<f32>,
cumulative_subsidy_in_dollars: &mut BiMap<f32>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.cumulative_coinblocks_destroyed
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_destroyed);
self.cumulative_coinblocks_destroyed
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_destroyed_1d_sum);
self.coinblocks_created.multi_insert_simple_transform(
heights,
&mut circulating_supply.height,
|circulating_supply, _| circulating_supply as f32,
);
self.coinblocks_created_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_created,
first_height,
last_height,
);
self.cumulative_coinblocks_created
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_created);
self.cumulative_coinblocks_created
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_created_1d_sum);
self.coinblocks_stored.multi_insert_subtract(
heights,
&mut self.coinblocks_created,
&mut self.coinblocks_destroyed,
);
self.coinblocks_stored_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_stored,
first_height,
last_height,
);
self.cumulative_coinblocks_stored
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_stored);
self.cumulative_coinblocks_stored
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_stored_1d_sum);
self.liveliness.multi_insert_divide(
heights,
dates,
&mut self.cumulative_coinblocks_destroyed,
&mut self.cumulative_coinblocks_created,
);
self.vaultedness.multi_insert_simple_transform(
heights,
dates,
&mut self.liveliness,
&|liveliness| 1.0 - liveliness,
);
self.activity_to_vaultedness_ratio.multi_insert_divide(
heights,
dates,
&mut self.liveliness,
&mut self.vaultedness,
);
self.concurrent_liveliness.multi_insert_divide(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut self.coinblocks_created_1d_sum,
);
self.concurrent_liveliness_2w_median.multi_insert_median(
dates,
&mut self.concurrent_liveliness,
Some(TWO_WEEK_IN_DAYS),
);
self.liveliness_net_change.multi_insert_net_change(
heights,
dates,
&mut self.liveliness,
ONE_DAY_IN_DAYS,
);
self.liveliness_net_change_2w_median
.multi_insert_net_change(heights, dates, &mut self.liveliness, TWO_WEEK_IN_DAYS);
self.vaulted_supply.multi_insert_multiply(
heights,
dates,
&mut self.vaultedness,
circulating_supply,
);
self.vaulted_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
ONE_DAY_IN_DAYS,
);
self.vaulted_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
THREE_MONTHS_IN_DAYS,
);
self.vaulting_rate.multi_insert_simple_transform(
heights,
dates,
&mut self.vaulted_supply,
&|vaulted_supply| vaulted_supply * ONE_YEAR_IN_DAYS as f32,
);
self.active_supply.multi_insert_multiply(
heights,
dates,
&mut self.liveliness,
circulating_supply,
);
self.active_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
ONE_DAY_IN_DAYS,
);
self.active_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
THREE_MONTHS_IN_DAYS,
);
// TODO: Do these
// let min_vaulted_supply = ;
// let max_active_supply = ;
self.cointime_adjusted_inflation_rate.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
inflation_rate,
);
self.cointime_adjusted_yearly_inflation_rate
.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
yearly_inflation_rate,
);
self.cointime_adjusted_velocity.multi_insert_divide(
dates,
annualized_transaction_volume,
&mut self.active_supply.date,
);
// TODO:
// const activeSupplyChangeFromTransactions90dChange =
// createNetChangeLazyDataset(activeSupplyChangeFromTransactions, 90);
// const activeSupplyChangeFromIssuance = createMultipliedLazyDataset(
// lastSubsidy,
// liveliness,
// );
self.thermo_cap.multi_insert_simple_transform(
heights,
dates,
cumulative_subsidy_in_dollars,
&|cumulative_subsidy_in_dollars| cumulative_subsidy_in_dollars,
);
self.investor_cap
.multi_insert_subtract(heights, dates, realized_cap, &mut self.thermo_cap);
self.thermo_cap_to_investor_cap_ratio
.multi_insert_percentage(heights, dates, &mut self.thermo_cap, &mut self.investor_cap);
// TODO:
// const activeSupplyChangeFromIssuance90dChange = createNetChangeLazyDataset(
// activeSupplyChangeFromIssuance,
// 90,
// );
self.active_price
.multi_insert_divide(heights, dates, realized_price, &mut self.liveliness);
self.active_cap.height.multi_insert_multiply(
heights,
&mut self.active_supply.height,
&mut closes.height,
);
self.active_cap.date.multi_insert_multiply(
dates,
&mut self.active_supply.date,
&mut closes.date,
);
self.vaulted_price.multi_insert_divide(
heights,
dates,
realized_price,
&mut self.vaultedness,
);
self.vaulted_cap.height.multi_insert_multiply(
heights,
&mut self.vaulted_supply.height,
&mut closes.height,
);
self.vaulted_cap.date.multi_insert_multiply(
dates,
&mut self.vaulted_supply.date,
&mut closes.date,
);
self.true_market_mean.multi_insert_divide(
heights,
dates,
&mut self.investor_cap,
&mut self.active_supply,
);
self.true_market_deviation.multi_insert_divide(
heights,
dates,
&mut self.active_cap,
&mut self.investor_cap,
);
self.true_market_net_unrealized_profit_and_loss
.height
.multi_insert_complex_transform(
heights,
&mut self.active_cap.height,
|(active_cap, height, ..)| {
let investor_cap = self.investor_cap.height.get(height).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.true_market_net_unrealized_profit_and_loss
.date
.multi_insert_complex_transform(
dates,
&mut self.active_cap.date,
|(active_cap, date, _, _)| {
let investor_cap = self.investor_cap.date.get(date).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.investorness
.multi_insert_divide(heights, dates, &mut self.investor_cap, realized_cap);
self.producerness
.multi_insert_divide(heights, dates, &mut self.thermo_cap, realized_cap);
self.cointime_value_destroyed.multi_insert_multiply(
heights,
&mut self.coinblocks_destroyed,
&mut closes.height,
);
self.cointime_value_destroyed_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut closes.date,
);
self.cointime_value_created.multi_insert_multiply(
heights,
&mut self.coinblocks_created,
&mut closes.height,
);
self.cointime_value_created_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_created_1d_sum,
&mut closes.date,
);
self.cointime_value_stored.multi_insert_multiply(
heights,
&mut self.coinblocks_stored,
&mut closes.height,
);
self.cointime_value_stored_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_stored_1d_sum,
&mut closes.date,
);
self.total_cointime_value_created
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_created);
self.total_cointime_value_created
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_created_1d_sum);
self.total_cointime_value_destroyed
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_destroyed);
self.total_cointime_value_destroyed
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_destroyed_1d_sum);
self.total_cointime_value_stored
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_stored);
self.total_cointime_value_stored
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_stored_1d_sum);
self.cointime_price.multi_insert_divide(
heights,
dates,
&mut self.total_cointime_value_destroyed,
&mut self.cumulative_coinblocks_stored,
);
self.cointime_cap.multi_insert_multiply(
heights,
dates,
&mut self.cointime_price,
circulating_supply,
);
self.active_price_ratio
.compute(compute_data, closes, &mut self.active_price);
self.cointime_price_ratio
.compute(compute_data, closes, &mut self.cointime_price);
self.true_market_mean_ratio
.compute(compute_data, closes, &mut self.true_market_mean);
self.vaulted_price_ratio
.compute(compute_data, closes, &mut self.vaulted_price);
}
}
impl AnyDataset for CointimeDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,50 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::structs::{BiMap, Config, MapKind};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct ConstantDataset {
min_initial_states: MinInitialStates,
pub _0: BiMap<u16>,
pub _1: BiMap<u16>,
pub _50: BiMap<u16>,
pub _100: BiMap<u16>,
}
impl ConstantDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Computed
_0: BiMap::new_bin(1, MapKind::Computed, &f("0")),
_1: BiMap::new_bin(1, MapKind::Computed, &f("1")),
_50: BiMap::new_bin(1, MapKind::Computed, &f("50")),
_100: BiMap::new_bin(1, MapKind::Computed, &f("100")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self._0.multi_insert_const(heights, dates, 0);
self._1.multi_insert_const(heights, dates, 1);
self._50.multi_insert_const(heights, dates, 50);
self._100.multi_insert_const(heights, dates, 100);
}
}
impl AnyDataset for ConstantDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,56 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, Height, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct DateMetadataDataset {
min_initial_states: MinInitialStates,
pub first_height: DateMap<Height>,
pub last_height: DateMap<Height>,
}
impl DateMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
first_height: DateMap::new_bin(1, MapKind::Inserted, &f("first_height")),
last_height: DateMap::new_bin(1, MapKind::Inserted, &f("last_height")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date,
date_first_height,
height,
..
}: &InsertData,
) {
self.first_height.insert(date, date_first_height);
self.last_height.insert(date, height);
}
}
impl AnyDataset for DateMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,760 +0,0 @@
use allocative::Allocative;
use itertools::Itertools;
use ordered_float::OrderedFloat;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Amount, BiMap, Config, DateMap, Height, HeightMap, MapKey, MapKind},
utils::{
BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS,
TARGET_BLOCKS_PER_DAY,
},
};
use super::{
ComputeData, DateRecapDataset, InsertData, MinInitialStates, RecapDataset, RecapOptions,
};
#[derive(Allocative, Iterable)]
pub struct MiningDataset {
min_initial_states: MinInitialStates,
// Inserted
pub blocks_mined: DateMap<usize>,
pub total_blocks_mined: DateMap<usize>,
pub coinbase: HeightMap<f64>,
pub coinbase_1d_sum: DateMap<f64>,
pub coinbase_in_dollars: HeightMap<f32>,
pub coinbase_in_dollars_1d_sum: DateMap<f32>,
pub fees: HeightMap<f64>,
pub fees_1d_sum: DateMap<f64>,
pub fees_in_dollars: HeightMap<f32>,
pub fees_in_dollars_1d_sum: DateMap<f32>,
// Raw
// pub average_fee_paid: BiMap<f32>,
// pub max_fee_paid: BiMap<f32>,
// pub _90th_percentile_fee_paid: BiMap<f32>,
// pub _75th_percentile_fee_paid: BiMap<f32>,
// pub median_fee_paid: BiMap<f32>,
// pub _25th_percentile_fee_paid: BiMap<f32>,
// pub _10th_percentile_fee_paid: BiMap<f32>,
// pub min_fee_paid: BiMap<f32>,
// sat/vB
// pub average_fee_price: BiMap<f32>,
// pub max_fee_price: BiMap<f32>,
// pub _90th_percentile_fee_price: BiMap<f32>,
// pub _75th_percentile_fee_price: BiMap<f32>,
// pub median_fee_price: BiMap<f32>,
// pub _25th_percentile_fee_price: BiMap<f32>,
// pub _10th_percentile_fee_price: BiMap<f32>,
// pub min_fee_price: BiMap<f32>,
// -
pub subsidy: HeightMap<f64>,
pub subsidy_1d_sum: DateMap<f64>,
pub subsidy_in_dollars: HeightMap<f32>,
pub subsidy_in_dollars_1d_sum: DateMap<f32>,
pub last_coinbase: DateMap<f64>,
pub last_coinbase_in_dollars: DateMap<f32>,
pub last_fees: DateMap<f64>,
pub last_fees_in_dollars: DateMap<f32>,
pub last_subsidy: DateMap<f64>,
pub last_subsidy_in_dollars: DateMap<f32>,
pub difficulty: BiMap<f64>,
pub block_size: HeightMap<f32>, // in MB
pub block_weight: HeightMap<f32>, // in MB
pub block_vbytes: HeightMap<u64>,
pub block_interval: HeightMap<u32>, // in s
// Computed
pub annualized_issuance: DateMap<f64>, // Same as subsidy_1y_sum
pub blocks_mined_1d_target: DateMap<usize>,
pub blocks_mined_1m_sma: DateMap<f32>,
pub blocks_mined_1m_sum: DateMap<usize>,
pub blocks_mined_1m_target: DateMap<usize>,
pub blocks_mined_1w_sma: DateMap<f32>,
pub blocks_mined_1w_sum: DateMap<usize>,
pub blocks_mined_1w_target: DateMap<usize>,
pub blocks_mined_1y_sum: DateMap<usize>,
pub blocks_mined_1y_target: DateMap<usize>,
pub cumulative_block_size: BiMap<f32>,
pub cumulative_block_size_gigabytes: BiMap<f32>,
pub subsidy_1y_sum: DateMap<f64>,
pub subsidy_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_subsidy: BiMap<f64>,
pub cumulative_subsidy_in_dollars: BiMap<f32>,
pub coinbase_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1d_sum_1y_sma: DateMap<f32>,
pub cumulative_coinbase: BiMap<f64>,
pub cumulative_coinbase_in_dollars: BiMap<f32>,
pub fees_1y_sum: DateMap<f64>,
pub fees_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_fees: BiMap<f64>,
pub cumulative_fees_in_dollars: BiMap<f32>,
pub inflation_rate: DateMap<f64>,
pub yearly_inflation_rate: DateMap<f64>,
pub subsidy_to_coinbase_ratio: HeightMap<f64>,
pub subsidy_to_coinbase_1d_ratio: DateMap<f64>,
pub fees_to_coinbase_ratio: HeightMap<f64>,
pub fees_to_coinbase_1d_ratio: DateMap<f64>,
pub hash_rate: DateMap<f64>,
pub hash_rate_1w_sma: DateMap<f32>,
pub hash_rate_1m_sma: DateMap<f32>,
pub hash_rate_2m_sma: DateMap<f32>,
pub hash_price: DateMap<f64>,
pub hash_price_min: DateMap<f64>,
pub hash_price_rebound: DateMap<f64>,
pub difficulty_adjustment: DateMap<f64>,
pub block_size_recap: DateRecapDataset<f32>, // in MB
pub block_weight_recap: DateRecapDataset<f32>, // in MB
pub block_vbytes_recap: DateRecapDataset<u64>,
pub block_interval_recap: DateRecapDataset<u32>, // in s
pub puell_multiple: DateMap<f32>,
// pub hash_price_in_dollars: DateMap<f64>,
// pub hash_price_30d_volatility: BiMap<f32>,
// difficulty_adjustment
// next_difficulty_adjustment
// op return fees
// inscriptions fees
// until adjustement
// until halving in days
// until halving in blocks
}
impl MiningDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
total_blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("total_blocks_mined")),
blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("blocks_mined")),
coinbase: HeightMap::new_bin(1, MapKind::Inserted, &f("coinbase")),
coinbase_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("coinbase_1d_sum")),
coinbase_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars"),
),
coinbase_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars_1d_sum"),
),
fees: HeightMap::new_bin(1, MapKind::Inserted, &f("fees")),
fees_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("fees_1d_sum")),
fees_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("fees_in_dollars")),
fees_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("fees_in_dollars_1d_sum"),
),
subsidy: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy")),
subsidy_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("subsidy_1d_sum")),
subsidy_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy_in_dollars")),
subsidy_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("subsidy_in_dollars_1d_sum"),
),
last_subsidy: DateMap::new_bin(1, MapKind::Inserted, &f("last_subsidy")),
last_subsidy_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_subsidy_in_dollars"),
),
last_coinbase: DateMap::new_bin(1, MapKind::Inserted, &f("last_coinbase")),
last_coinbase_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_coinbase_in_dollars"),
),
last_fees: DateMap::new_bin(1, MapKind::Inserted, &f("last_fees")),
last_fees_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_fees_in_dollars"),
),
difficulty: BiMap::new_bin(1, MapKind::Inserted, &f("difficulty")),
block_size: HeightMap::new_bin(1, MapKind::Inserted, &f("block_size")),
block_weight: HeightMap::new_bin(1, MapKind::Inserted, &f("block_weight")),
block_vbytes: HeightMap::new_bin(1, MapKind::Inserted, &f("block_vbytes")),
block_interval: HeightMap::new_bin(2, MapKind::Inserted, &f("block_interval")),
// ---
// Computed
// ---
coinbase_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("coinbase_1y_sum")),
coinbase_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1y_sum"),
),
coinbase_in_dollars_1d_sum_1y_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1d_sum_1y_sma"),
),
cumulative_coinbase: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_coinbase")),
cumulative_coinbase_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinbase_in_dollars"),
),
fees_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("fees_1y_sum")),
fees_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_in_dollars_1y_sum"),
),
cumulative_fees: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_fees")),
cumulative_fees_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_fees_in_dollars"),
),
subsidy_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("subsidy_1y_sum")),
subsidy_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_in_dollars_1y_sum"),
),
cumulative_subsidy: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_subsidy")),
cumulative_subsidy_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_subsidy_in_dollars"),
),
subsidy_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_ratio"),
),
subsidy_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_1d_ratio"),
),
fees_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_ratio"),
),
fees_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_1d_ratio"),
),
annualized_issuance: DateMap::new_bin(1, MapKind::Computed, &f("annualized_issuance")),
inflation_rate: DateMap::new_bin(2, MapKind::Computed, &f("inflation_rate")),
yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("yearly_inflation_rate"),
),
blocks_mined_1d_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1d_target"),
),
blocks_mined_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sma")),
blocks_mined_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sma")),
blocks_mined_1w_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sum")),
blocks_mined_1m_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sum")),
blocks_mined_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1y_sum")),
blocks_mined_1w_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1w_target"),
),
blocks_mined_1m_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1m_target"),
),
blocks_mined_1y_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1y_target"),
),
difficulty_adjustment: DateMap::new_bin(
1,
MapKind::Computed,
&f("difficulty_adjustment"),
),
block_size_recap: RecapDataset::import(
&f("block_size_1d"),
RecapOptions::default()
.add_sum()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
cumulative_block_size: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size"),
),
cumulative_block_size_gigabytes: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size_gigabytes"),
),
block_weight_recap: RecapDataset::import(
&f("block_weight_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_vbytes_recap: RecapDataset::import(
&f("block_vbytes_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_interval_recap: RecapDataset::import(
&f("block_interval_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
hash_rate: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate")),
hash_rate_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1w_sma")),
hash_rate_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1m_sma")),
hash_rate_2m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_2m_sma")),
hash_price: DateMap::new_bin(1, MapKind::Computed, &f("hash_price")),
hash_price_min: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_min")),
hash_price_rebound: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_rebound")),
puell_multiple: DateMap::new_bin(1, MapKind::Computed, &f("puell_multiple")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date_first_height,
height,
coinbase,
fees,
date_blocks_range,
is_date_last_block,
block_price,
date,
difficulty,
block_size,
block_vbytes,
block_weight,
block_interval,
..
}: &InsertData,
) {
self.coinbase.insert(height, coinbase.to_btc());
let coinbase_in_dollars = self
.coinbase_in_dollars
.insert(height, (block_price * coinbase).to_dollar() as f32);
let sumed_fees = Amount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
self.fees.insert(height, sumed_fees.to_btc());
let sumed_fees_in_dollars = self
.fees_in_dollars
.insert(height, (block_price * sumed_fees).to_dollar() as f32);
let subsidy = coinbase - sumed_fees;
self.subsidy.insert(height, subsidy.to_btc());
let subsidy_in_dollars = self
.subsidy_in_dollars
.insert(height, (block_price * subsidy).to_dollar() as f32);
self.difficulty.height.insert(height, difficulty);
self.block_size
.insert(height, block_size as f32 / BYTES_IN_MB as f32);
self.block_weight
.insert(height, block_weight as f32 / BYTES_IN_MB as f32);
self.block_vbytes.insert(height, block_vbytes);
self.block_interval.insert(height, *block_interval);
if is_date_last_block {
self.coinbase_1d_sum
.insert(date, self.coinbase.sum_range(date_blocks_range));
self.coinbase_in_dollars_1d_sum
.insert(date, self.coinbase_in_dollars.sum_range(date_blocks_range));
self.fees_1d_sum
.insert(date, self.fees.sum_range(date_blocks_range));
self.fees_in_dollars_1d_sum
.insert(date, self.fees_in_dollars.sum_range(date_blocks_range));
self.subsidy_1d_sum
.insert(date, self.subsidy.sum_range(date_blocks_range));
self.subsidy_in_dollars_1d_sum
.insert(date, self.subsidy_in_dollars.sum_range(date_blocks_range));
self.last_coinbase.insert(date, coinbase.to_btc());
self.last_coinbase_in_dollars
.insert(date, coinbase_in_dollars);
self.last_subsidy.insert(date, subsidy.to_btc());
self.last_subsidy_in_dollars
.insert(date, subsidy_in_dollars);
self.last_fees.insert(date, sumed_fees.to_btc());
self.last_fees_in_dollars
.insert(date, sumed_fees_in_dollars);
let total_blocks_mined = self.total_blocks_mined.insert(date, height.to_usize() + 1);
self.blocks_mined
.insert(date, total_blocks_mined - date_first_height.to_usize());
self.difficulty.date.insert(date, difficulty);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) {
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.blocks_mined_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_YEAR_IN_DAYS,
);
self.subsidy_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.subsidy_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_subsidy
.height
.multi_insert_cumulative(heights, &mut self.subsidy);
self.cumulative_subsidy
.date
.multi_insert_cumulative(dates, &mut self.subsidy_1d_sum);
self.cumulative_subsidy_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.subsidy_in_dollars);
self.cumulative_subsidy_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.subsidy_in_dollars_1d_sum);
self.fees_1y_sum
.multi_insert_last_x_sum(dates, &mut self.fees_1d_sum, ONE_YEAR_IN_DAYS);
self.fees_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.fees_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_fees
.height
.multi_insert_cumulative(heights, &mut self.fees);
self.cumulative_fees
.date
.multi_insert_cumulative(dates, &mut self.fees_1d_sum);
self.cumulative_fees_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.fees_in_dollars);
self.cumulative_fees_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.fees_in_dollars_1d_sum);
self.coinbase_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1d_sum_1y_sma
.multi_insert_simple_average(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_coinbase
.height
.multi_insert_cumulative(heights, &mut self.coinbase);
self.cumulative_coinbase
.date
.multi_insert_cumulative(dates, &mut self.coinbase_1d_sum);
self.cumulative_coinbase_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.coinbase_in_dollars);
self.cumulative_coinbase_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.coinbase_in_dollars_1d_sum);
self.subsidy_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.subsidy,
&mut self.coinbase,
);
self.subsidy_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.subsidy_1d_sum,
&mut self.coinbase_1d_sum,
);
self.fees_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.fees,
&mut self.coinbase,
);
self.fees_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.fees_1d_sum,
&mut self.coinbase_1d_sum,
);
self.annualized_issuance.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.inflation_rate.multi_insert_simple_transform(
dates,
&mut self.subsidy_1d_sum,
|subsidy_1d_sum, date| {
subsidy_1d_sum * ONE_YEAR_IN_DAYS as f64
/ self.cumulative_subsidy.date.get_or_import(date).unwrap()
* 100.0
},
);
self.yearly_inflation_rate.multi_insert_percentage(
dates,
&mut self.annualized_issuance,
&mut self.cumulative_subsidy.date,
);
self.blocks_mined_1d_target
.multi_insert_const(dates, TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_target
.multi_insert_const(dates, ONE_WEEK_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1m_target
.multi_insert_const(dates, ONE_MONTH_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1y_target
.multi_insert_const(dates, ONE_YEAR_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size.date.multi_insert_last(
dates,
&mut self.cumulative_block_size.height,
last_height,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size_gigabytes
.multi_insert_simple_transform(heights, dates, &mut self.cumulative_block_size, &|v| {
v / 1000.0
});
// https://hashrateindex.com/blog/what-is-bitcoins-hashrate/
self.hash_rate.multi_insert(dates, |date| {
let blocks_mined = self.blocks_mined.get_or_import(date).unwrap();
let difficulty = self.difficulty.date.get_or_import(date).unwrap();
(blocks_mined as f64 / (date.get_day_completion() * TARGET_BLOCKS_PER_DAY as f64)
* difficulty
* 2.0_f64.powi(32))
/ 600.0
/ 1_000_000_000_000_000_000.0
});
self.hash_rate_1w_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_WEEK_IN_DAYS,
);
self.hash_rate_1m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_MONTH_IN_DAYS,
);
self.hash_rate_2m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
2 * ONE_MONTH_IN_DAYS,
);
self.hash_price.multi_insert(dates, |date| {
let coinbase_in_dollars = self.coinbase_in_dollars_1d_sum.get_or_import(date).unwrap();
let hash_rate = self.hash_rate.get_or_import(date).unwrap();
coinbase_in_dollars as f64 / hash_rate / 1_000.0
});
self.hash_price_min
.multi_insert_min(dates, &mut self.hash_price, 0.0);
self.hash_price_rebound.multi_insert_percentage(
dates,
&mut self.hash_price,
&mut self.hash_price_min,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.difficulty_adjustment.multi_insert_percentage_change(
dates,
&mut self.difficulty.date,
ONE_DAY_IN_DAYS,
);
dates.iter().for_each(|date| {
let first = first_height.get_or_import(date).unwrap();
let last = last_height.get_or_import(date).unwrap();
self.block_size_recap.compute(
*date,
&mut self
.block_size
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_weight_recap.compute(
*date,
&mut self
.block_weight
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_vbytes_recap.compute(
*date,
&mut self.block_vbytes.get_or_import_range_inclusive(first, last),
);
self.block_interval_recap.compute(
*date,
&mut self
.block_interval
.get_or_import_range_inclusive(first, last),
);
})
}
}
impl AnyDataset for MiningDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,347 +0,0 @@
use std::{collections::BTreeMap, ops::RangeInclusive};
use allocative::Allocative;
use itertools::Itertools;
use log::info;
use rayon::prelude::*;
mod _traits;
mod address;
mod block_metadata;
mod coindays;
mod cointime;
mod constant;
mod date_metadata;
mod mining;
mod price;
mod subs;
mod transaction;
mod utxo;
pub use _traits::*;
pub use address::*;
pub use block_metadata::*;
pub use coindays::*;
pub use cointime::*;
pub use constant::*;
pub use date_metadata::*;
pub use mining::*;
pub use price::*;
use serde_json::Value;
pub use subs::*;
pub use transaction::*;
pub use utxo::*;
use crate::{
io::Json,
parser::{
databases::Databases,
states::{
AddressCohortsInputStates,
AddressCohortsOneShotStates,
AddressCohortsRealizedStates,
States,
UTXOCohortsOneShotStates,
// UTXOCohortsReceivedStates,
UTXOCohortsSentStates,
},
},
structs::{Amount, Config, Date, Height, Price, Timestamp},
};
pub struct InsertData<'a> {
pub address_cohorts_input_states: &'a Option<AddressCohortsInputStates>,
pub address_cohorts_one_shot_states: &'a Option<AddressCohortsOneShotStates>,
pub address_cohorts_realized_states: &'a Option<AddressCohortsRealizedStates>,
pub amount_sent: Amount,
pub block_interval: Timestamp,
pub block_price: Price,
pub block_size: usize,
pub block_vbytes: u64,
pub block_weight: u64,
pub coinbase: Amount,
pub compute_addresses: bool,
pub databases: &'a Databases,
pub date: Date,
pub date_blocks_range: &'a RangeInclusive<u32>,
pub date_first_height: Height,
pub difficulty: f64,
pub fees: &'a Vec<Amount>,
pub height: Height,
pub is_date_last_block: bool,
pub satblocks_destroyed: Amount,
pub satdays_destroyed: Amount,
pub states: &'a States,
pub timestamp: Timestamp,
pub transaction_count: usize,
pub utxo_cohorts_one_shot_states: &'a UTXOCohortsOneShotStates,
// pub utxo_cohorts_received_states: &'a UTXOCohortsReceivedStates,
pub utxo_cohorts_sent_states: &'a UTXOCohortsSentStates,
}
pub struct ComputeData<'a> {
pub heights: &'a [Height],
pub dates: &'a [Date],
}
#[derive(Allocative)]
pub struct Datasets {
min_initial_states: MinInitialStates,
pub constant: ConstantDataset,
pub address: AddressDatasets,
pub block_metadata: BlockMetadataDataset,
pub coindays: CoindaysDataset,
pub cointime: CointimeDataset,
pub date_metadata: DateMetadataDataset,
pub mining: MiningDataset,
pub price: PriceDatasets,
pub transaction: TransactionDataset,
pub utxo: UTXODatasets,
}
impl Datasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let price = PriceDatasets::import(config)?;
let constant = ConstantDataset::import(config)?;
let date_metadata = DateMetadataDataset::import(config)?;
let cointime = CointimeDataset::import(config)?;
let coindays = CoindaysDataset::import(config)?;
let mining = MiningDataset::import(config)?;
let block_metadata = BlockMetadataDataset::import(config)?;
let transaction = TransactionDataset::import(config)?;
let address = AddressDatasets::import(config)?;
let utxo = UTXODatasets::import(config)?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
address,
block_metadata,
cointime,
coindays,
constant,
date_metadata,
price,
mining,
transaction,
utxo,
};
s.set_initial_states(config);
info!("Imported datasets");
Ok(s)
}
fn set_initial_states(&mut self, config: &Config) {
self.min_initial_states
.consume(MinInitialStates::compute_from_datasets(self, config));
}
pub fn insert(&mut self, insert_data: InsertData) {
if insert_data.compute_addresses {
self.address.insert(&insert_data);
}
self.utxo.insert(&insert_data);
if self
.block_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.block_metadata.insert(&insert_data);
}
if self
.date_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.date_metadata.insert(&insert_data);
}
if self
.coindays
.needs_insert(insert_data.height, insert_data.date)
{
self.coindays.insert(&insert_data);
}
if self
.mining
.needs_insert(insert_data.height, insert_data.date)
{
self.mining.insert(&insert_data);
}
if self
.transaction
.needs_insert(insert_data.height, insert_data.date)
{
self.transaction.insert(&insert_data);
}
if self
.cointime
.needs_insert(insert_data.height, insert_data.date)
{
self.cointime.insert(&insert_data);
}
}
pub fn compute(&mut self, compute_data: ComputeData) {
if self.constant.should_compute(&compute_data) {
self.constant.compute(&compute_data);
}
if self.mining.should_compute(&compute_data) {
self.mining.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
);
}
// No compute needed for now
self.price
.compute(&compute_data, &mut self.mining.cumulative_subsidy);
self.address.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
self.utxo.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
if self.transaction.should_compute(&compute_data) {
self.transaction.compute(
&compute_data,
&mut self.mining.cumulative_subsidy,
&mut self.mining.block_interval,
);
}
if self.cointime.should_compute(&compute_data) {
self.cointime.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.address.cohorts.all.subs.capitalization.realized_cap,
&mut self.address.cohorts.all.subs.capitalization.realized_price,
&mut self.mining.inflation_rate,
&mut self.mining.yearly_inflation_rate,
&mut self.transaction.annualized_volume,
&mut self.mining.cumulative_subsidy_in_dollars,
);
}
}
pub fn export(&mut self, config: &Config, height: Height) -> color_eyre::Result<()> {
let is_new = self
.min_initial_states
.min_last_height()
.map_or(true, |last| last <= height);
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| dataset.pre_export());
self.to_any_dataset_vec()
.into_par_iter()
.try_for_each(|dataset| -> color_eyre::Result<()> { dataset.export() })?;
let mut path_to_last: BTreeMap<String, Value> = BTreeMap::default();
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| {
dataset.post_export();
if is_new {
dataset.to_all_map_vec().iter().for_each(|map| {
if map.path_last().is_some() {
if let Some(last_value) = map.last_value() {
path_to_last.insert(map.id(config), last_value);
}
}
});
}
});
if is_new {
Json::export(&config.path_datasets_last_values(), &path_to_last)?;
}
self.set_initial_states(config);
Ok(())
}
}
impl AnyDatasets for Datasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
vec![
&self.price as &(dyn AnyDataset + Send + Sync),
&self.constant,
],
self.address.to_any_dataset_vec(),
self.utxo.to_any_dataset_vec(),
vec![
&self.mining,
&self.transaction,
&self.block_metadata,
&self.date_metadata,
&self.cointime,
&self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
vec![&mut self.price as &mut dyn AnyDataset, &mut self.constant],
self.address.to_mut_any_dataset_vec(),
self.utxo.to_mut_any_dataset_vec(),
vec![
&mut self.mining,
&mut self.transaction,
&mut self.block_metadata,
&mut self.date_metadata,
&mut self.cointime,
&mut self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
}

View File

@@ -1,733 +0,0 @@
use std::collections::BTreeMap;
use allocative::Allocative;
use chrono::Days;
use color_eyre::eyre::Error;
use struct_iterable::Iterable;
use crate::{
parser::price::{Binance, Kibo, Kraken},
structs::{
Amount, BiMap, Config, Date, DateMap, DateMapChunkId, Height, HeightMapChunkId, MapKey,
MapKind, Timestamp, OHLC,
},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{AnyDataset, ComputeData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct PriceDatasets {
min_initial_states: MinInitialStates,
kraken_daily: Option<BTreeMap<Date, OHLC>>,
kraken_1mn: Option<BTreeMap<u32, OHLC>>,
binance_1mn: Option<BTreeMap<u32, OHLC>>,
binance_daily: Option<BTreeMap<Date, OHLC>>,
binance_har: Option<BTreeMap<u32, OHLC>>,
kibo_by_height: BTreeMap<HeightMapChunkId, Vec<OHLC>>,
kibo_by_date: BTreeMap<DateMapChunkId, BTreeMap<Date, OHLC>>,
pub ohlc: BiMap<OHLC>,
pub open: BiMap<f32>,
pub high: BiMap<f32>,
pub low: BiMap<f32>,
pub close: BiMap<f32>,
pub market_cap: BiMap<f32>,
pub price_1w_sma: BiMap<f32>,
pub price_1w_sma_ratio: RatioDataset,
pub price_1m_sma: BiMap<f32>,
pub price_1m_sma_ratio: RatioDataset,
pub price_1y_sma: BiMap<f32>,
pub price_1y_sma_ratio: RatioDataset,
pub price_2y_sma: BiMap<f32>,
pub price_2y_sma_ratio: RatioDataset,
pub price_4y_sma: BiMap<f32>,
pub price_4y_sma_ratio: RatioDataset,
pub price_8d_sma: BiMap<f32>,
pub price_8d_sma_ratio: RatioDataset,
pub price_13d_sma: BiMap<f32>,
pub price_13d_sma_ratio: RatioDataset,
pub price_21d_sma: BiMap<f32>,
pub price_21d_sma_ratio: RatioDataset,
pub price_34d_sma: BiMap<f32>,
pub price_34d_sma_ratio: RatioDataset,
pub price_55d_sma: BiMap<f32>,
pub price_55d_sma_ratio: RatioDataset,
pub price_89d_sma: BiMap<f32>,
pub price_89d_sma_ratio: RatioDataset,
pub price_144d_sma: BiMap<f32>,
pub price_144d_sma_ratio: RatioDataset,
pub price_200w_sma: BiMap<f32>,
pub price_200w_sma_ratio: RatioDataset,
pub price_1d_total_return: DateMap<f32>,
pub price_1m_total_return: DateMap<f32>,
pub price_6m_total_return: DateMap<f32>,
pub price_1y_total_return: DateMap<f32>,
pub price_2y_total_return: DateMap<f32>,
pub price_3y_total_return: DateMap<f32>,
pub price_4y_total_return: DateMap<f32>,
pub price_6y_total_return: DateMap<f32>,
pub price_8y_total_return: DateMap<f32>,
pub price_10y_total_return: DateMap<f32>,
pub price_4y_compound_return: DateMap<f32>,
// projection via lowest 4y compound value
pub all_time_high: BiMap<f32>,
pub all_time_high_date: DateMap<Date>,
pub days_since_all_time_high: DateMap<u32>,
pub max_days_between_all_time_highs: DateMap<u32>,
pub max_years_between_all_time_highs: DateMap<f32>,
pub market_price_to_all_time_high_ratio: BiMap<f32>,
pub drawdown: BiMap<f32>,
pub sats_per_dollar: BiMap<f32>,
// volatility
}
impl PriceDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
binance_1mn: None,
binance_daily: None,
binance_har: None,
kraken_1mn: None,
kraken_daily: None,
kibo_by_height: BTreeMap::default(),
kibo_by_date: BTreeMap::default(),
// ---
// Inserted
// ---
ohlc: BiMap::new_json(1, MapKind::Inserted, &config.path_price()),
// ---
// Computed
// ---
open: BiMap::new_bin(1, MapKind::Computed, &f("open")),
high: BiMap::new_bin(1, MapKind::Computed, &f("high")),
low: BiMap::new_bin(1, MapKind::Computed, &f("low")),
close: BiMap::new_bin(1, MapKind::Computed, &f("close")),
market_cap: BiMap::new_bin(1, MapKind::Computed, &f("market_cap")),
price_1w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1w_sma")),
price_1w_sma_ratio: RatioDataset::import(&path_dataset, "price_1w_sma", config)?,
price_1m_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1m_sma")),
price_1m_sma_ratio: RatioDataset::import(&path_dataset, "price_1m_sma", config)?,
price_1y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1y_sma")),
price_1y_sma_ratio: RatioDataset::import(&path_dataset, "price_1y_sma", config)?,
price_2y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_2y_sma")),
price_2y_sma_ratio: RatioDataset::import(&path_dataset, "price_2y_sma", config)?,
price_4y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_4y_sma")),
price_4y_sma_ratio: RatioDataset::import(&path_dataset, "price_4y_sma", config)?,
price_8d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_8d_sma")),
price_8d_sma_ratio: RatioDataset::import(&path_dataset, "price_8d_sma", config)?,
price_13d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_13d_sma")),
price_13d_sma_ratio: RatioDataset::import(&path_dataset, "price_13d_sma", config)?,
price_21d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_21d_sma")),
price_21d_sma_ratio: RatioDataset::import(&path_dataset, "price_21d_sma", config)?,
price_34d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_34d_sma")),
price_34d_sma_ratio: RatioDataset::import(&path_dataset, "price_34d_sma", config)?,
price_55d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_55d_sma")),
price_55d_sma_ratio: RatioDataset::import(&path_dataset, "price_55d_sma", config)?,
price_89d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_89d_sma")),
price_89d_sma_ratio: RatioDataset::import(&path_dataset, "price_89d_sma", config)?,
price_144d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_144d_sma")),
price_144d_sma_ratio: RatioDataset::import(&path_dataset, "price_144d_sma", config)?,
price_200w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_200w_sma")),
price_200w_sma_ratio: RatioDataset::import(&path_dataset, "price_200w_sma", config)?,
price_1d_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1d_total_return"),
),
price_1m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1m_total_return"),
),
price_6m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6m_total_return"),
),
price_1y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1y_total_return"),
),
price_2y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_2y_total_return"),
),
price_3y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_3y_total_return"),
),
price_4y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_total_return"),
),
price_6y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6y_total_return"),
),
price_8y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_8y_total_return"),
),
price_10y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_10y_total_return"),
),
price_4y_compound_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_compound_return"),
),
all_time_high: BiMap::new_bin(1, MapKind::Computed, &f("all_time_high")),
all_time_high_date: DateMap::new_bin(1, MapKind::Computed, &f("all_time_high_date")),
days_since_all_time_high: DateMap::new_bin(
1,
MapKind::Computed,
&f("days_since_all_time_high"),
),
max_days_between_all_time_highs: DateMap::new_bin(
1,
MapKind::Computed,
&f("max_days_between_all_time_highs"),
),
max_years_between_all_time_highs: DateMap::new_bin(
2,
MapKind::Computed,
&f("max_years_between_all_time_highs"),
),
market_price_to_all_time_high_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("market_price_to_all_time_high_ratio"),
),
drawdown: BiMap::new_bin(1, MapKind::Computed, &f("drawdown")),
sats_per_dollar: BiMap::new_bin(1, MapKind::Computed, &f("sats_per_dollar")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, compute_data: &ComputeData, circulating_supply: &mut BiMap<f64>) {
let &ComputeData { dates, heights, .. } = compute_data;
self.open
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.open);
self.high
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.high);
self.low
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.low);
self.close
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.close);
self.market_cap
.multi_insert_multiply(heights, dates, &mut self.close, circulating_supply);
self.price_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_WEEK_IN_DAYS,
);
self.price_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_MONTH_IN_DAYS,
);
self.price_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_YEAR_IN_DAYS,
);
self.price_2y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
2 * ONE_YEAR_IN_DAYS,
);
self.price_4y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
4 * ONE_YEAR_IN_DAYS,
);
self.price_8d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 8);
self.price_13d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 13);
self.price_21d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 21);
self.price_34d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 34);
self.price_55d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 55);
self.price_89d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 89);
self.price_144d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 144);
self.price_200w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
200 * ONE_WEEK_IN_DAYS,
);
self.price_1d_total_return
.multi_insert_percentage_change(dates, &mut self.close.date, 1);
self.price_1m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_MONTH_IN_DAYS,
);
self.price_6m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_MONTH_IN_DAYS,
);
self.price_1y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_YEAR_IN_DAYS,
);
self.price_2y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
2 * ONE_YEAR_IN_DAYS,
);
self.price_3y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
3 * ONE_YEAR_IN_DAYS,
);
self.price_4y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
4 * ONE_YEAR_IN_DAYS,
);
self.price_6y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_YEAR_IN_DAYS,
);
self.price_8y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
8 * ONE_YEAR_IN_DAYS,
);
self.price_10y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
10 * ONE_YEAR_IN_DAYS,
);
self.price_4y_compound_return
.multi_insert_complex_transform(
dates,
&mut self.close.date,
|(last_value, date, closes, _)| {
let previous_value = date
.checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64))
.and_then(|date| closes.get_or_import(&Date::wrap(date)))
.unwrap_or_default();
(((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0
},
);
self.price_1w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1w_sma);
self.price_1m_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1m_sma);
self.price_1y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1y_sma);
self.price_2y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_2y_sma);
self.price_4y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_4y_sma);
self.price_8d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_8d_sma);
self.price_13d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_13d_sma);
self.price_21d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_21d_sma);
self.price_34d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_34d_sma);
self.price_55d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_55d_sma);
self.price_89d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_89d_sma);
self.price_144d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_144d_sma);
self.price_200w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_200w_sma);
self.all_time_high
.multi_insert_max(heights, dates, &mut self.high);
self.market_price_to_all_time_high_ratio
.multi_insert_percentage(heights, dates, &mut self.close, &mut self.all_time_high);
self.all_time_high_date.multi_insert_complex_transform(
dates,
&mut self.all_time_high.date,
|(value, date, _, map)| {
let high = self.high.date.get_or_import(date).unwrap();
let is_ath = high == value;
if is_ath {
*date
} else {
let previous_date = date.checked_sub(1).unwrap();
*map.get_or_import(&previous_date).as_ref().unwrap_or(date)
}
},
);
self.days_since_all_time_high.multi_insert_simple_transform(
dates,
&mut self.all_time_high_date,
|value, key| key.difference_in_days_between(value),
);
self.max_days_between_all_time_highs
.multi_insert_max(dates, &mut self.days_since_all_time_high);
self.max_years_between_all_time_highs
.multi_insert_simple_transform(
dates,
&mut self.max_days_between_all_time_highs,
|days, _| (days as f64 / ONE_YEAR_IN_DAYS as f64) as f32,
);
self.drawdown.multi_insert_simple_transform(
heights,
dates,
&mut self.market_price_to_all_time_high_ratio,
&|v| -(100.0 - v),
);
self.sats_per_dollar.multi_insert_simple_transform(
heights,
dates,
&mut self.close,
&|price| Amount::ONE_BTC_F32 / price,
);
}
pub fn get_date_ohlc(&mut self, date: Date) -> color_eyre::Result<OHLC> {
if self.ohlc.date.is_key_safe(date) {
Ok(self.ohlc.date.get_or_import(&date).unwrap().to_owned())
} else {
let ohlc = self
.get_from_daily_kraken(&date)
.or_else(|_| self.get_from_daily_binance(&date))
.or_else(|_| self.get_from_date_kibo(&date))?;
self.ohlc.date.insert(date, ohlc);
Ok(ohlc)
}
}
fn get_from_date_kibo(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
let chunk_id = date.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_date.contains_key(&chunk_id)
|| self
.kibo_by_date
.get(&chunk_id)
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kibo_by_date
.insert(chunk_id, Kibo::fetch_date_prices(chunk_id)?);
}
self.kibo_by_date
.get(&chunk_id)
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date in satonomics"))
}
fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.kraken_daily.is_none()
|| self
.kraken_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kraken_daily.replace(Kraken::fetch_daily_prices()?);
}
self.kraken_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
fn get_from_daily_binance(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.binance_daily.is_none()
|| self
.binance_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.binance_daily.replace(Binance::fetch_daily_prices()?);
}
self.binance_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
pub fn get_height_ohlc(
&mut self,
height: Height,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if let Some(ohlc) = self.ohlc.height.get_or_import(&height) {
return Ok(ohlc);
}
let timestamp = timestamp.to_floored_seconds();
if previous_timestamp.is_none() && !height.is_first() {
panic!("Shouldn't be possible");
}
let previous_timestamp = previous_timestamp.map(|t| t.to_floored_seconds());
let ohlc = self
.get_from_1mn_kraken(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_1mn_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_har_binance(timestamp, previous_timestamp, config)
.unwrap_or_else(|_| {
self.get_from_height_kibo(&height).unwrap_or_else(|_| {
let date = timestamp.to_date();
panic!(
"Can't find the price for: height: {height} - date: {date}
1mn APIs are limited to the last 16 hours for Binance's and the last 10 hours for Kraken's
How to fix this:
1. Go to https://www.binance.com/en/trade/BTC_USDT?type=spot
2. Select 1mn interval
3. Open the inspector/dev tools
4. Go to the Network Tab
5. Filter URLs by 'uiKlines'
6. Go back to the chart and scroll until you pass the date mentioned few lines ago
7. Go back to the dev tools
8. Export to a har file (if there is no explicit button, click on the cog button)
9. Move the file to 'parser/imports/binance.har'
"
)
})
})
})
});
self.ohlc.height.insert(height, ohlc);
Ok(ohlc)
}
fn get_from_height_kibo(&mut self, height: &Height) -> color_eyre::Result<OHLC> {
let chunk_id = height.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_height.contains_key(&chunk_id)
|| ((chunk_id.to_usize() + self.kibo_by_height.get(&chunk_id).unwrap().len())
<= height.to_usize())
{
self.kibo_by_height
.insert(chunk_id, Kibo::fetch_height_prices(chunk_id)?);
}
self.kibo_by_height
.get(&chunk_id)
.unwrap()
.get(height.to_serialized_key().to_usize())
.cloned()
.ok_or(Error::msg("Couldn't find height in kibo"))
}
fn get_from_1mn_kraken(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.kraken_1mn.is_none()
|| self
.kraken_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.kraken_1mn.replace(Kraken::fetch_1mn_prices()?);
}
Self::find_height_ohlc(&self.kraken_1mn, timestamp, previous_timestamp, "kraken 1m")
}
fn get_from_1mn_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.binance_1mn.is_none()
|| self
.binance_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.binance_1mn.replace(Binance::fetch_1mn_prices()?);
}
Self::find_height_ohlc(
&self.binance_1mn,
timestamp,
previous_timestamp,
"binance 1m",
)
}
fn get_from_har_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if self.binance_har.is_none() {
self.binance_har
.replace(Binance::read_har_file(config).unwrap_or_default());
}
Self::find_height_ohlc(
&self.binance_har,
timestamp,
previous_timestamp,
"binance har",
)
}
fn find_height_ohlc(
tree: &Option<BTreeMap<u32, OHLC>>,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
name: &str,
) -> color_eyre::Result<OHLC> {
let tree = tree.as_ref().unwrap();
let err = Error::msg(format!("Couldn't find timestamp in {name}"));
let previous_ohlc = previous_timestamp
.map_or(Some(OHLC::default()), |previous_timestamp| {
tree.get(&previous_timestamp).cloned()
});
let last_ohlc = tree.get(&timestamp);
if previous_ohlc.is_none() || last_ohlc.is_none() {
return Err(err);
}
let previous_ohlc = previous_ohlc.unwrap();
let mut final_ohlc = OHLC {
open: previous_ohlc.close,
high: previous_ohlc.close,
low: previous_ohlc.close,
close: previous_ohlc.close,
};
let start = previous_timestamp.unwrap_or_default();
let end = timestamp;
// Otherwise it's a re-org
if start < end {
tree.range(&*start..=&*end).skip(1).for_each(|(_, ohlc)| {
if ohlc.high > final_ohlc.high {
final_ohlc.high = ohlc.high
}
if ohlc.low < final_ohlc.low {
final_ohlc.low = ohlc.low
}
final_ohlc.close = ohlc.close;
});
}
Ok(final_ohlc)
}
}
impl AnyDataset for PriceDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,123 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::CapitalizationState,
},
structs::{BiMap, Config, MapKind, MapPath},
utils::ONE_MONTH_IN_DAYS,
};
use super::RatioDataset;
#[derive(Allocative, Iterable)]
pub struct CapitalizationDataset {
min_initial_states: MinInitialStates,
pub realized_cap: BiMap<f32>,
pub realized_price: BiMap<f32>,
realized_cap_1m_net_change: BiMap<f32>,
realized_price_ratio: RatioDataset,
}
impl CapitalizationDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_cap: BiMap::new_bin(1, MapKind::Inserted, &f("realized_cap")),
// ---
// Computed
// ---
realized_cap_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("realized_cap_1m_net_change"),
),
realized_price: BiMap::new_bin(1, MapKind::Computed, &f("realized_price")),
realized_price_ratio: RatioDataset::import(
path,
&format!(
"{}realized_price",
name.as_ref().map_or("".to_owned(), |n| format!("{n}-"))
),
config,
)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &CapitalizationState,
) {
let realized_cap = self
.realized_cap
.height
.insert(height, state.realized_cap().to_dollar() as f32);
if is_date_last_block {
self.realized_cap.date.insert(date, realized_cap);
}
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
cohort_supply: &mut BiMap<f64>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.realized_price.multi_insert_divide(
heights,
dates,
&mut self.realized_cap,
cohort_supply,
);
self.realized_cap_1m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.realized_cap,
ONE_MONTH_IN_DAYS,
);
self.realized_price_ratio
.compute(compute_data, closes, &mut self.realized_price);
}
}
impl AnyDataset for CapitalizationDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,86 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::InputState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct InputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<u64>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
// Computed
// add inputs_per_second
}
impl InputSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("input_count")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("input_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("input_volume_1d_sum")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &InputState,
) {
let count = self
.count
.height
.insert(height, state.count().round() as u64);
self.volume.insert(height, state.volume().to_btc());
if is_date_last_block {
self.count.date.insert(date, count);
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for InputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,90 +0,0 @@
use allocative::Allocative;
mod capitalization;
mod input;
mod price_paid;
mod ratio;
mod realized;
mod recap;
mod supply;
mod unrealized;
mod utxo;
pub use capitalization::*;
pub use input::*;
pub use price_paid::*;
pub use ratio::*;
pub use realized::*;
pub use recap::*;
use struct_iterable::Iterable;
pub use supply::*;
pub use unrealized::*;
pub use utxo::*;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, MapPath},
};
use super::AnyDatasetGroup;
#[derive(Allocative, Iterable)]
pub struct SubDataset {
pub capitalization: CapitalizationDataset,
pub input: InputSubDataset,
// pub output: OutputSubDataset,
pub price_paid: PricePaidSubDataset,
pub realized: RealizedSubDataset,
pub supply: SupplySubDataset,
pub unrealized: UnrealizedSubDataset,
pub utxo: UTXOSubDataset,
}
impl SubDataset {
pub fn import(
parent_path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let s = Self {
capitalization: CapitalizationDataset::import(parent_path, name, config)?,
input: InputSubDataset::import(parent_path, name, config)?,
// output: OutputSubDataset::import(parent_path)?,
price_paid: PricePaidSubDataset::import(parent_path, name, config)?,
realized: RealizedSubDataset::import(parent_path, name, config)?,
supply: SupplySubDataset::import(parent_path, name, config)?,
unrealized: UnrealizedSubDataset::import(parent_path, name, config)?,
utxo: UTXOSubDataset::import(parent_path, name, config)?,
};
Ok(s)
}
}
impl AnyDatasetGroup for SubDataset {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
&self.capitalization,
&self.price_paid,
&self.realized,
&self.supply,
&self.unrealized,
&self.utxo,
&self.input,
// &self.output,
]
}
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
&mut self.capitalization,
&mut self.price_paid,
&mut self.realized,
&mut self.supply,
&mut self.unrealized,
&mut self.utxo,
&mut self.input,
// &mut self.output,
]
}
}

View File

@@ -1,87 +0,0 @@
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::OutputState,
structs::{AnyBiMap, BiMap},
utils::ONE_YEAR_IN_DAYS,
};
pub struct OutputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<f32>,
pub volume: BiMap<f32>,
// Computed
pub annualized_volume: BiMap<f32>,
pub velocity: BiMap<f32>,
// add outputs_per_second
}
impl OutputSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("output_count")),
volume: BiMap::new_bin(1, &f("output_volume")),
annualized_volume: BiMap::new_bin(1, &f("annualized_output_volume")),
velocity: BiMap::new_bin(1, &f("output_velocity")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &OutputState,
) {
let count = self.count.height.insert(height, state.count);
self.volume.height.insert(height, state.volume);
if is_date_last_block {
self.count.date.insert(date, count);
self.volume.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
cohort_supply: &mut BiMap<f32>,
) {
self.annualized_volume.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
heights,
dates,
&mut self.annualized_volume,
cohort_supply,
);
}
}
impl AnyDataset for OutputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,266 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::PricePaidState,
},
structs::{BiMap, Config, Date, Height, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct PricePaidSubDataset {
min_initial_states: MinInitialStates,
pp_median: BiMap<f32>,
pp_95p: BiMap<f32>,
pp_90p: BiMap<f32>,
pp_85p: BiMap<f32>,
pp_80p: BiMap<f32>,
pp_75p: BiMap<f32>,
pp_70p: BiMap<f32>,
pp_65p: BiMap<f32>,
pp_60p: BiMap<f32>,
pp_55p: BiMap<f32>,
pp_45p: BiMap<f32>,
pp_40p: BiMap<f32>,
pp_35p: BiMap<f32>,
pp_30p: BiMap<f32>,
pp_25p: BiMap<f32>,
pp_20p: BiMap<f32>,
pp_15p: BiMap<f32>,
pp_10p: BiMap<f32>,
pp_05p: BiMap<f32>,
}
impl PricePaidSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
pp_median: BiMap::new_bin(1, MapKind::Inserted, &f("median_price_paid")),
pp_95p: BiMap::new_bin(1, MapKind::Inserted, &f("95p_price_paid")),
pp_90p: BiMap::new_bin(1, MapKind::Inserted, &f("90p_price_paid")),
pp_85p: BiMap::new_bin(1, MapKind::Inserted, &f("85p_price_paid")),
pp_80p: BiMap::new_bin(1, MapKind::Inserted, &f("80p_price_paid")),
pp_75p: BiMap::new_bin(1, MapKind::Inserted, &f("75p_price_paid")),
pp_70p: BiMap::new_bin(1, MapKind::Inserted, &f("70p_price_paid")),
pp_65p: BiMap::new_bin(1, MapKind::Inserted, &f("65p_price_paid")),
pp_60p: BiMap::new_bin(1, MapKind::Inserted, &f("60p_price_paid")),
pp_55p: BiMap::new_bin(1, MapKind::Inserted, &f("55p_price_paid")),
pp_45p: BiMap::new_bin(1, MapKind::Inserted, &f("45p_price_paid")),
pp_40p: BiMap::new_bin(1, MapKind::Inserted, &f("40p_price_paid")),
pp_35p: BiMap::new_bin(1, MapKind::Inserted, &f("35p_price_paid")),
pp_30p: BiMap::new_bin(1, MapKind::Inserted, &f("30p_price_paid")),
pp_25p: BiMap::new_bin(1, MapKind::Inserted, &f("25p_price_paid")),
pp_20p: BiMap::new_bin(1, MapKind::Inserted, &f("20p_price_paid")),
pp_15p: BiMap::new_bin(1, MapKind::Inserted, &f("15p_price_paid")),
pp_10p: BiMap::new_bin(1, MapKind::Inserted, &f("10p_price_paid")),
pp_05p: BiMap::new_bin(1, MapKind::Inserted, &f("05p_price_paid")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &PricePaidState,
) {
let pp_05p = state.pp_05p();
let pp_10p = state.pp_10p();
let pp_15p = state.pp_15p();
let pp_20p = state.pp_20p();
let pp_25p = state.pp_25p();
let pp_30p = state.pp_30p();
let pp_35p = state.pp_35p();
let pp_40p = state.pp_40p();
let pp_45p = state.pp_45p();
let pp_median = state.pp_median();
let pp_55p = state.pp_55p();
let pp_60p = state.pp_60p();
let pp_65p = state.pp_65p();
let pp_70p = state.pp_70p();
let pp_75p = state.pp_75p();
let pp_80p = state.pp_80p();
let pp_85p = state.pp_85p();
let pp_90p = state.pp_90p();
let pp_95p = state.pp_95p();
// Check if iter was empty
if pp_05p.is_none() {
self.insert_height_default(height);
if is_date_last_block {
self.insert_date_default(date);
}
return;
}
let pp_05p = self
.pp_05p
.height
.insert(height, pp_05p.unwrap().to_dollar() as f32);
let pp_10p = self
.pp_10p
.height
.insert(height, pp_10p.unwrap().to_dollar() as f32);
let pp_15p = self
.pp_15p
.height
.insert(height, pp_15p.unwrap().to_dollar() as f32);
let pp_20p = self
.pp_20p
.height
.insert(height, pp_20p.unwrap().to_dollar() as f32);
let pp_25p = self
.pp_25p
.height
.insert(height, pp_25p.unwrap().to_dollar() as f32);
let pp_30p = self
.pp_30p
.height
.insert(height, pp_30p.unwrap().to_dollar() as f32);
let pp_35p = self
.pp_35p
.height
.insert(height, pp_35p.unwrap().to_dollar() as f32);
let pp_40p = self
.pp_40p
.height
.insert(height, pp_40p.unwrap().to_dollar() as f32);
let pp_45p = self
.pp_45p
.height
.insert(height, pp_45p.unwrap().to_dollar() as f32);
let pp_median = self
.pp_median
.height
.insert(height, pp_median.unwrap().to_dollar() as f32);
let pp_55p = self
.pp_55p
.height
.insert(height, pp_55p.unwrap().to_dollar() as f32);
let pp_60p = self
.pp_60p
.height
.insert(height, pp_60p.unwrap().to_dollar() as f32);
let pp_65p = self
.pp_65p
.height
.insert(height, pp_65p.unwrap().to_dollar() as f32);
let pp_70p = self
.pp_70p
.height
.insert(height, pp_70p.unwrap().to_dollar() as f32);
let pp_75p = self
.pp_75p
.height
.insert(height, pp_75p.unwrap().to_dollar() as f32);
let pp_80p = self
.pp_80p
.height
.insert(height, pp_80p.unwrap().to_dollar() as f32);
let pp_85p = self
.pp_85p
.height
.insert(height, pp_85p.unwrap().to_dollar() as f32);
let pp_90p = self
.pp_90p
.height
.insert(height, pp_90p.unwrap().to_dollar() as f32);
let pp_95p = self
.pp_95p
.height
.insert(height, pp_95p.unwrap().to_dollar() as f32);
if is_date_last_block {
self.pp_05p.date.insert(date, pp_05p);
self.pp_10p.date.insert(date, pp_10p);
self.pp_15p.date.insert(date, pp_15p);
self.pp_20p.date.insert(date, pp_20p);
self.pp_25p.date.insert(date, pp_25p);
self.pp_30p.date.insert(date, pp_30p);
self.pp_35p.date.insert(date, pp_35p);
self.pp_40p.date.insert(date, pp_40p);
self.pp_45p.date.insert(date, pp_45p);
self.pp_median.date.insert(date, pp_median);
self.pp_55p.date.insert(date, pp_55p);
self.pp_60p.date.insert(date, pp_60p);
self.pp_65p.date.insert(date, pp_65p);
self.pp_70p.date.insert(date, pp_70p);
self.pp_75p.date.insert(date, pp_75p);
self.pp_80p.date.insert(date, pp_80p);
self.pp_85p.date.insert(date, pp_85p);
self.pp_90p.date.insert(date, pp_90p);
self.pp_95p.date.insert(date, pp_95p);
}
}
fn insert_height_default(&mut self, height: Height) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.height.insert_default(height);
})
}
fn insert_date_default(&mut self, date: Date) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.date.insert_default(date);
})
}
pub fn inserted_as_mut_vec(&mut self) -> Vec<&mut BiMap<f32>> {
vec![
&mut self.pp_95p,
&mut self.pp_90p,
&mut self.pp_85p,
&mut self.pp_80p,
&mut self.pp_75p,
&mut self.pp_70p,
&mut self.pp_65p,
&mut self.pp_60p,
&mut self.pp_55p,
&mut self.pp_median,
&mut self.pp_45p,
&mut self.pp_40p,
&mut self.pp_35p,
&mut self.pp_30p,
&mut self.pp_25p,
&mut self.pp_20p,
&mut self.pp_15p,
&mut self.pp_10p,
&mut self.pp_05p,
]
}
}
impl AnyDataset for PricePaidSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,171 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
#[derive(Allocative, Iterable)]
pub struct RatioDataset {
min_initial_states: MinInitialStates,
ratio: BiMap<f32>,
ratio_1w_sma: BiMap<f32>,
ratio_1m_sma: BiMap<f32>,
ratio_1y_sma: BiMap<f32>,
ratio_1y_sma_momentum_oscillator: BiMap<f32>,
ratio_99p: BiMap<f32>,
ratio_99_5p: BiMap<f32>,
ratio_99_9p: BiMap<f32>,
ratio_1p: BiMap<f32>,
ratio_0_5p: BiMap<f32>,
ratio_0_1p: BiMap<f32>,
price_99p: BiMap<f32>,
price_99_5p: BiMap<f32>,
price_99_9p: BiMap<f32>,
price_1p: BiMap<f32>,
price_0_5p: BiMap<f32>,
price_0_1p: BiMap<f32>,
}
impl RatioDataset {
pub fn import(path: &MapPath, name: &str, config: &Config) -> color_eyre::Result<Self> {
let f_ratio = |s: &str| path.join(&format!("market_price_to_{name}_{s}"));
let f_price = |s: &str| path.join(&format!("{name}_{s}"));
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Computed
// ---
ratio: BiMap::new_bin(1, MapKind::Computed, &f_ratio("ratio")),
ratio_1w_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1w_sma")),
ratio_1m_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1m_sma")),
ratio_1y_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1y_sma")),
ratio_1y_sma_momentum_oscillator: BiMap::new_bin(
2,
MapKind::Computed,
&f_ratio("ratio_1y_sma_momentum_oscillator"),
),
ratio_99p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99p")),
ratio_99_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_5p")),
ratio_99_9p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_9p")),
ratio_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_1p")),
ratio_0_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_5p")),
ratio_0_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_1p")),
price_99p: BiMap::new_bin(4, MapKind::Computed, &f_price("99p")),
price_99_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_5p")),
price_99_9p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_9p")),
price_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("1p")),
price_0_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_5p")),
price_0_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_1p")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_price: &mut BiMap<f32>,
other_price: &mut BiMap<f32>,
) {
self.ratio.height.multi_insert_divide(
heights,
&mut market_price.height,
&mut other_price.height,
);
self.ratio
.date
.multi_insert_divide(dates, &mut market_price.date, &mut other_price.date);
self.ratio_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_WEEK_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_YEAR_IN_DAYS,
);
self.ratio_1y_sma_momentum_oscillator
.height
.multi_insert_complex_transform(
heights,
&mut self.ratio.height,
|(ratio, height, ..)| {
(ratio / self.ratio_1y_sma.height.get_or_import(height).unwrap()) - 1.0
},
);
self.ratio_1y_sma_momentum_oscillator
.date
.multi_insert_complex_transform(dates, &mut self.ratio.date, |(ratio, date, _, _)| {
(ratio / self.ratio_1y_sma.date.get_or_import(date).unwrap()) - 1.0
});
self.ratio.multi_insert_percentile(
heights,
dates,
vec![
(&mut self.ratio_99p, 0.99),
(&mut self.ratio_99_5p, 0.995),
(&mut self.ratio_99_9p, 0.999),
(&mut self.ratio_1p, 0.1),
(&mut self.ratio_0_5p, 0.005),
(&mut self.ratio_0_1p, 0.001),
],
None,
);
self.price_99p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99p);
self.price_99_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_5p);
self.price_99_9p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_9p);
self.price_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_1p);
self.price_0_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_5p);
self.price_0_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_1p);
}
}
impl AnyDataset for RatioDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,387 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::RealizedState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath, Price},
utils::ONE_MONTH_IN_DAYS,
};
#[derive(Allocative, Iterable)]
pub struct RealizedSubDataset {
min_initial_states: MinInitialStates,
realized_profit: HeightMap<f32>,
realized_loss: HeightMap<f32>,
value_created: HeightMap<f32>,
adjusted_value_created: HeightMap<f32>,
value_destroyed: HeightMap<f32>,
adjusted_value_destroyed: HeightMap<f32>,
realized_profit_1d_sum: DateMap<f32>,
realized_loss_1d_sum: DateMap<f32>,
value_created_1d_sum: DateMap<f32>,
adjusted_value_created_1d_sum: DateMap<f32>,
value_destroyed_1d_sum: DateMap<f32>,
adjusted_value_destroyed_1d_sum: DateMap<f32>,
spent_output_profit_ratio: BiMap<f32>,
adjusted_spent_output_profit_ratio: BiMap<f32>,
negative_realized_loss: HeightMap<f32>,
negative_realized_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss: HeightMap<f32>,
net_realized_profit_and_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap<f32>,
cumulative_realized_profit: BiMap<f32>,
cumulative_realized_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap<f32>,
realized_value: HeightMap<f32>,
realized_value_1d_sum: DateMap<f32>,
sell_side_risk_ratio: DateMap<f32>,
realized_profit_to_loss_ratio: HeightMap<f32>,
realized_profit_to_loss_1d_sum_ratio: DateMap<f32>,
}
impl RealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_profit: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_profit")),
realized_loss: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_loss")),
value_created: HeightMap::new_bin(1, MapKind::Inserted, &f("value_created")),
adjusted_value_created: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created"),
),
value_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("value_destroyed")),
adjusted_value_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed"),
),
realized_profit_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_profit_1d_sum"),
),
realized_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_loss_1d_sum"),
),
value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_created_1d_sum"),
),
adjusted_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created_1d_sum"),
),
value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_destroyed_1d_sum"),
),
adjusted_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed_1d_sum"),
),
spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("spent_output_profit_ratio"),
),
adjusted_spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("adjusted_spent_output_profit_ratio"),
),
// ---
// Computed
// ---
negative_realized_loss: HeightMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss"),
),
negative_realized_loss_1d_sum: DateMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss_1d_sum"),
),
net_realized_profit_and_loss: HeightMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss"),
),
net_realized_profit_and_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss_1d_sum"),
),
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap::new_bin(
2,
MapKind::Computed,
&f("net_realized_profit_and_loss_to_market_cap_ratio"),
),
cumulative_realized_profit: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_profit"),
),
cumulative_realized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_loss"),
),
cumulative_net_realized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss"),
),
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss_1m_net_change"),
),
realized_value: HeightMap::new_bin(1, MapKind::Computed, &f("realized_value")),
realized_value_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_value_1d_sum"),
),
sell_side_risk_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("sell_side_risk_ratio"),
),
realized_profit_to_loss_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_ratio"),
),
realized_profit_to_loss_1d_sum_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_1d_sum_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
height_state: &RealizedState,
) {
self.realized_profit
.insert(height, height_state.realized_profit().to_dollar() as f32);
self.realized_loss
.insert(height, height_state.realized_loss().to_dollar() as f32);
self.value_created
.insert(height, height_state.value_created().to_dollar() as f32);
self.adjusted_value_created.insert(
height,
height_state.adjusted_value_created().to_dollar() as f32,
);
self.value_destroyed
.insert(height, height_state.value_destroyed().to_dollar() as f32);
self.adjusted_value_destroyed.insert(
height,
height_state.adjusted_value_destroyed().to_dollar() as f32,
);
self.spent_output_profit_ratio.height.insert(height, {
if height_state.value_destroyed() > Price::ZERO {
(height_state.value_created().to_cent() as f64
/ height_state.value_destroyed().to_cent() as f64) as f32
} else {
1.0
}
});
self.adjusted_spent_output_profit_ratio
.height
.insert(height, {
if height_state.adjusted_value_destroyed() > Price::ZERO {
(height_state.adjusted_value_created().to_cent() as f64
/ height_state.adjusted_value_destroyed().to_cent() as f64)
as f32
} else {
1.0
}
});
if is_date_last_block {
self.realized_profit_1d_sum
.insert(date, self.realized_profit.sum_range(date_blocks_range));
self.realized_loss_1d_sum
.insert(date, self.realized_loss.sum_range(date_blocks_range));
let value_created_1d_sum = self
.value_created_1d_sum
.insert(date, self.value_created.sum_range(date_blocks_range));
let adjusted_value_created_1d_sum = self.adjusted_value_created_1d_sum.insert(
date,
self.adjusted_value_created.sum_range(date_blocks_range),
);
let value_destroyed_1d_sum = self
.value_destroyed_1d_sum
.insert(date, self.value_destroyed.sum_range(date_blocks_range));
let adjusted_value_destroyed_1d_sum = self.adjusted_value_destroyed_1d_sum.insert(
date,
self.adjusted_value_destroyed.sum_range(date_blocks_range),
);
self.spent_output_profit_ratio
.date
.insert(date, value_created_1d_sum / value_destroyed_1d_sum);
self.adjusted_spent_output_profit_ratio.date.insert(
date,
adjusted_value_created_1d_sum / adjusted_value_destroyed_1d_sum,
);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_cap: &mut BiMap<f32>,
) {
self.negative_realized_loss.multi_insert_simple_transform(
heights,
&mut self.realized_loss,
|v, _| v * -1.0,
);
self.negative_realized_loss_1d_sum
.multi_insert_simple_transform(dates, &mut self.realized_loss_1d_sum, |v, _| v * -1.0);
self.net_realized_profit_and_loss.multi_insert_subtract(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.net_realized_profit_and_loss_1d_sum
.multi_insert_subtract(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.net_realized_profit_and_loss_1d_sum_to_market_cap_ratio
.multi_insert_percentage(
dates,
&mut self.net_realized_profit_and_loss_1d_sum,
&mut market_cap.date,
);
self.cumulative_realized_profit
.height
.multi_insert_cumulative(heights, &mut self.realized_profit);
self.cumulative_realized_profit
.date
.multi_insert_cumulative(dates, &mut self.realized_profit_1d_sum);
self.cumulative_realized_loss
.height
.multi_insert_cumulative(heights, &mut self.realized_loss);
self.cumulative_realized_loss
.date
.multi_insert_cumulative(dates, &mut self.realized_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss
.height
.multi_insert_cumulative(heights, &mut self.net_realized_profit_and_loss);
self.cumulative_net_realized_profit_and_loss
.date
.multi_insert_cumulative(dates, &mut self.net_realized_profit_and_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss_1m_net_change
.multi_insert_net_change(
heights,
dates,
&mut self.cumulative_net_realized_profit_and_loss,
ONE_MONTH_IN_DAYS,
);
self.realized_value.multi_insert_add(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_value_1d_sum.multi_insert_add(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.sell_side_risk_ratio.multi_insert_percentage(
dates,
&mut self.realized_value_1d_sum,
&mut market_cap.date,
);
self.realized_profit_to_loss_ratio.multi_insert_divide(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_profit_to_loss_1d_sum_ratio
.multi_insert_divide(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
}
}
impl AnyDataset for RealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,277 +0,0 @@
use std::{iter::Sum, ops::Add};
use allocative::Allocative;
use crate::{
structs::{
Date, DateMapChunkId, GenericMap, MapChunkId, MapKey, MapKind, MapPath, MapSerialized,
MapValue, SerializedDateMap,
},
utils::{get_percentile, LossyFrom},
};
pub type DateRecapDataset<T> = RecapDataset<Date, T, DateMapChunkId, SerializedDateMap<T>>;
#[derive(Allocative)]
pub struct RecapDataset<Key, Value, ChunkId, Serialized> {
average: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
sum: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
max: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_90p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_75p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
median: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_25p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_10p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
min: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
}
#[derive(Default)]
pub struct RecapOptions {
average: bool,
sum: bool,
max: bool,
_90p: bool,
_75p: bool,
median: bool,
_25p: bool,
_10p: bool,
min: bool,
}
impl RecapOptions {
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_median(mut self) -> Self {
self.median = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
#[allow(unused)]
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_90p(mut self) -> Self {
self._90p = true;
self
}
pub fn add_75p(mut self) -> Self {
self._75p = true;
self
}
pub fn add_25p(mut self) -> Self {
self._25p = true;
self
}
pub fn add_10p(mut self) -> Self {
self._10p = true;
self
}
}
impl<Key, Value, ChunkId, Serialized> RecapDataset<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn import(path: &MapPath, options: RecapOptions) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let s = Self {
// ---
// Computed
// ---
min: options
.min
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("min"))),
max: options
.max
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("max"))),
median: options
.median
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("median"))),
average: options
.average
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("average"))),
sum: options
.sum
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("sum"))),
_90p: options
._90p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("90p"))),
_75p: options
._75p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("75p"))),
_25p: options
._25p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("25p"))),
_10p: options
._10p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("10p"))),
};
Ok(s)
}
pub fn compute<'a, Value2>(&mut self, key: Key, values: &'a mut [Value2])
where
Value: LossyFrom<f32> + LossyFrom<Value2>,
Value2: Sum<&'a Value2> + Ord + Add<Output = Value2> + Clone + Copy + LossyFrom<f32>,
f32: LossyFrom<Value> + LossyFrom<Value2>,
{
if self.max.is_some()
|| self._90p.is_some()
|| self._75p.is_some()
|| self.median.is_some()
|| self._25p.is_some()
|| self._10p.is_some()
|| self.min.is_some()
{
values.sort_unstable();
if let Some(max) = self.max.as_mut() {
max.insert_computed(key, Value::lossy_from(*values.last().unwrap()));
}
if let Some(_90p) = self._90p.as_mut() {
_90p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.90)));
}
if let Some(_75p) = self._75p.as_mut() {
_75p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.75)));
}
if let Some(median) = self.median.as_mut() {
median.insert_computed(key, Value::lossy_from(get_percentile(values, 0.50)));
}
if let Some(_25p) = self._25p.as_mut() {
_25p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.25)));
}
if let Some(_10p) = self._10p.as_mut() {
_10p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.10)));
}
if let Some(min) = self.min.as_mut() {
min.insert_computed(key, Value::lossy_from(*values.first().unwrap()));
}
}
if self.sum.is_some() || self.average.is_some() {
let sum = Value::lossy_from(values.iter().sum::<Value2>());
if let Some(sum_map) = self.sum.as_mut() {
sum_map.insert_computed(key, sum);
}
if let Some(average) = self.average.as_mut() {
let len = values.len() as f32;
average.insert_computed(key, Value::lossy_from(f32::lossy_from(sum) / len));
}
}
}
pub fn as_vec(&self) -> Vec<&GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_ref() {
v.push(min);
}
if let Some(max) = self.max.as_ref() {
v.push(max);
}
if let Some(median) = self.median.as_ref() {
v.push(median);
}
if let Some(average) = self.average.as_ref() {
v.push(average);
}
if let Some(sum) = self.sum.as_ref() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_ref() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_ref() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_ref() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_ref() {
v.push(_10p);
}
v
}
pub fn as_mut_vec(&mut self) -> Vec<&mut GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_mut() {
v.push(min);
}
if let Some(max) = self.max.as_mut() {
v.push(max);
}
if let Some(median) = self.median.as_mut() {
v.push(median);
}
if let Some(average) = self.average.as_mut() {
v.push(average);
}
if let Some(sum) = self.sum.as_mut() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_mut() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_mut() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_mut() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_mut() {
v.push(_10p);
}
v
}
}

View File

@@ -1,109 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::SupplyState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct SupplySubDataset {
min_initial_states: MinInitialStates,
pub supply: BiMap<f64>,
pub supply_to_circulating_supply_ratio: BiMap<f64>,
pub halved_supply: BiMap<f64>,
pub halved_supply_to_circulating_supply_ratio: BiMap<f64>,
}
impl SupplySubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply: BiMap::new_bin(1, MapKind::Inserted, &f("supply")),
// ---
// Computed,
// ---
supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_to_circulating_supply_ratio"),
),
halved_supply: BiMap::new_bin(1, MapKind::Computed, &f("halved_supply")),
halved_supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("halved_supply_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &SupplyState,
) {
let total_supply = self.supply.height.insert(height, state.supply().to_btc());
if is_date_last_block {
self.supply.date.insert(date, total_supply);
}
}
#[allow(unused_variables)]
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.supply_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply, circulating_supply);
self.halved_supply
.multi_insert_simple_transform(heights, dates, &mut self.supply, &|v| v / 2.0);
self.halved_supply_to_circulating_supply_ratio
.multi_insert_simple_transform(
heights,
dates,
&mut self.supply_to_circulating_supply_ratio,
&|v| v / 2.0,
);
}
}
impl AnyDataset for SupplySubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,199 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::UnrealizedState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UnrealizedSubDataset {
min_initial_states: MinInitialStates,
supply_in_profit: BiMap<f64>,
unrealized_profit: BiMap<f32>,
unrealized_loss: BiMap<f32>,
supply_in_loss: BiMap<f64>,
negative_unrealized_loss: BiMap<f32>,
net_unrealized_profit_and_loss: BiMap<f32>,
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap<f32>,
supply_in_profit_to_own_supply_ratio: BiMap<f64>,
supply_in_profit_to_circulating_supply_ratio: BiMap<f64>,
supply_in_loss_to_own_supply_ratio: BiMap<f64>,
supply_in_loss_to_circulating_supply_ratio: BiMap<f64>,
}
impl UnrealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply_in_profit: BiMap::new_bin(1, MapKind::Inserted, &f("supply_in_profit")),
unrealized_profit: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_profit")),
unrealized_loss: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_loss")),
// ---
// Inserted
// ---
supply_in_loss: BiMap::new_bin(1, MapKind::Computed, &f("supply_in_loss")),
negative_unrealized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("negative_unrealized_loss"),
),
net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("net_unrealized_profit_and_loss"),
),
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("net_unrealized_profit_and_loss_to_market_cap_ratio"),
),
supply_in_profit_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_own_supply_ratio"),
),
supply_in_profit_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_circulating_supply_ratio"),
),
supply_in_loss_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_own_supply_ratio"),
),
supply_in_loss_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
block_state: &UnrealizedState,
date_state: &Option<UnrealizedState>,
) {
self.supply_in_profit
.height
.insert(height, block_state.supply_in_profit().to_btc());
self.unrealized_profit
.height
.insert(height, block_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.height
.insert(height, block_state.unrealized_loss().to_dollar() as f32);
if is_date_last_block {
let date_state = date_state.as_ref().unwrap();
self.supply_in_profit
.date
.insert(date, date_state.supply_in_profit().to_btc());
self.unrealized_profit
.date
.insert(date, date_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.date
.insert(date, date_state.unrealized_loss().to_dollar() as f32);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
own_supply: &mut BiMap<f64>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.supply_in_loss.multi_insert_subtract(
heights,
dates,
own_supply,
&mut self.supply_in_profit,
);
self.negative_unrealized_loss.multi_insert_simple_transform(
heights,
dates,
&mut self.unrealized_loss,
&|v| v * -1.0,
);
self.net_unrealized_profit_and_loss.multi_insert_subtract(
heights,
dates,
&mut self.unrealized_profit,
&mut self.unrealized_loss,
);
self.net_unrealized_profit_and_loss_to_market_cap_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.net_unrealized_profit_and_loss,
market_cap,
);
self.supply_in_profit_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_profit, own_supply);
self.supply_in_profit_to_circulating_supply_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.supply_in_profit,
circulating_supply,
);
self.supply_in_loss_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, own_supply);
self.supply_in_loss_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, circulating_supply);
}
}
impl AnyDataset for UnrealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,70 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::UTXOState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXOSubDataset {
min_initial_states: MinInitialStates,
count: BiMap<f64>,
}
impl UTXOSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("utxo_count")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &UTXOState,
) {
let count = self.count.height.insert(height, state.count());
if is_date_last_block {
self.count.date.insert(date, count);
}
}
}
impl AnyDataset for UTXOSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,325 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::InsertData,
structs::{BiMap, Config, DateMap, HeightMap, MapKind},
utils::{
ONE_DAY_IN_S, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS, TARGET_BLOCKS_PER_DAY,
},
};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct TransactionDataset {
min_initial_states: MinInitialStates,
pub count: HeightMap<usize>,
pub count_1d_sum: DateMap<usize>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
pub volume_in_dollars: HeightMap<f32>,
pub volume_in_dollars_1d_sum: DateMap<f32>,
// Average sent
// Average sent in dollars
// Median sent
// Median sent in dollars
// Min
// Max
// 10th 25th 75th 90th percentiles
// type
// version
pub count_1w_sma: HeightMap<f32>,
pub count_1d_sum_1w_sma: DateMap<f32>,
pub count_1m_sma: HeightMap<f32>,
pub count_1d_sum_1m_sma: DateMap<f32>,
pub volume_1w_sma: HeightMap<f32>,
pub volume_1d_sum_1w_sma: DateMap<f32>,
pub volume_1m_sma: HeightMap<f32>,
pub volume_1d_sum_1m_sma: DateMap<f32>,
pub volume_in_dollars_1w_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1w_sma: DateMap<f32>,
pub volume_in_dollars_1m_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1m_sma: DateMap<f32>,
pub annualized_volume: DateMap<f32>,
pub annualized_volume_in_dollars: DateMap<f32>,
pub velocity: DateMap<f32>,
pub transactions_per_second: BiMap<f32>,
pub transactions_per_second_1w_sma: BiMap<f32>,
pub transactions_per_second_1m_sma: BiMap<f32>,
}
impl TransactionDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_count")),
count_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_count_1d_sum")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_volume_1d_sum")),
volume_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars"),
),
volume_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars_1d_sum"),
),
// ---
// Inserted
// ---
count_1w_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1w_sma")),
count_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1w_sma"),
),
count_1m_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1m_sma")),
count_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1m_sma"),
),
volume_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1w_sma"),
),
volume_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1w_sma"),
),
volume_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1m_sma"),
),
volume_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1m_sma"),
),
volume_in_dollars_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1w_sma"),
),
volume_in_dollars_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1w_sma"),
),
volume_in_dollars_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1m_sma"),
),
volume_in_dollars_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1m_sma"),
),
annualized_volume: DateMap::new_bin(
1,
MapKind::Computed,
&f("annualized_transaction_volume"),
),
annualized_volume_in_dollars: DateMap::new_bin(
2,
MapKind::Computed,
&f("annualized_transaction_volume_in_dollars"),
),
velocity: DateMap::new_bin(1, MapKind::Computed, &f("transaction_velocity")),
transactions_per_second: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second"),
),
transactions_per_second_1w_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1w_sma"),
),
transactions_per_second_1m_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1m_sma"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
amount_sent,
transaction_count,
is_date_last_block,
date_blocks_range,
block_price,
..
}: &InsertData,
) {
self.count.insert(height, transaction_count);
self.volume.insert(height, amount_sent.to_btc());
self.volume_in_dollars
.insert(height, (block_price * amount_sent).to_dollar() as f32);
if is_date_last_block {
self.count_1d_sum
.insert(date, self.count.sum_range(date_blocks_range));
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
self.volume_in_dollars_1d_sum
.insert(date, self.volume_in_dollars.sum_range(date_blocks_range));
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
block_interval: &mut HeightMap<u32>,
) {
self.count_1w_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.count_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.count_1m_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.count_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1w_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1m_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.annualized_volume.multi_insert_last_x_sum(
dates,
&mut self.volume_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.annualized_volume_in_dollars.multi_insert_last_x_sum(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
dates,
&mut self.annualized_volume,
&mut circulating_supply.date,
);
self.transactions_per_second.height.multi_insert_divide(
heights,
&mut self.count,
block_interval,
);
self.transactions_per_second
.date
.multi_insert_simple_transform(dates, &mut self.count_1d_sum, |count, date| {
count as f32 / (date.get_day_completion() as f32 * ONE_DAY_IN_S as f32)
});
self.transactions_per_second_1w_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_WEEK_IN_DAYS,
);
self.transactions_per_second_1m_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_MONTH_IN_DAYS,
);
}
}
impl AnyDataset for TransactionDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,199 +0,0 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::UTXOCohortId,
},
structs::{BiMap, Config, Date, Height, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXODataset {
id: UTXOCohortId,
min_initial_states: MinInitialStates,
pub subs: SubDataset,
}
impl UTXODataset {
pub fn import(
parent_path: &MapPath,
id: UTXOCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.name().to_owned();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
id,
subs: SubDataset::import(parent_path, &Some(name), config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
states,
utxo_cohorts_one_shot_states,
// utxo_cohorts_received_states,
utxo_cohorts_sent_states,
..
} = insert_data;
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.subs.supply.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.supply_state,
);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.subs.utxo.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.utxo_state,
);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.subs.capitalization.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.capitalization_state,
);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.subs.unrealized.insert(
insert_data,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_block_state,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_date_state,
);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.subs.price_paid.insert(
insert_data,
&utxo_cohorts_one_shot_states.get(&self.id).price_paid_state,
);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.subs.realized.insert(
insert_data,
&utxo_cohorts_sent_states.get(&self.id).realized,
);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.subs
.input
.insert(insert_data, &utxo_cohorts_sent_states.get(&self.id).input);
}
// TODO: move output from common to address
// if self.subs.output.needs_insert(insert_data) {
// self.subs
// .output
// .insert(insert_data, utxo_cohorts_received_states.get(&self.id));
// }
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.subs.utxo.needs_insert(height, date)
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.subs.capitalization.needs_insert(height, date)
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.subs.supply.needs_insert(height, date)
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.subs.price_paid.needs_insert(height, date)
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.subs.realized.needs_insert(height, date)
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.subs.unrealized.needs_insert(height, date)
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.subs.input.needs_insert(height, date)
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.subs.supply.should_compute(compute_data) {
self.subs.supply.compute(compute_data, circulating_supply);
}
if self.subs.unrealized.should_compute(compute_data) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
if self.subs.realized.should_compute(compute_data) {
self.subs.realized.compute(compute_data, market_cap);
}
if self.subs.capitalization.should_compute(compute_data) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// if self.subs.output.should_compute(compute_data) {
// self.subs
// .output
// .compute(compute_data, &mut self.subs.supply.total);
// }
}
}
impl AnyDataset for UTXODataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -1,164 +0,0 @@
mod dataset;
use allocative::Allocative;
use dataset::*;
use rayon::prelude::*;
use itertools::Itertools;
use crate::{
parser::datasets::AnyDatasets,
parser::states::{SplitByUTXOCohort, UTXOCohortId},
structs::{BiMap, Config, Date, Height},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct UTXODatasets {
min_initial_states: MinInitialStates,
cohorts: SplitByUTXOCohort<UTXODataset>,
}
impl UTXODatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByUTXOCohort::<Option<UTXODataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, UTXODataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
pub fn needs_sent_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_realized(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_input(height, date))
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
fn as_vec(&self) -> Vec<(&UTXODataset, UTXOCohortId)> {
self.cohorts.as_vec()
}
fn as_mut_vec(&mut self) -> Vec<(&mut UTXODataset, UTXOCohortId)> {
self.cohorts.as_mut_vec()
}
}
impl AnyDatasets for UTXODatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.as_vec()
.into_iter()
.map(|(dataset, _)| dataset as &(dyn AnyDataset + Send + Sync))
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.as_mut_vec()
.into_iter()
.map(|(dataset, _)| dataset as &mut dyn AnyDataset)
.collect_vec()
}
}

View File

@@ -1,42 +0,0 @@
use std::{thread::sleep, time::Duration};
use brk_parser::bitcoincore_rpc::{Client, RpcApi};
mod actions;
mod databases;
mod datasets;
mod price;
mod states;
pub use actions::*;
pub use databases::*;
pub use datasets::*;
use log::info;
pub use states::*;
use crate::structs::{Config, Exit};
pub fn main(config: &Config, rpc: &Client, exit: &Exit) -> color_eyre::Result<()> {
loop {
let block_count = rpc.get_blockchain_info().unwrap().blocks as usize;
info!("{block_count} blocks found.");
let mut databases = Databases::import(config);
let mut datasets = Datasets::import(config)?;
iter_blocks(config, rpc, block_count, exit.clone(), &mut databases, &mut datasets)?;
if let Some(delay) = config.delay() {
sleep(Duration::from_secs(delay))
}
info!("Waiting for a new block...");
while block_count == rpc.get_blockchain_info().unwrap().blocks as usize {
sleep(Duration::from_secs(1))
}
}
// Ok(())
}

View File

@@ -1,213 +0,0 @@
#![allow(dead_code)]
use std::{collections::BTreeMap, fs};
use color_eyre::eyre::ContextCompat;
use itertools::Itertools;
use log::info;
use serde_json::Value;
use crate::{
io::Json,
structs::{Config, Date, Timestamp, OHLC},
utils::retry,
};
pub struct Binance;
impl Binance {
pub fn read_har_file(config: &Config) -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("binance: read har file");
let path = config.path_inputs();
fs::create_dir_all(&path)?;
let path_binance_har = path.join("binance.har");
let json: BTreeMap<String, Value> = Json::import(&path_binance_har).unwrap_or_default();
Ok(json
.get("log")
.context("Expect object to have log attribute")?
.as_object()
.context("Expect to be an object")?
.get("entries")
.context("Expect object to have entries")?
.as_array()
.context("Expect to be an array")?
.iter()
.filter(|entry| {
entry
.as_object()
.unwrap()
.get("request")
.unwrap()
.as_object()
.unwrap()
.get("url")
.unwrap()
.as_str()
.unwrap()
.contains("/uiKlines")
})
.flat_map(|entry| {
let response = entry
.as_object()
.unwrap()
.get("response")
.unwrap()
.as_object()
.unwrap();
let content = response.get("content").unwrap().as_object().unwrap();
let text = content.get("text");
if text.is_none() {
return vec![];
}
let text = text.unwrap().as_str().unwrap();
let arrays: Value = serde_json::from_str(text).unwrap();
arrays
.as_array()
.unwrap()
.iter()
.map(|array| {
let array = array.as_array().unwrap();
let timestamp = (array.first().unwrap().as_u64().unwrap() / 1000) as u32;
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
timestamp,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect_vec()
})
.collect::<BTreeMap<_, _>>())
}
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("binance: fetch 1mn");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1m&limit=1000",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().context("Expect to be array")?;
let timestamp = (array
.first()
.context("Expect to have first")?
.as_u64()
.context("Expect to be convertible to u64")?
/ 1_000) as u32;
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect to have index")?
.as_str()
.context("Expect to have &str")?
.parse::<f32>()?)
};
Ok((
timestamp,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("binance: fetch 1d");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1d",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().context("Expect to be array")?;
let date = Timestamp::from(
(array
.first()
.context("Expect to have first")?
.as_u64()
.context("Expect to be convertible to u64")?
/ 1_000) as u32,
)
.to_date();
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect to have index")?
.as_str()
.context("Expect to have &str")?
.parse::<f32>()?)
};
Ok((
date,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
}

View File

@@ -1,118 +0,0 @@
use std::{collections::BTreeMap, str::FromStr};
use chrono::NaiveDate;
use color_eyre::eyre::ContextCompat;
use log::info;
use serde_json::Value;
use crate::{
structs::{Date, DateMapChunkId, HeightMapChunkId, MapChunkId, OHLC},
utils::retry,
};
pub struct Kibo;
const KIBO_OFFICIAL_URL: &str = "https://kibo.money/api";
const KIBO_OFFICIAL_BACKUP_URL: &str = "https://backup.kibo.money/api";
const RETRIES: usize = 10;
impl Kibo {
fn get_base_url(try_index: usize) -> &'static str {
if try_index < RETRIES / 2 {
KIBO_OFFICIAL_URL
} else {
KIBO_OFFICIAL_BACKUP_URL
}
}
pub fn fetch_height_prices(chunk_id: HeightMapChunkId) -> color_eyre::Result<Vec<OHLC>> {
info!("kibo: fetch height prices");
retry(
|try_index| {
let base_url = Self::get_base_url(try_index);
let body: Value = reqwest::blocking::get(format!(
"{base_url}/height-to-price?chunk={}",
chunk_id.to_usize()
))?
.json()?;
let vec = body
.as_object()
.context("Expect to be an object")?
.get("dataset")
.context("Expect object to have dataset")?
.as_object()
.context("Expect to be an object")?
.get("map")
.context("Expect to have map")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(Self::value_to_ohlc)
.collect::<Result<Vec<_>, _>>()?;
Ok(vec)
},
30,
RETRIES,
)
}
pub fn fetch_date_prices(chunk_id: DateMapChunkId) -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("kibo: fetch date prices");
retry(
|try_index| {
let base_url = Self::get_base_url(try_index);
let body: Value = reqwest::blocking::get(format!(
"{base_url}/date-to-price?chunk={}",
chunk_id.to_usize()
))?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("dataset")
.context("Expect object to have dataset")?
.as_object()
.context("Expect to be an object")?
.get("map")
.context("Expect to have map")?
.as_object()
.context("Expect to be an object")?
.iter()
.map(|(serialized_date, value)| -> color_eyre::Result<_> {
let date = Date::wrap(NaiveDate::from_str(serialized_date)?);
Ok((date, Self::value_to_ohlc(value)?))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
RETRIES,
)
}
fn value_to_ohlc(value: &Value) -> color_eyre::Result<OHLC> {
let ohlc = value.as_object().context("Expect as_object to work")?;
let get_value = |key: &str| -> color_eyre::Result<f32> {
Ok(ohlc
.get(key)
.context("Expect get key to work")?
.as_f64()
.context("Expect as_f64 to work")? as f32)
};
Ok(OHLC {
open: get_value("open")?,
high: get_value("high")?,
low: get_value("low")?,
close: get_value("close")?,
})
}
}

View File

@@ -1,133 +0,0 @@
use std::collections::BTreeMap;
use color_eyre::eyre::ContextCompat;
use log::info;
use serde_json::Value;
use crate::{
structs::{Date, Timestamp, OHLC},
utils::retry,
};
pub struct Kraken;
impl Kraken {
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("kraken: fetch 1mn");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
let array = value.as_array().context("Expect as_array to work")?;
let timestamp = array
.first()
.context("Expect first to work")?
.as_u64()
.expect("Expect as_u64 to work")
as u32;
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect get index to work")?
.as_str()
.context("Expect as_str to work")?
.parse::<f32>()?)
};
Ok((
timestamp,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("fetch kraken daily");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1440",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
let array = value.as_array().context("Expect as_array to work")?;
let date = Timestamp::from(
array
.first()
.context("Expect first to work")?
.as_u64()
.context("Expect as_u64 to work")?
as u32,
)
.to_date();
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect get index to work")?
.as_str()
.context("Expect as_str to work")?
.parse::<f32>()?)
};
Ok((
date,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
}

View File

@@ -1,7 +0,0 @@
mod binance;
mod kibo;
mod kraken;
pub use binance::*;
pub use kibo::*;
pub use kraken::*;

View File

@@ -1,36 +0,0 @@
use std::{
fmt::Debug,
fs, io,
path::{Path, PathBuf},
};
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use crate::{io::Serialization, structs::Config};
pub trait AnyState
where
Self: Debug + Encode + Decode + Serialize + DeserializeOwned,
{
fn name<'a>() -> &'a str;
fn path(config: &Config) -> PathBuf {
config.path_states().join(Self::name())
}
fn reset(&mut self, config: &Config) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::path(config))
}
fn import(config: &Config) -> color_eyre::Result<Self> {
Serialization::Binary.import(&Self::path(config))
}
fn export(&self, config: &Config) -> color_eyre::Result<()> {
Serialization::Binary.export(Path::new(&Self::path(config)), self)
}
fn clear(&mut self);
}

View File

@@ -1,121 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::{
parser::states::{DurableStates, IsZero, OneShotStates, PriceToValue, UnrealizedState},
structs::{Amount, Price},
};
#[derive(Default, Debug, Allocative)]
pub struct AddressCohortDurableStates {
pub address_count: f64,
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<Amount>,
}
impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn increment(
&mut self,
address_count: f64,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
) -> color_eyre::Result<()> {
self.address_count += address_count;
self._crement(amount, utxo_count, realized_cap, mean_price_paid, true)
}
#[allow(clippy::too_many_arguments)]
pub fn decrement(
&mut self,
address_count: f64,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
) -> color_eyre::Result<()> {
self.address_count -= address_count;
self._crement(amount, utxo_count, realized_cap, mean_price_paid, false)
}
#[allow(clippy::too_many_arguments)]
pub fn _crement(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
increment: bool,
) -> color_eyre::Result<()> {
if increment {
self.durable_states
.increment(amount, utxo_count, realized_cap)
} else {
self.durable_states
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(report);
})?;
if !amount.is_zero()? {
if increment {
self.price_to_amount.increment(mean_price_paid, amount);
} else {
self.price_to_amount
.decrement(mean_price_paid, amount)
.inspect_err(|report| {
dbg!(report, "cents_to_split_amount decrement",);
})?;
}
}
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> OneShotStates {
let mut one_shot_states = OneShotStates::default();
if date_price.is_some() {
one_shot_states
.unrealized_date_state
.replace(UnrealizedState::default());
}
let one_shot_states_ref = &mut one_shot_states;
let supply = self.durable_states.supply_state.supply();
self.price_to_amount.iterate(supply, |price_paid, amount| {
one_shot_states_ref
.price_paid_state
.iterate(price_paid, amount, supply);
one_shot_states_ref
.unrealized_block_state
.iterate(price_paid, block_price, amount);
if let Some(unrealized_date_state) = one_shot_states_ref.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(price_paid, date_price.unwrap(), amount);
}
});
one_shot_states
}
}
impl AddAssign for AddressCohortDurableStates {
fn add_assign(&mut self, rhs: Self) {
self.address_count += rhs.address_count;
self.durable_states += rhs.durable_states;
self.price_to_amount += rhs.price_to_amount;
}
}

View File

@@ -1,80 +0,0 @@
use crate::structs::{AddressLiquidity, AddressSize, AddressSplit, AddressType};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub enum AddressCohortId {
All,
Illiquid,
Liquid,
HighlyLiquid,
Plankton,
Shrimp,
Crab,
Fish,
Shark,
Whale,
Humpback,
Megalodon,
P2PK,
P2PKH,
P2SH,
P2WPKH,
P2WSH,
P2TR,
}
impl AddressCohortId {
pub fn as_name(&self) -> Option<&str> {
match self {
Self::All => None,
Self::Illiquid => Some("illiquid"),
Self::Liquid => Some("liquid"),
Self::HighlyLiquid => Some("highly_liquid"),
Self::Plankton => Some("plankton"),
Self::Shrimp => Some("shrimp"),
Self::Crab => Some("crab"),
Self::Fish => Some("fish"),
Self::Shark => Some("shark"),
Self::Whale => Some("whale"),
Self::Humpback => Some("humpback"),
Self::Megalodon => Some("megalodon"),
Self::P2PK => Some("p2pk"),
Self::P2PKH => Some("p2pkh"),
Self::P2SH => Some("p2sh"),
Self::P2WPKH => Some("p2wpkh"),
Self::P2WSH => Some("p2wsh"),
Self::P2TR => Some("p2tr"),
}
}
pub fn as_split(&self) -> AddressSplit {
match self {
Self::All => AddressSplit::All,
Self::Illiquid => AddressSplit::Liquidity(AddressLiquidity::Illiquid),
Self::Liquid => AddressSplit::Liquidity(AddressLiquidity::Liquid),
Self::HighlyLiquid => AddressSplit::Liquidity(AddressLiquidity::HighlyLiquid),
Self::Plankton => AddressSplit::Size(AddressSize::Plankton),
Self::Shrimp => AddressSplit::Size(AddressSize::Shrimp),
Self::Crab => AddressSplit::Size(AddressSize::Crab),
Self::Fish => AddressSplit::Size(AddressSize::Fish),
Self::Shark => AddressSplit::Size(AddressSize::Shark),
Self::Whale => AddressSplit::Size(AddressSize::Whale),
Self::Humpback => AddressSplit::Size(AddressSize::Humpback),
Self::Megalodon => AddressSplit::Size(AddressSize::Megalodon),
Self::P2PK => AddressSplit::Type(AddressType::P2PK),
Self::P2PKH => AddressSplit::Type(AddressType::P2PKH),
Self::P2SH => AddressSplit::Type(AddressType::P2SH),
Self::P2WPKH => AddressSplit::Type(AddressType::P2WPKH),
Self::P2WSH => AddressSplit::Type(AddressType::P2WSH),
Self::P2TR => AddressSplit::Type(AddressType::P2TR),
}
}
}

View File

@@ -1,156 +0,0 @@
use std::iter::Sum;
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use crate::{
parser::databases::AddressIndexToAddressData,
structs::{AddressData, AddressRealizedData, Amount, Price},
};
use super::{AddressCohortDurableStates, AddressCohortsOneShotStates, SplitByAddressCohort};
#[derive(Default, Deref, DerefMut, Allocative)]
pub struct AddressCohortsDurableStates(SplitByAddressCohort<AddressCohortDurableStates>);
impl AddressCohortsDurableStates {
pub fn init(address_index_to_address_data: &mut AddressIndexToAddressData) -> Self {
address_index_to_address_data.compute_addres_cohorts_durable_states()
}
pub fn iterate(
&mut self,
address_realized_data: &AddressRealizedData,
current_address_data: &AddressData,
) -> color_eyre::Result<()> {
self.decrement(&address_realized_data.initial_address_data)
.inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("decrement initial address_data");
})?;
self.increment(current_address_data).inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("increment address_data");
})?;
Ok(())
}
/// Should always increment using current address data state
pub fn increment(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, true)
}
/// Should always decrement using initial address data state
fn decrement(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, false)
}
fn _crement(&mut self, address_data: &AddressData, increment: bool) -> color_eyre::Result<()> {
// No need to either insert or remove if empty
if address_data.is_empty() {
return Ok(());
}
let amount = address_data.amount;
let utxo_count = address_data.outputs_len as f64;
let realized_cap = address_data.realized_cap;
let mean_price_paid = address_data.realized_cap / amount;
let liquidity_classification = address_data.compute_liquidity_classification();
let split_address_count = liquidity_classification.split(1.0);
let split_sat_amount = liquidity_classification.split(amount.to_sat() as f64);
let split_utxo_count = liquidity_classification.split(utxo_count);
let split_realized_cap = liquidity_classification.split(realized_cap.to_dollar());
self.0.iterate(
address_data,
|state| {
// Unsplit it must be one
let address_count = 1.0;
if increment {
state.increment(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
} else {
state.decrement(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
}
},
|liquidity, state| {
let address_count = split_address_count.from(liquidity);
let amount = Amount::from_sat(split_sat_amount.from(liquidity).floor() as u64);
let utxo_count = split_utxo_count.from(liquidity);
let realized_cap = Price::from_dollar(split_realized_cap.from(liquidity));
if increment {
state.increment(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
} else {
state.decrement(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
}
},
)?;
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> AddressCohortsOneShotStates {
let mut one_shot_states = AddressCohortsOneShotStates::default();
self.as_vec()
.into_par_iter()
.map(|(states, address_cohort_id)| {
(
address_cohort_id,
states.compute_one_shot_states(block_price, date_price),
)
})
.collect::<Vec<_>>()
.into_iter()
.for_each(|(address_cohort_id, states)| {
*one_shot_states.get_mut_from_id(&address_cohort_id) = states;
});
one_shot_states
}
}
impl Sum for AddressCohortsDurableStates {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
iter.fold(Self::default(), |mut a, b| {
a.0 += b.0;
a
})
}
}

View File

@@ -1,45 +0,0 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::InputState,
structs::{AddressRealizedData, Amount, LiquidityClassification},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsInputStates(SplitByAddressCohort<InputState>);
impl AddressCohortsInputStates {
pub fn iterate_input(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_destroyed as f64;
let sent = realized_data.sent;
let normal_iteration = move |state: &mut InputState| -> color_eyre::Result<()> {
state.iterate(count, sent);
Ok(())
};
let split_count = liquidity_classification.split(count);
let split_sent = liquidity_classification.split(sent.to_sat() as f64);
let liquified_iteration =
move |liquidity, state: &mut InputState| -> color_eyre::Result<()> {
state.iterate(
split_count.from(liquidity),
Amount::from_sat(split_sent.from(liquidity).round() as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -1,8 +0,0 @@
use derive_deref::{Deref, DerefMut};
use crate::parser::states::OneShotStates;
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOneShotStates(pub SplitByAddressCohort<OneShotStates>);

View File

@@ -1,45 +0,0 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::OutputState,
structs::{AddressRealizedData, Amount, LiquidityClassification},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOutputStates(SplitByAddressCohort<OutputState>);
impl AddressCohortsOutputStates {
pub fn iterate_output(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_created as f64;
let volume = realized_data.received;
let normal_iteration = move |state: &mut OutputState| -> color_eyre::Result<()> {
state.iterate(count, volume);
Ok(())
};
let split_count = liquidity_classification.split(count);
let split_volume = liquidity_classification.split(volume.to_sat() as f64);
let liquified_iteration =
move |liquidity, state: &mut OutputState| -> color_eyre::Result<()> {
state.iterate(
split_count.from(liquidity),
Amount::from_sat(split_volume.from(liquidity).round() as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -1,68 +0,0 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::RealizedState,
structs::{AddressRealizedData, LiquidityClassification, Price},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsRealizedStates(SplitByAddressCohort<RealizedState>);
impl AddressCohortsRealizedStates {
pub fn iterate_realized(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let realized_profit = realized_data.profit;
let realized_loss = realized_data.loss;
let value_created = realized_data.value_created;
let adjusted_value_created = realized_data.adjusted_value_created;
let value_destroyed = realized_data.value_destroyed;
let adjusted_value_destroyed = realized_data.adjusted_value_destroyed;
let normal_iteration = move |state: &mut RealizedState| -> color_eyre::Result<()> {
state.iterate(
realized_profit,
realized_loss,
value_created,
adjusted_value_created,
value_destroyed,
adjusted_value_destroyed,
);
Ok(())
};
let split_realized_profit =
liquidity_classification.split(realized_profit.to_cent() as f64);
let split_realized_loss = liquidity_classification.split(realized_loss.to_cent() as f64);
let split_value_created = liquidity_classification.split(value_created.to_cent() as f64);
let split_adjusted_value_created =
liquidity_classification.split(adjusted_value_created.to_cent() as f64);
let split_value_destroyed =
liquidity_classification.split(value_destroyed.to_cent() as f64);
let split_adjusted_value_destroyed =
liquidity_classification.split(adjusted_value_destroyed.to_cent() as f64);
let liquified_iteration =
move |liquidity, state: &mut RealizedState| -> color_eyre::Result<()> {
state.iterate(
Price::from_cent(split_realized_profit.from(liquidity) as u64),
Price::from_cent(split_realized_loss.from(liquidity) as u64),
Price::from_cent(split_value_created.from(liquidity) as u64),
Price::from_cent(split_adjusted_value_created.from(liquidity) as u64),
Price::from_cent(split_value_destroyed.from(liquidity) as u64),
Price::from_cent(split_adjusted_value_destroyed.from(liquidity) as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -1,17 +0,0 @@
mod cohort_durable_states;
mod cohort_id;
mod cohorts_durable_states;
mod cohorts_input_states;
mod cohorts_one_shot_states;
mod cohorts_output_states;
mod cohorts_realized_states;
mod split_by_address_cohort;
pub use cohort_durable_states::*;
pub use cohort_id::*;
pub use cohorts_durable_states::*;
pub use cohorts_input_states::*;
pub use cohorts_one_shot_states::*;
pub use cohorts_output_states::*;
pub use cohorts_realized_states::*;
pub use split_by_address_cohort::*;

View File

@@ -1,273 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::{AddressData, AddressLiquidity, AddressSize, AddressSplit, AddressType};
use super::AddressCohortId;
#[derive(Default, Allocative)]
pub struct SplitByAddressCohort<T> {
pub all: T,
pub illiquid: T,
pub liquid: T,
pub highly_liquid: T,
pub plankton: T,
pub shrimp: T,
pub crab: T,
pub fish: T,
pub shark: T,
pub whale: T,
pub humpback: T,
pub megalodon: T,
pub p2pk: T,
pub p2pkh: T,
pub p2sh: T,
pub p2wpkh: T,
pub p2wsh: T,
pub p2tr: T,
}
impl<T> SplitByAddressCohort<T> {
pub fn get(&self, split: &AddressSplit) -> Option<&T> {
match &split {
AddressSplit::All => Some(&self.all),
AddressSplit::Liquidity(address_liquidity) => match address_liquidity {
AddressLiquidity::Illiquid => Some(&self.illiquid),
AddressLiquidity::Liquid => Some(&self.liquid),
AddressLiquidity::HighlyLiquid => Some(&self.highly_liquid),
},
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&self.p2pk),
AddressType::P2PKH => Some(&self.p2pkh),
AddressType::P2SH => Some(&self.p2sh),
AddressType::P2WPKH => Some(&self.p2wpkh),
AddressType::P2WSH => Some(&self.p2wsh),
AddressType::P2TR => Some(&self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&self.plankton),
AddressSize::Shrimp => Some(&self.shrimp),
AddressSize::Crab => Some(&self.crab),
AddressSize::Fish => Some(&self.fish),
AddressSize::Shark => Some(&self.shark),
AddressSize::Whale => Some(&self.whale),
AddressSize::Humpback => Some(&self.humpback),
AddressSize::Megalodon => Some(&self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn iterate(
&mut self,
address_data: &AddressData,
normal_iteration: impl Fn(&mut T) -> color_eyre::Result<()>,
liquified_iteration: impl Fn(AddressLiquidity, &mut T) -> color_eyre::Result<()>,
) -> color_eyre::Result<()> {
normal_iteration(self.get_mut_from_split(&AddressSplit::All).unwrap())?;
let mut _liquified_iteration = |address_liquidity| {
liquified_iteration(
address_liquidity,
self.get_mut_from_split(&AddressSplit::Liquidity(address_liquidity))
.unwrap(),
)
};
_liquified_iteration(AddressLiquidity::Illiquid)?;
_liquified_iteration(AddressLiquidity::Liquid)?;
_liquified_iteration(AddressLiquidity::HighlyLiquid)?;
if let Some(state) = self.get_mut_from_split(&AddressSplit::Type(address_data.address_type))
{
normal_iteration(state)?;
}
if let Some(state) = self.get_mut_from_split(&AddressSplit::Size(AddressSize::from_amount(
address_data.amount,
))) {
normal_iteration(state)?;
}
Ok(())
}
fn get_mut_from_split(&mut self, split: &AddressSplit) -> Option<&mut T> {
match &split {
AddressSplit::All => Some(&mut self.all),
AddressSplit::Liquidity(address_liquidity) => match address_liquidity {
AddressLiquidity::Illiquid => Some(&mut self.illiquid),
AddressLiquidity::Liquid => Some(&mut self.liquid),
AddressLiquidity::HighlyLiquid => Some(&mut self.highly_liquid),
},
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&mut self.p2pk),
AddressType::P2PKH => Some(&mut self.p2pkh),
AddressType::P2SH => Some(&mut self.p2sh),
AddressType::P2WPKH => Some(&mut self.p2wpkh),
AddressType::P2WSH => Some(&mut self.p2wsh),
AddressType::P2TR => Some(&mut self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&mut self.plankton),
AddressSize::Shrimp => Some(&mut self.shrimp),
AddressSize::Crab => Some(&mut self.crab),
AddressSize::Fish => Some(&mut self.fish),
AddressSize::Shark => Some(&mut self.shark),
AddressSize::Whale => Some(&mut self.whale),
AddressSize::Humpback => Some(&mut self.humpback),
AddressSize::Megalodon => Some(&mut self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn get_mut_from_id(&mut self, id: &AddressCohortId) -> &mut T {
match id {
AddressCohortId::All => &mut self.all,
AddressCohortId::Illiquid => &mut self.illiquid,
AddressCohortId::Liquid => &mut self.liquid,
AddressCohortId::HighlyLiquid => &mut self.highly_liquid,
AddressCohortId::Plankton => &mut self.plankton,
AddressCohortId::Shrimp => &mut self.shrimp,
AddressCohortId::Crab => &mut self.crab,
AddressCohortId::Fish => &mut self.fish,
AddressCohortId::Shark => &mut self.shark,
AddressCohortId::Whale => &mut self.whale,
AddressCohortId::Humpback => &mut self.humpback,
AddressCohortId::Megalodon => &mut self.megalodon,
AddressCohortId::P2PK => &mut self.p2pk,
AddressCohortId::P2PKH => &mut self.p2pkh,
AddressCohortId::P2SH => &mut self.p2sh,
AddressCohortId::P2WPKH => &mut self.p2wpkh,
AddressCohortId::P2WSH => &mut self.p2wsh,
AddressCohortId::P2TR => &mut self.p2tr,
}
}
pub fn as_vec(&self) -> Vec<(&T, AddressCohortId)> {
vec![
(&self.all, AddressCohortId::All),
(&self.illiquid, AddressCohortId::Illiquid),
(&self.liquid, AddressCohortId::Liquid),
(&self.highly_liquid, AddressCohortId::HighlyLiquid),
(&self.plankton, AddressCohortId::Plankton),
(&self.shrimp, AddressCohortId::Shrimp),
(&self.crab, AddressCohortId::Crab),
(&self.fish, AddressCohortId::Fish),
(&self.shark, AddressCohortId::Shark),
(&self.whale, AddressCohortId::Whale),
(&self.humpback, AddressCohortId::Humpback),
(&self.megalodon, AddressCohortId::Megalodon),
(&self.p2pk, AddressCohortId::P2PK),
(&self.p2pkh, AddressCohortId::P2PKH),
(&self.p2sh, AddressCohortId::P2SH),
(&self.p2wpkh, AddressCohortId::P2WPKH),
(&self.p2wsh, AddressCohortId::P2WSH),
(&self.p2tr, AddressCohortId::P2TR),
]
}
pub fn as_mut_vec(&mut self) -> Vec<(&mut T, AddressCohortId)> {
vec![
(&mut self.all, AddressCohortId::All),
(&mut self.illiquid, AddressCohortId::Illiquid),
(&mut self.liquid, AddressCohortId::Liquid),
(&mut self.highly_liquid, AddressCohortId::HighlyLiquid),
(&mut self.plankton, AddressCohortId::Plankton),
(&mut self.shrimp, AddressCohortId::Shrimp),
(&mut self.crab, AddressCohortId::Crab),
(&mut self.fish, AddressCohortId::Fish),
(&mut self.shark, AddressCohortId::Shark),
(&mut self.whale, AddressCohortId::Whale),
(&mut self.humpback, AddressCohortId::Humpback),
(&mut self.megalodon, AddressCohortId::Megalodon),
(&mut self.p2pk, AddressCohortId::P2PK),
(&mut self.p2pkh, AddressCohortId::P2PKH),
(&mut self.p2sh, AddressCohortId::P2SH),
(&mut self.p2wpkh, AddressCohortId::P2WPKH),
(&mut self.p2wsh, AddressCohortId::P2WSH),
(&mut self.p2tr, AddressCohortId::P2TR),
]
}
}
impl<T> AddAssign for SplitByAddressCohort<T>
where
T: AddAssign,
{
fn add_assign(&mut self, rhs: Self) {
self.all += rhs.all;
self.illiquid += rhs.illiquid;
self.liquid += rhs.liquid;
self.highly_liquid += rhs.highly_liquid;
self.plankton += rhs.plankton;
self.shrimp += rhs.shrimp;
self.crab += rhs.crab;
self.fish += rhs.fish;
self.shark += rhs.shark;
self.whale += rhs.whale;
self.humpback += rhs.humpback;
self.megalodon += rhs.megalodon;
self.p2pk += rhs.p2pk;
self.p2pkh += rhs.p2pkh;
self.p2sh += rhs.p2sh;
self.p2wpkh += rhs.p2wpkh;
self.p2wsh += rhs.p2wsh;
self.p2tr += rhs.p2tr;
}
}
impl<T> SplitByAddressCohort<Option<T>> {
pub fn unwrap(self) -> SplitByAddressCohort<T> {
SplitByAddressCohort {
all: self.all.unwrap(),
illiquid: self.illiquid.unwrap(),
liquid: self.liquid.unwrap(),
highly_liquid: self.highly_liquid.unwrap(),
plankton: self.plankton.unwrap(),
shrimp: self.shrimp.unwrap(),
crab: self.crab.unwrap(),
fish: self.fish.unwrap(),
shark: self.shark.unwrap(),
whale: self.whale.unwrap(),
humpback: self.humpback.unwrap(),
megalodon: self.megalodon.unwrap(),
p2pk: self.p2pk.unwrap(),
p2pkh: self.p2pkh.unwrap(),
p2sh: self.p2sh.unwrap(),
p2wpkh: self.p2wpkh.unwrap(),
p2wsh: self.p2wsh.unwrap(),
p2tr: self.p2tr.unwrap(),
}
}
}

View File

@@ -1,30 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::Price;
#[derive(Debug, Default, Allocative)]
pub struct CapitalizationState {
realized_cap: Price,
}
impl CapitalizationState {
pub fn realized_cap(&self) -> Price {
self.realized_cap
}
pub fn increment(&mut self, realized_cap: Price) {
self.realized_cap += realized_cap;
}
pub fn decrement(&mut self, realized_cap: Price) {
self.realized_cap -= realized_cap;
}
}
impl AddAssign for CapitalizationState {
fn add_assign(&mut self, rhs: Self) {
self.realized_cap += rhs.realized_cap;
}
}

View File

@@ -1,50 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::{Amount, Price};
use super::{CapitalizationState, SupplyState, UTXOState};
#[derive(Default, Debug, Allocative)]
pub struct DurableStates {
pub capitalization_state: CapitalizationState,
pub supply_state: SupplyState,
pub utxo_state: UTXOState,
}
impl DurableStates {
pub fn increment(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
) -> color_eyre::Result<()> {
self.utxo_state.increment(utxo_count);
self.capitalization_state.increment(realized_cap);
self.supply_state.increment(amount);
Ok(())
}
pub fn decrement(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
) -> color_eyre::Result<()> {
self.utxo_state.decrement(utxo_count)?;
self.capitalization_state.decrement(realized_cap);
self.supply_state.decrement(amount)?;
Ok(())
}
}
impl AddAssign for DurableStates {
fn add_assign(&mut self, rhs: Self) {
self.capitalization_state += rhs.capitalization_state;
self.supply_state += rhs.supply_state;
self.utxo_state += rhs.utxo_state;
}
}

View File

@@ -1,22 +0,0 @@
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct InputState {
count: f64,
volume: Amount,
}
impl InputState {
pub fn count(&self) -> f64 {
self.count
}
pub fn volume(&self) -> Amount {
self.volume
}
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -1,23 +0,0 @@
mod capitalization_state;
mod durable_states;
mod input_state;
mod one_shot_states;
mod output_state;
mod price_paid_state;
mod price_to_value;
mod realized_state;
mod supply_state;
mod unrealized_state;
mod utxo_state;
pub use capitalization_state::*;
pub use durable_states::*;
pub use input_state::*;
pub use one_shot_states::*;
pub use output_state::*;
pub use price_paid_state::*;
pub use price_to_value::*;
pub use realized_state::*;
pub use supply_state::*;
pub use unrealized_state::*;
pub use utxo_state::*;

View File

@@ -1,9 +0,0 @@
use super::{PricePaidState, UnrealizedState};
#[derive(Default)]
pub struct OneShotStates {
pub price_paid_state: PricePaidState,
pub unrealized_block_state: UnrealizedState,
pub unrealized_date_state: Option<UnrealizedState>,
}

View File

@@ -1,22 +0,0 @@
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct OutputState {
count: f64,
volume: Amount,
}
impl OutputState {
// pub fn count(&self) -> f64 {
// self.count
// }
// pub fn volume(&self) -> Amount {
// self.volume
// }
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -1,286 +0,0 @@
use crate::structs::{Amount, Price};
#[derive(Default, Debug)]
pub struct PricePaidState {
pp_05p: Option<Price>,
pp_10p: Option<Price>,
pp_15p: Option<Price>,
pp_20p: Option<Price>,
pp_25p: Option<Price>,
pp_30p: Option<Price>,
pp_35p: Option<Price>,
pp_40p: Option<Price>,
pp_45p: Option<Price>,
pp_median: Option<Price>,
pp_55p: Option<Price>,
pp_60p: Option<Price>,
pp_65p: Option<Price>,
pp_70p: Option<Price>,
pp_75p: Option<Price>,
pp_80p: Option<Price>,
pp_85p: Option<Price>,
pp_90p: Option<Price>,
pp_95p: Option<Price>,
processed_amount: Amount,
}
impl PricePaidState {
pub fn pp_05p(&self) -> Option<Price> {
self.pp_05p
}
pub fn pp_10p(&self) -> Option<Price> {
self.pp_10p
}
pub fn pp_15p(&self) -> Option<Price> {
self.pp_15p
}
pub fn pp_20p(&self) -> Option<Price> {
self.pp_20p
}
pub fn pp_25p(&self) -> Option<Price> {
self.pp_25p
}
pub fn pp_30p(&self) -> Option<Price> {
self.pp_30p
}
pub fn pp_35p(&self) -> Option<Price> {
self.pp_35p
}
pub fn pp_40p(&self) -> Option<Price> {
self.pp_40p
}
pub fn pp_45p(&self) -> Option<Price> {
self.pp_45p
}
pub fn pp_median(&self) -> Option<Price> {
self.pp_median
}
pub fn pp_55p(&self) -> Option<Price> {
self.pp_55p
}
pub fn pp_60p(&self) -> Option<Price> {
self.pp_60p
}
pub fn pp_65p(&self) -> Option<Price> {
self.pp_65p
}
pub fn pp_70p(&self) -> Option<Price> {
self.pp_70p
}
pub fn pp_75p(&self) -> Option<Price> {
self.pp_75p
}
pub fn pp_80p(&self) -> Option<Price> {
self.pp_80p
}
pub fn pp_85p(&self) -> Option<Price> {
self.pp_85p
}
pub fn pp_90p(&self) -> Option<Price> {
self.pp_90p
}
pub fn pp_95p(&self) -> Option<Price> {
self.pp_95p
}
pub fn iterate(&mut self, price: Price, amount: Amount, supply: Amount) {
let PricePaidState {
processed_amount: processed_supply,
pp_05p,
pp_10p,
pp_15p,
pp_20p,
pp_25p,
pp_30p,
pp_35p,
pp_40p,
pp_45p,
pp_median,
pp_55p,
pp_60p,
pp_65p,
pp_70p,
pp_75p,
pp_80p,
pp_85p,
pp_90p,
pp_95p,
} = self;
*processed_supply += amount;
if pp_95p.is_some() {
return;
}
let processed_sat_amount = processed_supply.to_sat();
let total_sat_supply = supply.to_sat();
if processed_sat_amount >= total_sat_supply * 95 / 100 {
pp_95p.replace(price);
}
if pp_90p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 90 / 100 {
pp_90p.replace(price);
}
if pp_85p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 85 / 100 {
pp_85p.replace(price);
}
if pp_80p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 80 / 100 {
pp_80p.replace(price);
}
if pp_75p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 75 / 100 {
pp_75p.replace(price);
}
if pp_70p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 70 / 100 {
pp_70p.replace(price);
}
if pp_65p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 65 / 100 {
pp_65p.replace(price);
}
if pp_60p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 60 / 100 {
pp_60p.replace(price);
}
if pp_55p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 55 / 100 {
pp_55p.replace(price);
}
if pp_median.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 2 {
pp_median.replace(price);
}
if pp_45p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 45 / 100 {
pp_45p.replace(price);
}
if pp_40p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 40 / 100 {
pp_40p.replace(price);
}
if pp_35p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 35 / 100 {
pp_35p.replace(price);
}
if pp_30p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 30 / 100 {
pp_30p.replace(price);
}
if pp_25p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 4 {
pp_25p.replace(price);
}
if pp_20p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 5 {
pp_20p.replace(price);
}
if pp_15p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 15 / 100 {
pp_15p.replace(price);
}
if pp_10p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 10 {
pp_10p.replace(price);
}
if pp_05p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 20 {
pp_05p.replace(price);
}
}
}

View File

@@ -1,113 +0,0 @@
use std::{
collections::BTreeMap,
fmt::Debug,
ops::{AddAssign, SubAssign},
};
use allocative::Allocative;
use color_eyre::eyre::eyre;
use derive_deref::{Deref, DerefMut};
use crate::structs::{Amount, Price};
#[derive(Deref, DerefMut, Default, Debug, Allocative)]
pub struct PriceToValue<T>(BTreeMap<u32, T>);
impl<T> PriceToValue<T>
where
T: Default
+ Debug
+ AddAssign
+ SubAssign
+ CanSubtract
+ Default
+ Copy
+ Clone
+ PartialEq
+ IsZero,
{
pub fn increment(&mut self, price: Price, value: T) {
*self.entry(price.to_cent() as u32).or_default() += value;
}
pub fn decrement(&mut self, price: Price, value: T) -> color_eyre::Result<()> {
let cent = price.to_cent() as u32;
let delete = {
let self_value = self.get_mut(&cent);
if self_value.is_none() {
dbg!(&self.0, price, value);
return Err(eyre!("self_value is none"));
}
let self_value = self_value.unwrap();
if !self_value.can_subtract(&value) {
dbg!(*self_value, &self.0, price, value);
return Err(eyre!("self value < value"));
}
*self_value -= value;
self_value.is_zero()?
};
if delete {
self.remove(&cent).unwrap();
}
Ok(())
}
pub fn iterate(&self, supply: T, mut iterate: impl FnMut(Price, T)) {
let mut processed = T::default();
self.iter().for_each(|(cent, value)| {
let value = *value;
processed += value;
iterate(Price::from_cent(*cent as u64), value)
});
if processed != supply {
dbg!(processed, supply);
panic!("processed_amount isn't equal to supply")
}
}
}
impl<T> AddAssign for PriceToValue<T>
where
T: AddAssign + Copy,
{
fn add_assign(&mut self, rhs: Self) {
rhs.0.into_iter().for_each(|(key, value)| {
self.0
.entry(key)
.and_modify(|previous| *previous += value)
.or_insert(value);
});
}
}
pub trait CanSubtract {
fn can_subtract(&self, other: &Self) -> bool;
}
impl CanSubtract for Amount {
fn can_subtract(&self, other: &Self) -> bool {
self >= other
}
}
pub trait IsZero {
fn is_zero(&self) -> color_eyre::Result<bool>;
}
impl IsZero for Amount {
fn is_zero(&self) -> color_eyre::Result<bool> {
Ok(*self == Amount::ZERO)
}
}

View File

@@ -1,54 +0,0 @@
use crate::structs::Price;
#[derive(Debug, Default)]
pub struct RealizedState {
realized_profit: Price,
realized_loss: Price,
value_created: Price,
adjusted_value_created: Price,
value_destroyed: Price,
adjusted_value_destroyed: Price,
}
impl RealizedState {
pub fn realized_profit(&self) -> Price {
self.realized_profit
}
pub fn realized_loss(&self) -> Price {
self.realized_loss
}
pub fn value_created(&self) -> Price {
self.value_created
}
pub fn adjusted_value_created(&self) -> Price {
self.adjusted_value_created
}
pub fn value_destroyed(&self) -> Price {
self.value_destroyed
}
pub fn adjusted_value_destroyed(&self) -> Price {
self.adjusted_value_destroyed
}
pub fn iterate(
&mut self,
realized_profit: Price,
realized_loss: Price,
value_created: Price,
adjusted_value_created: Price,
value_destroyed: Price,
adjusted_value_destroyed: Price,
) {
self.realized_profit += realized_profit;
self.realized_loss += realized_loss;
self.value_created += value_created;
self.adjusted_value_created += adjusted_value_created;
self.value_destroyed += value_destroyed;
self.adjusted_value_destroyed += adjusted_value_destroyed;
}
}

View File

@@ -1,39 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::Amount;
#[derive(Debug, Default, Allocative)]
pub struct SupplyState {
supply: Amount,
}
impl SupplyState {
pub fn supply(&self) -> Amount {
self.supply
}
pub fn increment(&mut self, amount: Amount) {
self.supply += amount;
}
pub fn decrement(&mut self, amount: Amount) -> color_eyre::Result<()> {
if self.supply < amount {
dbg!(self.supply, amount);
return Err(eyre!("supply smaller than supply"));
}
self.supply -= amount;
Ok(())
}
}
impl AddAssign for SupplyState {
fn add_assign(&mut self, rhs: Self) {
self.supply += rhs.supply;
}
}

View File

@@ -1,50 +0,0 @@
use std::{cmp::Ordering, ops::Add};
use crate::structs::{Amount, Price};
#[derive(Debug, Default)]
pub struct UnrealizedState {
supply_in_profit: Amount,
unrealized_profit: Price,
unrealized_loss: Price,
}
impl UnrealizedState {
pub fn supply_in_profit(&self) -> Amount {
self.supply_in_profit
}
pub fn unrealized_profit(&self) -> Price {
self.unrealized_profit
}
pub fn unrealized_loss(&self) -> Price {
self.unrealized_loss
}
#[inline]
pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: Amount) {
match price_then.cmp(&price_now) {
Ordering::Less => {
self.unrealized_profit += (price_now - price_then) * amount;
self.supply_in_profit += amount;
}
Ordering::Greater => {
self.unrealized_loss += (price_then - price_now) * amount;
}
Ordering::Equal => {}
}
}
}
impl Add<UnrealizedState> for UnrealizedState {
type Output = UnrealizedState;
fn add(self, other: UnrealizedState) -> UnrealizedState {
UnrealizedState {
supply_in_profit: self.supply_in_profit + other.supply_in_profit,
unrealized_profit: self.unrealized_profit + other.unrealized_profit,
unrealized_loss: self.unrealized_loss + other.unrealized_loss,
}
}
}

View File

@@ -1,37 +0,0 @@
use std::ops::AddAssign;
use allocative::Allocative;
use color_eyre::eyre::eyre;
#[derive(Debug, Default, Allocative)]
pub struct UTXOState {
count: f64,
}
impl UTXOState {
pub fn count(&self) -> f64 {
self.count
}
pub fn increment(&mut self, utxo_count: f64) {
self.count += utxo_count;
}
pub fn decrement(&mut self, utxo_count: f64) -> color_eyre::Result<()> {
if self.count < utxo_count {
dbg!(self.count, utxo_count);
return Err(eyre!("self.count smaller than utxo_count"));
}
self.count -= utxo_count;
Ok(())
}
}
impl AddAssign for UTXOState {
fn add_assign(&mut self, rhs: Self) {
self.count += rhs.count;
}
}

View File

@@ -1,7 +0,0 @@
mod address;
mod any;
mod utxo;
pub use address::*;
pub use any::*;
pub use utxo::*;

View File

@@ -1,109 +0,0 @@
use allocative::Allocative;
use crate::{
parser::states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{Amount, Price},
};
#[derive(Default, Debug, Allocative)]
pub struct UTXOCohortDurableStates {
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<Amount>,
}
impl UTXOCohortDurableStates {
pub fn increment(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, true)
}
pub fn decrement(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, false)
}
pub fn _crement(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
increment: bool,
) -> color_eyre::Result<()> {
let realized_cap = price * amount;
if increment {
self.durable_states
.increment(amount, utxo_count, realized_cap)
} else {
self.durable_states
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(report, "split all failed", amount, utxo_count);
})?;
let rounded_price = price.to_significant();
if increment {
self.price_to_amount.increment(rounded_price, amount);
} else {
self.price_to_amount
.decrement(rounded_price, amount)
.inspect_err(|report| {
dbg!(
report,
"cents_to_amount decrement failed",
rounded_price,
price,
amount,
utxo_count
);
})?;
}
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> OneShotStates {
let mut one_shot_states = OneShotStates::default();
if date_price.is_some() {
one_shot_states
.unrealized_date_state
.replace(UnrealizedState::default());
}
let supply = self.durable_states.supply_state.supply();
let one_shot_states_ref = &mut one_shot_states;
self.price_to_amount.iterate(supply, |price_paid, amount| {
one_shot_states_ref
.price_paid_state
.iterate(price_paid, amount, supply);
one_shot_states_ref
.unrealized_block_state
.iterate(price_paid, block_price, amount);
if let Some(unrealized_date_state) = one_shot_states_ref.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(price_paid, date_price.unwrap(), amount);
}
});
one_shot_states
}
}

Some files were not shown because too many files have changed in this diff Show More