Browse Source

Merge pull request #732 from mnaamani/import-storage-node

Import storage node
shamil-gadelshin 4 years ago
parent
commit
f54bd5bb4d
89 changed files with 8409 additions and 35 deletions
  1. 3 1
      package.json
  2. 290 0
      storage-node/.eslintrc.js
  3. 27 0
      storage-node/.gitignore
  4. 15 0
      storage-node/.travis.yml
  5. 675 0
      storage-node/LICENSE.md
  6. 56 0
      storage-node/README.md
  7. 54 0
      storage-node/docs/json-signing.md
  8. 18 0
      storage-node/license_header.txt
  9. 43 0
      storage-node/package.json
  10. 5 0
      storage-node/packages/cli/README.md
  11. 230 0
      storage-node/packages/cli/bin/cli.js
  12. 48 0
      storage-node/packages/cli/package.json
  13. 1 0
      storage-node/packages/cli/test/index.js
  14. 1 0
      storage-node/packages/colossus/.eslintrc.js
  15. 94 0
      storage-node/packages/colossus/README.md
  16. 33 0
      storage-node/packages/colossus/api-base.yml
  17. 397 0
      storage-node/packages/colossus/bin/cli.js
  18. 78 0
      storage-node/packages/colossus/lib/app.js
  19. 73 0
      storage-node/packages/colossus/lib/discovery.js
  20. 44 0
      storage-node/packages/colossus/lib/middleware/file_uploads.js
  21. 61 0
      storage-node/packages/colossus/lib/middleware/validate_responses.js
  22. 108 0
      storage-node/packages/colossus/lib/sync.js
  23. 67 0
      storage-node/packages/colossus/package.json
  24. 361 0
      storage-node/packages/colossus/paths/asset/v0/{id}.js
  25. 86 0
      storage-node/packages/colossus/paths/discover/v0/{id}.js
  26. 1 0
      storage-node/packages/colossus/test/index.js
  27. 68 0
      storage-node/packages/discovery/IpfsResolver.js
  28. 28 0
      storage-node/packages/discovery/JdsResolver.js
  29. 129 0
      storage-node/packages/discovery/README.md
  30. 48 0
      storage-node/packages/discovery/Resolver.js
  31. 182 0
      storage-node/packages/discovery/discover.js
  32. 34 0
      storage-node/packages/discovery/example.js
  33. 5 0
      storage-node/packages/discovery/index.js
  34. 59 0
      storage-node/packages/discovery/package.json
  35. 53 0
      storage-node/packages/discovery/publish.js
  36. 1 0
      storage-node/packages/discovery/test/index.js
  37. 3 0
      storage-node/packages/helios/.gitignore
  38. 12 0
      storage-node/packages/helios/README.md
  39. 166 0
      storage-node/packages/helios/bin/cli.js
  40. 17 0
      storage-node/packages/helios/package.json
  41. 1 0
      storage-node/packages/helios/test/index.js
  42. 1 0
      storage-node/packages/runtime-api/.eslintrc.js
  43. 3 0
      storage-node/packages/runtime-api/.gitignore
  44. 7 0
      storage-node/packages/runtime-api/README.md
  45. 176 0
      storage-node/packages/runtime-api/assets.js
  46. 90 0
      storage-node/packages/runtime-api/balances.js
  47. 64 0
      storage-node/packages/runtime-api/discovery.js
  48. 235 0
      storage-node/packages/runtime-api/identities.js
  49. 291 0
      storage-node/packages/runtime-api/index.js
  50. 53 0
      storage-node/packages/runtime-api/package.json
  51. 186 0
      storage-node/packages/runtime-api/roles.js
  52. 52 0
      storage-node/packages/runtime-api/test/assets.js
  53. 55 0
      storage-node/packages/runtime-api/test/balances.js
  54. 1 0
      storage-node/packages/runtime-api/test/data/edwards.json
  55. 1 0
      storage-node/packages/runtime-api/test/data/edwards_unlocked.json
  56. 1 0
      storage-node/packages/runtime-api/test/data/schnorr.json
  57. 106 0
      storage-node/packages/runtime-api/test/identities.js
  58. 31 0
      storage-node/packages/runtime-api/test/index.js
  59. 67 0
      storage-node/packages/runtime-api/test/roles.js
  60. 1 0
      storage-node/packages/storage/.eslintrc.js
  61. 23 0
      storage-node/packages/storage/README.md
  62. 132 0
      storage-node/packages/storage/filter.js
  63. 25 0
      storage-node/packages/storage/index.js
  64. 50 0
      storage-node/packages/storage/package.json
  65. 406 0
      storage-node/packages/storage/storage.js
  66. 230 0
      storage-node/packages/storage/test/storage.js
  67. 0 0
      storage-node/packages/storage/test/template/bar
  68. 0 0
      storage-node/packages/storage/test/template/foo/baz
  69. 1 0
      storage-node/packages/storage/test/template/quux
  70. 1 0
      storage-node/packages/util/.eslintrc.js
  71. 12 0
      storage-node/packages/util/README.md
  72. 67 0
      storage-node/packages/util/fs/resolve.js
  73. 148 0
      storage-node/packages/util/fs/walk.js
  74. 126 0
      storage-node/packages/util/lru.js
  75. 48 0
      storage-node/packages/util/package.json
  76. 163 0
      storage-node/packages/util/pagination.js
  77. 492 0
      storage-node/packages/util/ranges.js
  78. 10 0
      storage-node/packages/util/stripEndingSlash.js
  79. 0 0
      storage-node/packages/util/test/data/bar
  80. 0 0
      storage-node/packages/util/test/data/foo/baz
  81. 1 0
      storage-node/packages/util/test/data/quux
  82. 80 0
      storage-node/packages/util/test/fs/resolve.js
  83. 69 0
      storage-node/packages/util/test/fs/walk.js
  84. 164 0
      storage-node/packages/util/test/lru.js
  85. 124 0
      storage-node/packages/util/test/pagination.js
  86. 409 0
      storage-node/packages/util/test/ranges.js
  87. 16 0
      storage-node/scripts/compose/devchain-and-ipfs-node/docker-compose.yaml
  88. 17 0
      storage-node/storage-node_new.svg
  89. 499 34
      yarn.lock

+ 3 - 1
package.json

@@ -14,7 +14,9 @@
 		"cli",
 		"types",
 		"pioneer",
-		"pioneer/packages/*"
+		"pioneer/packages/*",
+		"storage-node/",
+		"storage-node/packages/*"
 	],
 	"resolutions": {
 		"@polkadot/api": "^0.96.1",

+ 290 - 0
storage-node/.eslintrc.js

@@ -0,0 +1,290 @@
+module.exports = {
+    "env": {
+        "es6": true,
+        "node": true
+    },
+    "extends": "eslint:recommended",
+    "parserOptions": {
+        "ecmaVersion": 2018
+    },
+    "rules": {
+        "accessor-pairs": "error",
+        "array-bracket-newline": "off",
+        "array-bracket-spacing": [
+            "error",
+            "never",
+        ],
+        "array-callback-return": "error",
+        "array-element-newline": [
+          "error",
+          "consistent",
+        ],
+        "arrow-body-style": [
+          "warn",
+          "as-needed"
+        ],
+        "arrow-parens": [
+            "error",
+            "always"
+        ],
+        "arrow-spacing": [
+            "error",
+            {
+                "after": true,
+                "before": true
+            }
+        ],
+        "block-scoped-var": "error",
+        "block-spacing": "error",
+        "brace-style": "off",
+        "callback-return": "error",
+        "camelcase": "off",
+        "capitalized-comments": "off",
+        "class-methods-use-this": "error",
+        "comma-dangle": "off",
+        "comma-spacing": "off",
+        "comma-style": [
+            "error",
+            "last"
+        ],
+        "complexity": "error",
+        "computed-property-spacing": [
+            "error",
+            "never"
+        ],
+        "consistent-return": "error",
+        "consistent-this": "error",
+        "curly": "error",
+        "default-case": "error",
+        "dot-location": "error",
+        "dot-notation": "off",
+        "eol-last": "error",
+        "eqeqeq": "off",
+        "func-call-spacing": "error",
+        "func-name-matching": "off",
+        "func-names": "off",
+        "func-style": "off",
+        "function-paren-newline": "off",
+        "generator-star-spacing": "error",
+        "global-require": "off",
+        "guard-for-in": "warn",
+        "handle-callback-err": "error",
+        "id-blacklist": "error",
+        "id-length": "off",
+        "id-match": "error",
+        "implicit-arrow-linebreak": "off",
+        "indent": "off",
+        "indent-legacy": "off",
+        "init-declarations": "off",
+        "jsx-quotes": "error",
+        "key-spacing": "error",
+        "keyword-spacing": [
+            "error",
+            {
+                "after": true,
+                "before": true
+            }
+        ],
+        "line-comment-position": "off",
+        "linebreak-style": [
+            "error",
+            "unix"
+        ],
+        "lines-around-comment": "error",
+        "lines-around-directive": "error",
+        "lines-between-class-members": "error",
+        "max-classes-per-file": "error",
+        "max-depth": "error",
+        "max-len": "off",
+        "max-lines": "off",
+        "max-lines-per-function": "off",
+        "max-nested-callbacks": "error",
+        "max-params": "off",
+        "max-statements": "off",
+        "max-statements-per-line": "error",
+        "multiline-comment-style": "off",
+        "new-cap": "error",
+        "new-parens": "error",
+        "newline-after-var": "off",
+        "newline-before-return": "off",
+        "newline-per-chained-call": "off",
+        "no-alert": "error",
+        "no-array-constructor": "error",
+        "no-async-promise-executor": "error",
+        "no-await-in-loop": "error",
+        "no-bitwise": "error",
+        "no-buffer-constructor": "error",
+        "no-caller": "error",
+        "no-catch-shadow": "error",
+        "no-confusing-arrow": "error",
+        "no-continue": "off",
+        "no-constant-condition": "off",
+        "no-div-regex": "error",
+        "no-duplicate-imports": "error",
+        "no-else-return": "off",
+        "no-empty-function": "error",
+        "no-eq-null": "error",
+        "no-eval": "error",
+        "no-extend-native": "error",
+        "no-extra-bind": "error",
+        "no-extra-label": "error",
+        "no-extra-parens": "off",
+        "no-floating-decimal": "error",
+        "no-implicit-globals": "error",
+        "no-implied-eval": "error",
+        "no-inline-comments": "off",
+        "no-invalid-this": "error",
+        "no-iterator": "error",
+        "no-label-var": "error",
+        "no-labels": "error",
+        "no-lone-blocks": "error",
+        "no-lonely-if": "error",
+        "no-loop-func": "error",
+        "no-magic-numbers": "off",
+        "no-misleading-character-class": "error",
+        "no-mixed-operators": "error",
+        "no-mixed-requires": "error",
+        "no-multi-assign": "error",
+        "no-multi-spaces": "off",
+        "no-multi-str": "error",
+        "no-multiple-empty-lines": "error",
+        "no-native-reassign": "error",
+        "no-negated-condition": "error",
+        "no-negated-in-lhs": "error",
+        "no-nested-ternary": "error",
+        "no-new": "error",
+        "no-new-func": "error",
+        "no-new-object": "error",
+        "no-new-require": "error",
+        "no-new-wrappers": "error",
+        "no-octal-escape": "error",
+        "no-param-reassign": "error",
+        "no-path-concat": "error",
+        "no-plusplus": "off",
+        "no-process-env": "error",
+        "no-process-exit": "error",
+        "no-proto": "error",
+        "no-prototype-builtins": "error",
+        "no-restricted-globals": "error",
+        "no-restricted-imports": "error",
+        "no-restricted-modules": "error",
+        "no-restricted-properties": "error",
+        "no-restricted-syntax": "error",
+        "no-return-assign": "error",
+        "no-return-await": "error",
+        "no-script-url": "error",
+        "no-self-compare": "error",
+        "no-sequences": "error",
+        "no-shadow": "error",
+        "no-shadow-restricted-names": "error",
+        "no-spaced-func": "error",
+        "no-sync": "warn",
+        "no-tabs": "error",
+        "no-template-curly-in-string": "error",
+        "no-ternary": "off",
+        "no-throw-literal": "error",
+        "no-trailing-spaces": "error",
+        "no-undef-init": "error",
+        "no-undefined": "off",
+        "no-underscore-dangle": "off",
+        "no-unmodified-loop-condition": "error",
+        "no-unneeded-ternary": "off",
+        "no-unused-expressions": "error",
+        "no-unused-vars": [
+          "error",
+          {
+            "argsIgnorePattern": "^_",
+          },
+        ],
+        "no-use-before-define": "error",
+        "no-useless-call": "error",
+        "no-useless-catch": "error",
+        "no-useless-computed-key": "error",
+        "no-useless-concat": "error",
+        "no-useless-constructor": "error",
+        "no-useless-rename": "error",
+        "no-useless-return": "error",
+        "no-useless-escape": "off",
+        "no-var": "off",
+        "no-void": "error",
+        "no-warning-comments": "warn",
+        "no-whitespace-before-property": "error",
+        "no-with": "error",
+        "nonblock-statement-body-position": "error",
+        "object-curly-newline": "error",
+        "object-curly-spacing": [
+            "error",
+            "always"
+        ],
+        "object-shorthand": "off",
+        "one-var": "off",
+        "one-var-declaration-per-line": "error",
+        "operator-assignment": "error",
+        "operator-linebreak": "error",
+        "padded-blocks": "off",
+        "padding-line-between-statements": "error",
+        "prefer-arrow-callback": "off",
+        "prefer-const": "error",
+        "prefer-destructuring": "off",
+        "prefer-numeric-literals": "error",
+        "prefer-object-spread": "error",
+        "prefer-promise-reject-errors": "error",
+        "prefer-reflect": "off",
+        "prefer-rest-params": "error",
+        "prefer-spread": "error",
+        "prefer-template": "off",
+        "quote-props": "off",
+        "quotes": "off",
+        "radix": "error",
+        "require-atomic-updates": "error",
+        "require-await": "error",
+        "require-jsdoc": "warn",
+        "require-unicode-regexp": "error",
+        "rest-spread-spacing": [
+            "error",
+            "never"
+        ],
+        "semi": "off",
+        "semi-spacing": "error",
+        "semi-style": [
+            "error",
+            "last"
+        ],
+        "sort-imports": "error",
+        "sort-keys": "off",
+        "sort-vars": "error",
+        "space-before-blocks": "error",
+        "space-before-function-paren": "off",
+        "space-in-parens": [
+            "error",
+            "never"
+        ],
+        "space-infix-ops": "error",
+        "space-unary-ops": "error",
+        "spaced-comment": [
+            "error",
+            "always"
+        ],
+        "strict": "error",
+        "switch-colon-spacing": "error",
+        "symbol-description": "error",
+        "template-curly-spacing": [
+            "error",
+            "never"
+        ],
+        "template-tag-spacing": "error",
+        "unicode-bom": [
+            "error",
+            "never"
+        ],
+        "valid-jsdoc": "error",
+        "vars-on-top": "off",
+        "wrap-iife": "error",
+        "wrap-regex": "error",
+        "yield-star-spacing": "error",
+        "yoda": [
+            "error",
+            "never"
+        ]
+    }
+};

+ 27 - 0
storage-node/.gitignore

@@ -0,0 +1,27 @@
+build/
+coverage/
+dist
+tmp/
+.DS_Store
+
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+.npmrc
+package-lock.json
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# IDEs
+.idea
+.vscode
+.*.sw*
+
+# Node modules
+node_modules/
+
+# Ignore nvm config file
+.nvmrc

+ 15 - 0
storage-node/.travis.yml

@@ -0,0 +1,15 @@
+language: node_js
+
+node_js:
+    - 10
+    - 12
+    - 13
+
+services:
+  - docker
+
+script:
+  - docker-compose -f ./scripts/compose/devchain-and-ipfs-node/docker-compose.yaml up -d
+  - yarn test
+  - docker-compose -f ./scripts/compose/devchain-and-ipfs-node/docker-compose.yaml stop
+

+ 675 - 0
storage-node/LICENSE.md

@@ -0,0 +1,675 @@
+### GNU GENERAL PUBLIC LICENSE
+
+Version 3, 29 June 2007
+
+Copyright (C) 2007 Free Software Foundation, Inc.
+<https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+### Preamble
+
+The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom
+to share and change all versions of a program--to make sure it remains
+free software for all its users. We, the Free Software Foundation, use
+the GNU General Public License for most of our software; it applies
+also to any other work released this way by its authors. You can apply
+it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you
+have certain responsibilities if you distribute copies of the
+software, or if you modify it: responsibilities to respect the freedom
+of others.
+
+For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the
+manufacturer can do so. This is fundamentally incompatible with the
+aim of protecting users' freedom to change the software. The
+systematic pattern of such abuse occurs in the area of products for
+individuals to use, which is precisely where it is most unacceptable.
+Therefore, we have designed this version of the GPL to prohibit the
+practice for those products. If such problems arise substantially in
+other domains, we stand ready to extend this provision to those
+domains in future versions of the GPL, as needed to protect the
+freedom of users.
+
+Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish
+to avoid the special danger that patents applied to a free program
+could make it effectively proprietary. To prevent this, the GPL
+assures that patents cannot be used to render the program non-free.
+
+The precise terms and conditions for copying, distribution and
+modification follow.
+
+### TERMS AND CONDITIONS
+
+#### 0. Definitions.
+
+"This License" refers to version 3 of the GNU General Public License.
+
+"Copyright" also means copyright-like laws that apply to other kinds
+of works, such as semiconductor masks.
+
+"The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of
+an exact copy. The resulting work is called a "modified version" of
+the earlier work or a work "based on" the earlier work.
+
+A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user
+through a computer network, with no transfer of a copy, is not
+conveying.
+
+An interactive user interface displays "Appropriate Legal Notices" to
+the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+#### 1. Source Code.
+
+The "source code" for a work means the preferred form of the work for
+making modifications to it. "Object code" means any non-source form of
+a work.
+
+A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can
+regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same
+work.
+
+#### 2. Basic Permissions.
+
+All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey,
+without conditions so long as your license otherwise remains in force.
+You may convey covered works to others for the sole purpose of having
+them make modifications exclusively for you, or provide you with
+facilities for running those works, provided that you comply with the
+terms of this License in conveying all material for which you do not
+control copyright. Those thus making or running the covered works for
+you must do so exclusively on your behalf, under your direction and
+control, on terms that prohibit them from making any copies of your
+copyrighted material outside their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the
+conditions stated below. Sublicensing is not allowed; section 10 makes
+it unnecessary.
+
+#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such
+circumvention is effected by exercising rights under this License with
+respect to the covered work, and you disclaim any intention to limit
+operation or modification of the work as a means of enforcing, against
+the work's users, your or third parties' legal rights to forbid
+circumvention of technological measures.
+
+#### 4. Conveying Verbatim Copies.
+
+You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+#### 5. Conveying Modified Source Versions.
+
+You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these
+conditions:
+
+-   a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+-   b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under
+    section 7. This requirement modifies the requirement in section 4
+    to "keep intact all notices".
+-   c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy. This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged. This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+-   d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+#### 6. Conveying Non-Source Forms.
+
+You may convey a covered work in object code form under the terms of
+sections 4 and 5, provided that you also convey the machine-readable
+Corresponding Source under the terms of this License, in one of these
+ways:
+
+-   a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+-   b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the Corresponding
+    Source from a network server at no charge.
+-   c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source. This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+-   d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge. You need not require recipients to copy the
+    Corresponding Source along with the object code. If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source. Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+-   e) Convey the object code using peer-to-peer transmission,
+    provided you inform other peers where the object code and
+    Corresponding Source of the work are being offered to the general
+    public at no charge under subsection 6d.
+
+A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal,
+family, or household purposes, or (2) anything designed or sold for
+incorporation into a dwelling. In determining whether a product is a
+consumer product, doubtful cases shall be resolved in favor of
+coverage. For a particular product received by a particular user,
+"normally used" refers to a typical or common use of that class of
+product, regardless of the status of the particular user or of the way
+in which the particular user actually uses, or expects or is expected
+to use, the product. A product is a consumer product regardless of
+whether the product has substantial commercial, industrial or
+non-consumer uses, unless such uses represent the only significant
+mode of use of the product.
+
+"Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to
+install and execute modified versions of a covered work in that User
+Product from a modified version of its Corresponding Source. The
+information must suffice to ensure that the continued functioning of
+the modified object code is in no case prevented or interfered with
+solely because modification has been made.
+
+If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or
+updates for a work that has been modified or installed by the
+recipient, or for the User Product in which it has been modified or
+installed. Access to a network may be denied when the modification
+itself materially and adversely affects the operation of the network
+or violates the rules and protocols for communication across the
+network.
+
+Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+#### 7. Additional Terms.
+
+"Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders
+of that material) supplement the terms of this License with terms:
+
+-   a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+-   b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+-   c) Prohibiting misrepresentation of the origin of that material,
+    or requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+-   d) Limiting the use for publicity purposes of names of licensors
+    or authors of the material; or
+-   e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+-   f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions
+    of it) with contractual assumptions of liability to the recipient,
+    for any liability that these contractual assumptions directly
+    impose on those licensors and authors.
+
+All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions; the
+above requirements apply either way.
+
+#### 8. Termination.
+
+You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+However, if you cease all violation of this License, then your license
+from a particular copyright holder is reinstated (a) provisionally,
+unless and until the copyright holder explicitly and finally
+terminates your license, and (b) permanently, if the copyright holder
+fails to notify you of the violation by some reasonable means prior to
+60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+#### 9. Acceptance Not Required for Having Copies.
+
+You are not required to accept this License in order to receive or run
+a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+#### 10. Automatic Licensing of Downstream Recipients.
+
+Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+#### 11. Patents.
+
+A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+A contributor's "essential patent claims" are all patent claims owned
+or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+A patent license is "discriminatory" if it does not include within the
+scope of its coverage, prohibits the exercise of, or is conditioned on
+the non-exercise of one or more of the rights that are specifically
+granted under this License. You may not convey a covered work if you
+are a party to an arrangement with a third party that is in the
+business of distributing software, under which you make payment to the
+third party based on the extent of your activity of conveying the
+work, and under which the third party grants, to any of the parties
+who would receive the covered work from you, a discriminatory patent
+license (a) in connection with copies of the covered work conveyed by
+you (or copies made from those copies), or (b) primarily for and in
+connection with specific products or compilations that contain the
+covered work, unless you entered into that arrangement, or that patent
+license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+#### 12. No Surrender of Others' Freedom.
+
+If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under
+this License and any other pertinent obligations, then as a
+consequence you may not convey it at all. For example, if you agree to
+terms that obligate you to collect a royalty for further conveying
+from those to whom you convey the Program, the only way you could
+satisfy both those terms and this License would be to refrain entirely
+from conveying the Program.
+
+#### 13. Use with the GNU Affero General Public License.
+
+Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+#### 14. Revised Versions of this License.
+
+The Free Software Foundation may publish revised and/or new versions
+of the GNU General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in
+detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies that a certain numbered version of the GNU General Public
+License "or any later version" applies to it, you have the option of
+following the terms and conditions either of that numbered version or
+of any later version published by the Free Software Foundation. If the
+Program does not specify a version number of the GNU General Public
+License, you may choose any version ever published by the Free
+Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions
+of the GNU General Public License can be used, that proxy's public
+statement of acceptance of a version permanently authorizes you to
+choose that version for the Program.
+
+Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+#### 15. Disclaimer of Warranty.
+
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
+WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
+PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
+DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
+CORRECTION.
+
+#### 16. Limitation of Liability.
+
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
+CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
+ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
+NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
+LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
+TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
+PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+#### 17. Interpretation of Sections 15 and 16.
+
+If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+END OF TERMS AND CONDITIONS
+
+### How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these
+terms.
+
+To do so, attach the following notices to the program. It is safest to
+attach them to the start of each source file to most effectively state
+the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+        <one line to give the program's name and a brief idea of what it does.>
+        Copyright (C) <year>  <name of author>
+
+        This program is free software: you can redistribute it and/or modify
+        it under the terms of the GNU General Public License as published by
+        the Free Software Foundation, either version 3 of the License, or
+        (at your option) any later version.
+
+        This program is distributed in the hope that it will be useful,
+        but WITHOUT ANY WARRANTY; without even the implied warranty of
+        MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+        GNU General Public License for more details.
+
+        You should have received a copy of the GNU General Public License
+        along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper
+mail.
+
+If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+        <program>  Copyright (C) <year>  <name of author>
+        This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+        This is free software, and you are welcome to redistribute it
+        under certain conditions; type `show c' for details.
+
+The hypothetical commands \`show w' and \`show c' should show the
+appropriate parts of the General Public License. Of course, your
+program's commands might be different; for a GUI interface, you would
+use an "about box".
+
+You should also get your employer (if you work as a programmer) or
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. For more information on this, and how to apply and follow
+the GNU GPL, see <https://www.gnu.org/licenses/>.
+
+The GNU General Public License does not permit incorporating your
+program into proprietary programs. If your program is a subroutine
+library, you may consider it more useful to permit linking proprietary
+applications with the library. If this is what you want to do, use the
+GNU Lesser General Public License instead of this License. But first,
+please read <https://www.gnu.org/licenses/why-not-lgpl.html>.

+ 56 - 0
storage-node/README.md

@@ -0,0 +1,56 @@
+![Storage Nodes for Joystream](./storage-node_new.svg)
+
+This repository contains several Node packages, located under the `packages/`
+subdirectory. See each individual package for details:
+
+* [colossus](./packages/colossus/README.md) - the main colossus app.
+* [storage](./packages/storage/README.md) - abstraction over the storage backend.
+* [runtime-api](./packages/runtime-api/README.md) - convenience wrappers for the runtime API.
+* [crypto](./packages/crypto/README.md) - cryptographic utility functions.
+* [util](./packages/util/README.md) - general utility functions.
+* [discovery](./packages/discovery/README.md) - service discovery using IPNS.
+
+Installation
+------------
+
+*Requirements*
+
+This project uses [yarn](https://yarnpkg.com/) as Node package manager. It also
+uses some node packages with native components, so make sure to install your
+system's basic build tools.
+
+On Debian-based systems:
+
+```bash
+$ apt install build-essential
+```
+
+On Mac OS (using [homebrew](https://brew.sh/)):
+
+```bash
+$ brew install libtool automake autoconf
+```
+
+*Building*
+
+```bash
+$ yarn install
+```
+
+The command will install dependencies, and make a `colossus` executable available:
+
+```bash
+$ yarn run colossus --help
+```
+
+*Testing*
+
+Running tests from the repository root will run tests from all packages:
+
+```
+$ yarn run test
+```
+
+
+## Detailed Setup and Configuration Guide
+For details on how to setup a storage node on the Joystream network, follow this [step by step guide](https://github.com/Joystream/helpdesk/tree/master/roles/storage-providers).

+ 54 - 0
storage-node/docs/json-signing.md

@@ -0,0 +1,54 @@
+# JSON Data Signing
+
+As serializing and deserializing JSON is not deterministic, but may depend
+on the order in which keys are added or even the system's collation method,
+signing JSON cryptographically is fraught with issues. We circumvent them
+by wrapping any JSON to be signed in another JSON object:
+
+* `version` contains the version of the wrapper JSON, currently always `1`.
+* `serialized` contains the serialized version of the data, currently this
+  will be the base64 encoded, serialized JSON payload.
+* `signature` contains the base64 encoded signature of the `serialized` field
+  value prior to its base64 encoding.
+* `payload` [optional] contains the deserialized JSON object corresponding
+  to the `serialized` payload.
+
+For signing and verification, we'll use polkadot's *ed25519* or *sr25519* keys
+directly.
+
+## Signing Process
+
+Given some structured data:
+
+1. Serialize the structured data into a JSON string.
+1. Create a signature over the serialized JSON string.
+1. Create a new structured data with the appropriate `version` field.
+1. Add a base64 encoded version of the serialized JSON string as the `serialized` field.
+1. Add a base64 encoded version of the signature as the `signature` field.
+1. Optionally add the original structured data as the `payload` field.
+
+## Verification Process
+
+1. Verify data contains a `version`, `serialized` and `signature` field.
+1. Currently, verify that the `version` field's value is `1`.
+1. Try to base64 decode the `serialized` and `signature` fields.
+1. Verify that the decoded `signature` is valid for the decoded `serialized`
+  field.
+1. JSON deserialize the decoded `serialized` field.
+1. Add the resulting structured data as the `payload` field, and return the
+  modified object.
+
+# Alternatives
+
+There are alternative schemes available for signing JSON objects, but they
+have specific issues we'd like to avoid.
+
+* [JOSE](https://jose.readthedocs.io/en/latest/) has no support for the *ed25519*
+  or *sr25519* keys used in polkadot apps, and
+  [appears to be fraught with security issues](https://paragonie.com/blog/2017/03/jwt-json-web-tokens-is-bad-standard-that-everyone-should-avoid).
+  Either makes its use hard to justify.
+* While [PASETO](https://paseto.io/) does use *ed25519* keys and seems to have
+  a reasonably robuts JavaScript implementation, it requires its secret keys to
+  be 512 bits long, while polkadot provides 256 bit secret keys. The implication
+  is that we would have to manage 512 bit keys and their corresponding public
+  keys as linked to polkadot's keys, which is cumbersome at the very least.

+ 18 - 0
storage-node/license_header.txt

@@ -0,0 +1,18 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+

+ 43 - 0
storage-node/package.json

@@ -0,0 +1,43 @@
+{
+  "private": true,
+  "name": "@joystream/storage-node",
+  "version": "1.0.0",
+  "engines": {
+    "node": ">=10.15.3",
+    "yarn": "^1.15.2"
+  },
+  "homepage": "https://github.com/Joystream/joystream/",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "workspaces": [
+    "packages/*"
+  ],
+  "scripts": {
+    "test": "wsrun --serial test",
+    "lint": "wsrun --serial lint"
+  },
+  "devDependencies": {
+    "wsrun": "^3.6.5"
+  }
+}

+ 5 - 0
storage-node/packages/cli/README.md

@@ -0,0 +1,5 @@
+# A CLI for the Joystream Runtime & Colossus
+
+- CLI access for some functionality from `@joystream/runtime-api`
+- Colossus/storage node functionality:
+  - File uploads

+ 230 - 0
storage-node/packages/cli/bin/cli.js

@@ -0,0 +1,230 @@
+#!/usr/bin/env node
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const path = require('path');
+const fs = require('fs');
+const assert = require('assert');
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+const meow = require('meow');
+const chalk = require('chalk');
+const _ = require('lodash');
+
+const debug = require('debug')('joystream:cli');
+
+// Project root
+const project_root = path.resolve(__dirname, '..');
+
+// Configuration (default)
+const pkg = require(path.resolve(project_root, 'package.json'));
+
+// Parse CLI
+const FLAG_DEFINITIONS = {
+  // TODO
+};
+
+const cli = meow(`
+  Usage:
+    $ joystream key_file command [options]
+
+  All commands require a key file holding the identity for interacting with the
+  runtime API.
+
+  Commands:
+    upload            Upload a file to a Colossus storage node. Requires a
+                      storage node URL, and a local file name to upload. As
+                      an optional third parameter, you can provide a Data
+                      Object Type ID - this defaults to "1" if not provided.
+    download          Retrieve a file. Requires a storage node URL and a content
+                      ID, as well as an output filename.
+    head              Send a HEAD request for a file, and print headers.
+                      Requires a storage node URL and a content ID.
+  `,
+  { flags: FLAG_DEFINITIONS });
+
+function assert_file(name, filename)
+{
+  assert(filename, `Need a ${name} parameter to proceed!`);
+  assert(fs.statSync(filename).isFile(), `Path "${filename}" is not a file, aborting!`);
+}
+
+const commands = {
+  'upload': async (runtime_api, url, filename, do_type_id) => {
+    // Check parameters
+    assert_file('file', filename);
+
+    const size = fs.statSync(filename).size;
+    console.log(`File "${filename}" is ` + chalk.green(size) + ' Bytes.');
+
+    if (!do_type_id) {
+      do_type_id = 1;
+    }
+    console.log('Data Object Type ID is: ' + chalk.green(do_type_id));
+
+    // Generate content ID
+    // FIXME this require path is like this because of
+    // https://github.com/Joystream/apps/issues/207
+    const { ContentId } = require('@joystream/types/lib/media');
+    var cid = ContentId.generate();
+    cid = cid.encode().toString();
+    console.log('Generated content ID: ' + chalk.green(cid));
+
+    // Create Data Object
+    const data_object = await runtime_api.assets.createDataObject(
+      runtime_api.identities.key.address, cid, do_type_id, size);
+    console.log('Data object created.');
+
+    // TODO in future, optionally contact liaison here?
+    const request = require('request');
+    url = `${url}asset/v0/${cid}`;
+    console.log('Uploading to URL', chalk.green(url));
+
+    const f = fs.createReadStream(filename);
+    const opts = {
+      url: url,
+      headers: {
+        'content-type': '',
+        'content-length': `${size}`,
+      },
+      json: true,
+    };
+    return new Promise((resolve, reject) => {
+      const r = request.put(opts, (error, response, body) => {
+        if (error) {
+          reject(error);
+          return;
+        }
+
+        if (response.statusCode / 100 != 2) {
+          reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`));
+          return;
+        }
+        console.log('Upload successful:', body.message);
+        resolve();
+      });
+      f.pipe(r);
+    });
+  },
+
+  'download': async (runtime_api, url, content_id, filename) => {
+    const request = require('request');
+    url = `${url}asset/v0/${content_id}`;
+    console.log('Downloading URL', chalk.green(url), 'to', chalk.green(filename));
+
+    const f = fs.createWriteStream(filename);
+    const opts = {
+      url: url,
+      json: true,
+    };
+    return new Promise((resolve, reject) => {
+      const r = request.get(opts, (error, response, body) => {
+        if (error) {
+          reject(error);
+          return;
+        }
+
+        console.log('Downloading', chalk.green(response.headers['content-type']), 'of size', chalk.green(response.headers['content-length']), '...');
+
+        f.on('error', (err) => {
+          reject(err);
+        });
+
+        f.on('finish', () => {
+          if (response.statusCode / 100 != 2) {
+            reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`));
+            return;
+          }
+          console.log('Download completed.');
+          resolve();
+        });
+      });
+      r.pipe(f);
+    });
+  },
+
+  'head': async (runtime_api, url, content_id) => {
+    const request = require('request');
+    url = `${url}asset/v0/${content_id}`;
+    console.log('Checking URL', chalk.green(url), '...');
+
+    const opts = {
+      url: url,
+      json: true,
+    };
+    return new Promise((resolve, reject) => {
+      const r = request.head(opts, (error, response, body) => {
+        if (error) {
+          reject(error);
+          return;
+        }
+
+        if (response.statusCode / 100 != 2) {
+          reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`));
+          return;
+        }
+
+        for (var propname in response.headers) {
+          console.log(`  ${chalk.yellow(propname)}: ${response.headers[propname]}`);
+        }
+
+        resolve();
+      });
+    });
+  },
+
+};
+
+
+async function main()
+{
+  // Key file is at the first instance.
+  const key_file = cli.input[0];
+  assert_file('key file', key_file);
+
+  // Create runtime API.
+  const runtime_api = await RuntimeApi.create({ account_file: key_file });
+
+  // Simple CLI commands
+  const command = cli.input[1];
+  if (!command) {
+    throw new Error('Need a command to run!');
+  }
+
+  if (commands.hasOwnProperty(command)) {
+    // Command recognized
+    const args = _.clone(cli.input).slice(2);
+    await commands[command](runtime_api, ...args);
+  }
+  else {
+    throw new Error(`Command "${command}" not recognized, aborting!`);
+  }
+}
+
+main()
+  .then(() => {
+    console.log('Process exiting gracefully.');
+    process.exit(0);
+  })
+  .catch((err) => {
+    console.error(chalk.red(err.stack));
+    process.exit(-1);
+  });

+ 48 - 0
storage-node/packages/cli/package.json

@@ -0,0 +1,48 @@
+{
+  "name": "@joystream/storage-cli",
+  "version": "0.1.0",
+  "description": "Joystream tool for uploading and downloading files to the network",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "scripts": {
+    "test": "mocha 'test/**/*.js'",
+    "lint": "eslint 'paths/**/*.js' 'lib/**/*.js'"
+  },
+  "bin": {
+    "joystream": "bin/cli.js"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "eslint": "^5.13.0",
+    "mocha": "^5.2.0",
+    "temp": "^0.9.0"
+  },
+  "dependencies": {
+    "@joystream/runtime-api": "^0.1.0",
+    "chalk": "^2.4.2",
+    "lodash": "^4.17.11",
+    "meow": "^5.0.0",
+    "request": "^2.88.0"
+  }
+}

+ 1 - 0
storage-node/packages/cli/test/index.js

@@ -0,0 +1 @@
+// Add Tests!

+ 1 - 0
storage-node/packages/colossus/.eslintrc.js

@@ -0,0 +1 @@
+../../.eslintrc.js

+ 94 - 0
storage-node/packages/colossus/README.md

@@ -0,0 +1,94 @@
+![Storage Nodes for Joystream](../../banner.svg)
+
+Development
+-----------
+
+Run a development server:
+
+```bash
+$ yarn run dev --config myconfig.json
+```
+
+Command-Line
+------------
+
+Running a storage server is (almost) as easy as running the bundled `colossus`
+executable:
+
+```bash
+$ colossus --storage=/path/to/storage/directory
+```
+
+Run with `--help` to see a list of available CLI options.
+
+You need to stake as a storage provider to run a storage node.
+
+Configuration
+-------------
+
+Most common configuration options are available as command-line options
+for the CLI.
+
+However, some advanced configuration options are only possible to set
+via the configuration file.
+
+* `filter` is a hash of upload filtering options.
+  * `max_size` sets the maximum permissible file upload size. If unset,
+    this defaults to 100 MiB.
+  * `mime` is a hash of...
+    * `accept` is an Array of mime types that are acceptable for uploads,
+      such as `text/plain`, etc. Mime types can also be specified for
+      wildcard matching, such as `video/*`.
+    * `reject` is an Array of mime types that are unacceptable for uploads.
+
+Upload Filtering
+----------------
+
+The upload filtering logic first tests whether any of the `accept` mime types
+are matched. If none are matched, the upload is rejected. If any is matched,
+then the upload is still rejected if any of the `reject` mime types are
+matched.
+
+This allows inclusive and exclusive filtering.
+
+* `{ accept: ['text/plain', 'text/html'] }` accepts *only* the two given mime types.
+* `{ accept: ['text/*'], reject: ['text/plain'] }` accepts any `text/*` that is not
+  `text/plain`.
+
+More advanced filtering is currently not available.
+
+API Packages
+------------
+
+Since it's not entirely clear yet how APIs will develop in future, the approach
+taken here is to package individual APIs up individually. That is, instead of
+providing an overall API version in `api-base.yml`, it should be part of each
+API package's path.
+
+For example, for a `foo` API in its version `v1`, its definitions should live
+in `./paths/foo/v1.js` and `./paths/foo/v1/*.js` respectively.
+
+*Note:* until a reasonably stable API is reached, this project uses a `v0`
+version prefix.
+
+Interface/implementation
+------------------------
+
+For reusability across API versions, it's best to keep files in the `paths`
+subfolder very thin, and instead inject implementations via the `dependencies`
+configuration value of `express-openapi`.
+
+These implementations line to the `./lib` subfolder. Adjust `server.js` as
+needed to make them available to API packages.
+
+Streaming Notes
+---------------
+
+For streaming content, it is required that stream metadata is located at the
+start of the stream. Most software writes metadata at the end of the stream,
+because it is when the stream is committed to disk that the entirety of the
+metadata is known.
+
+To move metadata to the start of the stream, a CLI tool such as
+[qtfaststart](https://github.com/danielgtaylor/qtfaststart) for MP4 files might
+be used.

+ 33 - 0
storage-node/packages/colossus/api-base.yml

@@ -0,0 +1,33 @@
+openapi: '3.0.0'
+info:
+  title: 'Joystream Storage Node API.'
+  version: '1.0.0'
+paths: {}  # Will be populated by express-openapi
+
+components:
+  # Re-usable parameter definitions
+  parameters: {}
+
+  # Re-usable (response) object definitions
+  schemas:
+    Error:
+      required:
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+
+    ContentDirectoryEntry: # TODO implement
+      required:
+        - name
+      properties:
+        name:
+          type: string
+
+    ContentDirectoryEntries:
+      type: array
+      items:
+        $ref: '#/components/schemas/ContentDirectoryEntry'

+ 397 - 0
storage-node/packages/colossus/bin/cli.js

@@ -0,0 +1,397 @@
+#!/usr/bin/env node
+'use strict';
+
+// Node requires
+const path = require('path');
+
+// npm requires
+const meow = require('meow');
+const configstore = require('configstore');
+const chalk = require('chalk');
+const figlet = require('figlet');
+const _ = require('lodash');
+
+const debug = require('debug')('joystream:cli');
+
+// Project root
+const PROJECT_ROOT = path.resolve(__dirname, '..');
+
+// Configuration (default)
+const pkg = require(path.resolve(PROJECT_ROOT, 'package.json'));
+const default_config = new configstore(pkg.name);
+
+// Parse CLI
+const FLAG_DEFINITIONS = {
+  port: {
+    type: 'integer',
+    alias: 'p',
+    _default: 3000,
+  },
+  'syncPeriod': {
+    type: 'integer',
+    _default: 120000,
+  },
+  keyFile: {
+    type: 'string',
+  },
+  config: {
+    type: 'string',
+    alias: 'c',
+  },
+  'publicUrl': {
+    type: 'string',
+    alias: 'u'
+  },
+  'passphrase': {
+    type: 'string'
+  },
+  'wsProvider': {
+    type: 'string',
+    _default: 'ws://localhost:9944'
+  }
+};
+
+const cli = meow(`
+  Usage:
+    $ colossus [command] [options]
+
+  Commands:
+    server [default]  Run a server instance with the given configuration.
+    signup            Sign up as a storage provider. Requires that you provide
+                      a JSON account file of an account that is a member, and has
+                      sufficient balance for staking as a storage provider.
+                      Writes a new account file that should be used to run the
+                      storage node.
+    down              Signal to network that all services are down. Running
+                      the server will signal that services as online again.
+    discovery         Run the discovery service only.
+
+  Options:
+    --config=PATH, -c PATH  Configuration file path. Defaults to
+                            "${default_config.path}".
+    --port=PORT, -p PORT    Port number to listen on, defaults to 3000.
+    --sync-period           Number of milliseconds to wait between synchronization
+                            runs. Defaults to 30,000 (30s).
+    --key-file              JSON key export file to use as the storage provider.
+    --passphrase            Optional passphrase to use to decrypt the key-file (if its encrypted).
+    --public-url            API Public URL to announce. No URL will be announced if not specified.
+    --ws-provider           Joystream Node websocket provider url, eg: "ws://127.0.0.1:9944"
+  `,
+  { flags: FLAG_DEFINITIONS });
+
+// Create configuration
+function create_config(pkgname, flags)
+{
+  // Create defaults from flag definitions
+  const defaults = {};
+  for (var key in FLAG_DEFINITIONS) {
+    const defs = FLAG_DEFINITIONS[key];
+    if (defs._default) {
+      defaults[key] = defs._default;
+    }
+  }
+
+  // Provide flags as defaults. Anything stored in the config overrides.
+  var config = new configstore(pkgname, defaults, { configPath: flags.config });
+
+  // But we want the flags to also override what's stored in the config, so
+  // set them all.
+  for (var key in flags) {
+    // Skip aliases and self-referential config flag
+    if (key.length == 1 || key === 'config') continue;
+    // Skip sensitive flags
+    if (key == 'passphrase') continue;
+    // Skip unset flags
+    if (!flags[key]) continue;
+    // Otherwise set.
+    config.set(key, flags[key]);
+  }
+
+  debug('Configuration at', config.path, config.all);
+  return config;
+}
+
+// All-important banner!
+function banner()
+{
+  console.log(chalk.blue(figlet.textSync('joystream', 'Speed')));
+}
+
+function start_express_app(app, port) {
+  const http = require('http');
+  const server = http.createServer(app);
+
+  return new Promise((resolve, reject) => {
+    server.on('error', reject);
+    server.on('close', (...args) => {
+      console.log('Server closed, shutting down...');
+      resolve(...args);
+    });
+    server.on('listening', () => {
+      console.log('API server started.', server.address());
+    });
+    server.listen(port, '::');
+    console.log('Starting API server...');
+  });
+}
+// Start app
+function start_all_services(store, api, config)
+{
+  const app = require('../lib/app')(PROJECT_ROOT, store, api, config);
+  const port = config.get('port');
+  return start_express_app(app, port);
+}
+
+// Start discovery service app
+function start_discovery_service(api, config)
+{
+  const app = require('../lib/discovery')(PROJECT_ROOT, api, config);
+  const port = config.get('port');
+  return start_express_app(app, port);
+}
+
+// Get an initialized storage instance
+function get_storage(runtime_api, config)
+{
+  // TODO at some point, we can figure out what backend-specific connection
+  // options make sense. For now, just don't use any configuration.
+  const { Storage } = require('@joystream/storage');
+
+  const options = {
+    resolve_content_id: async (content_id) => {
+      // Resolve via API
+      const obj = await runtime_api.assets.getDataObject(content_id);
+      if (!obj || obj.isNone) {
+        return;
+      }
+
+      return obj.unwrap().ipfs_content_id.toString();
+    },
+  };
+
+  return Storage.create(options);
+}
+
+async function run_signup(account_file, provider_url)
+{
+  if (!account_file) {
+    console.log('Cannot proceed without keyfile');
+    return
+  }
+
+  const { RuntimeApi } = require('@joystream/runtime-api');
+  const api = await RuntimeApi.create({account_file, canPromptForPassphrase: true, provider_url});
+
+  if (!api.identities.key) {
+    console.log('Cannot proceed without a member account');
+    return
+  }
+
+  // Check there is an opening
+  let availableSlots = await api.roles.availableSlotsForRole(api.roles.ROLE_STORAGE);
+
+  if (availableSlots == 0) {
+    console.log(`
+      There are no open storage provider slots available at this time.
+      Please try again later.
+    `);
+    return;
+  } else {
+    console.log(`There are still ${availableSlots} slots available, proceeding`);
+  }
+
+  const member_address = api.identities.key.address;
+
+  // Check if account works
+  const min = await api.roles.requiredBalanceForRoleStaking(api.roles.ROLE_STORAGE);
+  console.log(`Account needs to be a member and have a minimum balance of ${min.toString()}`);
+  const check = await api.roles.checkAccountForStaking(member_address);
+  if (check) {
+    console.log('Account is working for staking, proceeding.');
+  }
+
+  // Create a role key
+  const role_key = await api.identities.createRoleKey(member_address);
+  const role_address = role_key.address;
+  console.log('Generated', role_address, '- this is going to be exported to a JSON file.\n',
+    ' You can provide an empty passphrase to make starting the server easier,\n',
+    ' but you must keep the file very safe, then.');
+  const filename = await api.identities.writeKeyPairExport(role_address);
+  console.log('Identity stored in', filename);
+
+  // Ok, transfer for staking.
+  await api.roles.transferForStaking(member_address, role_address, api.roles.ROLE_STORAGE);
+  console.log('Funds transferred.');
+
+  // Now apply for the role
+  await api.roles.applyForRole(role_address, api.roles.ROLE_STORAGE, member_address);
+  console.log('Role application sent.\nNow visit Roles > My Requests in the app.');
+}
+
+async function wait_for_role(config)
+{
+  // Load key information
+  const { RuntimeApi } = require('@joystream/runtime-api');
+  const keyFile = config.get('keyFile');
+  if (!keyFile) {
+    throw new Error("Must specify a key file for running a storage node! Sign up for the role; see `colussus --help' for details.");
+  }
+  const wsProvider = config.get('wsProvider');
+
+  const api = await RuntimeApi.create({
+    account_file: keyFile,
+    passphrase: cli.flags.passphrase,
+    provider_url: wsProvider,
+  });
+
+  if (!api.identities.key) {
+    throw new Error('Failed to unlock storage provider account');
+  }
+
+  // Wait for the account role to be finalized
+  console.log('Waiting for the account to be staked as a storage provider role...');
+  const result = await api.roles.waitForRole(api.identities.key.address, api.roles.ROLE_STORAGE);
+  return [result, api];
+}
+
+function get_service_information(config) {
+  // For now assume we run all services on the same endpoint
+  return({
+    asset: {
+      version: 1, // spec version
+      endpoint: config.get('publicUrl')
+    },
+    discover: {
+      version: 1, // spec version
+      endpoint: config.get('publicUrl')
+    }
+  })
+}
+
+async function announce_public_url(api, config) {
+  // re-announce in future
+  const reannounce = function (timeoutMs) {
+    setTimeout(announce_public_url, timeoutMs, api, config);
+  }
+
+  debug('announcing public url')
+  const { publish } = require('@joystream/discovery')
+
+  const accountId = api.identities.key.address
+
+  try {
+    const serviceInformation = get_service_information(config)
+
+    let keyId = await publish.publish(serviceInformation);
+
+    const expiresInBlocks = 600; // ~ 1 hour (6s block interval)
+    await api.discovery.setAccountInfo(accountId, keyId, expiresInBlocks);
+
+    debug('publishing complete, scheduling next update')
+
+// >> sometimes after tx is finalized.. we are not reaching here!
+
+    // Reannounce before expiery
+    reannounce(50 * 60 * 1000); // in 50 minutes
+
+  } catch (err) {
+    debug(`announcing public url failed: ${err.stack}`)
+
+    // On failure retry sooner
+    debug(`announcing failed, retrying in: 2 minutes`)
+    reannounce(120 * 1000)
+  }
+}
+
+function go_offline(api) {
+  return api.discovery.unsetAccountInfo(api.identities.key.address)
+}
+
+// Simple CLI commands
+var command = cli.input[0];
+if (!command) {
+  command = 'server';
+}
+
+const commands = {
+  'server': async () => {
+    const cfg = create_config(pkg.name, cli.flags);
+
+    // Load key information
+    const values = await wait_for_role(cfg);
+    const result = values[0]
+    const api = values[1];
+    if (!result) {
+      throw new Error(`Not staked as storage role.`);
+    }
+    console.log('Staked, proceeding.');
+
+    // Make sure a public URL is configured
+    if (!cfg.get('publicUrl')) {
+      throw new Error('publicUrl not configured')
+    }
+
+    // Continue with server setup
+    const store = get_storage(api, cfg);
+    banner();
+
+    const { start_syncing } = require('../lib/sync');
+    start_syncing(api, cfg, store);
+
+    announce_public_url(api, cfg);
+    await start_all_services(store, api, cfg);
+  },
+  'signup': async (account_file) => {
+    const cfg = create_config(pkg.name, cli.flags);
+    await run_signup(account_file, cfg.get('wsProvider'));
+  },
+  'down': async () => {
+    const cfg = create_config(pkg.name, cli.flags);
+
+    const values = await wait_for_role(cfg);
+    const result = values[0]
+    const api = values[1];
+    if (!result) {
+      throw new Error(`Not staked as storage role.`);
+    }
+
+    await go_offline(api)
+  },
+  'discovery': async () => {
+    debug("Starting Joystream Discovery Service")
+    const { RuntimeApi } = require('@joystream/runtime-api')
+    const cfg = create_config(pkg.name, cli.flags)
+    const wsProvider = cfg.get('wsProvider');
+    const api = await RuntimeApi.create({ provider_url: wsProvider });
+    await start_discovery_service(api, cfg)
+  }
+};
+
+
+async function main()
+{
+  // Simple CLI commands
+  var command = cli.input[0];
+  if (!command) {
+    command = 'server';
+  }
+
+  if (commands.hasOwnProperty(command)) {
+    // Command recognized
+    const args = _.clone(cli.input).slice(1);
+    await commands[command](...args);
+  }
+  else {
+    throw new Error(`Command "${command}" not recognized, aborting!`);
+  }
+}
+
+main()
+  .then(() => {
+    console.log('Process exiting gracefully.');
+    process.exit(0);
+  })
+  .catch((err) => {
+    console.error(chalk.red(err.stack));
+    process.exit(-1);
+  });

+ 78 - 0
storage-node/packages/colossus/lib/app.js

@@ -0,0 +1,78 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+// Node requires
+const fs = require('fs');
+const path = require('path');
+
+// npm requires
+const express = require('express');
+const openapi = require('express-openapi');
+const bodyParser = require('body-parser');
+const cors = require('cors');
+const yaml = require('js-yaml');
+
+// Project requires
+const validateResponses = require('./middleware/validate_responses');
+const fileUploads = require('./middleware/file_uploads');
+const pagination = require('@joystream/util/pagination');
+const storage = require('@joystream/storage');
+
+// Configure app
+function create_app(project_root, storage, runtime, config)
+{
+  const app = express();
+  app.use(cors());
+  app.use(bodyParser.json());
+  // FIXME app.use(bodyParser.urlencoded({ extended: true }));
+
+  // Load & extend/configure API docs
+  var api = yaml.safeLoad(fs.readFileSync(
+    path.resolve(project_root, 'api-base.yml')));
+  api['x-express-openapi-additional-middleware'] = [validateResponses];
+  api['x-express-openapi-validation-strict'] = true;
+
+  api = pagination.openapi(api);
+
+  openapi.initialize({
+    apiDoc: api,
+    app: app,
+    paths: path.resolve(project_root, 'paths'),
+    docsPath: '/swagger.json',
+    consumesMiddleware: {
+      'multipart/form-data': fileUploads
+    },
+    dependencies: {
+      config: config,
+      storage: storage,
+      runtime: runtime,
+    },
+  });
+
+  // If no other handler gets triggered (errors), respond with the
+  // error serialized to JSON.
+  app.use(function(err, req, res, next) {
+    res.status(err.status).json(err);
+  });
+
+  return app;
+}
+
+module.exports = create_app;

+ 73 - 0
storage-node/packages/colossus/lib/discovery.js

@@ -0,0 +1,73 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+// npm requires
+const express = require('express');
+const openapi = require('express-openapi');
+const bodyParser = require('body-parser');
+const cors = require('cors');
+const yaml = require('js-yaml');
+
+// Node requires
+const fs = require('fs');
+const path = require('path');
+
+// Project requires
+const validateResponses = require('./middleware/validate_responses');
+
+// Configure app
+function create_app(project_root, runtime, config)
+{
+  const app = express();
+  app.use(cors());
+  app.use(bodyParser.json());
+  // FIXME app.use(bodyParser.urlencoded({ extended: true }));
+
+  // Load & extend/configure API docs
+  var api = yaml.safeLoad(fs.readFileSync(
+    path.resolve(project_root, 'api-base.yml')));
+  api['x-express-openapi-additional-middleware'] = [validateResponses];
+  api['x-express-openapi-validation-strict'] = true;
+
+  openapi.initialize({
+    apiDoc: api,
+    app: app,
+    //paths: path.resolve(project_root, 'discovery_app_paths'),
+    paths: {
+      path: '/discover/v0/{id}',
+      module: require('../paths/discover/v0/{id}')
+    },
+    docsPath: '/swagger.json',
+    dependencies: {
+      config: config,
+      runtime: runtime,
+    },
+  });
+
+  // If no other handler gets triggered (errors), respond with the
+  // error serialized to JSON.
+  app.use(function(err, req, res, next) {
+    res.status(err.status).json(err);
+  });
+
+  return app;
+}
+
+module.exports = create_app;

+ 44 - 0
storage-node/packages/colossus/lib/middleware/file_uploads.js

@@ -0,0 +1,44 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const multer = require('multer');
+
+// Taken from express-openapi examples
+module.exports = function(req, res, next)
+{
+  multer().any()(req, res, function(err) {
+    if (err) {
+      return next(err);
+    }
+    // Handle both single and multiple files
+    const filesMap = req.files.reduce(
+      (acc, f) =>
+        Object.assign(acc, {
+          [f.fieldname]: (acc[f.fieldname] || []).concat(f)
+        }),
+      {}
+    );
+    Object.keys(filesMap).forEach((fieldname) => {
+      const files = filesMap[fieldname];
+      req.body[fieldname] = files.length > 1 ? files.map(() => '') : '';
+    });
+    return next();
+  });
+}

+ 61 - 0
storage-node/packages/colossus/lib/middleware/validate_responses.js

@@ -0,0 +1,61 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:middleware:validate');
+
+// Function taken directly from https://github.com/kogosoftwarellc/open-api/tree/master/packages/express-openapi
+module.exports = function(req, res, next)
+{
+  const strictValidation = req.apiDoc['x-express-openapi-validation-strict'] ? true : false;
+  if (typeof res.validateResponse === 'function') {
+    const send = res.send;
+    res.send = function expressOpenAPISend(...args) {
+      const onlyWarn = !strictValidation;
+      if (res.get('x-express-openapi-validation-error-for') !== undefined) {
+        return send.apply(res, args);
+      }
+      if (res.get('x-express-openapi-validation-for') !== undefined) {
+        return send.apply(res, args);
+      }
+
+      const body = args[0];
+      let validation = res.validateResponse(res.statusCode, body);
+      let validationMessage;
+      if (validation === undefined) {
+        validation = { message: undefined, errors: undefined };
+      }
+      if (validation.errors) {
+        const errorList = Array.from(validation.errors).map((_) => _.message).join(',');
+        validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`;
+        debug(validationMessage);
+        // Set to avoid a loop, and to provide the original status code
+        res.set('x-express-openapi-validation-error-for', res.statusCode.toString());
+      }
+      if ((onlyWarn || !validation.errors) && res.statusCode) {
+        res.set('x-express-openapi-validation-for', res.statusCode.toString());
+        return send.apply(res, args);
+      } else {
+        res.status(500);
+        return res.json({ error: validationMessage });
+      }
+    }
+  }
+  next();
+}

+ 108 - 0
storage-node/packages/colossus/lib/sync.js

@@ -0,0 +1,108 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:sync');
+
+async function sync_callback(api, config, storage)
+{
+  debug('Starting sync run...');
+
+  // The first step is to gather all data objects from chain.
+  // TODO: in future, limit to a configured tranche
+  // FIXME this isn't actually on chain yet, so we'll fake it.
+  const knownContentIds = await api.assets.getKnownContentIds() || [];
+
+  const role_addr = api.identities.key.address;
+
+  // Iterate over all sync objects, and ensure they're synced.
+  const allChecks = knownContentIds.map(async (content_id) => {
+    let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(role_addr, content_id);
+
+    let fileLocal;
+    try {
+      // check if we have content or not
+      let stats = await storage.stat(content_id);
+      fileLocal = stats.local;
+    } catch (err) {
+      // on error stating or timeout
+      debug(err.message);
+      // we don't have content if we can't stat it
+      fileLocal = false;
+    }
+
+    if (!fileLocal) {
+      try {
+        await storage.synchronize(content_id);
+      } catch (err) {
+        debug(err.message)
+      }
+      return;
+    }
+
+    if (!relationship) {
+      // create relationship
+      debug(`Creating new storage relationship for ${content_id.encode()}`);
+      try {
+        relationshipId = await api.assets.createAndReturnStorageRelationship(role_addr, content_id);
+        await api.assets.toggleStorageRelationshipReady(role_addr, relationshipId, true);
+      } catch (err) {
+        debug(`Error creating new storage relationship ${content_id.encode()}: ${err.stack}`);
+        return;
+      }
+    } else if (!relationship.ready) {
+      debug(`Updating storage relationship to ready for ${content_id.encode()}`);
+      // update to ready. (Why would there be a relationship set to ready: false?)
+      try {
+        await api.assets.toggleStorageRelationshipReady(role_addr, relationshipId, true);
+      } catch(err) {
+        debug(`Error setting relationship ready ${content_id.encode()}: ${err.stack}`);
+      }
+    } else {
+      // we already have content and a ready relationship set. No need to do anything
+      // debug(`content already stored locally ${content_id.encode()}`);
+    }
+  });
+
+
+  await Promise.all(allChecks);
+  debug('sync run complete');
+}
+
+
+async function sync_periodic(api, config, storage)
+{
+  try {
+    await sync_callback(api, config, storage);
+  } catch (err) {
+    debug(`Error in sync_periodic ${err.stack}`);
+  }
+  // always try again
+  setTimeout(sync_periodic, config.get('syncPeriod'), api, config, storage);
+}
+
+
+function start_syncing(api, config, storage)
+{
+  sync_periodic(api, config, storage);
+}
+
+module.exports = {
+  start_syncing: start_syncing,
+}

+ 67 - 0
storage-node/packages/colossus/package.json

@@ -0,0 +1,67 @@
+{
+  "name": "@joystream/colossus",
+  "version": "0.1.0",
+  "description": "Colossus - Joystream Storage Node",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "scripts": {
+    "test": "mocha 'test/**/*.js'",
+    "lint": "eslint 'paths/**/*.js' 'lib/**/*.js'",
+    "dev": "nodemon --watch api-base.yml --watch bin/ --watch paths/ --watch lib/ --verbose --ext js --exec node bin/cli.js --"
+  },
+  "bin": {
+    "colossus": "bin/cli.js"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "eslint": "^5.13.0",
+    "express": "^4.16.4",
+    "mocha": "^5.2.0",
+    "node-mocks-http": "^1.7.3",
+    "nodemon": "^1.18.10",
+    "supertest": "^3.4.2",
+    "temp": "^0.9.0"
+  },
+  "dependencies": {
+    "@joystream/runtime-api": "^0.1.0",
+    "@joystream/storage": "^0.1.0",
+    "@joystream/util": "^0.1.0",
+    "body-parser": "^1.19.0",
+    "chalk": "^2.4.2",
+    "configstore": "^4.0.0",
+    "cors": "^2.8.5",
+    "express-openapi": "^4.6.1",
+    "figlet": "^1.2.1",
+    "js-yaml": "^3.13.1",
+    "lodash": "^4.17.11",
+    "meow": "^5.0.0",
+    "multer": "^1.4.1",
+    "si-prefix": "^0.2.0"
+  }
+}

+ 361 - 0
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -0,0 +1,361 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const path = require('path');
+
+const file_type = require('file-type');
+const mime_types = require('mime-types');
+
+const debug = require('debug')('joystream:api:asset');
+
+const util_ranges = require('@joystream/util/ranges');
+const filter = require('@joystream/storage/filter');
+
+function error_handler(response, err, code)
+{
+  debug(err);
+  response.status((err.code || code) || 500).send({ message: err.toString() });
+}
+
+
+module.exports = function(config, storage, runtime)
+{
+  var doc = {
+    // parameters for all operations in this path
+    parameters: [
+      {
+        name: 'id',
+        in: 'path',
+        required: true,
+        description: 'Joystream Content ID',
+        schema: {
+          type: 'string',
+        },
+      },
+    ],
+
+    // Head: report that ranges are OK
+    head: async function(req, res, _next)
+    {
+      const id = req.params.id;
+
+      // Open file
+      try {
+        const size = await storage.size(id);
+        const stream = await storage.open(id, 'r');
+        const type = stream.file_info.mime_type;
+
+        // Close the stream; we don't need to fetch the file (if we haven't
+        // already). Then return result.
+        stream.destroy();
+
+        res.status(200);
+        res.contentType(type);
+        res.header('Content-Disposition', 'inline');
+        res.header('Content-Transfer-Encoding', 'binary');
+        res.header('Accept-Ranges', 'bytes');
+        if (size > 0) {
+          res.header('Content-Length', size);
+        }
+        res.send();
+      } catch (err) {
+        error_handler(res, err, err.code);
+      }
+    },
+
+    // Put for uploads
+    put: async function(req, res, _next)
+    {
+      const id = req.params.id;
+
+      // First check if we're the liaison for the name, otherwise we can bail
+      // out already.
+      const role_addr = runtime.identities.key.address;
+      let dataObject;
+      try {
+        debug('calling checkLiaisonForDataObject')
+        dataObject = await runtime.assets.checkLiaisonForDataObject(role_addr, id);
+        debug('called checkLiaisonForDataObject')
+      } catch (err) {
+        error_handler(res, err, 403);
+        return;
+      }
+
+      // We'll open a write stream to the backend, but reserve the right to
+      // abort upload if the filters don't smell right.
+      var stream;
+      try {
+        stream = await storage.open(id, 'w');
+
+        // We don't know whether the filtering occurs before or after the
+        // stream was finished, and can only commit if both passed.
+        var finished = false;
+        var accepted = false;
+        const possibly_commit = () => {
+          if (finished && accepted) {
+            debug('Stream is finished and passed filters; committing.');
+            stream.commit();
+          }
+        };
+
+
+        stream.on('file_info', async (info) => {
+          try {
+            debug('Detected file info:', info);
+
+            // Filter
+            const filter_result = filter(config, req.headers, info.mime_type);
+            if (200 != filter_result.code) {
+              debug('Rejecting content', filter_result.message);
+              stream.end();
+              res.status(filter_result.code).send({ message: filter_result.message });
+
+              // Reject the content
+              await runtime.assets.rejectContent(role_addr, id);
+              return;
+            }
+            debug('Content accepted.');
+            accepted = true;
+
+            // We may have to commit the stream.
+            possibly_commit();
+          } catch (err) {
+            error_handler(res, err);
+          }
+        });
+
+        stream.on('finish', () => {
+          try {
+            finished = true;
+            possibly_commit();
+          } catch (err) {
+            error_handler(res, err);
+          }
+        });
+
+        stream.on('committed', async (hash) => {
+          console.log('commited', dataObject)
+          try {
+            if (hash !== dataObject.ipfs_content_id.toString()) {
+              debug('Rejecting content. IPFS hash does not match value in objectId');
+              await runtime.assets.rejectContent(role_addr, id);
+              res.status(400).send({ message: "Uploaded content doesn't match IPFS hash" });
+              return;
+            }
+
+            debug('accepting Content')
+            await runtime.assets.acceptContent(role_addr, id);
+
+            debug('creating storage relationship for newly uploaded content')
+            // Create storage relationship and flip it to ready.
+            const dosr_id = await runtime.assets.createAndReturnStorageRelationship(role_addr, id);
+
+            debug('toggling storage relationship for newly uploaded content')
+            await runtime.assets.toggleStorageRelationshipReady(role_addr, dosr_id, true);
+
+            debug('Sending OK response.');
+            res.status(200).send({ message: 'Asset uploaded.' });
+          } catch (err) {
+            debug(`${err.message}`);
+            error_handler(res, err);
+          }
+        });
+
+        stream.on('error', (err) => error_handler(res, err));
+        req.pipe(stream);
+
+      } catch (err) {
+        error_handler(res, err);
+        return;
+      }
+    },
+
+    // Get content
+    get: async function(req, res, _next)
+    {
+      const id = req.params.id;
+      const download = req.query.download;
+
+      // Parse range header
+      var ranges;
+      if (!download) {
+        try {
+          var range_header = req.headers['range'];
+          ranges = util_ranges.parse(range_header);
+        } catch (err) {
+          // Do nothing; it's ok to ignore malformed ranges and respond with the
+          // full content according to https://www.rfc-editor.org/rfc/rfc7233.txt
+        }
+        if (ranges && ranges.unit != 'bytes') {
+          // Ignore ranges that are not byte units.
+          ranges = undefined;
+        }
+      }
+      debug('Requested range(s) is/are', ranges);
+
+      // Open file
+      try {
+        const size = await storage.size(id);
+        const stream = await storage.open(id, 'r');
+
+        // Add a file extension to download requests if necessary. If the file
+        // already contains an extension, don't add one.
+        var send_name = id;
+        const type = stream.file_info.mime_type;
+        if (download) {
+          var ext = path.extname(send_name);
+          if (!ext) {
+            ext = stream.file_info.ext;
+            if (ext) {
+              send_name = `${send_name}.${ext}`;
+            }
+          }
+        }
+
+        var opts = {
+          name: send_name,
+          type: type,
+          size: size,
+          ranges: ranges,
+          download: download,
+        };
+        util_ranges.send(res, stream, opts);
+
+
+      } catch (err) {
+        error_handler(res, err, err.code);
+      }
+    }
+  };
+
+  // OpenAPI specs
+  doc.get.apiDoc =
+  {
+    description: 'Download an asset.',
+    operationId: 'assetData',
+    tags: ['asset', 'data'],
+    parameters: [
+      {
+        name: 'download',
+        in: 'query',
+        description: 'Download instead of streaming inline.',
+        required: false,
+        allowEmptyValue: true,
+        schema: {
+          type: 'boolean',
+          default: false,
+        },
+      },
+    ],
+    responses: {
+      200: {
+        description: 'Asset download.',
+        content: {
+          default: {
+            schema: {
+              type: 'string',
+              format: 'binary',
+            },
+          },
+        },
+      },
+      default: {
+        description: 'Unexpected error',
+        content: {
+          'application/json': {
+            schema: {
+              '$ref': '#/components/schemas/Error'
+            },
+          },
+        },
+      },
+    },
+  };
+
+  doc.put.apiDoc =
+  {
+    description: 'Asset upload.',
+    operationId: 'assetUpload',
+    tags: ['asset', 'data'],
+    requestBody: {
+      content: {
+        '*/*': {
+          schema: {
+            type: 'string',
+            format: 'binary',
+          },
+        },
+      },
+    },
+    responses: {
+      200: {
+        description: 'Asset upload.',
+        content: {
+          'application/json': {
+            schema: {
+              type: 'object',
+              required: ['message'],
+              properties: {
+                message: {
+                  type: 'string',
+                }
+              },
+            },
+          },
+        },
+      },
+      default: {
+        description: 'Unexpected error',
+        content: {
+          'application/json': {
+            schema: {
+              '$ref': '#/components/schemas/Error'
+            },
+          },
+        },
+      },
+    },
+  };
+
+
+  doc.head.apiDoc =
+  {
+    description: 'Asset download information.',
+    operationId: 'assetInfo',
+    tags: ['asset', 'metadata'],
+    responses: {
+      200: {
+        description: 'Asset info.',
+      },
+      default: {
+        description: 'Unexpected error',
+        content: {
+          'application/json': {
+            schema: {
+              '$ref': '#/components/schemas/Error'
+            },
+          },
+        },
+      },
+    },
+  };
+
+  return doc;
+};

+ 86 - 0
storage-node/packages/colossus/paths/discover/v0/{id}.js

@@ -0,0 +1,86 @@
+const { discover } = require('@joystream/discovery')
+const debug = require('debug')('joystream:api:discovery');
+
+const MAX_CACHE_AGE = 30 * 60 * 1000;
+const USE_CACHE = true;
+
+module.exports = function(config, runtime)
+{
+  var doc = {
+    // parameters for all operations in this path
+    parameters: [
+      {
+        name: 'id',
+        in: 'path',
+        required: true,
+        description: 'Actor accouuntId',
+        schema: {
+          type: 'string',
+        },
+      },
+    ],
+
+    // Resolve Service Information
+    get: async function(req, res)
+    {
+        const id = req.params.id;
+        let cacheMaxAge = req.query.max_age;
+
+        if (cacheMaxAge) {
+          try {
+            cacheMaxAge = parseInt(cacheMaxAge);
+          } catch(err) {
+            cacheMaxAge = MAX_CACHE_AGE
+          }
+        } else {
+          cacheMaxAge = 0
+        }
+
+        // todo - validate id before querying
+
+        try {
+          debug(`resolving ${id}`);
+          const info = await discover.discover(id, runtime, USE_CACHE, cacheMaxAge);
+          if (info == null) {
+            debug('info not found');
+            res.status(404).end();
+          } else {
+            res.status(200).send(info);
+          }
+
+        } catch (err) {
+          debug(`${err}`);
+          res.status(400).end()
+        }
+    }
+  };
+
+    // OpenAPI specs
+    doc.get.apiDoc = {
+        description: 'Resolve Service Information',
+        operationId: 'discover',
+        //tags: ['asset', 'data'],
+        responses: {
+            200: {
+                description: 'Wrapped JSON Service Information',
+                content: {
+                  'application/json': {
+                    schema: {
+                      required: ['serialized'],
+                      properties: {
+                        'serialized': {
+                          type: 'string'
+                        },
+                        'signature': {
+                          type: 'string'
+                        }
+                      },
+                    },
+                  }
+                }
+            }
+        }
+    }
+
+    return doc;
+};

+ 1 - 0
storage-node/packages/colossus/test/index.js

@@ -0,0 +1 @@
+// Add Tests!

+ 68 - 0
storage-node/packages/discovery/IpfsResolver.js

@@ -0,0 +1,68 @@
+const IpfsClient = require('ipfs-http-client')
+const axios = require('axios')
+const { Resolver } = require('./Resolver')
+
+class IpfsResolver extends Resolver {
+    constructor({
+        host = 'localhost',
+        port,
+        mode = 'rpc', // rpc or gateway
+        protocol = 'http', // http or https
+        ipfs,
+        runtime
+    }) {
+        super({runtime})
+
+        if (ipfs) {
+            // use an existing ipfs client instance
+            this.ipfs = ipfs
+        } else if (mode == 'rpc') {
+            port = port || '5001'
+            this.ipfs = IpfsClient(host, port, { protocol })
+        } else if (mode === 'gateway') {
+            port = port || '8080'
+            this.gateway = this.constructUrl(protocol, host, port)
+        } else {
+            throw new Error('Invalid IPFS Resolver options')
+        }
+    }
+
+    async _resolveOverRpc(identity) {
+        const ipnsPath = `/ipns/${identity}/`
+
+        const ipfsName = await this.ipfs.name.resolve(ipnsPath, {
+            recursive: false, // there should only be one indirection to service info file
+            nocache: false,
+        })
+
+        const data = await this.ipfs.get(ipfsName)
+
+        // there should only be one file published under the resolved path
+        const content = data[0].content
+
+        return JSON.parse(content)
+    }
+
+    async _resolveOverGateway(identity) {
+        const url = `${this.gateway}/ipns/${identity}`
+
+        // expected JSON object response
+        const response = await axios.get(url)
+
+        return response.data
+    }
+
+    resolve(accountId) {
+        const identity = this.resolveIdentity(accountId)
+
+        if (this.ipfs) {
+            return this._resolveOverRpc(identity)
+        } else {
+            return this._resolveOverGateway(identity)
+        }
+    }
+}
+
+module.exports = {
+    IpfsResolver
+}

+ 28 - 0
storage-node/packages/discovery/JdsResolver.js

@@ -0,0 +1,28 @@
+const axios = require('axios')
+const { Resolver } = require('./Resolver')
+
+class JdsResolver extends Resolver {
+    constructor({
+        protocol = 'http', // http or https
+        host = 'localhost',
+        port,
+        runtime
+    }) {
+        super({runtime})
+
+        this.baseUrl = this.constructUrl(protocol, host, port)
+    }
+
+    async resolve(accountId) {
+        const url = `${this.baseUrl}/discover/v0/${accountId}`
+
+        // expected JSON object response
+        const response = await axios.get(url)
+
+        return response.data
+    }
+}
+
+module.exports = {
+    JdsResolver
+}

+ 129 - 0
storage-node/packages/discovery/README.md

@@ -0,0 +1,129 @@
+# Discovery
+
+The `@joystream/discovery` package provides an API for role services to publish
+discovery information about themselves, and for consumers to resolve this
+information.
+
+In the Joystream network, services are provided by having members stake for a
+role. The role is identified by a unique actor key. Resolving service information
+associated with the actor key is the main purpose of this module.
+
+This implementation is based on [IPNS](https://docs.ipfs.io/guides/concepts/ipns/)
+as well as runtime information.
+
+## Discovery Workflow
+
+The discovery workflow provides an actor public key to the `discover()` function, which
+will eventually return structured data.
+
+Clients can verify that the structured data has been signed by the identifying
+actor. This is normally done automatically, unless a `verify: false` option is
+passed to `discover()`. Then, a separate `verify()` call can be used for
+verification.
+
+Under the hood, `discover()` uses any known participating node in the discovery
+network. If no other nodes are known, the bootstrap nodes from the runtime are
+used.
+
+There is a distinction in the discovery workflow:
+
+1. If run in the browser environment, a HTTP request to a participating node
+  is performed to discover nodes.
+2. If run in a node.js process, instead:
+  - A trusted (local) IPFS node must be configured.
+  - The chain is queried to resolve an actor key to an IPNS peer ID.
+  - The trusted IPFS node is used to resolve the IPNS peer ID to an IPFS
+    file.
+  - The IPFS file is fetched; this contains the structured data.
+
+Web services providing the HTTP endpoint used in the first approach will
+themselves use the second approach for fulfilling queries.
+
+## Publishing Workflow
+
+The publishing workflow is a little more involved, and requires more interaction
+with the runtime and the trusted IPFS node.
+
+1. A service information file is created.
+1. The file is signed with the actor key (see below).
+1. The file is published on IPFS.
+1. The IPNS name of the trusted IPFS node is updated to refer to the published
+   file.
+1. The runtime mapping from the actor ID to the IPNS name is updated.
+
+## Published Information
+
+Any JSON data can theoretically be published with this system; however, the
+following structure is currently imposed:
+
+- The JSON must be an Object at the top-level, not an Array.
+- Each key must correspond to a service spec (below).
+
+The data is signed using the [@joystream/json-signing](../json-signing/README.md)
+package.
+
+## Service Info Specifications
+
+Service specifications are JSON Objects, not Arrays. All service specifications
+come with their own `version` field which should be intepreted by clients making
+use of the information.
+
+Additionally, some services may only provide an `endpoint` value, as defined
+here:
+
+* `version`: A numeric version identifier for the service info field.
+* `endpoint`: A publicly accessible base URL for a service API.
+
+The `endpoint` should include a scheme and full authority, such that appending
+`swagger.json` to the path resolves the OpenAPI definition of the API served
+at this endpoint.
+
+The OpenAPI definition must include a top level path component corresponding
+to the service name, followed by an API version component. The remainder of the
+provided paths are dependent on the specific version of the API provided.
+
+For example, for an endpoint value of `https://user:password@host:port/` the
+following must hold:
+
+- `https://user:password@host:port/swagger.json` resolves to the OpenAPI
+  definition of the API(s) provided by this endpoint.
+- The OpenAPI definitions include paths prefixed by
+  `https://user:password@host:port/XXX/vYYY` where
+  - `XXX` is the service name, identical to the field name of the service spec
+    in the published service information.
+  - `YYY` the version identifier for the published service API.
+
+**Note:** The `version` field in the spec indicates the version of the spec.
+The `YYY` path component above indicates the version of the published OpenAPI.
+
+### Discovery Service
+
+Publishes `version` and `endpoint` as above; the `version` field is currently
+always `1`.
+
+### Asset Service
+
+Publishes `version` and `endpoint` as above; the `version` field is currently
+always `1`.
+
+### Example
+
+```json
+{
+  "asset": {
+    "version": 1,
+    "endpoint": "https://foo.bar/"
+  },
+  "discovery": {
+    "version": 1,
+    "endpoint": "http://quux.io/"
+  },
+}
+```
+
+Here, the following must be true:
+
+- `https://foo.bar/swagger.json` must include paths beginning with `https://foo.bar/asset/vXXX`
+  where `XXX` is the API version of the asset API.
+- `https://quux.io/swagger.json` must include paths beginning with `https://foo.bar/discovery/vYYY`
+  where `XXX` is the API version of the asset API.

+ 48 - 0
storage-node/packages/discovery/Resolver.js

@@ -0,0 +1,48 @@
+class Resolver {
+    constructor ({
+        runtime
+    }) {
+        this.runtime = runtime
+    }
+
+    constructUrl (protocol, host, port) {
+        port = port ? `:${port}` : ''
+        return `${protocol}:://${host}${port}`
+    }
+
+    async resolveServiceInformation(accountId) {
+        let isActor = await this.runtime.identities.isActor(accountId)
+
+        if (!isActor) {
+            throw new Error('Cannot discover non actor account service info')
+        }
+
+        const identity = await this.resolveIdentity(accountId)
+
+        if (identity == null) {
+            // dont waste time trying to resolve if no identity was found
+            throw new Error('no identity to resolve');
+        }
+
+        return this.resolve(accountId)
+    }
+
+    // lookup ipns identity from chain corresponding to accountId
+    // return null if no identity found or record is expired
+    async resolveIdentity(accountId) {
+        const info = await this.runtime.discovery.getAccountInfo(accountId)
+        return info ? info.identity.toString() : null
+    }
+}
+
+Resolver.Error = {};
+Resolver.Error.UnrecognizedProtocol = class UnrecognizedProtocol extends Error {
+    constructor(message) {
+        super(message);
+        this.name = 'UnrecognizedProtocol';
+    }
+}
+
+module.exports = {
+    Resolver
+}

+ 182 - 0
storage-node/packages/discovery/discover.js

@@ -0,0 +1,182 @@
+const axios = require('axios')
+const debug = require('debug')('discovery::discover')
+const stripEndingSlash = require('@joystream/util/stripEndingSlash')
+
+const ipfs = require('ipfs-http-client')('localhost', '5001', { protocol: 'http' })
+
+function inBrowser() {
+    return typeof window !== 'undefined'
+}
+
+var activeDiscoveries = {};
+var accountInfoCache = {};
+const CACHE_TTL = 60 * 60 * 1000;
+
+async function getIpnsIdentity (actorAccountId, runtimeApi) {
+    // lookup ipns identity from chain corresponding to actorAccountId
+    const info = await runtimeApi.discovery.getAccountInfo(actorAccountId)
+
+    if (info == null) {
+        // no identity found on chain for account
+        return null
+    } else {
+        return info.identity.toString()
+    }
+}
+
+async function discover_over_ipfs_http_gateway(actorAccountId, runtimeApi, gateway) {
+    let isActor = await runtimeApi.identities.isActor(actorAccountId)
+
+    if (!isActor) {
+        throw new Error('Cannot discover non actor account service info')
+    }
+
+    const identity = await getIpnsIdentity(actorAccountId, runtimeApi)
+
+    gateway = gateway || 'http://localhost:8080'
+
+    const url = `${gateway}/ipns/${identity}`
+
+    const response = await axios.get(url)
+
+    return response.data
+}
+
+async function discover_over_joystream_discovery_service(actorAccountId, runtimeApi, discoverApiEndpoint) {
+    let isActor = await runtimeApi.identities.isActor(actorAccountId)
+
+    if (!isActor) {
+        throw new Error('Cannot discover non actor account service info')
+    }
+
+    const identity = await getIpnsIdentity(actorAccountId, runtimeApi)
+
+    if (identity == null) {
+        // dont waste time trying to resolve if no identity was found
+        throw new Error('no identity to resolve');
+    }
+
+    if (!discoverApiEndpoint) {
+        // Use bootstrap nodes
+        let discoveryBootstrapNodes = await runtimeApi.discovery.getBootstrapEndpoints()
+
+        if (discoveryBootstrapNodes.length) {
+            discoverApiEndpoint = stripEndingSlash(discoveryBootstrapNodes[0].toString())
+        } else {
+            throw new Error('No known discovery bootstrap nodes found on network');
+        }
+    }
+
+    const url = `${discoverApiEndpoint}/discover/v0/${actorAccountId}`
+
+    // should have parsed if data was json?
+    const response = await axios.get(url)
+
+    return response.data
+}
+
+async function discover_over_local_ipfs_node(actorAccountId, runtimeApi) {
+    let isActor = await runtimeApi.identities.isActor(actorAccountId)
+
+    if (!isActor) {
+        throw new Error('Cannot discover non actor account service info')
+    }
+
+    const identity = await getIpnsIdentity(actorAccountId, runtimeApi)
+
+    const ipns_address = `/ipns/${identity}/`
+
+    debug('resolved ipns to ipfs object')
+    let ipfs_name = await ipfs.name.resolve(ipns_address, {
+        recursive: false, // there should only be one indirection to service info file
+        nocache: false,
+    }) // this can hang forever!? can we set a timeout?
+
+    debug('getting ipfs object', ipfs_name)
+    let data = await ipfs.get(ipfs_name) // this can sometimes hang forever!?! can we set a timeout?
+
+    // there should only be one file published under the resolved path
+    let content = data[0].content
+
+    // verify information and if 'discovery' service found
+    // add it to our list of bootstrap nodes
+
+    // TODO cache result or flag
+    return JSON.parse(content)
+}
+
+async function discover (actorAccountId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
+    const id = actorAccountId.toString();
+    const cached = accountInfoCache[id];
+
+    if (cached && useCachedValue) {
+        if (maxCacheAge > 0) {
+            // get latest value
+            if (Date.now() > (cached.updated + maxCacheAge)) {
+                return _discover(actorAccountId, runtimeApi);
+            }
+        }
+        // refresh if cache is stale, new value returned on next cached query
+        if (Date.now() > (cached.updated + CACHE_TTL)) {
+            _discover(actorAccountId, runtimeApi);
+        }
+        // return best known value
+        return cached.value;
+    } else {
+        return _discover(actorAccountId, runtimeApi);
+    }
+}
+
+function createExternallyControlledPromise() {
+    let resolve, reject;
+    const promise = new Promise((_resolve, _reject) => {
+        resolve = _resolve;
+        reject = _reject;
+    });
+    return ({ resolve, reject, promise });
+}
+
+async function _discover(actorAccountId, runtimeApi) {
+    const id = actorAccountId.toString();
+
+    const discoveryResult = activeDiscoveries[id];
+    if (discoveryResult) {
+        debug('discovery in progress waiting for result for',id);
+        return discoveryResult
+    }
+
+    debug('starting new discovery for', id);
+    const deferredDiscovery = createExternallyControlledPromise();
+    activeDiscoveries[id] = deferredDiscovery.promise;
+
+    let result;
+    try {
+        if (inBrowser()) {
+            result = await discover_over_joystream_discovery_service(actorAccountId, runtimeApi)
+        } else {
+            result = await discover_over_local_ipfs_node(actorAccountId, runtimeApi)
+        }
+        debug(result)
+        result = JSON.stringify(result)
+        accountInfoCache[id] = {
+            value: result,
+            updated: Date.now()
+        };
+
+        deferredDiscovery.resolve(result);
+        delete activeDiscoveries[id];
+        return result;
+    } catch (err) {
+        debug(err.message);
+        deferredDiscovery.reject(err);
+        delete activeDiscoveries[id];
+        throw err;
+    }
+}
+
+module.exports = {
+    discover,
+    discover_over_joystream_discovery_service,
+    discover_over_ipfs_http_gateway,
+    discover_over_local_ipfs_node,
+}

+ 34 - 0
storage-node/packages/discovery/example.js

@@ -0,0 +1,34 @@
+const { RuntimeApi } = require('@joystream/runtime-api')
+
+const { discover, publish } = require('./')
+
+async function main() {
+    const runtimeApi = await RuntimeApi.create({
+        account_file: "/Users/mokhtar/Downloads/5Gn9n7SDJ7VgHqHQWYzkSA4vX6DCmS5TFWdHxikTXp9b4L32.json"
+    })
+
+    let published = await publish.publish(
+        "5Gn9n7SDJ7VgHqHQWYzkSA4vX6DCmS5TFWdHxikTXp9b4L32",
+        {
+            asset: {
+                version: 1,
+                endpoint: 'http://endpoint.com'
+            }
+        },
+        runtimeApi
+    )
+
+    console.log(published)
+
+    // let serviceInfo = await discover('5Gn9n7SDJ7VgHqHQWYzkSA4vX6DCmS5TFWdHxikTXp9b4L32', { runtimeApi })
+    let serviceInfo = await discover.discover(
+        '5Gn9n7SDJ7VgHqHQWYzkSA4vX6DCmS5TFWdHxikTXp9b4L32',
+        runtimeApi
+    )
+
+    console.log(serviceInfo)
+
+    runtimeApi.api.disconnect()
+}
+
+main()

+ 5 - 0
storage-node/packages/discovery/index.js

@@ -0,0 +1,5 @@
+
+module.exports = {
+    discover : require('./discover'),
+    publish : require('./publish'),
+}

+ 59 - 0
storage-node/packages/discovery/package.json

@@ -0,0 +1,59 @@
+{
+  "name": "@joystream/discovery",
+  "version": "0.1.0",
+  "description": "Service Discovery - Joystream Storage Node",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "main": "./index.js",
+  "scripts": {
+    "test": "mocha 'test/**/*.js'",
+    "lint": "eslint 'paths/**/*.js' 'lib/**/*.js'"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "eslint": "^5.13.0",
+    "mocha": "^5.2.0",
+    "supertest": "^3.4.2",
+    "temp": "^0.9.0"
+  },
+  "dependencies": {
+    "@joystream/runtime-api": "^0.1.0",
+    "@joystream/util": "^0.1.0",
+    "async-lock": "^1.2.0",
+    "axios": "^0.18.0",
+    "chalk": "^2.4.2",
+    "configstore": "^4.0.0",
+    "figlet": "^1.2.1",
+    "ipfs-http-client": "^32.0.1",
+    "js-yaml": "^3.13.1",
+    "meow": "^5.0.0",
+    "multer": "^1.4.1",
+    "si-prefix": "^0.2.0"
+  }
+}

+ 53 - 0
storage-node/packages/discovery/publish.js

@@ -0,0 +1,53 @@
+const ipfsClient = require('ipfs-http-client')
+const ipfs = ipfsClient('localhost', '5001', { protocol: 'http' })
+
+const debug = require('debug')('discovery::publish')
+
+const PUBLISH_KEY = 'self'; // 'services';
+
+function bufferFrom(data) {
+    return Buffer.from(JSON.stringify(data), 'utf-8')
+}
+
+function encodeServiceInfo(info) {
+    return bufferFrom({
+        serialized: JSON.stringify(info),
+        // signature: ''
+    })
+}
+
+async function publish (service_info) {
+    const keys = await ipfs.key.list()
+    let services_key = keys.find((key) => key.name === PUBLISH_KEY)
+
+    // generate a new services key if not found
+    if (PUBLISH_KEY !== 'self' && !services_key) {
+        debug('generating ipns services key')
+        services_key = await ipfs.key.gen(PUBLISH_KEY, {
+          type: 'rsa',
+          size: 2048
+        });
+    }
+
+    if (!services_key) {
+        throw new Error('No IPFS publishing key available!')
+    }
+
+    debug('adding service info file to node')
+    const files = await ipfs.add(encodeServiceInfo(service_info))
+
+    debug('publishing...')
+    const published = await ipfs.name.publish(files[0].hash, {
+        key: PUBLISH_KEY,
+        resolve: false,
+        // lifetime: // string - Time duration of the record. Default: 24h
+        // ttl:      // string - Time duration this record should be cached
+    })
+
+    debug(published)
+    return services_key.id;
+}
+
+module.exports = {
+    publish
+}

+ 1 - 0
storage-node/packages/discovery/test/index.js

@@ -0,0 +1 @@
+// Add Tests!

+ 3 - 0
storage-node/packages/helios/.gitignore

@@ -0,0 +1,3 @@
+node_modules/
+lib/
+

+ 12 - 0
storage-node/packages/helios/README.md

@@ -0,0 +1,12 @@
+# Joystream Helios
+
+A basic tool to scan the joystream storage network to get a birds eye view of the health of the storage providers and content replication status.
+
+
+## Scanning
+
+```
+yarn
+yarn run helios
+```
+

+ 166 - 0
storage-node/packages/helios/bin/cli.js

@@ -0,0 +1,166 @@
+#!/usr/bin/env node
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+const { encodeAddress } = require('@polkadot/keyring')
+const { discover } = require('@joystream/discovery');
+const axios = require('axios');
+const stripEndingSlash = require('@joystream/util/stripEndingSlash');
+
+(async function main () {
+
+  const runtime = await RuntimeApi.create();
+  const api  = runtime.api;
+
+  // get current blockheight
+  const currentHeader = await api.rpc.chain.getHeader();
+  const currentHeight = currentHeader.number.toBn();
+
+  // get all providers
+  const storageProviders = await api.query.actors.accountIdsByRole(0);
+
+  const storageProviderAccountInfos = await Promise.all(storageProviders.map(async (account) => {
+    return ({
+      account,
+      info: await runtime.discovery.getAccountInfo(account),
+      joined: (await api.query.actors.actorByAccountId(account)).unwrap().joined_at
+    });
+  }));
+
+  const liveProviders = storageProviderAccountInfos.filter(({account, info}) => {
+    return info && info.expires_at.gte(currentHeight)
+  });
+
+  const downProviders = storageProviderAccountInfos.filter(({account, info}) => {
+    return info == null
+  });
+
+  const expiredTtlProviders = storageProviderAccountInfos.filter(({account, info}) => {
+    return info && currentHeight.gte(info.expires_at)
+  });
+
+  let providersStatuses = mapInfoToStatus(liveProviders, currentHeight);
+  console.log('\n== Live Providers\n', providersStatuses);
+
+  let expiredProviderStatuses = mapInfoToStatus(expiredTtlProviders, currentHeight)
+  console.log('\n== Expired Providers\n', expiredProviderStatuses);
+
+  // check when actor account was created consider grace period before removing
+  console.log('\n== Down Providers!\n', downProviders.map(provider => {
+    return ({
+      account: provider.account.toString(),
+      age: currentHeight.sub(provider.joined).toNumber()
+    })
+  }));
+
+  // Resolve IPNS identities of providers
+  console.log('\nResolving live provider API Endpoints...')
+  //providersStatuses = providersStatuses.concat(expiredProviderStatuses);
+  let endpoints = await Promise.all(providersStatuses.map(async (status) => {
+    try {
+      let serviceInfo = await discover.discover_over_joystream_discovery_service(status.address, runtime);
+      let info = JSON.parse(serviceInfo.serialized);
+      console.log(`${status.address} -> ${info.asset.endpoint}`);
+      return { address: status.address, endpoint: info.asset.endpoint};
+    } catch (err) {
+      console.log('resolve failed', status.address, err.message);
+      return { address: status.address, endpoint: null};
+    }
+  }));
+
+  console.log('\nChecking API Endpoint is online')
+  await Promise.all(endpoints.map(async (provider) => {
+    if (!provider.endpoint) {
+      console.log('skipping', provider.address);
+      return
+    }
+    const swaggerUrl = `${stripEndingSlash(provider.endpoint)}/swagger.json`;
+    let error;
+    try {
+      await axios.get(swaggerUrl)
+    } catch (err) {error = err}
+    console.log(`${provider.endpoint} - ${error ? error.message : 'OK'}`);
+  }));
+
+  // after resolving for each resolved provider, HTTP HEAD with axios all known content ids
+  // report available/known
+  let knownContentIds = await runtime.assets.getKnownContentIds()
+
+  console.log(`\nContent Directory has ${knownContentIds.length} assets`);
+
+  await Promise.all(knownContentIds.map(async (contentId) => {
+    let [relationships, judgement] = await assetRelationshipState(api, contentId, storageProviders);
+    console.log(`${encodeAddress(contentId)} replication ${relationships}/${storageProviders.length} - ${judgement}`);
+  }));
+
+  console.log('\nChecking available assets on providers...');
+
+  endpoints.map(async ({address, endpoint}) => {
+    if (!endpoint) { return }
+    let { found, content } = await countContentAvailability(knownContentIds, endpoint);
+    console.log(`${address}: has ${found} assets`);
+    return content
+  });
+
+
+  // interesting disconnect doesn't work unless an explicit provider was created
+  // for underlying api instance
+  runtime.api.disconnect();
+})();
+
+function mapInfoToStatus(providers, currentHeight) {
+  return providers.map(({account, info, joined}) => {
+    if (info) {
+      return {
+        address: account.toString(),
+        age: currentHeight.sub(joined).toNumber(),
+        identity: info.identity.toString(),
+        expiresIn: info.expires_at.sub(currentHeight).toNumber(),
+        expired: currentHeight.gte(info.expires_at),
+      }
+    } else {
+      return {
+        address: account.toString(),
+        identity: null,
+        status: 'down'
+      }
+    }
+  })
+}
+
+async function countContentAvailability(contentIds, source) {
+  let content = {}
+  let found = 0;
+  for(let i = 0; i < contentIds.length; i++) {
+    const assetUrl = makeAssetUrl(contentIds[i], source);
+    try {
+      let info = await axios.head(assetUrl)
+      content[encodeAddress(contentIds[i])] = {
+        type: info.headers['content-type'],
+        bytes: info.headers['content-length']
+      }
+      found++
+    } catch(err) { console.log(`${assetUrl} ${err.message}`); continue; }
+  }
+  console.log(content);
+  return { found, content };
+}
+
+function makeAssetUrl(contentId, source) {
+  source = stripEndingSlash(source);
+  return `${source}/asset/v0/${encodeAddress(contentId)}`
+}
+
+async function assetRelationshipState(api, contentId, providers) {
+  let dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId);
+
+  // how many relationships out of active providers?
+  let relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId);
+
+  let activeRelationships = await Promise.all(relationshipIds.map(async (id) => {
+    let relationship = await api.query.dataObjectStorageRegistry.relationships(id);
+    relationship = relationship.unwrap()
+    return providers.find((provider) => relationship.storage_provider.eq(provider))
+  }));
+
+  return [activeRelationships.filter(active => active).length, dataObject.unwrap().liaison_judgement]
+}

+ 17 - 0
storage-node/packages/helios/package.json

@@ -0,0 +1,17 @@
+{
+  "name": "@joystream/helios",
+  "version": "0.1.0",
+  "bin": {
+    "helios": "bin/cli.js"
+  },
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 0"
+  },
+  "license": "MIT",
+  "dependencies": {
+    "@joystream/runtime-api": "^0.1.0",
+    "@types/bn.js": "^4.11.5",
+    "axios": "^0.19.0",
+    "bn.js": "^4.11.8"
+  }
+}

+ 1 - 0
storage-node/packages/helios/test/index.js

@@ -0,0 +1 @@
+// Add Tests!

+ 1 - 0
storage-node/packages/runtime-api/.eslintrc.js

@@ -0,0 +1 @@
+../../.eslintrc.js

+ 3 - 0
storage-node/packages/runtime-api/.gitignore

@@ -0,0 +1,3 @@
+# Generated JS files
+types/*.js
+!types/index.js

+ 7 - 0
storage-node/packages/runtime-api/README.md

@@ -0,0 +1,7 @@
+Summary
+=======
+
+This package contains convenience functions for the runtime API.
+
+The main entry point creates and initializes a `@polkadot/api` instance, and
+provides more workflow oriented functions than the underlying API exposes.

+ 176 - 0
storage-node/packages/runtime-api/assets.js

@@ -0,0 +1,176 @@
+'use strict';
+
+const debug = require('debug')('joystream:runtime:assets');
+
+const { Null } = require('@polkadot/types/primitive');
+
+const { _ } = require('lodash');
+
+const { decodeAddress, encodeAddress } = require('@polkadot/keyring');
+
+function parseContentId(contentId) {
+  try {
+    return decodeAddress(contentId)
+  } catch (err) {
+    return contentId
+  }
+}
+
+/*
+ * Add asset related functionality to the substrate API.
+ */
+class AssetsApi
+{
+  static async create(base)
+  {
+    const ret = new AssetsApi();
+    ret.base = base;
+    await ret.init();
+    return ret;
+  }
+
+  async init(account_file)
+  {
+    debug('Init');
+  }
+
+  /*
+   * Create a data object.
+   */
+  async createDataObject(accountId, contentId, doTypeId, size)
+  {
+    contentId = parseContentId(contentId)
+    const tx = this.base.api.tx.dataDirectory.addContent(contentId, doTypeId, size);
+    await this.base.signAndSend(accountId, tx);
+
+    // If the data object constructed properly, we should now be able to return
+    // the data object from the state.
+    return await this.getDataObject(contentId);
+  }
+
+  /*
+   * Return the Data Object for a CID
+   */
+  async getDataObject(contentId)
+  {
+    contentId = parseContentId(contentId)
+    const obj = await this.base.api.query.dataDirectory.dataObjectByContentId(contentId);
+    return obj;
+  }
+
+  /*
+   * Verify the liaison state for a DO:
+   * - Check the content ID has a DO
+   * - Check the account is the liaison
+   * - Check the liaison state is pending
+   *
+   * Each failure errors out, success returns the data object.
+   */
+  async checkLiaisonForDataObject(accountId, contentId)
+  {
+    contentId = parseContentId(contentId)
+
+    let obj = await this.getDataObject(contentId);
+
+    if (obj.isNone) {
+      throw new Error(`No DataObject created for content ID: ${contentId}`);
+    }
+
+    const encoded = encodeAddress(obj.raw.liaison);
+    if (encoded != accountId) {
+      throw new Error(`This storage node is not liaison for the content ID: ${contentId}`);
+    }
+
+    if (obj.raw.liaison_judgement.type != 'Pending') {
+      throw new Error(`Expected Pending judgement, but found: ${obj.raw.liaison_judgement.type}`);
+    }
+
+    return obj.unwrap();
+  }
+
+  /*
+   * Changes a data object liaison judgement.
+   */
+  async acceptContent(accountId, contentId)
+  {
+    contentId = parseContentId(contentId)
+    const tx = this.base.api.tx.dataDirectory.acceptContent(contentId);
+    return await this.base.signAndSend(accountId, tx);
+  }
+
+  /*
+   * Changes a data object liaison judgement.
+   */
+  async rejectContent(accountId, contentId)
+  {
+    contentId = parseContentId(contentId)
+    const tx = this.base.api.tx.dataDirectory.rejectContent(contentId);
+    return await this.base.signAndSend(accountId, tx);
+  }
+
+  /*
+   * Create storage relationship
+   */
+  async createStorageRelationship(accountId, contentId, callback)
+  {
+    contentId = parseContentId(contentId)
+    const tx = this.base.api.tx.dataObjectStorageRegistry.addRelationship(contentId);
+
+    const subscribed = [['dataObjectStorageRegistry', 'DataObjectStorageRelationshipAdded']];
+    return await this.base.signAndSend(accountId, tx, 3, subscribed, callback);
+  }
+
+  /*
+   * Get storage relationship for contentId
+   */
+  async getStorageRelationshipAndId(accountId, contentId) {
+    contentId = parseContentId(contentId)
+    let rids = await this.base.api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId);
+
+    while(rids.length) {
+      const relationshipId = rids.shift();
+      let relationship = await this.base.api.query.dataObjectStorageRegistry.relationships(relationshipId);
+      relationship = relationship.unwrap();
+      if (relationship.storage_provider.eq(decodeAddress(accountId))) {
+        return ({ relationship, relationshipId });
+      }
+    }
+
+    return {};
+  }
+
+  async createAndReturnStorageRelationship(accountId, contentId)
+  {
+    contentId = parseContentId(contentId)
+    return new Promise(async (resolve, reject) => {
+      try {
+        await this.createStorageRelationship(accountId, contentId, (events) => {
+          events.forEach((event) => {
+            resolve(event[1].DataObjectStorageRelationshipId);
+          });
+        });
+      } catch (err) {
+        reject(err);
+      }
+    });
+  }
+
+  /*
+   * Toggle ready state for DOSR.
+   */
+  async toggleStorageRelationshipReady(accountId, dosrId, ready)
+  {
+    var tx = ready
+      ? this.base.api.tx.dataObjectStorageRegistry.setRelationshipReady(dosrId)
+      : this.base.api.tx.dataObjectStorageRegistry.unsetRelationshipReady(dosrId);
+    return await this.base.signAndSend(accountId, tx);
+  }
+
+  async getKnownContentIds() {
+    return this.base.api.query.dataDirectory.knownContentIds();
+  }
+}
+
+module.exports = {
+  AssetsApi: AssetsApi,
+}

+ 90 - 0
storage-node/packages/runtime-api/balances.js

@@ -0,0 +1,90 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:runtime:balances');
+
+const { IdentitiesApi } = require('@joystream/runtime-api/identities');
+
+/*
+ * Bundle API calls related to account balances.
+ */
+class BalancesApi
+{
+  static async create(base)
+  {
+    const ret = new BalancesApi();
+    ret.base = base;
+    await ret.init();
+    return ret;
+  }
+
+  async init(account_file)
+  {
+    debug('Init');
+  }
+
+  /*
+   * Return true/false if the account has the minimum balance given.
+   */
+  async hasMinimumBalanceOf(accountId, min)
+  {
+    const balance = await this.freeBalance(accountId);
+    if (typeof min === 'number') {
+      return balance.cmpn(min) >= 0;
+    }
+    else {
+      return balance.cmp(min) >= 0;
+    }
+  }
+
+  /*
+   * Return the account's current free balance.
+   */
+  async freeBalance(accountId)
+  {
+    const decoded = this.base.identities.keyring.decodeAddress(accountId, true);
+    return this.base.api.query.balances.freeBalance(decoded);
+  }
+
+  /*
+   * Return the base transaction fee.
+   */
+  baseTransactionFee()
+  {
+    return this.base.api.consts.transactionPayment.transactionBaseFee;
+  }
+
+  /*
+   * Transfer amount currency from one address to another. The sending
+   * address must be an unlocked key pair!
+   */
+  async transfer(from, to, amount)
+  {
+    const decode = require('@polkadot/keyring').decodeAddress;
+    const to_decoded = decode(to, true);
+
+    const tx = this.base.api.tx.balances.transfer(to_decoded, amount);
+    return this.base.signAndSend(from, tx);
+  }
+}
+
+module.exports = {
+  BalancesApi: BalancesApi,
+}

+ 64 - 0
storage-node/packages/runtime-api/discovery.js

@@ -0,0 +1,64 @@
+'use strict';
+
+const debug = require('debug')('joystream:runtime:discovery');
+
+/*
+ * Add discovery related functionality to the substrate API.
+ */
+class DiscoveryApi
+{
+  static async create(base)
+  {
+    const ret = new DiscoveryApi();
+    ret.base = base;
+    await ret.init();
+    return ret;
+  }
+
+  async init(account_file)
+  {
+    debug('Init');
+  }
+
+  /*
+   * Get Bootstrap endpoints
+   */
+  async getBootstrapEndpoints() {
+    return this.base.api.query.discovery.bootstrapEndpoints()
+  }
+
+  /*
+   * Get AccountInfo of an accountId
+   */
+  async getAccountInfo(accountId) {
+    const decoded = this.base.identities.keyring.decodeAddress(accountId, true)
+    const info = await this.base.api.query.discovery.accountInfoByAccountId(decoded)
+    // Not an Option so we use default value check to know if info was found
+    return info.expires_at.eq(0) ? null : info
+  }
+
+  /*
+   * Set AccountInfo of an accountId
+   */
+  async setAccountInfo(accountId, ipnsId, ttl) {
+    const isActor = await this.base.identities.isActor(accountId)
+    if (isActor) {
+      const tx = this.base.api.tx.discovery.setIpnsId(ipnsId, ttl)
+      return this.base.signAndSend(accountId, tx)
+    } else {
+      throw new Error('Cannot set AccountInfo for non actor account')
+    }
+  }
+
+  /*
+   * Clear AccountInfo of an accountId
+   */
+  async unsetAccountInfo(accountId) {
+    var tx = this.base.api.tx.discovery.unsetIpnsId()
+    return this.base.signAndSend(accountId, tx)
+  }
+}
+
+module.exports = {
+  DiscoveryApi: DiscoveryApi,
+}

+ 235 - 0
storage-node/packages/runtime-api/identities.js

@@ -0,0 +1,235 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const path = require('path');
+const fs = require('fs');
+// const readline = require('readline');
+
+const debug = require('debug')('joystream:runtime:identities');
+
+const { Keyring } = require('@polkadot/keyring');
+// const { Null } = require('@polkadot/types/primitive');
+const util_crypto = require('@polkadot/util-crypto');
+
+// const { _ } = require('lodash');
+
+/*
+ * Add identity management to the substrate API.
+ *
+ * This loosely groups: accounts, key management, and membership.
+ */
+class IdentitiesApi
+{
+  static async create(base, {account_file, passphrase, canPromptForPassphrase})
+  {
+    const ret = new IdentitiesApi();
+    ret.base = base;
+    await ret.init(account_file, passphrase, canPromptForPassphrase);
+    return ret;
+  }
+
+  async init(account_file, passphrase, canPromptForPassphrase)
+  {
+    debug('Init');
+
+    // Creatre keyring
+    this.keyring = new Keyring();
+
+    this.canPromptForPassphrase = canPromptForPassphrase || false;
+
+    // Load account file, if possible.
+    try {
+      this.key = await this.loadUnlock(account_file, passphrase);
+    } catch (err) {
+      debug('Error loading account file:', err.message);
+    }
+  }
+
+  /*
+   * Load a key file and unlock it if necessary.
+   */
+  async loadUnlock(account_file, passphrase)
+  {
+    const fullname = path.resolve(account_file);
+    debug('Initializing key from', fullname);
+    const key = this.keyring.addFromJson(require(fullname));
+    await this.tryUnlock(key, passphrase);
+    debug('Successfully initialized with address', key.address);
+    return key;
+  }
+
+  /*
+   * Try to unlock a key if it isn't already unlocked.
+   * passphrase should be supplied as argument.
+   */
+  async tryUnlock(key, passphrase)
+  {
+    if (!key.isLocked) {
+      debug('Key is not locked, not attempting to unlock')
+      return;
+    }
+
+    // First try with an empty passphrase - for convenience
+    try {
+      key.decodePkcs8('');
+
+      if (passphrase) {
+        debug('Key was not encrypted, supplied passphrase was ignored');
+      }
+
+      return;
+    } catch (err) {
+      // pass
+    }
+
+    // Then with supplied passphrase
+    try {
+      debug('Decrypting with supplied passphrase');
+      key.decodePkcs8(passphrase);
+      return;
+    } catch (err) {
+      // pass
+    }
+
+    // If that didn't work, ask for a passphrase if appropriate
+    if (this.canPromptForPassphrase) {
+      passphrase = await this.askForPassphrase(key.address);
+      key.decodePkcs8(passphrase);
+      return
+    }
+
+    throw new Error('invalid passphrase supplied');
+  }
+
+  /*
+   * Ask for a passphrase
+   */
+  askForPassphrase(address)
+  {
+    // Query for passphrase
+    const prompt = require('password-prompt');
+    return prompt(`Enter passphrase for ${address}: `, { required: false });
+  }
+
+  /*
+   * Return true if the account is a member
+   */
+  async isMember(accountId)
+  {
+    const memberIds = await this.memberIdsOf(accountId); // return array of member ids
+    return memberIds.length > 0 // true if at least one member id exists for the acccount
+  }
+
+  /*
+   * Return true if the account is an actor/role account
+   */
+  async isActor(accountId)
+  {
+    const decoded = this.keyring.decodeAddress(accountId);
+    const actor = await this.base.api.query.actors.actorByAccountId(decoded)
+    return actor.isSome
+  }
+
+  /*
+   * Return the member IDs of an account
+   */
+  async memberIdsOf(accountId)
+  {
+    const decoded = this.keyring.decodeAddress(accountId);
+    return await this.base.api.query.members.memberIdsByRootAccountId(decoded);
+  }
+
+  /*
+   * Return the first member ID of an account, or undefined if not a member.
+   */
+  async firstMemberIdOf(accountId)
+  {
+    const decoded = this.keyring.decodeAddress(accountId);
+    let ids = await this.base.api.query.members.memberIdsByRootAccountId(decoded);
+    return ids[0]
+  }
+
+  /*
+   * Create a new key for the given role *name*. If no name is given,
+   * default to 'storage'.
+   */
+  async createRoleKey(accountId, role)
+  {
+    role = role || 'storage';
+
+    // Generate new key pair
+    const keyPair = util_crypto.naclKeypairFromRandom();
+
+    // Encode to an address.
+    const addr = this.keyring.encodeAddress(keyPair.publicKey);
+    debug('Generated new key pair with address', addr);
+
+    // Add to key wring. We set the meta to identify the account as
+    // a role key.
+    const meta = {
+      name: `${role} role account for ${accountId}`,
+    };
+
+    const createPair = require('@polkadot/keyring/pair').default;
+    const pair = createPair('ed25519', keyPair, meta);
+
+    this.keyring.addPair(pair);
+
+    return pair;
+  }
+
+  /*
+   * Export a key pair to JSON. Will ask for a passphrase.
+   */
+  async exportKeyPair(accountId)
+  {
+    const passphrase = await this.askForPassphrase(accountId);
+
+    // Produce JSON output
+    return this.keyring.toJson(accountId, passphrase);
+  }
+
+  /*
+   * Export a key pair and write it to a JSON file with the account ID as the
+   * name.
+   */
+  async writeKeyPairExport(accountId, prefix)
+  {
+    // Generate JSON
+    const data = await this.exportKeyPair(accountId);
+
+    // Write JSON
+    var filename = `${data.address}.json`;
+    if (prefix) {
+      const path = require('path');
+      filename = path.resolve(prefix, filename);
+    }
+    fs.writeFileSync(filename, JSON.stringify(data), {
+      encoding: 'utf8',
+      mode: 0o600,
+    });
+
+    return filename;
+  }
+}
+
+module.exports = {
+  IdentitiesApi: IdentitiesApi,
+}

+ 291 - 0
storage-node/packages/runtime-api/index.js

@@ -0,0 +1,291 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:runtime:base');
+
+const { registerJoystreamTypes } = require('@joystream/types');
+const { ApiPromise, WsProvider } = require('@polkadot/api');
+
+const { IdentitiesApi } = require('@joystream/runtime-api/identities');
+const { BalancesApi } = require('@joystream/runtime-api/balances');
+const { RolesApi } = require('@joystream/runtime-api/roles');
+const { AssetsApi } = require('@joystream/runtime-api/assets');
+const { DiscoveryApi } = require('@joystream/runtime-api/discovery');
+const AsyncLock = require('async-lock');
+
+/*
+ * Initialize runtime (substrate) API and keyring.
+ */
+class RuntimeApi
+{
+  static async create(options)
+  {
+    const runtime_api = new RuntimeApi();
+    await runtime_api.init(options || {});
+    return runtime_api;
+  }
+
+  async init(options)
+  {
+    debug('Init');
+
+    options = options || {};
+
+    // Register joystream types
+    registerJoystreamTypes();
+
+    const provider = new WsProvider(options.provider_url || 'ws://localhost:9944');
+
+    // Create the API instrance
+    this.api = await ApiPromise.create({ provider });
+
+    this.asyncLock = new AsyncLock();
+
+    // Keep track locally of account nonces.
+    this.nonces = {};
+
+    // Ok, create individual APIs
+    this.identities = await IdentitiesApi.create(this, {
+      account_file: options.account_file,
+      passphrase: options.passphrase,
+      canPromptForPassphrase: options.canPromptForPassphrase
+    });
+    this.balances = await BalancesApi.create(this);
+    this.roles = await RolesApi.create(this);
+    this.assets = await AssetsApi.create(this);
+    this.discovery = await DiscoveryApi.create(this);
+  }
+
+  disconnect()
+  {
+    this.api.disconnect();
+  }
+
+  executeWithAccountLock(account_id, func) {
+    return this.asyncLock.acquire(`${account_id}`, func);
+  }
+
+  /*
+   * Wait for an event. Filters out any events that don't match the module and
+   * event name.
+   *
+   * The result of the Promise is an array containing first the full event
+   * name, and then the event fields as an object.
+   */
+  async waitForEvent(module, name)
+  {
+    return this.waitForEvents([[module, name]]);
+  }
+
+  _matchingEvents(subscribed, events)
+  {
+    debug(`Number of events: ${events.length}; subscribed to ${subscribed}`);
+
+    const filtered = events.filter((record) => {
+      const { event, phase } = record;
+
+      // Show what we are busy with
+      debug(`\t${event.section}:${event.method}:: (phase=${phase.toString()})`);
+      debug(`\t\t${event.meta.documentation.toString()}`);
+
+      // Skip events we're not interested in.
+      const matching = subscribed.filter((value) => {
+        return event.section == value[0] && event.method == value[1];
+      });
+      return matching.length > 0;
+    });
+    debug(`Filtered: ${filtered.length}`);
+
+    const mapped = filtered.map((record) => {
+      const { event } = record;
+      const types = event.typeDef;
+
+      // Loop through each of the parameters, displaying the type and data
+      const payload = {};
+      event.data.forEach((data, index) => {
+        debug(`\t\t\t${types[index].type}: ${data.toString()}`);
+        payload[types[index].type] = data;
+      });
+
+      const full_name = `${event.section}.${event.method}`;
+      return [full_name, payload];
+    });
+    debug('Mapped', mapped);
+
+    return mapped;
+  }
+
+  /*
+   * Same as waitForEvent, but filter on multiple events. The parameter is an
+   * array of arrays containing module and name. Calling waitForEvent is
+   * identical to calling this with [[module, name]].
+   *
+   * Returns the first matched event *only*.
+   */
+  async waitForEvents(subscribed)
+  {
+    return new Promise((resolve, reject) => {
+      this.api.query.system.events((events) => {
+        const matches = this._matchingEvents(subscribed, events);
+        if (matches && matches.length) {
+          resolve(matches);
+        }
+      });
+    });
+  }
+
+  /*
+   * Nonce-aware signAndSend(). Also allows you to use the accountId instead
+   * of the key, making calls a little simpler. Will lock to prevent concurrent
+   * calls so correct nonce is used.
+   *
+   * If the subscribed events are given, and a callback as well, then the
+   * callback is invoked with matching events.
+   */
+  async signAndSend(accountId, tx, attempts, subscribed, callback)
+  {
+    // Prepare key
+    const from_key = this.identities.keyring.getPair(accountId);
+
+    if (from_key.isLocked) {
+      throw new Error('Must unlock key before using it to sign!');
+    }
+
+    const finalizedPromise = newExternallyControlledPromise();
+
+    let unsubscribe = await this.executeWithAccountLock(accountId,  async () => {
+      // Try to get the next nonce to use
+      let nonce = this.nonces[accountId];
+
+      let incrementNonce = () => {
+        // only increment once
+        incrementNonce = () => {}; // turn it into a no-op
+        nonce = nonce.addn(1);
+        this.nonces[accountId] = nonce;
+      }
+
+      // If the nonce isn't available, get it from chain.
+      if (!nonce) {
+        // current nonce
+        nonce = await this.api.query.system.accountNonce(accountId);
+        debug(`Got nonce for ${accountId} from chain: ${nonce}`);
+      }
+
+      return new Promise((resolve, reject) => {
+        debug('Signing and sending tx');
+        // send(statusUpdates) returns a function for unsubscribing from status updates
+        let unsubscribe = tx.sign(from_key, { nonce })
+          .send(({events = [], status}) => {
+            debug(`TX status: ${status.type}`);
+
+            // Whatever events we get, process them if there's someone interested.
+            // It is critical that this event handling doesn't prevent
+            try {
+              if (subscribed && callback) {
+                const matched = this._matchingEvents(subscribed, events);
+                debug('Matching events:', matched);
+                if (matched.length) {
+                  callback(matched);
+                }
+              }
+            } catch(err) {
+              debug(`Error handling events ${err.stack}`)
+            }
+
+            // We want to release lock as early as possible, sometimes Ready status
+            // doesn't occur, so we do it on Broadcast instead
+            if (status.isReady) {
+              debug('TX Ready.');
+              incrementNonce();
+              resolve(unsubscribe); //releases lock
+            } else if (status.isBroadcast) {
+              debug('TX Broadcast.');
+              incrementNonce();
+              resolve(unsubscribe); //releases lock
+            } else if (status.isFinalized) {
+              debug('TX Finalized.');
+              finalizedPromise.resolve(status)
+            } else if (status.isFuture) {
+              // comes before ready.
+              // does that mean it will remain in mempool or in api internal queue?
+              // nonce was set in the future. Treating it as an error for now.
+              debug('TX Future!')
+              // nonce is likely out of sync, delete it so we reload it from chain on next attempt
+              delete this.nonces[accountId];
+              const err = new Error('transaction nonce set in future');
+              finalizedPromise.reject(err);
+              reject(err);
+            }
+
+            /* why don't we see these status updates on local devchain (single node)
+            isUsurped
+            isBroadcast
+            isDropped
+            isInvalid
+            */
+          })
+          .catch((err) => {
+            // 1014 error: Most likely you are sending transaction with the same nonce,
+            // so it assumes you want to replace existing one, but the priority is too low to replace it (priority = fee = len(encoded_transaction) currently)
+            // Remember this can also happen if in the past we sent a tx with a future nonce, and the current nonce
+            // now matches it.
+            if (err) {
+              const errstr = err.toString();
+              // not the best way to check error code.
+              // https://github.com/polkadot-js/api/blob/master/packages/rpc-provider/src/coder/index.ts#L52
+              if (errstr.indexOf('Error: 1014:') < 0 && // low priority
+                  errstr.indexOf('Error: 1010:') < 0) // bad transaction
+              {
+                // Error but not nonce related. (bad arguments maybe)
+                debug('TX error', err);
+              } else {
+                // nonce is likely out of sync, delete it so we reload it from chain on next attempt
+                delete this.nonces[accountId];
+              }
+            }
+
+            finalizedPromise.reject(err);
+            // releases lock
+            reject(err);
+          });
+      });
+    })
+
+    // when does it make sense to manyally unsubscribe?
+    // at this point unsubscribe.then and unsubscribe.catch have been deleted
+    // unsubscribe(); // don't unsubscribe if we want to wait for additional status
+    // updates to know when the tx has been finalized
+    return finalizedPromise.promise;
+  }
+}
+
+module.exports = {
+  RuntimeApi: RuntimeApi,
+}
+
+function newExternallyControlledPromise () {
+  // externally controller promise
+  let resolve, reject;
+  const promise = new Promise((res, rej) => {
+    resolve = res;
+    reject = rej;
+  });
+  return ({resolve, reject, promise});
+}

+ 53 - 0
storage-node/packages/runtime-api/package.json

@@ -0,0 +1,53 @@
+{
+  "name": "@joystream/runtime-api",
+  "version": "0.1.0",
+  "description": "Runtime API abstraction for Joystream Storage Node",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org/"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node",
+    "runtime"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "scripts": {
+    "test": "mocha 'test/**/*.js' --exit",
+    "lint": "eslint '**/*.js' --ignore-pattern 'test/**/*.js'"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "eslint": "^5.13.0",
+    "mocha": "^5.2.0",
+    "sinon": "^7.3.2",
+    "sinon-chai": "^3.3.0",
+    "temp": "^0.9.0"
+  },
+  "dependencies": {
+    "@joystream/types": "^0.10.0",
+    "@polkadot/api": "^0.96.1",
+    "async-lock": "^1.2.0",
+    "lodash": "^4.17.11",
+    "password-prompt": "^1.1.2"
+  }
+}

+ 186 - 0
storage-node/packages/runtime-api/roles.js

@@ -0,0 +1,186 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:runtime:roles');
+
+const { Null, u64 } = require('@polkadot/types');
+
+const { _ } = require('lodash');
+
+/*
+ * Add role related functionality to the substrate API.
+ */
+class RolesApi
+{
+  static async create(base)
+  {
+    const ret = new RolesApi();
+    ret.base = base;
+    await ret.init();
+    return ret;
+  }
+
+  async init()
+  {
+    debug('Init');
+
+    // Constants
+    this.ROLE_STORAGE = 'StorageProvider'; // new u64(0x00);
+  }
+
+  /*
+   * Raises errors if the given account ID is not valid for staking as the given
+   * role. The role should be one of the ROLE_* constants above.
+   */
+  async checkAccountForStaking(accountId, role)
+  {
+    role = role || this.ROLE_STORAGE;
+
+    if (!await this.base.identities.isMember(accountId)) {
+      const msg = `Account with id "${accountId}" is not a member!`;
+      debug(msg);
+      throw new Error(msg);
+    }
+
+    if (!await this.hasBalanceForRoleStaking(accountId, role)) {
+      const msg = `Account with id "${accountId}" does not have sufficient free balance for role staking!`;
+      debug(msg);
+      throw new Error(msg);
+    }
+
+    debug(`Account with id "${accountId}" is a member with sufficient free balance, able to proceed.`);
+    return true;
+  }
+
+  /*
+   * Returns the required balance for staking for a role.
+   */
+  async requiredBalanceForRoleStaking(role)
+  {
+    const params = await this.base.api.query.actors.parameters(role);
+    if (params.isNone) {
+      throw new Error(`Role ${role} is not defined!`);
+    }
+    const result = params.raw.min_stake
+      .add(params.raw.entry_request_fee)
+      .add(await this.base.balances.baseTransactionFee());
+    return result;
+  }
+
+  /*
+   * Returns true/false if the given account has the balance required for
+   * staking for the given role.
+   */
+  async hasBalanceForRoleStaking(accountId, role)
+  {
+    const required = await this.requiredBalanceForRoleStaking(role);
+    return await this.base.balances.hasMinimumBalanceOf(accountId, required);
+  }
+
+  /*
+   * Transfer enough funds to allow the recipient to stake for the given role.
+   */
+  async transferForStaking(from, to, role)
+  {
+    const required = await this.requiredBalanceForRoleStaking(role);
+    return await this.base.balances.transfer(from, to, required);
+  }
+
+  /*
+   * Return current accounts holding a role.
+   */
+  async accountIdsByRole(role)
+  {
+    const ids = await this.base.api.query.actors.accountIdsByRole(role);
+    return ids.map(id => id.toString());
+  }
+
+  /*
+   * Returns the number of slots available for a role
+   */
+  async availableSlotsForRole(role)
+  {
+    let params = await this.base.api.query.actors.parameters(role);
+    if (params.isNone) {
+      throw new Error(`Role ${role} is not defined!`);
+    }
+    params = params.unwrap();
+    const slots = params.max_actors;
+    const active = await this.accountIdsByRole(role);
+    return (slots.subn(active.length)).toNumber();
+  }
+
+  /*
+   * Send a role application.
+   * - The role account must not be a member, but have sufficient funds for
+   *   staking.
+   * - The member account must be a member.
+   *
+   * After sending this application, the member account will have role request
+   * in the 'My Requests' tab of the app.
+   */
+  async applyForRole(roleAccountId, role, memberAccountId)
+  {
+    const memberId = await this.base.identities.firstMemberIdOf(memberAccountId);
+    if (memberId == undefined) {
+      throw new Error('Account is not a member!');
+    }
+
+    const tx = this.base.api.tx.actors.roleEntryRequest(role, memberId);
+    return await this.base.signAndSend(roleAccountId, tx);
+  }
+
+  /*
+   * Check whether the given role is occupying the given role.
+   */
+  async checkForRole(roleAccountId, role)
+  {
+    const actor = await this.base.api.query.actors.actorByAccountId(roleAccountId);
+    return !_.isEqual(actor.raw, new Null());
+  }
+
+  /*
+   * Same as checkForRole(), but if the account is not currently occupying the
+   * role, wait for the appropriate `actors.Staked` event to be emitted.
+   */
+  async waitForRole(roleAccountId, role)
+  {
+    if (await this.checkForRole(roleAccountId, role)) {
+      return true;
+    }
+
+    return new Promise((resolve, reject) => {
+      this.base.waitForEvent('actors', 'Staked').then((values) => {
+        const name = values[0][0];
+        const payload = values[0][1];
+
+        if (payload.AccountId == roleAccountId) {
+          resolve(true);
+        } else {
+          // reject() ?
+        }
+      });
+    });
+  }
+}
+
+module.exports = {
+  RolesApi: RolesApi,
+}

+ 52 - 0
storage-node/packages/runtime-api/test/assets.js

@@ -0,0 +1,52 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const sinon = require('sinon');
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+describe('Assets', () => {
+  var api;
+  var key;
+  before(async () => {
+    api = await RuntimeApi.create();
+    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+  });
+
+  it('returns DataObjects for a content ID', async () => {
+    const obj = await api.assets.getDataObject('foo');
+    expect(obj.isNone).to.be.true;
+  });
+
+  it('can check the liaison for a DataObject', async () => {
+    expect(async _ => {
+      await api.assets.checkLiaisonForDataObject('foo', 'bar');
+    }).to.throw;
+  });
+
+  // Needs properly staked accounts
+  it('can accept content');
+  it('can reject content');
+  it('can create a storage relationship for content');
+  it('can create a storage relationship for content and return it');
+  it('can toggle a storage relatsionship to ready state');
+});

+ 55 - 0
storage-node/packages/runtime-api/test/balances.js

@@ -0,0 +1,55 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const sinon = require('sinon');
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+describe('Balances', () => {
+  var api;
+  var key;
+  before(async () => {
+    api = await RuntimeApi.create();
+    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+  });
+
+  it('returns free balance for an account', async () => {
+    const balance = await api.balances.freeBalance(key.address);
+    // Should be exactly zero
+    expect(balance.cmpn(0)).to.equal(0);
+  });
+
+  it('checks whether a minimum balance exists', async () => {
+    // A minimum of 0 should exist, but no more.
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 0)).to.be.true;
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 1)).to.be.false;
+  });
+
+  it('returns the base transaction fee of the chain', async () => {
+    const fee = await api.balances.baseTransactionFee();
+    // >= 0 comparison works
+    expect(fee.cmpn(0)).to.be.at.least(0);
+  });
+
+  // TODO implemtable only with accounts with balance
+  it('can transfer funds');
+});

+ 1 - 0
storage-node/packages/runtime-api/test/data/edwards.json

@@ -0,0 +1 @@
+{"address":"5HDnLpCjdbUBR6eyuz5geBJWzoZdXmWFXahEYrLg44rvToCK","encoded":"0x475f0c37c7893517f5a93c88b81208346211dfa9b0fd09e08bfd34f6e14da5468f48c6d9b0b4cbfbd7dd03a6f0730f5ee9a01b0cd30265e6b1b9fb652958889d5b174624568f49f3a671b8c330c3920814e938383749aa9046366ae6881281e0d053a9aa913a54ad53bd2f1dcf6c26e6b476495ea058832a36f122d09c18154577f951298ac72e6f471a6dca41e4d5741ed5db966001ae5ffd2b99d4c7","encoding":{"content":["pkcs8","ed25519"],"type":"xsalsa20-poly1305","version":"2"},"meta":{"name":"Edwards keypair for testing","whenCreated":1558974074691}}

+ 1 - 0
storage-node/packages/runtime-api/test/data/edwards_unlocked.json

@@ -0,0 +1 @@
+{"address":"5EZxbX2arChvhYL7cEgSybJL3kzEeuPqqNYyLqRBJxZx7Mao","encoded":"0x3053020101300506032b65700422042071f2096e5857177f03768478d0c006f60d1ee684f14feaede0f9c17e139e65586ec832e5db75112b0a4585b6a9ffe58fa056e5b1228f02663e9e64743e65c9a5a1230321006ec832e5db75112b0a4585b6a9ffe58fa056e5b1228f02663e9e64743e65c9a5","encoding":{"content":["pkcs8","ed25519"],"type":"none","version":"2"},"meta":{"name":"Unlocked keypair for testing","whenCreated":1558975434890}}

+ 1 - 0
storage-node/packages/runtime-api/test/data/schnorr.json

@@ -0,0 +1 @@
+{"address":"5GjxHjq9rtcxsfgcNswLGjYNRu8UmHAnYq7KfACE3yTjfYVk","encoded":"0x3dd5965708bbf4316c431ba8274b885a6017d82bc8bcb8c8b02e00c0c90356fb8a379f4be44bd454c76799d9d09bda7fc03c695340e23818f60cfcf00f3b48f42fb8d362e74f261354e99fff9cb2f91d899a722f0051db74d985602f3e95e49a99c73f77951022f98a99bb90981e3c1f60a5642ed583cd65b0161f8461d30f8b320bcd98cd7fb7ec71886d76825696d6fc11ac14a7391f2cdcb2b721d4","encoding":{"content":["pkcs8","sr25519"],"type":"xsalsa20-poly1305","version":"2"},"meta":{"name":"Schnorr keypair for testing","whenCreated":1558974091206}}

+ 106 - 0
storage-node/packages/runtime-api/test/identities.js

@@ -0,0 +1,106 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const sinon = require('sinon');
+const temp = require('temp').track();
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+describe('Identities', () => {
+  var api;
+  before(async () => {
+    api = await RuntimeApi.create({ canPromptForPassphrase: true });
+  });
+
+  it('creates role keys', async () => {
+    const key = await api.identities.createRoleKey('foo', 'bar');
+    expect(key).to.have.property('type', 'ed25519');
+    expect(key.meta.name).to.include('foo');
+    expect(key.meta.name).to.include('bar');
+  });
+
+  it('imports keys', async () => {
+    // Unlocked keys can be imported without asking for a passphrase
+    await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+
+    // Edwards and schnorr keys should unlock
+    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
+    await api.identities.loadUnlock('test/data/edwards.json');
+    await api.identities.loadUnlock('test/data/schnorr.json');
+    passphrase_stub.restore();
+
+    // Except if the wrong passphrase is given
+    const passphrase_stub_bad = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'bad');
+    expect(async () => {
+      await api.identities.loadUnlock('test/data/edwards.json');
+    }).to.throw;
+    passphrase_stub_bad.restore();
+  });
+
+  it('knows about membership', async () => {
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    const addr = key.address;
+
+    // Without seeding the runtime with data, we can only verify that the API
+    // reacts well in the absence of membership
+    expect(await api.identities.isMember(addr)).to.be.false;
+    const member_id = await api.identities.firstMemberIdOf(addr);
+
+    expect(member_id).to.be.undefined;
+  });
+
+  it('exports keys', async () => {
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+
+    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
+    const exported = await api.identities.exportKeyPair(key.address);
+    passphrase_stub.restore();
+
+    expect(exported).to.have.property('address');
+    expect(exported.address).to.equal(key.address);
+
+    expect(exported).to.have.property('encoding');
+
+    expect(exported.encoding).to.have.property('version', '2');
+
+    expect(exported.encoding).to.have.property('content');
+    expect(exported.encoding.content).to.include('pkcs8');
+    expect(exported.encoding.content).to.include('ed25519');
+
+    expect(exported.encoding).to.have.property('type');
+    expect(exported.encoding.type).to.include('salsa20');
+  });
+
+  it('writes key export files', async () => {
+    const prefix = temp.mkdirSync('joystream-runtime-api-test');
+
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+
+    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
+    const filename = await api.identities.writeKeyPairExport(key.address, prefix);
+    passphrase_stub.restore();
+
+    const fs = require('fs');
+    const stat = fs.statSync(filename);
+    expect(stat.isFile()).to.be.true;
+  });
+});

+ 31 - 0
storage-node/packages/runtime-api/test/index.js

@@ -0,0 +1,31 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+describe('RuntimeApi', () => {
+  it('can be created', async () => {
+    const api = await RuntimeApi.create();
+    api.disconnect();
+  });
+});

+ 67 - 0
storage-node/packages/runtime-api/test/roles.js

@@ -0,0 +1,67 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const sinon = require('sinon');
+
+const { RuntimeApi } = require('@joystream/runtime-api');
+
+describe('Roles', () => {
+  var api;
+  var key;
+  before(async () => {
+    api = await RuntimeApi.create();
+    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+  });
+
+  it('returns the required balance for role staking', async () => {
+    const amount = await api.roles.requiredBalanceForRoleStaking(api.roles.ROLE_STORAGE);
+
+    // Effectively checks that the role is at least defined.
+    expect(amount.cmpn(0)).to.be.above(0);
+  });
+
+  it('returns whether an account has funds for role staking', async () => {
+    expect(await api.roles.hasBalanceForRoleStaking(key.address, api.roles.ROLE_STORAGE)).to.be.false;
+  });
+
+  it('returns accounts for a role', async () => {
+    const accounts = await api.roles.accountIdsByRole(api.roles.ROLE_STORAGE);
+    // The chain may have accounts configured, so go for the bare minimum in
+    // expectations.
+    expect(accounts).to.have.lengthOf.above(-1);
+  });
+
+  it('can check whether an account fulfils requirements for role staking', async () => {
+    expect(async _ => {
+      await api.roles.checkAccountForRoleStaking(key.address, api.roles.ROLE_STORAGE);
+    }).to.throw;
+  });
+
+  it('can check for an account to have a role', async () => {
+    expect(await api.roles.checkForRole(key.address, api.roles.ROLE_STORAGE)).to.be.false;
+  });
+
+  // TODO requires complex setup, and may change in the near future.
+  it('transfers funds for staking');
+  it('can apply for a role');
+  it('can wait for an account to have a role');
+});

+ 1 - 0
storage-node/packages/storage/.eslintrc.js

@@ -0,0 +1 @@
+../../.eslintrc.js

+ 23 - 0
storage-node/packages/storage/README.md

@@ -0,0 +1,23 @@
+# Summary
+
+This package contains an abstraction over the storage backend of colossus.
+
+Its main purpose is to allow testing the storage subsystem without having to
+run a blockchain node.
+
+In the current version, the storage is backed by IPFS. In order to run tests,
+you have to also run an [IPFS node](https://dist.ipfs.io/#go-ipfs).
+
+## Testing
+
+Note also that tests do not finish. This is due to a design flaw in the
+[IPFS HTTP Client](https://github.com/ipfs/js-ipfs-http-client/i) npm package.
+In that package, requests can seemingly never time out - this client library
+patches over this by using [bluebird's cancellable Promises](http://bluebirdjs.com/docs/api/cancellation.html),
+so that at least this package can provide a timeout. In the client library,
+however, that still leaves some dangling requests, meaning node cannot
+exit cleanly.
+
+For this reason, we're passing the `--exit` flag to `mocha` in the `test`
+script - run `yarn run test` and you should have a well behaving test suite.
+Run `mocha` directly, without this flag, and you may be disappointed.

+ 132 - 0
storage-node/packages/storage/filter.js

@@ -0,0 +1,132 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:storage:filter');
+
+const DEFAULT_MAX_FILE_SIZE = 500 * 1024 * 1024;
+const DEFAULT_ACCEPT_TYPES = [
+  'video/*',
+  'audio/*',
+  'image/*',
+];
+const DEFAULT_REJECT_TYPES = [];
+
+// Configuration defaults
+function config_defaults(config)
+{
+  const filter =  config.filter || {};
+
+  // We accept zero as switching this check off.
+  if (typeof filter.max_size == 'undefined' || typeof filter.max_size == 'null') {
+    filter.max_size = DEFAULT_MAX_FILE_SIZE;
+  }
+
+  // Figure out mime types
+  filter.mime = filter.mime || [];
+  filter.mime.accept = filter.mime.accept || DEFAULT_ACCEPT_TYPES;
+  filter.mime.reject = filter.mime.reject || DEFAULT_REJECT_TYPES;
+
+  return filter;
+}
+
+// Mime type matching
+function mime_matches(acceptable, provided)
+{
+  if (acceptable.endsWith('*')) {
+    // Wildcard match
+    const prefix = acceptable.slice(0, acceptable.length - 1);
+    debug('wildcard matching', provided, 'against', acceptable, '/', prefix);
+    return provided.startsWith(prefix);
+  }
+  // Exact match
+  debug('exact matching', provided, 'against', acceptable);
+  return provided == acceptable;
+}
+
+function mime_matches_any(accept, reject, provided)
+{
+  // Pass accept
+  var accepted = false;
+  for (var item of accept) {
+    if (mime_matches(item, provided)) {
+      debug('Content type matches', item, 'which is acceptable.');
+      accepted = true;
+      break;
+    }
+  }
+  if (!accepted) {
+    return false;
+  }
+
+  // Don't pass reject
+  for (var item of reject) {
+    if (mime_matches(item, provided)) {
+      debug('Content type matches', item, 'which is unacceptable.');
+      return false;
+    }
+  }
+
+  return true;
+}
+
+/**
+ * Simple filter function deciding whether or not to accept a content
+ * upload.
+ *
+ * This is a straightforward implementation of
+ * https://github.com/Joystream/storage-node-joystream/issues/14 - but should
+ * most likely be improved on in future.
+ **/
+function filter_func(config, headers, mime_type)
+{
+  const filter = config_defaults(config);
+
+  // Enforce maximum file upload size
+  if (filter.max_size) {
+    const size = parseInt(headers['content-length'], 10);
+    if (!size) {
+      return {
+        code: 411,
+        message: 'A Content-Length header is required.',
+      };
+    }
+
+    if (size > filter.max_size) {
+      return {
+        code: 413,
+        message: 'The provided Content-Length is too large.',
+      };
+    }
+  }
+
+  // Enforce mime type based filtering
+  if (!mime_matches_any(filter.mime.accept, filter.mime.reject, mime_type)) {
+    return {
+      code: 415,
+      message: 'Content has an unacceptable MIME type.',
+    };
+  }
+
+  return {
+    code: 200,
+  };
+}
+
+module.exports = filter_func;

+ 25 - 0
storage-node/packages/storage/index.js

@@ -0,0 +1,25 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const { Storage } = require('./storage');
+
+module.exports = {
+  Storage: Storage,
+};

+ 50 - 0
storage-node/packages/storage/package.json

@@ -0,0 +1,50 @@
+{
+  "name": "@joystream/storage",
+  "version": "0.1.0",
+  "description": "Storage management code for Joystream Storage Node",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node",
+    "storage"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "scripts": {
+    "test": "mocha --exit 'test/**/*.js'",
+    "lint": "eslint '**/*.js' --ignore-pattern 'test/**/*.js'"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "chai-as-promised": "^7.1.1",
+    "eslint": "^5.13.0",
+    "mocha": "^5.2.0"
+  },
+  "dependencies": {
+    "bluebird": "^3.5.5",
+    "file-type": "^11.0.0",
+    "ipfs-http-client": "^32.0.1",
+    "temp": "^0.9.0"
+  }
+}

+ 406 - 0
storage-node/packages/storage/storage.js

@@ -0,0 +1,406 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const { Transform } = require('stream');
+const fs = require('fs');
+
+const debug = require('debug')('joystream:storage:storage');
+
+const Promise = require('bluebird');
+Promise.config({
+  cancellation: true,
+});
+
+const file_type = require('file-type');
+const ipfs_client = require('ipfs-http-client');
+const temp = require('temp').track();
+const _ = require('lodash');
+
+// Default request timeout; imposed on top of the IPFS client, because the
+// client doesn't seem to care.
+const DEFAULT_TIMEOUT = 30 * 1000;
+
+// Default/dummy resolution implementation.
+const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
+  debug('Warning: Default resolution returns original CID', original);
+  return original;
+}
+
+// Default file info if nothing could be detected.
+const DEFAULT_FILE_INFO = {
+  mime_type: 'application/octet-stream',
+  ext: 'bin',
+};
+
+
+/*
+ * fileType is a weird name, because we're really looking at MIME types.
+ * Also, the type field includes extension info, so we're going to call
+ * it file_info { mime_type, ext } instead.
+ * Nitpicking, but it also means we can add our default type if things
+ * go wrong.
+ */
+function fix_file_info(info)
+{
+  if (!info) {
+    info = DEFAULT_FILE_INFO;
+  }
+  else {
+    info.mime_type = info.mime;
+    delete(info.mime);
+  }
+  return info;
+}
+
+function fix_file_info_on_stream(stream)
+{
+  var info = fix_file_info(stream.fileType);
+  delete(stream.fileType);
+  stream.file_info = info;
+  return stream;
+}
+
+
+/*
+ * Internal Transform stream for helping write to a temporary location, adding
+ * MIME type detection, and a commit() function.
+ */
+class StorageWriteStream extends Transform
+{
+  constructor(storage, options)
+  {
+    options = _.clone(options || {});
+
+    super(options);
+
+    this.storage = storage;
+
+    // Create temp target.
+    this.temp = temp.createWriteStream();
+    this.buf = Buffer.alloc(0);
+  }
+
+  _transform(chunk, encoding, callback)
+  {
+    // Deal with buffers only
+    if (typeof chunk === 'string') {
+      chunk = Buffer.from(chunk);
+    }
+
+    // Logging this all the time is too verbose
+    // debug('Writing temporary chunk', chunk.length, chunk);
+    this.temp.write(chunk);
+
+    // Try to detect file type during streaming.
+    if (!this.file_info && this.buf < file_type.minimumBytes) {
+      this.buf = Buffer.concat([this.buf, chunk]);
+
+      if (this.buf >= file_type.minimumBytes) {
+        const info = file_type(this.buf);
+        // No info? We can try again at the end of the stream.
+        if (info) {
+          this.file_info = fix_file_info(info);
+          this.emit('file_info', this.file_info);
+        }
+      }
+    }
+
+    callback(null);
+  }
+
+  _flush(callback)
+  {
+    debug('Flushing temporary stream:', this.temp.path);
+    this.temp.end();
+
+    // Since we're finished, we can try to detect the file type again.
+    if (!this.file_info) {
+      const read = fs.createReadStream(this.temp.path);
+      file_type.stream(read)
+        .then((stream) => {
+          this.file_info = fix_file_info_on_stream(stream).file_info;
+          this.emit('file_info', this.file_info);
+        })
+        .catch((err) => {
+          debug('Error trying to detect file type at end-of-stream:', err);
+        });
+    }
+
+    callback(null);
+  }
+
+  /*
+   * Commit this stream to the IPFS backend.
+   */
+  commit()
+  {
+    // Create a read stream from the temp file.
+    if (!this.temp) {
+      throw new Error('Cannot commit a temporary stream that does not exist. Did you call cleanup()?');
+    }
+
+    debug('Committing temporary stream: ', this.temp.path);
+    this.storage.ipfs.addFromFs(this.temp.path)
+      .then(async (result) => {
+        const hash = result[0].hash;
+        debug('Stream committed as', hash);
+        this.emit('committed', hash);
+        await this.storage.ipfs.pin.add(hash);
+      })
+      .catch((err) => {
+        debug('Error committing stream', err);
+        this.emit('error', err);
+      })
+  }
+
+  /*
+   * Clean up temporary data.
+   */
+  cleanup()
+  {
+    debug('Cleaning up temporary file: ', this.temp.path);
+    fs.unlink(this.temp.path, () => {}); // Ignore errors
+    delete(this.temp);
+  }
+}
+
+
+
+/*
+ * Manages the storage backend interaction. This provides a Promise-based API.
+ *
+ * Usage:
+ *
+ *   const store = await Storage.create({ ... });
+ *   store.open(...);
+ */
+class Storage
+{
+  /*
+   * Create a Storage instance. Options include:
+   *
+   * - an `ipfs` property, which is itself a hash containing
+   *   - `connect_options` to be passed to the IPFS client library for
+   *     connecting to an IPFS node.
+   * - a `resolve_content_id` function, which translates Joystream
+   *   content IDs to IPFS content IDs or vice versa. The default is to
+   *   not perform any translation, which is not practical for a production
+   *   system, but serves its function during development and testing. The
+   *   function must be asynchronous.
+   * - a `timeout` parameter, defaulting to DEFAULT_TIMEOUT. After this time,
+   *   requests to the IPFS backend time out.
+   *
+   * Functions in this class accept an optional timeout parameter. If the
+   * timeout is given, it is used - otherwise, the `option.timeout` value
+   * above is used.
+   */
+  static create(options)
+  {
+    const storage = new Storage();
+    storage._init(options);
+    return storage;
+  }
+
+  _init(options)
+  {
+    this.options = _.clone(options || {});
+    this.options.ipfs = this.options.ipfs || {};
+
+    this._timeout = this.options.timeout || DEFAULT_TIMEOUT;
+    this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID;
+
+    this.ipfs = ipfs_client(this.options.ipfs.connect_options);
+
+    this.pins = {};
+
+    this.ipfs.id((err, identity) => {
+      if (err) {
+        debug(`Warning IPFS daemon not running: ${err.message}`);
+      } else {
+        debug(`IPFS node is up with identity: ${identity.id}`);
+      }
+    });
+  }
+
+  /*
+   * Uses bluebird's timeout mechanism to return a Promise that times out after
+   * the given timeout interval, and tries to execute the given operation within
+   * that time.
+   */
+  async _with_specified_timeout(timeout, operation)
+  {
+    return new Promise(async (resolve, reject) => {
+      try {
+        resolve(await new Promise(operation));
+      } catch (err) {
+        reject(err);
+      }
+    }).timeout(timeout || this._timeout);
+  }
+
+  /*
+   * Resolve content ID with timeout.
+   */
+  async _resolve_content_id_with_timeout(timeout, content_id)
+  {
+    return await this._with_specified_timeout(timeout, async (resolve, reject) => {
+      try {
+        resolve(await this._resolve_content_id(content_id));
+      } catch (err) {
+        reject(err);
+      }
+    });
+  }
+
+  /*
+   * Stat a content ID.
+   */
+  async stat(content_id, timeout)
+  {
+    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+
+    return await this._with_specified_timeout(timeout, (resolve, reject) => {
+      this.ipfs.files.stat(`/ipfs/${resolved}`, { withLocal: true }, (err, res) => {
+        if (err) {
+          reject(err);
+          return;
+        }
+        resolve(res);
+      });
+    });
+  }
+
+  /*
+   * Return the size of a content ID.
+   */
+  async size(content_id, timeout)
+  {
+    const stat = await this.stat(content_id, timeout);
+    return stat.size;
+  }
+
+  /*
+   * Opens the specified content in read or write mode, and returns a Promise
+   * with the stream.
+   *
+   * Read streams will contain a file_info property, with:
+   *  - a `mime_type` field providing the file's MIME type, or a default.
+   *  - an `ext` property, providing a file extension suggestion, or a default.
+   *
+   * Write streams have a slightly different flow, in order to allow for MIME
+   * type detection and potential filtering. First off, they are written to a
+   * temporary location, and only committed to the backend once their
+   * `commit()` function is called.
+   *
+   * When the commit has finished, a `committed` event is emitted, which
+   * contains the IPFS backend's content ID.
+   *
+   * Write streams also emit a `file_info` event during writing. It is passed
+   * the `file_info` field as described above. Event listeners may now opt to
+   * abort the write or continue and eventually `commit()` the file. There is
+   * an explicit `cleanup()` function that removes temporary files as well,
+   * in case comitting is not desired.
+   */
+  async open(content_id, mode, timeout)
+  {
+    if (mode != 'r' && mode != 'w') {
+      throw Error('The only supported modes are "r", "w" and "a".');
+    }
+
+    // Write stream
+    if (mode === 'w') {
+      return await this._create_write_stream(content_id, timeout);
+    }
+
+    // Read stream - with file type detection
+    return await this._create_read_stream(content_id, timeout);
+  }
+
+  async _create_write_stream(content_id)
+  {
+    // IPFS wants us to just dump a stream into its storage, then returns a
+    // content ID (of its own).
+    // We need to instead return a stream immediately, that we eventually
+    // decorate with the content ID when that's available.
+    return new Promise((resolve, reject) => {
+      const stream = new StorageWriteStream(this);
+      resolve(stream);
+    });
+  }
+
+  async _create_read_stream(content_id, timeout)
+  {
+    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+
+    var found = false;
+    return await this._with_specified_timeout(timeout, (resolve, reject) => {
+      const ls = this.ipfs.getReadableStream(resolved);
+      ls.on('data', async (result) => {
+        if (result.path === resolved) {
+          found = true;
+
+          const ft_stream = await file_type.stream(result.content);
+          resolve(fix_file_info_on_stream(ft_stream));
+        }
+      });
+      ls.on('error', (err) => {
+        ls.end();
+        debug(err);
+        reject(err);
+      });
+      ls.on('end', () => {
+        if (!found) {
+          const err = new Error('No matching content found for', content_id);
+          debug(err);
+          reject(err);
+        }
+      });
+      ls.resume();
+    });
+  }
+
+  /*
+   * Synchronize the given content ID
+   */
+  async synchronize(content_id)
+  {
+    const resolved = await this._resolve_content_id_with_timeout(this._timeout, content_id);
+
+    if (this.pins[resolved]) {
+      return;
+    }
+
+    debug(`Pinning ${resolved}`);
+
+    // This call blocks until file is retreived..
+    this.ipfs.pin.add(resolved, {quiet: true, pin: true}, (err, res) => {
+      if (err) {
+        debug(`Error Pinning: ${resolved}`)
+        delete this.pins[resolved];
+      } else {
+        debug(`Pinned ${resolved}`);
+      }
+    });
+  }
+}
+
+module.exports = {
+  Storage: Storage,
+};

+ 230 - 0
storage-node/packages/storage/test/storage.js

@@ -0,0 +1,230 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const chai = require('chai');
+const chai_as_promised = require('chai-as-promised');
+chai.use(chai_as_promised);
+const expect = chai.expect;
+
+const fs = require('fs');
+
+const { Storage } = require('@joystream/storage');
+
+const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/;
+
+function write(store, content_id, contents, callback)
+{
+  store.open(content_id, 'w')
+    .then((stream) => {
+
+      stream.on('finish', () => {
+        stream.commit();
+      });
+      stream.on('committed', callback);
+
+      stream.write(contents);
+      stream.end();
+    })
+    .catch((err) => {
+      expect.fail(err);
+    });
+}
+
+function read_all(stream)
+{
+  const chunks = []
+  let chunk
+  do {
+    chunk = stream.read();
+    if (chunk) {
+        chunks.push(chunk)
+    }
+  } while (chunk);
+  return Buffer.concat(chunks);
+}
+
+
+function create_known_object(content_id, contents, callback)
+{
+  var hash;
+  const store = Storage.create({
+    resolve_content_id: () => {
+      return hash;
+    },
+  })
+
+  write(store, content_id, contents, (the_hash) => {
+    hash = the_hash;
+
+    callback(store, hash);
+  });
+
+}
+
+describe('storage/storage', () => {
+  var storage;
+  before(async () => {
+    storage = await Storage.create({ timeout: 1900 });
+  });
+
+  describe('open()', () => {
+    it('can write a stream', (done) => {
+      write(storage, 'foobar', 'test-content', (hash) => {
+        expect(hash).to.not.be.undefined;
+        expect(hash).to.match(IPFS_CID_REGEX)
+        done();
+      });
+    });
+
+    it('detects the MIME type of a write stream', (done) => {
+      const contents = fs.readFileSync('../../storage-node_new.svg');
+
+      create_known_object('foobar', contents, (store, hash) => {
+        var file_info;
+        store.open('mime-test', 'w')
+          .then((stream) => {
+
+            stream.on('file_info', (info) => {
+              // Could filter & abort here now, but we're just going to set this,
+              // and expect it to be set later...
+              file_info = info;
+            });
+
+            stream.on('finish', () => {
+              stream.commit();
+            });
+            stream.on('committed', (hash) => {
+              // ... if file_info is not set here, there's an issue.
+              expect(file_info).to.have.property('mime_type', 'application/xml');
+              expect(file_info).to.have.property('ext', 'xml');
+
+              done();
+            });
+
+            stream.write(contents);
+            stream.end();
+          })
+          .catch((err) => {
+            expect.fail(err);
+          });
+      });
+
+    });
+
+    it('can read a stream', (done) => {
+      const contents = 'test-for-reading';
+      create_known_object('foobar', contents, (store, hash) => {
+        store.open('foobar', 'r')
+          .then((stream) => {
+            const data = read_all(stream);
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
+            done();
+          })
+          .catch((err) => {
+            expect.fail(err);
+          });
+      });
+    });
+
+    // Problems with this test. reading the stream is stalling, so we are
+    // not always able to read the full stream for the test to make sense
+    // Disabling for now. Look at readl_all() implementation.. maybe that
+    // is where the fault is?
+    xit('detects the MIME type of a read stream', (done) => {
+      const contents = fs.readFileSync('../../storage-node_new.svg');
+      create_known_object('foobar', contents, (store, hash) => {
+        store.open('foobar', 'r')
+          .then((stream) => {
+            const data = read_all(stream);
+            expect(contents.length).to.equal(data.length);
+            expect(Buffer.compare(data, contents)).to.equal(0);
+            expect(stream).to.have.property('file_info');
+
+            // application/xml+svg would be better, but this is good-ish.
+            expect(stream.file_info).to.have.property('mime_type', 'application/xml');
+            expect(stream.file_info).to.have.property('ext', 'xml');
+            done();
+          })
+          .catch((err) => {
+            expect.fail(err);
+          });
+      });
+    });
+
+    it('provides default MIME type for read streams', (done) => {
+      const contents = 'test-for-reading';
+      create_known_object('foobar', contents, (store, hash) => {
+        store.open('foobar', 'r')
+          .then((stream) => {
+            const data = read_all(stream);
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
+
+            expect(stream.file_info).to.have.property('mime_type', 'application/octet-stream');
+            expect(stream.file_info).to.have.property('ext', 'bin');
+            done();
+          })
+          .catch((err) => {
+            expect.fail(err);
+          });
+      });
+    });
+
+
+  });
+
+  describe('stat()', () => {
+    it('times out for unknown content', async () => {
+      const content = Buffer.from('this-should-not-exist');
+      const x = await storage.ipfs.add(content, { onlyHash: true });
+      const hash = x[0].hash;
+
+      // Try to stat this entry, it should timeout.
+      expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out');
+    });
+
+    it('returns stats for a known object', (done) => {
+      const content = 'stat-test';
+      const expected_size = content.length;
+      create_known_object('foobar', 'stat-test', (store, hash) => {
+        expect(store.stat(hash)).to.eventually.have.property('size', expected_size);
+        done();
+      });
+    });
+  });
+
+  describe('size()', () => {
+    it('times out for unknown content', async () => {
+      const content = Buffer.from('this-should-not-exist');
+      const x = await storage.ipfs.add(content, { onlyHash: true });
+      const hash = x[0].hash;
+
+      // Try to stat this entry, it should timeout.
+      expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out');
+    });
+
+    it('returns the size of a known object', (done) => {
+      create_known_object('foobar', 'stat-test', (store, hash) => {
+        expect(store.size(hash)).to.eventually.equal(15);
+        done();
+      });
+    });
+  });
+});

+ 0 - 0
storage-node/packages/storage/test/template/bar


+ 0 - 0
storage-node/packages/storage/test/template/foo/baz


+ 1 - 0
storage-node/packages/storage/test/template/quux

@@ -0,0 +1 @@
+foo/baz

+ 1 - 0
storage-node/packages/util/.eslintrc.js

@@ -0,0 +1 @@
+../../.eslintrc.js

+ 12 - 0
storage-node/packages/util/README.md

@@ -0,0 +1,12 @@
+Summary
+=======
+
+This package contains general utility functions for running the colossus
+storage node.
+
+* `lru` contains an in-memory least-recently-used cache abstraction.
+* `fs/*` contains helpers for resolving path names and walking file system
+  hierarchies.
+* `pagination` contains utility functions for paginating APIs.
+* `ranges` contains functions for dealing with `Range` headers in download
+  requests.

+ 67 - 0
storage-node/packages/util/fs/resolve.js

@@ -0,0 +1,67 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const path = require('path');
+
+const debug = require('debug')('joystream:util:fs:resolve');
+
+/*
+ * Resolves name relative to base, throwing an error if the given
+ * name wants to break out of the base directory.
+ *
+ * The problem is, we want to use node's functions so we don't add
+ * platform dependent code, but node's path.resolve() function is a little
+ * useless for our case because it does not care about breaking out of
+ * a base directory.
+ */
+function resolve(base, name)
+{
+  debug('Resolving', name);
+
+  // In a firs step, we strip leading slashes from the name, because they're
+  // just saying "relative to the base" in our use case.
+  var res = name.replace(/^\/+/, '');
+  debug('Stripped', res);
+
+  // At this point resolving the path should stay within the base we specify.
+  // We do specify a base other than the file system root, because the file
+  // everything is always relative to the file system root.
+  const test_base = path.join(path.sep, 'test-base');
+  debug('Test base is', test_base);
+  res = path.resolve(test_base, res);
+  debug('Resolved', res);
+
+  // Ok, we can check for violations now.
+  if (res.slice(0, test_base.length) != test_base) {
+    throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`);
+  }
+
+  // If we strip the base now, we have the relative name resolved.
+  res = res.slice(test_base.length + 1);
+  debug('Relative', res);
+
+  // Finally we can join this relative name to the requested base.
+  var res = path.join(base, res);
+  debug('Result', res);
+  return res;
+}
+
+
+module.exports = resolve;

+ 148 - 0
storage-node/packages/util/fs/walk.js

@@ -0,0 +1,148 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const fs = require('fs');
+const path = require('path');
+
+const debug = require('debug')('joystream:util:fs:walk');
+
+class Walker
+{
+  constructor(archive, base, cb)
+  {
+    this.archive = archive;
+    this.base = base;
+    this.slice_offset = this.base.length;
+    if (this.base[this.slice_offset - 1] != '/') {
+      this.slice_offset += 1;
+    }
+    this.cb = cb;
+    this.pending = 0;
+  }
+
+  /*
+   * Check pending
+   */
+  check_pending(name)
+  {
+    // Decrease pending count again.
+    this.pending -= 1;
+    debug('Finishing', name, 'decreases pending to', this.pending);
+    if (!this.pending) {
+      debug('No more pending.');
+      this.cb(null);
+    }
+  }
+
+  /*
+   * Helper function for walk; split out because it's used in two places.
+   */
+  report_and_recurse(relname, fname, lstat, linktarget)
+  {
+    // First report the value
+    this.cb(null, relname, lstat, linktarget);
+
+    // Recurse
+    if (lstat.isDirectory()) {
+      this.walk(fname);
+    }
+
+    this.check_pending(fname);
+  }
+
+
+  walk(dir)
+  {
+    // This is a little hacky - since readdir() may take a while, and we don't
+    // want the pending count to drop to zero before it's finished, we bump
+    // it up and down while readdir() does it's job.
+    // What this achieves is that when processing a parent directory finishes
+    // before walk() on a subdirectory could finish its readdir() call, the
+    // pending count still has a value.
+    // Note that in order not to hang on empty directories, we need to
+    // explicitly check the pending count in cases when there are no files.
+    this.pending += 1;
+    this.archive.readdir(dir, (err, files) => {
+      if (err) {
+        this.cb(err);
+        return;
+      }
+
+      // More pending data.
+      this.pending += files.length;
+      debug('Reading', dir, 'bumps pending to', this.pending);
+
+      files.forEach((name) => {
+        const fname = path.resolve(dir, name);
+        this.archive.lstat(fname, (err2, lstat) => {
+          if (err2) {
+            this.cb(err2);
+            return;
+          }
+
+          // The base is always prefixed, so a simple string slice should do.
+          const relname = fname.slice(this.slice_offset);
+
+          // We have a symbolic link? Resolve it.
+          if (lstat.isSymbolicLink()) {
+            this.archive.readlink(fname, (err3, linktarget) => {
+              if (err3) {
+                this.cb(err3);
+                return;
+              }
+
+              this.report_and_recurse(relname, fname, lstat, linktarget);
+            });
+          }
+          else {
+            this.report_and_recurse(relname, fname, lstat);
+          }
+        });
+      });
+
+      this.check_pending(dir);
+    });
+  }
+}
+
+
+/*
+ * Recursively walk a file system hierarchy (in undefined order), returning all
+ * entries via the callback(err, relname, lstat, [linktarget]). The name relative
+ * to the base is returned.
+ *
+ * You can optionally pass an 'archive', i.e. a class or module that responds to
+ * file system like functions. If you don't, then the 'fs' module is assumed as
+ * default.
+ *
+ * The callback is invoked one last time without data to signal the end of data.
+ */
+module.exports = function(base, archive, cb)
+{
+  // Archive is optional and defaults to fs, but cb is not.
+  if (!cb) {
+    cb = archive;
+    archive = fs;
+  }
+
+  const resolved = path.resolve(base);
+  const w = new Walker(archive, resolved, cb);
+  w.walk(resolved);
+};

+ 126 - 0
storage-node/packages/util/lru.js

@@ -0,0 +1,126 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const DEFAULT_CAPACITY = 100;
+
+const debug = require('debug')('joystream:util:lru');
+
+/*
+ * Simple least recently used cache.
+ */
+class LRUCache
+{
+  constructor(capacity = DEFAULT_CAPACITY)
+  {
+    this.capacity = capacity;
+    this.clear();
+  }
+
+  /*
+   * Return the entry with the given key, and update it's usage.
+   */
+  get(key)
+  {
+    const val = this.store.get(key);
+    if (val) {
+      this.access.set(key, Date.now());
+    }
+    return val;
+  }
+
+  /*
+   * Return true if the key is the cache, false otherwise.
+   */
+  has(key)
+  {
+    return this.store.has(key);
+  }
+
+  /*
+   * Put a value into the cache.
+   */
+  put(key, value)
+  {
+    this.store.set(key, value);
+    this.access.set(key, Date.now());
+    this._prune();
+  }
+
+  /*
+   * Delete a value from the cache.
+   */
+  del(key)
+  {
+    this.store.delete(key);
+    this.access.delete(key);
+  }
+
+  /*
+   * Current size of the cache
+   */
+  size()
+  {
+    return this.store.size;
+  }
+
+  /*
+   * Clear the LRU cache entirely.
+   */
+  clear()
+  {
+    this.store = new Map();
+    this.access = new Map();
+  }
+
+  /*
+   * Internal pruning function.
+   */
+  _prune()
+  {
+    debug('About to prune; have', this.store.size, 'and capacity is', this.capacity);
+
+    var sorted = Array.from(this.access.entries());
+    sorted.sort((first, second) => {
+      if (first[1] == second[1]) {
+        return 0;
+      }
+      return (first[1] < second[1] ? -1 : 1);
+    });
+    debug('Sorted keys are:', sorted);
+
+    debug('Have to prune', this.store.size - this.capacity, 'items.');
+    var idx = 0;
+    var to_prune = [];
+    while (idx < sorted.length && to_prune.length < (this.store.size - this.capacity)) {
+      to_prune.push(sorted[idx][0]);
+      ++idx;
+    }
+
+    to_prune.forEach((key) => {
+      this.store.delete(key);
+      this.access.delete(key);
+    });
+    debug('Size after pruning', this.store.size);
+  }
+}
+
+module.exports = {
+  LRUCache: LRUCache,
+};

+ 48 - 0
storage-node/packages/util/package.json

@@ -0,0 +1,48 @@
+{
+  "name": "@joystream/util",
+  "version": "0.1.0",
+  "description": "Utility code for Joystream Storage Node",
+  "author": "Joystream",
+  "homepage": "https://github.com/Joystream/joystream",
+  "bugs": {
+    "url": "https://github.com/Joystream/joystream/issues"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream.git"
+  },
+  "license": "GPL-3.0",
+  "contributors": [
+    {
+      "name": "Joystream",
+      "url": "https://joystream.org"
+    }
+  ],
+  "keywords": [
+    "joystream",
+    "storage",
+    "node",
+    "utility"
+  ],
+  "os": [
+    "darwin",
+    "linux"
+  ],
+  "engines": {
+    "node": ">=10.15.3"
+  },
+  "scripts": {
+    "test": "mocha 'test/**/*.js'",
+    "lint": "eslint '**/*.js' --ignore-pattern 'test/**/*.js'"
+  },
+  "devDependencies": {
+    "chai": "^4.2.0",
+    "eslint": "^5.13.0",
+    "mocha": "^5.2.0",
+    "temp": "^0.9.0"
+  },
+  "dependencies": {
+    "stream-buffers": "^3.0.2",
+    "uuid": "^3.3.2"
+  }
+}

+ 163 - 0
storage-node/packages/util/pagination.js

@@ -0,0 +1,163 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const debug = require('debug')('joystream:middleware:pagination');
+
+// Pagination definitions
+const _api_defs = {
+  parameters: {
+    paginationLimit: {
+      name: 'limit',
+      in: 'query',
+      description: 'Number of items per page.',
+      required: false,
+      schema: {
+        type: 'integer',
+        minimum: 1,
+        maximum: 50,
+        default: 20,
+      },
+    },
+    paginationOffset: {
+      name: 'offset',
+      in: 'query',
+      description: 'Page number (offset)',
+      schema: {
+        type: 'integer',
+        minimum: 0,
+      },
+    },
+  },
+  schemas: {
+    PaginationInfo: {
+      type: 'object',
+      required: ['self'],
+      properties: {
+        'self': {
+          type: 'string',
+        },
+        next: {
+          type: 'string',
+        },
+        prev: {
+          type: 'string',
+        },
+        first: {
+          type: 'string',
+        },
+        last: {
+          type: 'string',
+        },
+      },
+    },
+  },
+};
+
+/**
+ * Silly pagination because it's faster than getting other modules to work.
+ *
+ * Usage:
+ * - apiDoc.parameters = pagination.parameters
+ *   -> Validates pagination parameters
+ * - apiDoc.responses.200.schema.pagination = pagination.response
+ *   -> Generates pagination info on response
+ * - paginate(req, res, [last_offset])
+ *   -> add (valid) pagination fields to response object
+ *      If last_offset is given, create a last link with that offset
+ **/
+module.exports = {
+
+  // Add pagination parameters and pagination info responses.
+  parameters: [
+    { '$ref': '#/components/parameters/paginationLimit' },
+    { '$ref': '#/components/parameters/paginationOffset' },
+
+  ],
+
+  response: {
+    '$ref': '#/components/schema/PaginationInfo'
+  },
+
+  // Update swagger/openapi specs with our own parameters and definitions
+  openapi: function(api)
+  {
+    api.components = api.components || {};
+    api.components.parameters = { ...api.components.parameters || {} , ..._api_defs.parameters };
+    api.components.schemas = { ...api.components.schemas || {}, ..._api_defs.schemas };
+    return api;
+  },
+
+  // Pagination function
+  paginate: function(req, res, last_offset)
+  {
+    // Skip if the response is not an object.
+    if (Object.prototype.toString.call(res) != "[object Object]") {
+      debug('Cannot paginate non-objects.');
+      return res;
+    }
+
+    // Defaults for parameters
+    var offset = req.query.offset || 0;
+    var limit = req.query.limit || 20;
+    debug('Create pagination links from offset=' + offset, 'limit=' + limit);
+
+    // Parse current url
+    const url = require('url');
+    var req_url = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl);
+    var params = new url.URLSearchParams(req_url.query);
+
+    // Pagination object
+    var pagination = {
+      'self': req_url.href,
+    }
+
+    var prev = offset - limit;
+    if (prev >= 0) {
+      params.set('offset', prev);
+      req_url.search = params.toString();
+      pagination['prev'] = url.format(req_url);
+
+    }
+
+    var next = offset + limit;
+    if (next >= 0) {
+      params.set('offset', next);
+      req_url.search = params.toString();
+      pagination['next'] = url.format(req_url);
+    }
+
+    if (last_offset) {
+      params.set('offset', last_offset);
+      req_url.search = params.toString();
+      pagination['last'] = url.format(req_url);
+    }
+
+    // First
+    params.set('offset', 0);
+    req_url.search = params.toString();
+    pagination['first'] = url.format(req_url);
+
+    debug('pagination', pagination);
+
+    // Now set pagination values in response.
+    res.pagination = pagination;
+    return res;
+  },
+};

+ 492 - 0
storage-node/packages/util/ranges.js

@@ -0,0 +1,492 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const uuid = require('uuid');
+const stream_buf = require('stream-buffers');
+
+const debug = require('debug')('joystream:util:ranges');
+
+/*
+ * Range parsing
+ */
+
+/*
+ * Parse a range string, e.g. '0-100' or '-100' or '0-'. Return the values
+ * in an array of int or undefined (if not provided).
+ */
+function _parse_range(range)
+{
+  var matches = range.match(/^(\d+-\d+|\d+-|-\d+|\*)$/u);
+  if (!matches) {
+    throw new Error(`Not a valid range: ${range}`);
+  }
+
+  var vals = matches[1].split('-').map((v) => {
+    return v === '*' || v === '' ? undefined : parseInt(v, 10);
+  });
+
+  if (vals[1] <= vals[0]) {
+    throw new Error(`Invalid range: start "${vals[0]}" must be before end "${vals[1]}".`);
+  }
+
+  return [vals[0], vals[1]];
+}
+
+
+/*
+ * Parse a range header value, e.g. unit=ranges, where ranges
+ * are a comman separated list of individual ranges, and unit is any
+ * custom unit string. If the unit (and equal sign) are not given, assume
+ * 'bytes'.
+ */
+function parse(range_str)
+{
+  var res = {};
+  debug('Parse range header value:', range_str);
+  var matches = range_str.match(/^(([^\s]+)=)?((?:(?:\d+-\d+|-\d+|\d+-),?)+)$/u)
+  if (!matches) {
+    throw new Error(`Not a valid range header: ${range_str}`);
+  }
+
+  res.unit = matches[2] || 'bytes';
+  res.range_str = matches[3];
+  res.ranges = [];
+
+  // Parse individual ranges
+  var ranges = []
+  res.range_str.split(',').forEach((range) => {
+    ranges.push(_parse_range(range));
+  });
+
+  // Merge ranges into result.
+  ranges.forEach((new_range) => {
+    debug('Found range:', new_range);
+
+    var is_merged = false;
+    for (var i in res.ranges) {
+      var old_range = res.ranges[i];
+
+      // Skip if the new range is fully separate from the old range.
+      if (old_range[1] + 1 < new_range[0] || new_range[1] + 1 < old_range[0]) {
+        debug('Range does not overlap with', old_range);
+        continue;
+      }
+
+      // If we know they're adjacent or overlapping, we construct the
+      // merged range from the lower start and the higher end of both
+      // ranges.
+      var merged = [
+        Math.min(old_range[0], new_range[0]),
+        Math.max(old_range[1], new_range[1])
+      ];
+      res.ranges[i] = merged;
+      is_merged = true;
+      debug('Merged', new_range, 'into', old_range, 'as', merged);
+    }
+
+    if (!is_merged) {
+      debug('Non-overlapping range!');
+      res.ranges.push(new_range);
+    }
+  });
+
+  // Finally, sort ranges
+  res.ranges.sort((first, second) => {
+    if (first[0] === second[0]) {
+      // Should not happen due to merging.
+      return 0;
+    }
+    return (first[0] < second[0]) ? -1 : 1;
+  });
+
+  debug('Result of parse is', res);
+  return res;
+}
+
+
+/*
+ * Async version of parse().
+ */
+function parseAsync(range_str, cb)
+{
+  try {
+    return cb(parse(range_str));
+  } catch (err) {
+    return cb(null, err);
+  }
+}
+
+
+/*
+ * Range streaming
+ */
+
+/*
+ * The class writes parts specified in the options to the response. If no ranges
+ * are specified, the entire stream is written. At the end, the given callback
+ * is invoked - if an error occurred, it is invoked with an error parameter.
+ *
+ * Note that the range implementation can be optimized for streams that support
+ * seeking.
+ *
+ * There's another optimization here for when sizes are given, which is possible
+ * with file system based streams. We'll see how likely that's going to be in
+ * future.
+ */
+class RangeSender
+{
+  constructor(response, stream, opts, end_callback)
+  {
+    // Options
+    this.name = opts.name || 'content.bin';
+    this.type = opts.type || 'application/octet-stream';
+    this.size = opts.size;
+    this.ranges = opts.ranges;
+    this.download = opts.download || false;
+
+    // Range handling related state.
+    this.read_offset = 0;             // Nothing read so far
+    this.range_index = -1;            // No range index yet.
+    this.range_boundary = undefined;  // Generate boundary when needed.
+
+    // Event handlers & state
+    this.handlers = {};
+    this.opened = false;
+
+    debug('RangeSender:', this);
+    if (opts.ranges) {
+      debug('Parsed ranges:', opts.ranges.ranges);
+    }
+
+    // Parameters
+    this.response = response;
+    this.stream = stream;
+    this.opts = opts;
+    this.end_callback = end_callback;
+  }
+
+  on_error(err)
+  {
+    // Assume hiding the actual error is best, and default to 404.
+    debug('Error:', err);
+    if (!this.response.headersSent) {
+      this.response.status(err.code || 404).send({
+        message: err.message || `File not found: ${this.name}`
+      });
+    }
+    if (this.end_callback) {
+      this.end_callback(err);
+    }
+  }
+
+  on_end()
+  {
+    debug('End of stream.');
+    this.response.end();
+    if (this.end_callback) {
+      this.end_callback();
+    }
+  }
+
+
+  // **** No ranges
+  on_open_no_range()
+  {
+    // File got opened, so we can set headers/status
+    debug('Open succeeded:', this.name, this.type);
+    this.opened = true;
+
+    this.response.status(200);
+    this.response.contentType(this.type);
+    this.response.header('Accept-Ranges', 'bytes');
+    this.response.header('Content-Transfer-Encoding', 'binary');
+
+    if (this.download) {
+      this.response.header('Content-Disposition', `attachment; filename="${this.name}"`);
+    }
+    else {
+      this.response.header('Content-Disposition', 'inline');
+    }
+
+    if (this.size) {
+      this.response.header('Content-Length', this.size);
+    }
+  }
+
+
+  on_data_no_range(chunk)
+  {
+    if (!this.opened) {
+      this.handlers['open']();
+    }
+
+    // As simple as it can be.
+    this.response.write(Buffer.from(chunk, 'binary'));
+  }
+
+  // *** With ranges
+  next_range_headers()
+  {
+    // Next range
+    this.range_index += 1;
+    if (this.range_index >= this.ranges.ranges.length) {
+      debug('Cannot advance range index; we are done.');
+      return undefined;
+    }
+
+    // Calculate this range's size.
+    var range = this.ranges.ranges[this.range_index];
+    var total_size;
+    if (this.size) {
+      total_size = this.size;
+    }
+    if (typeof range[0] === 'undefined') {
+      range[0] = 0;
+    }
+    if (typeof range[1] === 'undefined') {
+      if (this.size) {
+        range[1] = total_size - 1;
+      }
+    }
+
+    var send_size;
+    if (typeof range[0] !== 'undefined' && typeof range[1] !== 'undefined') {
+      send_size = range[1] - range[0] + 1;
+    }
+
+    // Write headers, but since we may be in a multipart situation, write them
+    // explicitly to the stream.
+    var start = (typeof range[0] === 'undefined') ? '' : `${range[0]}`;
+    var end = (typeof range[1] === 'undefined') ? '' : `${range[1]}`;
+
+    var size_str;
+    if (total_size) {
+      size_str = `${total_size}`;
+    }
+    else {
+      size_str = '*';
+    }
+
+    var ret = {
+      'Content-Range': `bytes ${start}-${end}/${size_str}`,
+      'Content-Type': `${this.type}`,
+    };
+    if (send_size) {
+      ret['Content-Length'] = `${send_size}`;
+    }
+    return ret;
+  }
+
+
+  next_range()
+  {
+    if (this.ranges.ranges.length == 1) {
+      debug('Cannot start new range; only one requested.');
+      this.stream.off('data', this.handlers['data']);
+      return false;
+    }
+
+    var headers = this.next_range_headers();
+
+    if (headers) {
+      var header_buf = new stream_buf.WritableStreamBuffer();
+      // We start a range with a boundary.
+      header_buf.write(`\r\n--${this.range_boundary}\r\n`);
+
+      // The we write the range headers.
+      for (var header in headers) {
+        header_buf.write(`${header}: ${headers[header]}\r\n`);
+      }
+      header_buf.write('\r\n');
+      this.response.write(header_buf.getContents());
+      debug('New range started.');
+      return true;
+    }
+
+    // No headers means we're finishing the last range.
+    this.response.write(`\r\n--${this.range_boundary}--\r\n`);
+    debug('End of ranges sent.');
+    this.stream.off('data', this.handlers['data']);
+    return false;
+  }
+
+
+  on_open_ranges()
+  {
+    // File got opened, so we can set headers/status
+    debug('Open succeeded:', this.name, this.type);
+    this.opened = true;
+
+    this.response.header('Accept-Ranges', 'bytes');
+    this.response.header('Content-Transfer-Encoding', 'binary');
+    this.response.header('Content-Disposition', 'inline');
+
+    // For single ranges, the content length should be the size of the
+    // range. For multiple ranges, we don't send a content length
+    // header.
+    //
+    // Similarly, the type is different whether or not there is more than
+    // one range.
+    if (this.ranges.ranges.length == 1) {
+      this.response.writeHead(206, 'Partial Content', this.next_range_headers());
+    }
+    else {
+      this.range_boundary = uuid.v4();
+      var headers = {
+        'Content-Type': `multipart/byteranges; boundary=${this.range_boundary}`,
+      };
+      this.response.writeHead(206, 'Partial Content', headers);
+      this.next_range();
+    }
+  }
+
+  on_data_ranges(chunk)
+  {
+    if (!this.opened) {
+      this.handlers['open']();
+    }
+    // Crap, node.js streams are stupid. No guarantee for seek support. Sure,
+    // that makes node.js easier to implement, but offloads everything onto the
+    // application developer.
+    //
+    // So, we skip chunks until our read position is within the range we want to
+    // send at the moment. We're relying on ranges being in-order, which this
+    // file's parser luckily (?) provides.
+    //
+    // The simplest optimization would be at ever range start to seek() to the
+    // start.
+    var chunk_range = [this.read_offset, this.read_offset + chunk.length - 1];
+    debug('= Got chunk with byte range', chunk_range);
+    while (true) {
+      var req_range = this.ranges.ranges[this.range_index];
+      if (!req_range) {
+        break;
+      }
+      debug('Current requested range is', req_range);
+      if (!req_range[1]) {
+        req_range = [req_range[0], Number.MAX_SAFE_INTEGER];
+        debug('Treating as', req_range);
+      }
+
+      // No overlap in the chunk and requested range; don't write.
+      if (chunk_range[1] < req_range[0] || chunk_range[0] > req_range[1]) {
+        debug('Ignoring chunk; it is out of range.');
+        break;
+      }
+
+      // Since there is overlap, find the segment that's entirely within the
+      // chunk.
+      var segment = [
+        Math.max(chunk_range[0], req_range[0]),
+        Math.min(chunk_range[1], req_range[1]),
+      ];
+      debug('Segment to send within chunk is', segment);
+
+      // Normalize the segment to a chunk offset
+      var start = segment[0] - this.read_offset;
+      var end = segment[1] - this.read_offset;
+      var len = end - start + 1;
+      debug('Offsets into buffer are', [start, end], 'with length', len);
+
+      // Write the slice that we want to write. We first create a buffer from the
+      // chunk. Then we slice a new buffer from the same underlying ArrayBuffer,
+      // starting at the original buffer's offset, further offset by the segment
+      // start. The segment length bounds the end of our slice.
+      var buf = Buffer.from(chunk, 'binary');
+      this.response.write(Buffer.from(buf.buffer, buf.byteOffset + start, len));
+
+      // If the requested range is finished, we should start the next one.
+      if (req_range[1] > chunk_range[1]) {
+        debug('Chunk is finished, but the requested range is missing bytes.');
+        break;
+      }
+
+      if (req_range[1] <= chunk_range[1]) {
+        debug('Range is finished.');
+        if (!this.next_range(segment)) {
+          break;
+        }
+      }
+    }
+
+    // Update read offset when chunk is finished.
+    this.read_offset += chunk.length;
+  }
+
+
+  start()
+  {
+    // Before we start streaming, let's ensure our ranges don't contain any
+    // without start - if they do, we nuke them all and treat this as a full
+    // request.
+    var nuke = false;
+    if (this.ranges) {
+      for (var i in this.ranges.ranges) {
+        if (typeof this.ranges.ranges[i][0] === 'undefined') {
+          nuke = true;
+          break;
+        }
+      }
+    }
+    if (nuke) {
+      this.ranges = undefined;
+    }
+
+    // Register callbacks. Store them in a handlers object so we can
+    // keep the bound version around for stopping to listen to events.
+    this.handlers['error'] = this.on_error.bind(this);
+    this.handlers['end'] = this.on_end.bind(this);
+
+    if (this.ranges) {
+      debug('Preparing to handle ranges.');
+      this.handlers['open'] = this.on_open_ranges.bind(this);
+      this.handlers['data'] = this.on_data_ranges.bind(this);
+    }
+    else {
+      debug('No ranges, just send the whole file.');
+      this.handlers['open'] = this.on_open_no_range.bind(this);
+      this.handlers['data'] = this.on_data_no_range.bind(this);
+    }
+
+    for (var handler in this.handlers) {
+      this.stream.on(handler, this.handlers[handler]);
+    }
+  }
+}
+
+
+function send(response, stream, opts, end_callback)
+{
+  var sender = new RangeSender(response, stream, opts, end_callback);
+  sender.start();
+}
+
+
+/*
+ * Exports
+ */
+
+module.exports =
+{
+  parse: parse,
+  parseAsync: parseAsync,
+  RangeSender: RangeSender,
+  send: send,
+};

+ 10 - 0
storage-node/packages/util/stripEndingSlash.js

@@ -0,0 +1,10 @@
+// return url with last `/` removed
+function removeEndingForwardSlash(url) {
+    let st = new String(url)
+    if (st.endsWith('/')) {
+        return st.substring(0, st.length - 1);
+    }
+    return st.toString()
+}
+
+module.exports = removeEndingForwardSlash

+ 0 - 0
storage-node/packages/util/test/data/bar


+ 0 - 0
storage-node/packages/util/test/data/foo/baz


+ 1 - 0
storage-node/packages/util/test/data/quux

@@ -0,0 +1 @@
+foo/baz

+ 80 - 0
storage-node/packages/util/test/fs/resolve.js

@@ -0,0 +1,80 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const path = require('path');
+
+const resolve = require('@joystream/util/fs/resolve');
+
+function tests(base)
+{
+  it('resolves absolute paths relative to the base', function()
+  {
+    const resolved = resolve(base, '/foo');
+    const relative = path.relative(base, resolved);
+    expect(relative).to.equal('foo');
+  });
+
+  it('allows for relative paths that stay in the base', function()
+  {
+    const resolved = resolve(base, 'foo/../bar');
+    const relative = path.relative(base, resolved);
+    expect(relative).to.equal('bar');
+  });
+
+  it('prevents relative paths from breaking out of the base', function()
+  {
+    expect(() => resolve(base, '../foo')).to.throw();
+  });
+
+  it('prevents long relative paths from breaking out of the base', function()
+  {
+    expect(() => resolve(base, '../../../foo')).to.throw();
+  });
+
+  it('prevents sneaky relative paths from breaking out of the base', function()
+  {
+    expect(() => resolve(base, 'foo/../../../bar')).to.throw();
+  });
+}
+
+describe('util/fs/resolve', function()
+{
+  describe('slash base', function()
+  {
+    tests('/');
+  });
+
+  describe('empty base', function()
+  {
+    tests('');
+  });
+
+  describe('short base', function()
+  {
+    tests('/base');
+  });
+
+  describe('long base', function()
+  {
+    tests('/this/base/is/very/long/indeed');
+  });
+});

+ 69 - 0
storage-node/packages/util/test/fs/walk.js

@@ -0,0 +1,69 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const temp = require('temp').track();
+
+const fs = require('fs');
+const path = require('path');
+
+const fswalk = require('@joystream/util/fs/walk');
+
+function walktest(archive, base, done)
+{
+  var results = new Map();
+
+  fswalk(base, archive, (err, relname, stat, linktarget) => {
+    expect(err).to.be.null;
+
+    if (relname) {
+      results.set(relname, [stat, linktarget]);
+      return;
+    }
+
+    // End of data, do testing
+    const entries = Array.from(results.keys());
+    expect(entries).to.include('foo');
+    expect(results.get('foo')[0].isDirectory()).to.be.true;
+
+    expect(entries).to.include('bar');
+    expect(results.get('bar')[0].isFile()).to.be.true;
+
+    if (archive === fs) {
+      expect(entries).to.include('quux');
+      expect(results.get('quux')[0].isSymbolicLink()).to.be.true;
+      expect(results.get('quux')[1]).to.equal('foo/baz');
+    }
+
+    expect(entries).to.include('foo/baz');
+    expect(results.get('foo/baz')[0].isFile()).to.be.true;
+
+    done();
+  });
+}
+
+describe('util/fs/walk', function()
+{
+  it('reports all files in a file system hierarchy', function(done)
+  {
+    walktest(fs, path.resolve(__dirname, '../data'), done)
+  });
+});

+ 164 - 0
storage-node/packages/util/test/lru.js

@@ -0,0 +1,164 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+
+const lru = require('@joystream/util/lru');
+
+const DEFAULT_SLEEP = 1;
+function sleep(ms = DEFAULT_SLEEP)
+{
+  return new Promise(resolve => {
+    setTimeout(resolve, ms)
+  })
+}
+
+describe('util/lru', function()
+{
+  describe('simple usage', function()
+  {
+    it('does not contain keys that were not added', function()
+    {
+      var cache = new lru.LRUCache();
+      expect(cache.size()).to.equal(0);
+
+      var val = cache.get('something');
+      expect(val).to.be.undefined;
+
+      expect(cache.has('something')).to.be.false;
+    });
+
+    it('contains keys that were added', function()
+    {
+      var cache = new lru.LRUCache();
+      cache.put('something', 'yay!');
+      expect(cache.size()).to.equal(1);
+
+      var val = cache.get('something');
+      expect(val).to.be.equal('yay!');
+
+      expect(cache.has('something')).to.be.true;
+    });
+
+    it('does not contain keys that were deleted', function()
+    {
+      var cache = new lru.LRUCache();
+      cache.put('something', 'yay!');
+      expect(cache.size()).to.equal(1);
+      var val = cache.get('something');
+      expect(val).to.be.equal('yay!');
+      expect(cache.has('something')).to.be.true;
+
+      cache.del('something');
+      expect(cache.size()).to.equal(0);
+      val = cache.get('something');
+      expect(val).to.be.undefined;
+      expect(cache.has('something')).to.be.false;
+    });
+
+    it('can be cleared', function()
+    {
+      var cache = new lru.LRUCache();
+      cache.put('something', 'yay!');
+      expect(cache.size()).to.equal(1);
+
+      cache.clear();
+      expect(cache.size()).to.equal(0);
+    });
+  });
+
+  describe('capacity management', function()
+  {
+    it('does not grow beyond capacity', async function()
+    {
+      var cache = new lru.LRUCache(2); // Small capacity
+      expect(cache.size()).to.equal(0);
+
+      cache.put('foo', '42');
+      expect(cache.size()).to.equal(1);
+
+      await sleep();
+
+      cache.put('bar', '42');
+      expect(cache.size()).to.equal(2);
+
+      await sleep();
+
+      cache.put('baz', '42');
+      expect(cache.size()).to.equal(2); // Capacity exceeded
+    });
+
+    it('removes the oldest key when pruning', async function()
+    {
+      var cache = new lru.LRUCache(2); // Small capacity
+      expect(cache.size()).to.equal(0);
+
+      cache.put('foo', '42');
+      expect(cache.size()).to.equal(1);
+      expect(cache.has('foo')).to.be.true;
+
+      await sleep();
+
+      cache.put('bar', '42');
+      expect(cache.size()).to.equal(2);
+      expect(cache.has('foo')).to.be.true;
+      expect(cache.has('bar')).to.be.true;
+
+      await sleep();
+
+      cache.put('baz', '42');
+      expect(cache.size()).to.equal(2); // Capacity exceeded
+      expect(cache.has('bar')).to.be.true;
+      expect(cache.has('baz')).to.be.true;
+    });
+
+    it('updates LRU timestamp when reading', async function()
+    {
+      var cache = new lru.LRUCache(2); // Small capacity
+      expect(cache.size()).to.equal(0);
+
+      cache.put('foo', '42');
+      expect(cache.size()).to.equal(1);
+      expect(cache.has('foo')).to.be.true;
+
+      await sleep();
+
+      cache.put('bar', '42');
+      expect(cache.size()).to.equal(2);
+      expect(cache.has('foo')).to.be.true;
+      expect(cache.has('bar')).to.be.true;
+
+      await sleep();
+
+      // 'foo' is older than 'bar' right now, so should be pruned first. But
+      // if we get 'foo', it would be 'bar' that has to go.
+      var _ = cache.get('foo');
+
+      // Makes debugging a bit more obvious
+      await sleep();
+
+      cache.put('baz', '42');
+      expect(cache.size()).to.equal(2); // Capacity exceeded
+      expect(cache.has('foo')).to.be.true;
+      expect(cache.has('baz')).to.be.true;
+    });
+  });
+});

+ 124 - 0
storage-node/packages/util/test/pagination.js

@@ -0,0 +1,124 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const mock_http = require('node-mocks-http');
+
+const pagination = require('@joystream/util/pagination');
+
+describe('util/pagination', function()
+{
+  describe('openapi()', function()
+  {
+    it('should add parameters and definitions to an API spec', function()
+    {
+      var api = pagination.openapi({});
+
+      // Parameters
+      expect(api).to.have.property('components');
+
+      expect(api.components).to.have.property('parameters');
+      expect(api.components.parameters).to.have.property('paginationLimit');
+
+      expect(api.components.parameters.paginationLimit).to.have.property('name');
+      expect(api.components.parameters.paginationLimit.name).to.equal('limit');
+
+      expect(api.components.parameters.paginationLimit).to.have.property('schema');
+      expect(api.components.parameters.paginationLimit.schema).to.have.property('type');
+      expect(api.components.parameters.paginationLimit.schema.type).to.equal('integer');
+
+      expect(api.components.parameters.paginationOffset).to.have.property('name');
+      expect(api.components.parameters.paginationOffset.name).to.equal('offset');
+
+      expect(api.components.parameters.paginationOffset).to.have.property('schema');
+      expect(api.components.parameters.paginationOffset.schema).to.have.property('type');
+      expect(api.components.parameters.paginationOffset.schema.type).to.equal('integer');
+
+
+      // Defintiions
+      expect(api.components).to.have.property('schemas');
+      expect(api.components.schemas).to.have.property('PaginationInfo');
+
+      expect(api.components.schemas.PaginationInfo).to.have.property('type');
+      expect(api.components.schemas.PaginationInfo.type).to.equal('object');
+
+      expect(api.components.schemas.PaginationInfo).to.have.property('properties');
+      expect(api.components.schemas.PaginationInfo.properties)
+        .to.be.an('object')
+        .that.has.all.keys('self', 'next', 'prev', 'first', 'last');
+    });
+  });
+
+
+  describe('paginate()', function()
+  {
+    it('should add pagination links to a response object', function()
+    {
+      var req = mock_http.createRequest({
+        method: 'GET',
+        url: '/foo?limit=10',
+        query: {
+          limit: 10, // Mock is a little stupid, we have to explicitly set query
+        },
+        headers: {
+          host: 'localhost',
+        },
+        protocol: 'http',
+      });
+
+      var res = pagination.paginate(req, {});
+
+      expect(res).to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next');
+
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10');
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10');
+    });
+
+    it('should add a last pagination link when requested', function()
+    {
+      var req = mock_http.createRequest({
+        method: 'GET',
+        url: '/foo?limit=10&offset=15',
+        query: {
+          limit: 10, // Mock is a little stupid, we have to explicitly set query
+          offset: 15,
+        },
+        headers: {
+          host: 'localhost',
+        },
+        protocol: 'http',
+      });
+
+      var res = pagination.paginate(req, {}, 35);
+
+      expect(res).to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next', 'prev', 'last');
+
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15');
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
+      expect(res.pagination.last).to.equal('http://localhost/foo?limit=10&offset=35');
+      expect(res.pagination.prev).to.equal('http://localhost/foo?limit=10&offset=5');
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=25');
+    });
+  });
+});

+ 409 - 0
storage-node/packages/util/test/ranges.js

@@ -0,0 +1,409 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+const mocha = require('mocha');
+const expect = require('chai').expect;
+const mock_http = require('node-mocks-http');
+const stream_buffers = require('stream-buffers');
+
+const ranges = require('@joystream/util/ranges');
+
+describe('util/ranges', function()
+{
+  describe('parse()', function()
+  {
+    it('should parse a full range', function()
+    {
+      // Range with unit
+      var range = ranges.parse('bytes=0-100');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-100');
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(100);
+
+      // Range without unit
+      var range = ranges.parse('0-100');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-100');
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(100);
+
+      // Range with custom unit
+      //
+      var range = ranges.parse('foo=0-100');
+      expect(range.unit).to.equal('foo');
+      expect(range.range_str).to.equal('0-100');
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(100);
+    });
+
+    it('should error out on malformed strings', function()
+    {
+      expect(() => ranges.parse('foo')).to.throw();
+      expect(() => ranges.parse('foo=bar')).to.throw();
+      expect(() => ranges.parse('foo=100')).to.throw();
+      expect(() => ranges.parse('foo=100-0')).to.throw();
+    });
+
+    it('should parse a range without end', function()
+    {
+      var range = ranges.parse('0-');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-');
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.be.undefined;
+    });
+
+    it('should parse a range without start', function()
+    {
+      var range = ranges.parse('-100');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('-100');
+      expect(range.ranges[0][0]).to.be.undefined;
+      expect(range.ranges[0][1]).to.equal(100);
+    });
+
+    it('should parse multiple ranges', function()
+    {
+      var range = ranges.parse('0-10,30-40,60-80');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-10,30-40,60-80');
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(10);
+      expect(range.ranges[1][0]).to.equal(30);
+      expect(range.ranges[1][1]).to.equal(40);
+      expect(range.ranges[2][0]).to.equal(60);
+      expect(range.ranges[2][1]).to.equal(80);
+    });
+
+    it('should merge overlapping ranges', function()
+    {
+      // Two overlapping ranges
+      var range = ranges.parse('0-20,10-30');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-20,10-30');
+      expect(range.ranges).to.have.lengthOf(1);
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(30);
+
+      // Three overlapping ranges
+      var range = ranges.parse('0-15,10-25,20-30');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-15,10-25,20-30');
+      expect(range.ranges).to.have.lengthOf(1);
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(30);
+
+      // Three overlapping ranges, reverse order
+      var range = ranges.parse('20-30,10-25,0-15');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('20-30,10-25,0-15');
+      expect(range.ranges).to.have.lengthOf(1);
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(30);
+
+      // Adjacent ranges
+      var range = ranges.parse('0-10,11-20');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('0-10,11-20');
+      expect(range.ranges).to.have.lengthOf(1);
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(20);
+    });
+
+    it('should sort ranges', function()
+    {
+      var range = ranges.parse('10-30,0-5');
+      expect(range.unit).to.equal('bytes');
+      expect(range.range_str).to.equal('10-30,0-5');
+      expect(range.ranges).to.have.lengthOf(2);
+      expect(range.ranges[0][0]).to.equal(0);
+      expect(range.ranges[0][1]).to.equal(5);
+      expect(range.ranges[1][0]).to.equal(10);
+      expect(range.ranges[1][1]).to.equal(30);
+    });
+  });
+
+  describe('send()', function()
+  {
+    it('should send full files on request', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(200);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+    });
+
+    it('should send a range spanning the entire file on request', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[0, 12]],
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(206);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.getHeader('content-range')).to.equal('bytes 0-12/*');
+        expect(res.getHeader('content-length')).to.equal('13');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+
+    });
+
+    it('should send a small range on request', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[1, 11]], // Cut off first and last letter
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(206);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*');
+        expect(res.getHeader('content-length')).to.equal('11');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal('ello, world');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+    });
+
+    it('should send ranges crossing buffer boundaries', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({
+        chunkSize: 3, // Setting a chunk size smaller than the range should
+                      // not impact the test.
+      });
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[1, 11]], // Cut off first and last letter
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(206);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*');
+        expect(res.getHeader('content-length')).to.equal('11');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal('ello, world');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+    });
+
+    it('should send multiple ranges', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[1, 3], [5, 7]], // Slice two ranges out
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(206);
+        expect(res.getHeader('content-type')).to.satisfy((str) => str.startsWith('multipart/byteranges'));
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+
+        // The buffer should contain both ranges, but with all the That would be
+        // "ell" and ", w".
+        // It's pretty elaborate having to parse the entire multipart response
+        // body, so we'll restrict ourselves to finding lines within it.
+        var body = res._getBuffer().toString();
+        expect(body).to.contain('\r\nContent-Range: bytes 1-3/*\r\n');
+        expect(body).to.contain('\r\nell\r\n');
+        expect(body).to.contain('\r\nContent-Range: bytes 5-7/*\r\n');
+        expect(body).to.contain('\r\n, w');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+    });
+
+    it('should deal with ranges without end', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[5, undefined]], // Skip the first part, but read until end
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(206);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.getHeader('content-range')).to.equal('bytes 5-/*');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal(', world!');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+    });
+
+    it('should ignore ranges without start', function(done)
+    {
+      var res = mock_http.createResponse({});
+      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+
+      // End-of-stream callback
+      var opts = {
+        name: 'test.file',
+        type: 'application/test',
+        ranges: {
+          ranges: [[undefined, 5]], // Only last five
+        }
+      };
+      ranges.send(res, in_stream, opts, function(err) {
+        expect(err).to.not.exist;
+
+        // HTTP handling
+        expect(res.statusCode).to.equal(200);
+        expect(res.getHeader('content-type')).to.equal('application/test');
+        expect(res.getHeader('content-disposition')).to.equal('inline');
+
+        // Data/stream handling
+        expect(res._isEndCalled()).to.be.true;
+        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+
+        // Notify mocha that we're done.
+        done();
+      });
+
+      // Simulate file stream
+      in_stream.emit('open');
+      in_stream.put('Hello, world!');
+      in_stream.stop();
+
+    });
+  });
+});

+ 16 - 0
storage-node/scripts/compose/devchain-and-ipfs-node/docker-compose.yaml

@@ -0,0 +1,16 @@
+version: '3'
+services:
+  ipfs:
+    image: ipfs/go-ipfs:latest
+    ports:
+      - "5001:5001"
+    volumes:
+      - storage-node-shared-data:/data/ipfs
+  chain:
+    image: joystream/node:2.1.2
+    ports:
+      - "9944:9944"
+    command: --dev --ws-external
+volumes:
+  storage-node-shared-data:
+    driver: local

File diff suppressed because it is too large
+ 17 - 0
storage-node/storage-node_new.svg


File diff suppressed because it is too large
+ 499 - 34
yarn.lock


Some files were not shown because too many files changed in this diff