Browse Source

Merge pull request #935 from shamil-gadelshin/restore_storage_node_cli_commands

Restore storage-node CLI commands.
Mokhtar Naamani 4 years ago
parent
commit
c24aaf05bb
45 changed files with 890 additions and 521 deletions
  1. 2 2
      package.json
  2. 4 2
      storage-node/.gitignore
  3. 4 0
      storage-node/package.json
  4. 4 0
      storage-node/packages/cli/.eslintignore
  5. 2 240
      storage-node/packages/cli/bin/cli.js
  6. 9 4
      storage-node/packages/cli/package.json
  7. 123 0
      storage-node/packages/cli/src/cli.ts
  8. 48 0
      storage-node/packages/cli/src/commands/base.ts
  9. 4 2
      storage-node/packages/cli/src/commands/dev.ts
  10. 77 0
      storage-node/packages/cli/src/commands/download.ts
  11. 50 0
      storage-node/packages/cli/src/commands/head.ts
  12. 220 0
      storage-node/packages/cli/src/commands/upload.ts
  13. 0 0
      storage-node/packages/cli/src/test/index.ts
  14. 11 0
      storage-node/packages/cli/tsconfig.json
  15. 8 8
      storage-node/packages/colossus/bin/cli.js
  16. 3 1
      storage-node/packages/colossus/lib/app.js
  17. 1 1
      storage-node/packages/colossus/lib/discovery.js
  18. 3 3
      storage-node/packages/colossus/lib/middleware/file_uploads.js
  19. 2 2
      storage-node/packages/colossus/lib/middleware/validate_responses.js
  20. 1 1
      storage-node/packages/colossus/lib/sync.js
  21. 4 4
      storage-node/packages/colossus/paths/asset/v0/{id}.js
  22. 1 1
      storage-node/packages/colossus/paths/discover/v0/{id}.js
  23. 36 36
      storage-node/packages/discovery/discover.js
  24. 1 1
      storage-node/packages/discovery/publish.js
  25. 72 72
      storage-node/packages/helios/bin/cli.js
  26. 2 2
      storage-node/packages/runtime-api/assets.js
  27. 10 8
      storage-node/packages/runtime-api/index.js
  28. 33 0
      storage-node/packages/runtime-api/system.js
  29. 28 28
      storage-node/packages/runtime-api/workers.js
  30. 2 1
      storage-node/packages/storage/package.json
  31. 9 9
      storage-node/packages/storage/storage.js
  32. 21 21
      storage-node/packages/storage/test/storage.js
  33. 2 0
      storage-node/packages/util/externalPromise.js
  34. 2 2
      storage-node/packages/util/fs/walk.js
  35. 1 1
      storage-node/packages/util/lru.js
  36. 2 0
      storage-node/packages/util/pagination.js
  37. 3 3
      storage-node/packages/util/ranges.js
  38. 10 10
      storage-node/packages/util/test/fs/resolve.js
  39. 2 2
      storage-node/packages/util/test/fs/walk.js
  40. 11 11
      storage-node/packages/util/test/lru.js
  41. 8 12
      storage-node/packages/util/test/pagination.js
  42. 25 25
      storage-node/packages/util/test/ranges.js
  43. 3 3
      storage-node/packages/util/test/stripEndingSlash.js
  44. 23 0
      storage-node/tsconfig.json
  45. 3 3
      yarn.lock

+ 2 - 2
package.json

@@ -6,8 +6,8 @@
   "scripts": {
     "test": "yarn && yarn workspaces run test",
     "test-migration": "yarn && yarn workspaces run test-migration",
-    "postinstall": "yarn workspace @joystream/types build",
-    "cargo-checks": "devops/git-hooks/pre-commit && devops/git-hooks/pre-push",
+    "postinstall": "yarn workspace @joystream/types build && yarn workspace storage-node run build",
+	"cargo-checks": "devops/git-hooks/pre-commit && devops/git-hooks/pre-push",
     "cargo-build": "scripts/cargo-build.sh",
     "lint": "yarn workspaces run lint"
   },

+ 4 - 2
storage-node/.gitignore

@@ -1,6 +1,6 @@
 build/
 coverage/
-dist
+dist/
 tmp/
 .DS_Store
 
@@ -26,4 +26,6 @@ node_modules/
 # Ignore nvm config file
 .nvmrc
 
-yarn.lock
+yarn.lock
+
+*.tsbuildinfo

+ 4 - 0
storage-node/package.json

@@ -33,16 +33,20 @@
   "scripts": {
     "test": "wsrun --serial test",
     "lint": "eslint --ignore-path .gitignore .",
+    "build": "yarn workspace @joystream/storage-cli run build",
     "checks": "yarn lint && prettier . --check",
     "format": "prettier ./ --write"
   },
   "devDependencies": {
+    "@types/chai": "^4.2.11",
+    "@types/mocha": "^7.0.2",
     "eslint": "^5.16.0",
     "eslint-config-esnext": "^4.1.0",
     "eslint-config-prettier": "^6.11.0",
     "eslint-plugin-babel": "^5.3.1",
     "eslint-plugin-prettier": "^3.1.4",
     "prettier": "^2.0.5",
+    "typescript": "^3.9.6",
     "wsrun": "^3.6.5"
   }
 }

+ 4 - 0
storage-node/packages/cli/.eslintignore

@@ -0,0 +1,4 @@
+**/build/*
+**/dist/*
+**/coverage/*
+**/node_modules/*

+ 2 - 240
storage-node/packages/cli/bin/cli.js

@@ -1,251 +1,13 @@
 #!/usr/bin/env node
-/*
- * This file is part of the storage node for the Joystream project.
- * Copyright (C) 2019 Joystream Contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program.  If not, see <https://www.gnu.org/licenses/>.
- */
 
-'use strict'
-
-const fs = require('fs')
-const assert = require('assert')
-const { RuntimeApi } = require('@joystream/storage-runtime-api')
-const meow = require('meow')
 const chalk = require('chalk')
-const _ = require('lodash')
-const debug = require('debug')('joystream:storage-cli')
-const dev = require('./dev')
-
-// Parse CLI
-const FLAG_DEFINITIONS = {
-  // TODO
-}
-
-const cli = meow(
-  `
-  Usage:
-    $ storage-cli command [arguments..] [key_file] [passphrase]
-
-  Some commands require a key file as the last option holding the identity for
-  interacting with the runtime API.
-
-  Commands:
-    upload            Upload a file to a Colossus storage node. Requires a
-                      storage node URL, and a local file name to upload. As
-                      an optional third parameter, you can provide a Data
-                      Object Type ID - this defaults to "1" if not provided.
-    download          Retrieve a file. Requires a storage node URL and a content
-                      ID, as well as an output filename.
-    head              Send a HEAD request for a file, and print headers.
-                      Requires a storage node URL and a content ID.
-
-  Dev Commands:       Commands to run on a development chain.
-    dev-init          Setup chain with Alice as lead and storage provider.
-    dev-check         Check the chain is setup with Alice as lead and storage provider.
-  `,
-  { flags: FLAG_DEFINITIONS }
-)
-
-function assertFile(name, filename) {
-  assert(filename, `Need a ${name} parameter to proceed!`)
-  assert(fs.statSync(filename).isFile(), `Path "${filename}" is not a file, aborting!`)
-}
-
-function loadIdentity(api, filename, passphrase) {
-  if (filename) {
-    assertFile('keyfile', filename)
-    api.identities.loadUnlock(filename, passphrase)
-  } else {
-    debug('Loading Alice as identity')
-    api.identities.useKeyPair(dev.aliceKeyPair(api))
-  }
-}
-
-const commands = {
-  // add Alice well known account as storage provider
-  'dev-init': async api => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
-    const dev = require('./dev')
-    return dev.init(api)
-  },
-  // Checks that the setup done by dev-init command was successful.
-  'dev-check': async api => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
-    const dev = require('./dev')
-    return dev.check(api)
-  },
-  // The upload method is not correctly implemented
-  // needs to get the liaison after creating a data object,
-  // resolve the ipns id to the asset put api url of the storage-node
-  // before uploading..
-  upload: async (api, url, filename, doTypeId, keyfile, passphrase) => {
-    loadIdentity(keyfile, passphrase)
-    // Check parameters
-    assertFile('file', filename)
-
-    const size = fs.statSync(filename).size
-    debug(`File "${filename}" is ${chalk.green(size)} Bytes.`)
-
-    if (!doTypeId) {
-      doTypeId = 1
-    }
-
-    debug('Data Object Type ID is: ' + chalk.green(doTypeId))
-
-    // Generate content ID
-    // FIXME this require path is like this because of
-    // https://github.com/Joystream/apps/issues/207
-    const { ContentId } = require('@joystream/types/media')
-    let cid = ContentId.generate()
-    cid = cid.encode().toString()
-    debug('Generated content ID: ' + chalk.green(cid))
-
-    // Create Data Object
-    await api.assets.createDataObject(api.identities.key.address, cid, doTypeId, size)
-    debug('Data object created.')
-
-    // TODO in future, optionally contact liaison here?
-    const request = require('request')
-    url = `${url}asset/v0/${cid}`
-    debug('Uploading to URL', chalk.green(url))
-
-    const f = fs.createReadStream(filename)
-    const opts = {
-      url,
-      headers: {
-        'content-type': '',
-        'content-length': `${size}`,
-      },
-      json: true,
-    }
-    return new Promise((resolve, reject) => {
-      const r = request.put(opts, (error, response, body) => {
-        if (error) {
-          reject(error)
-          return
-        }
-
-        if (response.statusCode / 100 !== 2) {
-          reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`))
-          return
-        }
-        debug('Upload successful:', body.message)
-        resolve()
-      })
-      f.pipe(r)
-    })
-  },
-  // needs to be updated to take a content id and resolve it a potential set
-  // of providers that has it, and select one (possibly try more than one provider)
-  // to fetch it from the get api url of a provider..
-  download: async (api, url, contentId, filename) => {
-    const request = require('request')
-    url = `${url}asset/v0/${contentId}`
-    debug('Downloading URL', chalk.green(url), 'to', chalk.green(filename))
-
-    const f = fs.createWriteStream(filename)
-    const opts = {
-      url,
-      json: true,
-    }
-    return new Promise((resolve, reject) => {
-      const r = request.get(opts, (error, response, body) => {
-        if (error) {
-          reject(error)
-          return
-        }
-
-        debug(
-          'Downloading',
-          chalk.green(response.headers['content-type']),
-          'of size',
-          chalk.green(response.headers['content-length']),
-          '...'
-        )
-
-        f.on('error', err => {
-          reject(err)
-        })
-
-        f.on('finish', () => {
-          if (response.statusCode / 100 !== 2) {
-            reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`))
-            return
-          }
-          debug('Download completed.')
-          resolve()
-        })
-      })
-      r.pipe(f)
-    })
-  },
-  // similar to 'download' function
-  head: async (api, url, contentId) => {
-    const request = require('request')
-    url = `${url}asset/v0/${contentId}`
-    debug('Checking URL', chalk.green(url), '...')
-
-    const opts = {
-      url,
-      json: true,
-    }
-    return new Promise((resolve, reject) => {
-      request.head(opts, (error, response, body) => {
-        if (error) {
-          reject(error)
-          return
-        }
-
-        if (response.statusCode / 100 !== 2) {
-          reject(new Error(`${response.statusCode}: ${body.message || 'unknown reason'}`))
-          return
-        }
-
-        for (const propname in response.headers) {
-          debug(`  ${chalk.yellow(propname)}: ${response.headers[propname]}`)
-        }
-
-        resolve()
-      })
-    })
-  },
-}
-
-async function main() {
-  const api = await RuntimeApi.create()
-
-  // Simple CLI commands
-  const command = cli.input[0]
-  if (!command) {
-    throw new Error('Need a command to run!')
-  }
-
-  if (Object.prototype.hasOwnProperty.call(commands, command)) {
-    // Command recognized
-    const args = _.clone(cli.input).slice(1)
-    await commands[command](api, ...args)
-  } else {
-    throw new Error(`Command "${command}" not recognized, aborting!`)
-  }
-}
+const { main } = require('../dist/cli')
 
 main()
   .then(() => {
     process.exit(0)
   })
-  .catch(err => {
+  .catch((err) => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 9 - 4
storage-node/packages/cli/package.json

@@ -27,11 +27,12 @@
     "node": ">=10.15.3"
   },
   "scripts": {
-    "test": "mocha 'test/**/*.js'",
-    "lint": "eslint 'paths/**/*.js' 'lib/**/*.js'"
+    "test": "mocha 'dist/test/**/*.js'",
+    "lint": "eslint --ext .ts,.tsx . && tsc --noEmit --pretty",
+    "build": "tsc --build"
   },
   "bin": {
-    "storage-cli": "bin/cli.js"
+    "storage-cli": "./bin/cli.js"
   },
   "devDependencies": {
     "chai": "^4.2.0",
@@ -41,9 +42,13 @@
   },
   "dependencies": {
     "@joystream/storage-runtime-api": "^0.1.0",
+    "@joystream/service-discovery": "^0.1.0",
+    "@joystream/storage-utils": "^0.1.0",
+    "@joystream/types": "^0.11.0",
+    "axios": "^0.19.2",
     "chalk": "^2.4.2",
     "lodash": "^4.17.11",
     "meow": "^5.0.0",
-    "request": "^2.88.0"
+    "ipfs-only-hash": "^1.0.2"
   }
 }

+ 123 - 0
storage-node/packages/cli/src/cli.ts

@@ -0,0 +1,123 @@
+/*
+ * This file is part of the storage node for the Joystream project.
+ * Copyright (C) 2019 Joystream Contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.  If not, see <https://www.gnu.org/licenses/>.
+ */
+
+'use strict'
+
+import { RuntimeApi } from '@joystream/storage-runtime-api'
+import meow from 'meow'
+import _ from 'lodash'
+
+// Commands
+import * as dev from './commands/dev'
+import { HeadCommand } from './commands/head'
+import { DownloadCommand } from './commands/download'
+import { UploadCommand } from './commands/upload'
+
+// Parse CLI
+const FLAG_DEFINITIONS = {
+  // TODO: current version of meow doesn't support subcommands. We should consider a migration to yargs or oclif.
+}
+
+const usage = `
+  Usage:
+    $ storage-cli command [arguments..]
+
+  Commands:
+    upload            Upload a file to the Joystream Network. Requires a
+                      source file path to upload, data object ID, member ID and account key file with
+                      pass phrase to unlock it.
+    download          Retrieve a file. Requires a storage node URL and a content
+                      ID, as well as an output filename.
+    head              Send a HEAD request for a file, and print headers.
+                      Requires a storage node URL and a content ID.
+
+  Dev Commands:       Commands to run on a development chain.
+    dev-init          Setup chain with Alice as lead and storage provider.
+    dev-check         Check the chain is setup with Alice as lead and storage provider.
+    
+  Type 'storage-cli command' for the exact command usage examples.
+  `
+
+const cli = meow(usage, { flags: FLAG_DEFINITIONS })
+
+// Shows a message, CLI general usage and exits.
+function showUsageAndExit(message: string) {
+  console.log(message)
+  console.log(usage)
+  process.exit(1)
+}
+
+const commands = {
+  // add Alice well known account as storage provider
+  'dev-init': async (api) => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    return dev.init(api)
+  },
+  // Checks that the setup done by dev-init command was successful.
+  'dev-check': async (api) => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    return dev.check(api)
+  },
+  // Uploads the file to the system. Registers new data object in the runtime, obtains proper colossus instance URL.
+  upload: async (
+    api: any,
+    filePath: string,
+    dataObjectTypeId: string,
+    keyFile: string,
+    passPhrase: string,
+    memberId: string
+  ) => {
+    const uploadCmd = new UploadCommand(api, filePath, dataObjectTypeId, keyFile, passPhrase, memberId)
+
+    await uploadCmd.run()
+  },
+  // needs to be updated to take a content id and resolve it a potential set
+  // of providers that has it, and select one (possibly try more than one provider)
+  // to fetch it from the get api url of a provider..
+  download: async (api: any, url: string, contentId: string, filePath: string) => {
+    const downloadCmd = new DownloadCommand(api, url, contentId, filePath)
+
+    await downloadCmd.run()
+  },
+  // Shows asset information derived from response headers.
+  // Accepts colossus URL and content ID.
+  head: async (api: any, storageNodeUrl: string, contentId: string) => {
+    const headCmd = new HeadCommand(api, storageNodeUrl, contentId)
+
+    await headCmd.run()
+  },
+}
+
+// Entry point.
+export async function main() {
+  const api = await RuntimeApi.create()
+
+  // Simple CLI commands
+  const command = cli.input[0]
+  if (!command) {
+    showUsageAndExit('Enter the command, please.')
+  }
+
+  if (Object.prototype.hasOwnProperty.call(commands, command)) {
+    // Command recognized
+    const args = _.clone(cli.input).slice(1)
+    await commands[command](api, ...args)
+  } else {
+    showUsageAndExit(`Command "${command}" not recognized.`)
+  }
+}

+ 48 - 0
storage-node/packages/cli/src/commands/base.ts

@@ -0,0 +1,48 @@
+import chalk from 'chalk'
+import removeEndingForwardSlash from '@joystream/storage-utils/stripEndingSlash'
+import { ContentId } from '@joystream/types/media'
+
+// Commands base abstract class. Contains reusable methods.
+export abstract class BaseCommand {
+  // Creates the Colossus asset URL and logs it.
+  protected createAndLogAssetUrl(url: string, contentId: string | ContentId): string {
+    let normalizedContentId: string
+
+    if (typeof contentId === 'string') {
+      normalizedContentId = contentId
+    } else {
+      normalizedContentId = contentId.encode()
+    }
+
+    const normalizedUrl = removeEndingForwardSlash(url)
+    const assetUrl = `${normalizedUrl}/asset/v0/${normalizedContentId}`
+    console.log(chalk.yellow('Generated asset URL:', assetUrl))
+
+    return assetUrl
+  }
+
+  // Abstract method to provide parameter validation.
+  protected abstract validateParameters(): boolean
+
+  // Abstract method to show command usage.
+  protected abstract showUsage()
+
+  // Checks command parameters and shows the usage if necessary.
+  protected assertParameters(): boolean {
+    // Create, validate and show parameters.
+    if (!this.validateParameters()) {
+      console.log(chalk.yellow(`Invalid parameters for the command:`))
+      this.showUsage()
+
+      return false
+    }
+
+    return true
+  }
+
+  // Shows the error message and ends the process with error code.
+  protected fail(message: string) {
+    console.log(chalk.red(message))
+    process.exit(1)
+  }
+}

+ 4 - 2
storage-node/packages/cli/bin/dev.js → storage-node/packages/cli/src/commands/dev.ts

@@ -19,7 +19,7 @@ function developmentPort() {
   return 3001
 }
 
-const check = async api => {
+const check = async (api) => {
   const roleAccountId = roleKeyPair(api).address
   const providerId = await api.workers.findProviderIdByRoleAccount(roleAccountId)
 
@@ -40,7 +40,7 @@ const check = async api => {
 // Setup Alice account on a developement chain as
 // a member, storage lead, and a storage provider using a deterministic
 // development key for the role account
-const init = async api => {
+const init = async (api) => {
   try {
     await check(api)
     return
@@ -123,3 +123,5 @@ module.exports = {
   roleKeyPair,
   developmentPort,
 }
+
+export { init, check, aliceKeyPair, roleKeyPair, developmentPort }

+ 77 - 0
storage-node/packages/cli/src/commands/download.ts

@@ -0,0 +1,77 @@
+import axios from 'axios'
+import chalk from 'chalk'
+import fs from 'fs'
+import { BaseCommand } from './base'
+
+// Download command class. Validates input parameters and execute the logic for asset downloading.
+export class DownloadCommand extends BaseCommand {
+  private readonly api: any
+  private readonly storageNodeUrl: string
+  private readonly contentId: string
+  private readonly outputFilePath: string
+
+  constructor(api: any, storageNodeUrl: string, contentId: string, outputFilePath: string) {
+    super()
+
+    this.api = api
+    this.storageNodeUrl = storageNodeUrl
+    this.contentId = contentId
+    this.outputFilePath = outputFilePath
+  }
+
+  // Provides parameter validation. Overrides the abstract method from the base class.
+  protected validateParameters(): boolean {
+    return (
+      this.storageNodeUrl &&
+      this.storageNodeUrl !== '' &&
+      this.contentId &&
+      this.contentId !== '' &&
+      this.outputFilePath &&
+      this.outputFilePath !== ''
+    )
+  }
+
+  // Shows command usage. Overrides the abstract method from the base class.
+  protected showUsage() {
+    console.log(
+      chalk.yellow(`
+        Usage:   storage-cli download colossusURL contentID filePath
+        Example: storage-cli download http://localhost:3001 0x7a6ba7e9157e5fba190dc146fe1baa8180e29728a5c76779ed99655500cff795 ./movie.mp4
+      `)
+    )
+  }
+
+  // Command executor.
+  async run() {
+    // Checks for input parameters, shows usage if they are invalid.
+    if (!this.assertParameters()) return
+
+    const assetUrl = this.createAndLogAssetUrl(this.storageNodeUrl, this.contentId)
+    console.log(chalk.yellow('File path:', this.outputFilePath))
+
+    // Create file write stream and set error handler.
+    const writer = fs.createWriteStream(this.outputFilePath).on('error', (err) => {
+      this.fail(`File write failed: ${err}`)
+    })
+
+    // Request file download.
+    try {
+      const response = await axios({
+        url: assetUrl,
+        method: 'GET',
+        responseType: 'stream',
+      })
+
+      response.data.pipe(writer)
+
+      return new Promise((resolve) => {
+        writer.on('finish', () => {
+          console.log('File downloaded.')
+          resolve()
+        })
+      })
+    } catch (err) {
+      this.fail(`Colossus request failed: ${err.message}`)
+    }
+  }
+}

+ 50 - 0
storage-node/packages/cli/src/commands/head.ts

@@ -0,0 +1,50 @@
+import axios from 'axios'
+import chalk from 'chalk'
+import { BaseCommand } from './base'
+
+// Head command class. Validates input parameters and obtains the asset headers.
+export class HeadCommand extends BaseCommand {
+  private readonly api: any
+  private readonly storageNodeUrl: string
+  private readonly contentId: string
+
+  constructor(api: any, storageNodeUrl: string, contentId: string) {
+    super()
+
+    this.api = api
+    this.storageNodeUrl = storageNodeUrl
+    this.contentId = contentId
+  }
+
+  // Provides parameter validation. Overrides the abstract method from the base class.
+  protected validateParameters(): boolean {
+    return this.storageNodeUrl && this.storageNodeUrl !== '' && this.contentId && this.contentId !== ''
+  }
+
+  // Shows command usage. Overrides the abstract method from the base class.
+  protected showUsage() {
+    console.log(
+      chalk.yellow(`
+        Usage:   storage-cli head colossusURL contentID
+        Example: storage-cli head http://localhost:3001 0x7a6ba7e9157e5fba190dc146fe1baa8180e29728a5c76779ed99655500cff795
+      `)
+    )
+  }
+
+  // Command executor.
+  async run() {
+    // Checks for input parameters, shows usage if they are invalid.
+    if (!this.assertParameters()) return
+
+    const assetUrl = this.createAndLogAssetUrl(this.storageNodeUrl, this.contentId)
+
+    try {
+      const response = await axios.head(assetUrl)
+
+      console.log(chalk.green(`Content type: ${response.headers['content-type']}`))
+      console.log(chalk.green(`Content length: ${response.headers['content-length']}`))
+    } catch (err) {
+      this.fail(`Colossus request failed: ${err.message}`)
+    }
+  }
+}

+ 220 - 0
storage-node/packages/cli/src/commands/upload.ts

@@ -0,0 +1,220 @@
+import axios, { AxiosRequestConfig } from 'axios'
+import fs from 'fs'
+import ipfsHash from 'ipfs-only-hash'
+import { ContentId, DataObject } from '@joystream/types/media'
+import BN from 'bn.js'
+import { Option } from '@polkadot/types/codec'
+import { BaseCommand } from './base'
+import { discover } from '@joystream/service-discovery/discover'
+import Debug from 'debug'
+import chalk from 'chalk'
+import { aliceKeyPair } from './dev'
+const debug = Debug('joystream:storage-cli:upload')
+
+// Defines maximum content length for the assets (files). Limits the upload.
+const MAX_CONTENT_LENGTH = 500 * 1024 * 1024 // 500Mb
+
+// Defines the necessary parameters for the AddContent runtime tx.
+interface AddContentParams {
+  accountId: string
+  ipfsCid: string
+  contentId: ContentId
+  fileSize: BN
+  dataObjectTypeId: number
+  memberId: number
+}
+
+// Upload command class. Validates input parameters and uploads the asset to the storage node and runtime.
+export class UploadCommand extends BaseCommand {
+  private readonly api: any
+  private readonly mediaSourceFilePath: string
+  private readonly dataObjectTypeId: string
+  private readonly keyFile: string
+  private readonly passPhrase: string
+  private readonly memberId: string
+
+  constructor(
+    api: any,
+    mediaSourceFilePath: string,
+    dataObjectTypeId: string,
+    memberId: string,
+    keyFile: string,
+    passPhrase: string
+  ) {
+    super()
+
+    this.api = api
+    this.mediaSourceFilePath = mediaSourceFilePath
+    this.dataObjectTypeId = dataObjectTypeId
+    this.memberId = memberId
+    this.keyFile = keyFile
+    this.passPhrase = passPhrase
+  }
+
+  // Provides parameter validation. Overrides the abstract method from the base class.
+  protected validateParameters(): boolean {
+    return (
+      this.mediaSourceFilePath &&
+      this.mediaSourceFilePath !== '' &&
+      this.dataObjectTypeId &&
+      this.dataObjectTypeId !== '' &&
+      this.memberId &&
+      this.memberId !== ''
+    )
+  }
+
+  // Reads the file from the filesystem and computes IPFS hash.
+  private async computeIpfsHash(): Promise<string> {
+    const file = fs.createReadStream(this.mediaSourceFilePath).on('error', (err) => {
+      this.fail(`File read failed: ${err}`)
+    })
+
+    return await ipfsHash.of(file)
+  }
+
+  // Read the file size from the file system.
+  private getFileSize(): number {
+    const stats = fs.statSync(this.mediaSourceFilePath)
+    return stats.size
+  }
+
+  // Creates parameters for the AddContent runtime tx.
+  private async getAddContentParams(): Promise<AddContentParams> {
+    const identity = await this.loadIdentity()
+    const accountId = identity.address
+
+    const dataObjectTypeId: number = parseInt(this.dataObjectTypeId)
+    if (isNaN(dataObjectTypeId)) {
+      this.fail(`Cannot parse dataObjectTypeId: ${this.dataObjectTypeId}`)
+    }
+
+    const memberId: number = parseInt(this.memberId)
+    if (isNaN(dataObjectTypeId)) {
+      this.fail(`Cannot parse memberIdString: ${this.memberId}`)
+    }
+
+    return {
+      accountId,
+      ipfsCid: await this.computeIpfsHash(),
+      contentId: ContentId.generate(),
+      fileSize: new BN(this.getFileSize()),
+      dataObjectTypeId,
+      memberId,
+    }
+  }
+
+  // Creates the DataObject in the runtime.
+  private async createContent(p: AddContentParams): Promise<DataObject> {
+    try {
+      const dataObject: Option<DataObject> = await this.api.assets.createDataObject(
+        p.accountId,
+        p.memberId,
+        p.contentId,
+        p.dataObjectTypeId,
+        p.fileSize,
+        p.ipfsCid
+      )
+
+      if (dataObject.isNone) {
+        this.fail('Cannot create data object: got None object')
+      }
+
+      return dataObject.unwrap()
+    } catch (err) {
+      this.fail(`Cannot create data object: ${err}`)
+    }
+  }
+
+  // Uploads file to given asset URL.
+  private async uploadFile(assetUrl: string) {
+    // Create file read stream and set error handler.
+    const file = fs.createReadStream(this.mediaSourceFilePath).on('error', (err) => {
+      this.fail(`File read failed: ${err}`)
+    })
+
+    // Upload file from the stream.
+    try {
+      const fileSize = this.getFileSize()
+      const config: AxiosRequestConfig = {
+        headers: {
+          'Content-Type': '', // https://github.com/Joystream/storage-node-joystream/issues/16
+          'Content-Length': fileSize.toString(),
+        },
+        maxContentLength: MAX_CONTENT_LENGTH,
+      }
+      await axios.put(assetUrl, file, config)
+
+      console.log('File uploaded.')
+    } catch (err) {
+      this.fail(err.toString())
+    }
+  }
+
+  // Requests the runtime and obtains the storage node endpoint URL.
+  private async discoverStorageProviderEndpoint(storageProviderId: string): Promise<string> {
+    try {
+      const serviceInfo = await discover(storageProviderId, this.api)
+
+      if (serviceInfo === null) {
+        this.fail('Storage node discovery failed.')
+      }
+      debug(`Discovered service info object: ${serviceInfo}`)
+
+      const dataWrapper = JSON.parse(serviceInfo)
+      const assetWrapper = JSON.parse(dataWrapper.serialized)
+
+      return assetWrapper.asset.endpoint
+    } catch (err) {
+      this.fail(`Could not get asset endpoint: ${err}`)
+    }
+  }
+
+  // Loads and unlocks the runtime identity using the key file and pass phrase.
+  private async loadIdentity(): Promise<any> {
+    const noKeyFileProvided = !this.keyFile || this.keyFile === ''
+    const useAlice = noKeyFileProvided && (await this.api.system.isDevelopmentChain())
+
+    if (useAlice) {
+      debug("Discovered 'development' chain.")
+      return aliceKeyPair(this.api)
+    }
+
+    try {
+      await fs.promises.access(this.keyFile)
+    } catch (error) {
+      this.fail(`Cannot read file "${this.keyFile}".`)
+    }
+
+    return this.api.identities.loadUnlock(this.keyFile, this.passPhrase)
+  }
+
+  // Shows command usage. Overrides the abstract method from the base class.
+  protected showUsage() {
+    console.log(
+      chalk.yellow(`
+        Usage:       storage-cli upload mediaSourceFilePath dataObjectTypeId memberId [keyFilePath] [passPhrase]
+        Example:     storage-cli upload ./movie.mp4 1 1 ./keyFile.json secretPhrase
+        Development: storage-cli upload ./movie.mp4 1 0
+      `)
+    )
+  }
+
+  // Command executor.
+  async run() {
+    // Checks for input parameters, shows usage if they are invalid.
+    if (!this.assertParameters()) return
+
+    const addContentParams = await this.getAddContentParams()
+    debug(`AddContent Tx params: ${JSON.stringify(addContentParams)}`)
+    debug(`Decoded CID: ${addContentParams.contentId.toString()}`)
+
+    const dataObject = await this.createContent(addContentParams)
+    debug(`Received data object: ${dataObject.toString()}`)
+
+    const colossusEndpoint = await this.discoverStorageProviderEndpoint(dataObject.liaison.toString())
+    debug(`Discovered storage node endpoint: ${colossusEndpoint}`)
+
+    const assetUrl = this.createAndLogAssetUrl(colossusEndpoint, addContentParams.contentId)
+    await this.uploadFile(assetUrl)
+  }
+}

+ 0 - 0
storage-node/packages/cli/test/index.js → storage-node/packages/cli/src/test/index.ts


+ 11 - 0
storage-node/packages/cli/tsconfig.json

@@ -0,0 +1,11 @@
+{
+  "include": [
+    "src"
+  ],
+  "extends": "../../tsconfig.json",
+  "compilerOptions": {
+    "outDir": "dist",
+    "rootDir": "src",
+    "baseUrl": "."
+  }
+}

+ 8 - 8
storage-node/packages/colossus/bin/cli.js

@@ -29,14 +29,14 @@ const FLAG_DEFINITIONS = {
   },
   keyFile: {
     type: 'string',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
   publicUrl: {
     type: 'string',
     alias: 'u',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
@@ -50,7 +50,7 @@ const FLAG_DEFINITIONS = {
   providerId: {
     type: 'number',
     alias: 'i',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
@@ -122,7 +122,7 @@ function getStorage(runtimeApi) {
   const { Storage } = require('@joystream/storage-node-backend')
 
   const options = {
-    resolve_content_id: async contentId => {
+    resolve_content_id: async (contentId) => {
       // Resolve via API
       const obj = await runtimeApi.assets.getDataObject(contentId)
       if (!obj || obj.isNone) {
@@ -176,7 +176,7 @@ async function initApiDevelopment() {
     provider_url: wsProvider,
   })
 
-  const dev = require('../../cli/bin/dev')
+  const dev = require('../../cli/dist/commands/dev')
 
   api.identities.useKeyPair(dev.roleKeyPair(api))
 
@@ -201,7 +201,7 @@ function getServiceInformation(publicUrl) {
 
 async function announcePublicUrl(api, publicUrl) {
   // re-announce in future
-  const reannounce = function(timeoutMs) {
+  const reannounce = function (timeoutMs) {
     setTimeout(announcePublicUrl, timeoutMs, api, publicUrl)
   }
 
@@ -253,7 +253,7 @@ const commands = {
     let publicUrl, port, api
 
     if (cli.flags.dev) {
-      const dev = require('../../cli/bin/dev')
+      const dev = require('../../cli/dist/commands/dev')
       api = await initApiDevelopment()
       port = dev.developmentPort()
       publicUrl = `http://localhost:${port}/`
@@ -295,7 +295,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch(err => {
+  .catch((err) => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 3 - 1
storage-node/packages/colossus/lib/app.js

@@ -64,7 +64,9 @@ function createApp(projectRoot, storage, runtime) {
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res) {
+  // Disable lint because we need such function signature.
+  // eslint-disable-next-line no-unused-vars
+  app.use(function (err, req, res, next) {
     res.status(err.status).json(err)
   })
 

+ 1 - 1
storage-node/packages/colossus/lib/discovery.js

@@ -60,7 +60,7 @@ function createApp(projectRoot, runtime) {
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res) {
+  app.use(function (err, req, res) {
     res.status(err.status).json(err)
   })
 

+ 3 - 3
storage-node/packages/colossus/lib/middleware/file_uploads.js

@@ -21,8 +21,8 @@
 const multer = require('multer')
 
 // Taken from express-openapi examples
-module.exports = function(req, res, next) {
-  multer().any()(req, res, function(err) {
+module.exports = function (req, res, next) {
+  multer().any()(req, res, function (err) {
     if (err) {
       return next(err)
     }
@@ -34,7 +34,7 @@ module.exports = function(req, res, next) {
         }),
       {}
     )
-    Object.keys(filesMap).forEach(fieldname => {
+    Object.keys(filesMap).forEach((fieldname) => {
       const files = filesMap[fieldname]
       req.body[fieldname] = files.length > 1 ? files.map(() => '') : ''
     })

+ 2 - 2
storage-node/packages/colossus/lib/middleware/validate_responses.js

@@ -21,7 +21,7 @@
 const debug = require('debug')('joystream:middleware:validate')
 
 // Function taken directly from https://github.com/kogosoftwarellc/open-api/tree/master/packages/express-openapi
-module.exports = function(req, res, next) {
+module.exports = function (req, res, next) {
   const strictValidation = !!req.apiDoc['x-express-openapi-validation-strict']
   if (typeof res.validateResponse === 'function') {
     const send = res.send
@@ -42,7 +42,7 @@ module.exports = function(req, res, next) {
       }
       if (validation.errors) {
         const errorList = Array.from(validation.errors)
-          .map(_ => _.message)
+          .map((_) => _.message)
           .join(',')
         validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`
         debug(validationMessage)

+ 1 - 1
storage-node/packages/colossus/lib/sync.js

@@ -30,7 +30,7 @@ async function syncCallback(api, storage) {
   const providerId = api.storageProviderId
 
   // Iterate over all sync objects, and ensure they're synced.
-  const allChecks = knownContentIds.map(async contentId => {
+  const allChecks = knownContentIds.map(async (contentId) => {
     // eslint-disable-next-line prefer-const
     let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, contentId)
 

+ 4 - 4
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -30,7 +30,7 @@ function errorHandler(response, err, code) {
   response.status(err.code || code || 500).send({ message: err.toString() })
 }
 
-module.exports = function(storage, runtime) {
+module.exports = function (storage, runtime) {
   const doc = {
     // parameters for all operations in this path
     parameters: [
@@ -108,7 +108,7 @@ module.exports = function(storage, runtime) {
           }
         }
 
-        stream.on('fileInfo', async info => {
+        stream.on('fileInfo', async (info) => {
           try {
             debug('Detected file info:', info)
 
@@ -142,7 +142,7 @@ module.exports = function(storage, runtime) {
           }
         })
 
-        stream.on('committed', async hash => {
+        stream.on('committed', async (hash) => {
           console.log('commited', dataObject)
           try {
             if (hash !== dataObject.ipfs_content_id.toString()) {
@@ -170,7 +170,7 @@ module.exports = function(storage, runtime) {
           }
         })
 
-        stream.on('error', err => errorHandler(res, err))
+        stream.on('error', (err) => errorHandler(res, err))
         req.pipe(stream)
       } catch (err) {
         errorHandler(res, err)

+ 1 - 1
storage-node/packages/colossus/paths/discover/v0/{id}.js

@@ -4,7 +4,7 @@ const debug = require('debug')('joystream:colossus:api:discovery')
 const MAX_CACHE_AGE = 30 * 60 * 1000
 const USE_CACHE = true
 
-module.exports = function(runtime) {
+module.exports = function (runtime) {
   const doc = {
     // parameters for all operations in this path
     parameters: [

+ 36 - 36
storage-node/packages/discovery/discover.js

@@ -171,42 +171,6 @@ async function discoverOverLocalIpfsNode(storageProviderId, runtimeApi) {
   return JSON.parse(content)
 }
 
-/**
- * Cached discovery of storage provider service information. If useCachedValue is
- * set to true, will always return the cached result if found. New discovery will be triggered
- * if record is found to be stale. If a stale record is not desired (CACHE_TTL old) pass a non zero
- * value for maxCacheAge, which will force a new discovery and return the new resolved value.
- * This method in turn calls _discovery which handles concurrent discoveries and selects the appropriate
- * protocol to perform the query.
- * If the storage provider is not registered it will resolve to null
- * @param {number | BN | u64} storageProviderId - provider to discover
- * @param {RuntimeApi} runtimeApi - api instance to query the chain
- * @param {bool} useCachedValue - optionaly use chached queries
- * @param {number} maxCacheAge - maximum age of a cached query that triggers automatic re-discovery
- * @returns { Promise<object | null> } - the published service information
- */
-async function discover(storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
-  storageProviderId = new BN(storageProviderId)
-  const id = storageProviderId.toNumber()
-  const cached = accountInfoCache[id]
-
-  if (cached && useCachedValue) {
-    if (maxCacheAge > 0) {
-      // get latest value
-      if (Date.now() > cached.updated + maxCacheAge) {
-        return _discover(storageProviderId, runtimeApi)
-      }
-    }
-    // refresh if cache if stale, new value returned on next cached query
-    if (Date.now() > cached.updated + CACHE_TTL) {
-      _discover(storageProviderId, runtimeApi)
-    }
-    // return best known value
-    return cached.value
-  }
-  return _discover(storageProviderId, runtimeApi)
-}
-
 /**
  * Internal method that handles concurrent discoveries and caching of results. Will
  * select the appropriate discovery protocol based on whether we are in a browser environment or not.
@@ -264,6 +228,42 @@ async function _discover(storageProviderId, runtimeApi) {
   }
 }
 
+/**
+ * Cached discovery of storage provider service information. If useCachedValue is
+ * set to true, will always return the cached result if found. New discovery will be triggered
+ * if record is found to be stale. If a stale record is not desired (CACHE_TTL old) pass a non zero
+ * value for maxCacheAge, which will force a new discovery and return the new resolved value.
+ * This method in turn calls _discovery which handles concurrent discoveries and selects the appropriate
+ * protocol to perform the query.
+ * If the storage provider is not registered it will resolve to null
+ * @param {number | BN | u64} storageProviderId - provider to discover
+ * @param {RuntimeApi} runtimeApi - api instance to query the chain
+ * @param {bool} useCachedValue - optionaly use chached queries
+ * @param {number} maxCacheAge - maximum age of a cached query that triggers automatic re-discovery
+ * @returns { Promise<object | null> } - the published service information
+ */
+async function discover(storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
+  storageProviderId = new BN(storageProviderId)
+  const id = storageProviderId.toNumber()
+  const cached = accountInfoCache[id]
+
+  if (cached && useCachedValue) {
+    if (maxCacheAge > 0) {
+      // get latest value
+      if (Date.now() > cached.updated + maxCacheAge) {
+        return _discover(storageProviderId, runtimeApi)
+      }
+    }
+    // refresh if cache if stale, new value returned on next cached query
+    if (Date.now() > cached.updated + CACHE_TTL) {
+      _discover(storageProviderId, runtimeApi)
+    }
+    // return best known value
+    return cached.value
+  }
+  return _discover(storageProviderId, runtimeApi)
+}
+
 module.exports = {
   discover,
   discoverOverJoystreamDiscoveryService,

+ 1 - 1
storage-node/packages/discovery/publish.js

@@ -42,7 +42,7 @@ function encodeServiceInfo(info) {
  */
 async function publish(serviceInfo) {
   const keys = await ipfs.key.list()
-  let servicesKey = keys.find(key => key.name === PUBLISH_KEY)
+  let servicesKey = keys.find((key) => key.name === PUBLISH_KEY)
 
   // An ipfs node will always have the self key.
   // If the publish key is specified as anything else and it doesn't exist

+ 72 - 72
storage-node/packages/helios/bin/cli.js

@@ -6,6 +6,74 @@ const { discover } = require('@joystream/service-discovery')
 const axios = require('axios')
 const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
 
+function mapInfoToStatus(providers, currentHeight) {
+  return providers.map(({ providerId, info }) => {
+    if (info) {
+      return {
+        providerId,
+        identity: info.identity.toString(),
+        expiresIn: info.expires_at.sub(currentHeight).toNumber(),
+        expired: currentHeight.gte(info.expires_at),
+      }
+    }
+    return {
+      providerId,
+      identity: null,
+      status: 'down',
+    }
+  })
+}
+
+function makeAssetUrl(contentId, source) {
+  source = stripEndingSlash(source)
+  return `${source}/asset/v0/${encodeAddress(contentId)}`
+}
+
+async function assetRelationshipState(api, contentId, providers) {
+  const dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
+
+  const relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
+
+  // how many relationships associated with active providers and in ready state
+  const activeRelationships = await Promise.all(
+    relationshipIds.map(async (id) => {
+      let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
+      relationship = relationship.unwrap()
+      // only interested in ready relationships
+      if (!relationship.ready) {
+        return undefined
+      }
+      // Does the relationship belong to an active provider ?
+      return providers.find((provider) => relationship.storage_provider.eq(provider))
+    })
+  )
+
+  return [activeRelationships.filter((active) => active).length, dataObject.unwrap().liaison_judgement]
+}
+
+// HTTP HEAD with axios all known content ids on each provider
+async function countContentAvailability(contentIds, source) {
+  const content = {}
+  let found = 0
+  let missing = 0
+  for (let i = 0; i < contentIds.length; i++) {
+    const assetUrl = makeAssetUrl(contentIds[i], source)
+    try {
+      const info = await axios.head(assetUrl)
+      content[encodeAddress(contentIds[i])] = {
+        type: info.headers['content-type'],
+        bytes: info.headers['content-length'],
+      }
+      // TODO: cross check against dataobject size
+      found++
+    } catch (err) {
+      missing++
+    }
+  }
+
+  return { found, missing, content }
+}
+
 async function main() {
   const runtime = await RuntimeApi.create()
   const { api } = runtime
@@ -19,7 +87,7 @@ async function main() {
   console.log(`Found ${storageProviders.length} staked providers`)
 
   const storageProviderAccountInfos = await Promise.all(
-    storageProviders.map(async providerId => {
+    storageProviders.map(async (providerId) => {
       return {
         providerId,
         info: await runtime.discovery.getAccountInfo(providerId),
@@ -49,7 +117,7 @@ async function main() {
 
   console.log(
     '\n== Down Providers!\n',
-    downProviders.map(provider => {
+    downProviders.map((provider) => {
       return {
         providerId: provider.providerId,
       }
@@ -80,7 +148,7 @@ async function main() {
 
   console.log('\nChecking API Endpoints are online')
   await Promise.all(
-    endpoints.map(async provider => {
+    endpoints.map(async (provider) => {
       if (!provider.endpoint) {
         console.log('skipping', provider.address)
         return
@@ -103,7 +171,7 @@ async function main() {
 
   // Check which providers are reporting a ready relationship for each asset
   await Promise.all(
-    knownContentIds.map(async contentId => {
+    knownContentIds.map(async (contentId) => {
       const [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
       console.log(
         `${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`
@@ -127,72 +195,4 @@ async function main() {
   })
 }
 
-function mapInfoToStatus(providers, currentHeight) {
-  return providers.map(({ providerId, info }) => {
-    if (info) {
-      return {
-        providerId,
-        identity: info.identity.toString(),
-        expiresIn: info.expires_at.sub(currentHeight).toNumber(),
-        expired: currentHeight.gte(info.expires_at),
-      }
-    }
-    return {
-      providerId,
-      identity: null,
-      status: 'down',
-    }
-  })
-}
-
-// HTTP HEAD with axios all known content ids on each provider
-async function countContentAvailability(contentIds, source) {
-  const content = {}
-  let found = 0
-  let missing = 0
-  for (let i = 0; i < contentIds.length; i++) {
-    const assetUrl = makeAssetUrl(contentIds[i], source)
-    try {
-      const info = await axios.head(assetUrl)
-      content[encodeAddress(contentIds[i])] = {
-        type: info.headers['content-type'],
-        bytes: info.headers['content-length'],
-      }
-      // TODO: cross check against dataobject size
-      found++
-    } catch (err) {
-      missing++
-    }
-  }
-
-  return { found, missing, content }
-}
-
-function makeAssetUrl(contentId, source) {
-  source = stripEndingSlash(source)
-  return `${source}/asset/v0/${encodeAddress(contentId)}`
-}
-
-async function assetRelationshipState(api, contentId, providers) {
-  const dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
-
-  const relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
-
-  // how many relationships associated with active providers and in ready state
-  const activeRelationships = await Promise.all(
-    relationshipIds.map(async id => {
-      let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
-      relationship = relationship.unwrap()
-      // only interested in ready relationships
-      if (!relationship.ready) {
-        return undefined
-      }
-      // Does the relationship belong to an active provider ?
-      return providers.find(provider => relationship.storage_provider.eq(provider))
-    })
-  )
-
-  return [activeRelationships.filter(active => active).length, dataObject.unwrap().liaison_judgement]
-}
-
 main()

+ 2 - 2
storage-node/packages/runtime-api/assets.js

@@ -134,8 +134,8 @@ class AssetsApi {
     // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, events => {
-          events.forEach(event => {
+        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, (events) => {
+          events.forEach((event) => {
             resolve(event[1].DataObjectStorageRelationshipId)
           })
         })

+ 10 - 8
storage-node/packages/runtime-api/index.js

@@ -28,6 +28,7 @@ const { BalancesApi } = require('@joystream/storage-runtime-api/balances')
 const { WorkersApi } = require('@joystream/storage-runtime-api/workers')
 const { AssetsApi } = require('@joystream/storage-runtime-api/assets')
 const { DiscoveryApi } = require('@joystream/storage-runtime-api/discovery')
+const { SystemApi } = require('@joystream/storage-runtime-api/system')
 const AsyncLock = require('async-lock')
 const { newExternallyControlledPromise } = require('@joystream/storage-utils/externalPromise')
 
@@ -72,6 +73,7 @@ class RuntimeApi {
     this.workers = await WorkersApi.create(this)
     this.assets = await AssetsApi.create(this)
     this.discovery = await DiscoveryApi.create(this)
+    this.system = await SystemApi.create(this)
   }
 
   disconnect() {
@@ -96,7 +98,7 @@ class RuntimeApi {
   static matchingEvents(subscribed, events) {
     debug(`Number of events: ${events.length} subscribed to ${subscribed}`)
 
-    const filtered = events.filter(record => {
+    const filtered = events.filter((record) => {
       const { event, phase } = record
 
       // Show what we are busy with
@@ -104,14 +106,14 @@ class RuntimeApi {
       debug(`\t\t${event.meta.documentation.toString()}`)
 
       // Skip events we're not interested in.
-      const matching = subscribed.filter(value => {
+      const matching = subscribed.filter((value) => {
         return event.section === value[0] && event.method === value[1]
       })
       return matching.length > 0
     })
     debug(`Filtered: ${filtered.length}`)
 
-    const mapped = filtered.map(record => {
+    const mapped = filtered.map((record) => {
       const { event } = record
       const types = event.typeDef
 
@@ -138,8 +140,8 @@ class RuntimeApi {
    * Returns the first matched event *only*.
    */
   async waitForEvents(subscribed) {
-    return new Promise(resolve => {
-      this.api.query.system.events(events => {
+    return new Promise((resolve) => {
+      this.api.query.system.events((events) => {
         const matches = RuntimeApi.matchingEvents(subscribed, events)
         if (matches && matches.length) {
           resolve(matches)
@@ -243,7 +245,7 @@ class RuntimeApi {
             isInvalid
             */
           })
-          .catch(err => {
+          .catch((err) => {
             // 1014 error: Most likely you are sending transaction with the same nonce,
             // so it assumes you want to replace existing one, but the priority is too low to replace it (priority = fee = len(encoded_transaction) currently)
             // Remember this can also happen if in the past we sent a tx with a future nonce, and the current nonce
@@ -290,8 +292,8 @@ class RuntimeApi {
     // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.signAndSend(senderAccountId, tx, 1, subscribed, events => {
-          events.forEach(event => {
+        await this.signAndSend(senderAccountId, tx, 1, subscribed, (events) => {
+          events.forEach((event) => {
             // fix - we may not necessarily want the first event
             // if there are multiple events emitted,
             resolve(event[1][eventProperty])

+ 33 - 0
storage-node/packages/runtime-api/system.js

@@ -0,0 +1,33 @@
+'use strict'
+
+const debug = require('debug')('joystream:runtime:system')
+
+/*
+ * Add system functionality to the substrate API.
+ */
+class SystemApi {
+  static async create(base) {
+    const ret = new SystemApi()
+    ret.base = base
+    await SystemApi.init()
+    return ret
+  }
+
+  static async init() {
+    debug('Init')
+  }
+
+  /*
+   * Check the running chain for the development setup.
+   */
+  async isDevelopmentChain() {
+    const developmentChainName = 'Development'
+    const runningChainName = await this.base.api.rpc.system.chain()
+
+    return runningChainName.toString() === developmentChainName
+  }
+}
+
+module.exports = {
+  SystemApi,
+}

+ 28 - 28
storage-node/packages/runtime-api/workers.js

@@ -22,6 +22,34 @@ const debug = require('debug')('joystream:runtime:roles')
 const BN = require('bn.js')
 const { Worker } = require('@joystream/types/working-group')
 
+/*
+ * Finds assigned worker id corresponding to the application id from the resulting
+ * ApplicationIdToWorkerIdMap map in the OpeningFilled event. Expects map to
+ * contain at least one entry.
+ */
+function getWorkerIdFromApplicationIdToWorkerIdMap(filledMap, applicationId) {
+  if (filledMap.size === 0) {
+    throw new Error('Expected opening to be filled!')
+  }
+
+  let ourApplicationIdKey
+
+  for (const key of filledMap.keys()) {
+    if (key.eq(applicationId)) {
+      ourApplicationIdKey = key
+      break
+    }
+  }
+
+  if (!ourApplicationIdKey) {
+    throw new Error('Expected application id to have been filled!')
+  }
+
+  const workerId = filledMap.get(ourApplicationIdKey)
+
+  return workerId
+}
+
 /*
  * Add worker related functionality to the substrate API.
  */
@@ -266,34 +294,6 @@ class WorkersApi {
   }
 }
 
-/*
- * Finds assigned worker id corresponding to the application id from the resulting
- * ApplicationIdToWorkerIdMap map in the OpeningFilled event. Expects map to
- * contain at least one entry.
- */
-function getWorkerIdFromApplicationIdToWorkerIdMap(filledMap, applicationId) {
-  if (filledMap.size === 0) {
-    throw new Error('Expected opening to be filled!')
-  }
-
-  let ourApplicationIdKey
-
-  for (const key of filledMap.keys()) {
-    if (key.eq(applicationId)) {
-      ourApplicationIdKey = key
-      break
-    }
-  }
-
-  if (!ourApplicationIdKey) {
-    throw new Error('Expected application id to have been filled!')
-  }
-
-  const workerId = filledMap.get(ourApplicationIdKey)
-
-  return workerId
-}
-
 module.exports = {
   WorkersApi,
 }

+ 2 - 1
storage-node/packages/storage/package.json

@@ -34,7 +34,8 @@
   },
   "scripts": {
     "test": "mocha --exit 'test/**/*.js'",
-    "lint": "eslint '**/*.js' --ignore-pattern 'test/**/*.js'"
+    "lint": "eslint '**/*.js' --ignore-pattern 'test/**/*.js'",
+    "checks": "yarn lint && prettier ./ --check && tsc --noEmit --pretty"
   },
   "devDependencies": {
     "chai": "^4.2.0",

+ 9 - 9
storage-node/packages/storage/storage.js

@@ -39,7 +39,7 @@ const _ = require('lodash')
 const DEFAULT_TIMEOUT = 30 * 1000
 
 // Default/dummy resolution implementation.
-const DEFAULT_RESOLVE_CONTENT_ID = async original => {
+const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
   debug('Warning: Default resolution returns original CID', original)
   return original
 }
@@ -127,11 +127,11 @@ class StorageWriteStream extends Transform {
       const read = fs.createReadStream(this.temp.path)
       fileType
         .stream(read)
-        .then(stream => {
+        .then((stream) => {
           this.fileInfo = fixFileInfoOnStream(stream).fileInfo
           this.emit('fileInfo', this.fileInfo)
         })
-        .catch(err => {
+        .catch((err) => {
           debug('Error trying to detect file type at end-of-stream:', err)
         })
     }
@@ -151,13 +151,13 @@ class StorageWriteStream extends Transform {
     debug('Committing temporary stream: ', this.temp.path)
     this.storage.ipfs
       .addFromFs(this.temp.path)
-      .then(async result => {
+      .then(async (result) => {
         const hash = result[0].hash
         debug('Stream committed as', hash)
         this.emit('committed', hash)
         await this.storage.ipfs.pin.add(hash)
       })
-      .catch(err => {
+      .catch((err) => {
         debug('Error committing stream', err)
         this.emit('error', err)
       })
@@ -324,7 +324,7 @@ class Storage {
     // content ID (of its own).
     // We need to instead return a stream immediately, that we eventually
     // decorate with the content ID when that's available.
-    return new Promise(resolve => {
+    return new Promise((resolve) => {
       const stream = new StorageWriteStream(this)
       resolve(stream)
     })
@@ -336,7 +336,7 @@ class Storage {
     let found = false
     return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
       const ls = this.ipfs.getReadableStream(resolved)
-      ls.on('data', async result => {
+      ls.on('data', async (result) => {
         if (result.path === resolved) {
           found = true
 
@@ -344,7 +344,7 @@ class Storage {
           resolve(fixFileInfoOnStream(ftStream))
         }
       })
-      ls.on('error', err => {
+      ls.on('error', (err) => {
         ls.end()
         debug(err)
         reject(err)
@@ -375,7 +375,7 @@ class Storage {
     debug(`Pinning ${resolved}`)
 
     // This call blocks until file is retrieved..
-    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, err => {
+    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, (err) => {
       if (err) {
         debug(`Error Pinning: ${resolved}`)
         delete this.pins[resolved]

+ 21 - 21
storage-node/packages/storage/test/storage.js

@@ -33,7 +33,7 @@ const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/
 function write(store, contentId, contents, callback) {
   store
     .open(contentId, 'w')
-    .then(stream => {
+    .then((stream) => {
       stream.on('finish', () => {
         stream.commit()
       })
@@ -45,7 +45,7 @@ function write(store, contentId, contents, callback) {
         process.nextTick(() => stream.end())
       }
     })
-    .catch(err => {
+    .catch((err) => {
       expect.fail(err)
     })
 }
@@ -53,9 +53,9 @@ function write(store, contentId, contents, callback) {
 function readAll(stream) {
   return new Promise((resolve, reject) => {
     const chunks = []
-    stream.on('data', chunk => chunks.push(chunk))
+    stream.on('data', (chunk) => chunks.push(chunk))
     stream.on('end', () => resolve(Buffer.concat(chunks)))
-    stream.on('error', err => reject(err))
+    stream.on('error', (err) => reject(err))
     stream.resume()
   })
 }
@@ -68,7 +68,7 @@ function createKnownObject(contentId, contents, callback) {
     },
   })
 
-  write(store, contentId, contents, theHash => {
+  write(store, contentId, contents, (theHash) => {
     hash = theHash
 
     callback(store, hash)
@@ -82,8 +82,8 @@ describe('storage/storage', () => {
   })
 
   describe('open()', () => {
-    it('can write a stream', done => {
-      write(storage, 'foobar', 'test-content', hash => {
+    it('can write a stream', (done) => {
+      write(storage, 'foobar', 'test-content', (hash) => {
         expect(hash).to.not.be.undefined
         expect(hash).to.match(IPFS_CID_REGEX)
         done()
@@ -124,28 +124,28 @@ describe('storage/storage', () => {
     // 		})
     // })
 
-    it('can read a stream', done => {
+    it('can read a stream', (done) => {
       const contents = 'test-for-reading'
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
     })
 
-    it('detects the MIME type of a read stream', done => {
+    it('detects the MIME type of a read stream', (done) => {
       const contents = fs.readFileSync('../../storage-node_new.svg')
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(contents.length).to.equal(data.length)
             expect(Buffer.compare(data, contents)).to.equal(0)
@@ -156,18 +156,18 @@ describe('storage/storage', () => {
             expect(stream.fileInfo).to.have.property('ext', 'xml')
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
     })
 
-    it('provides default MIME type for read streams', done => {
+    it('provides default MIME type for read streams', (done) => {
       const contents = 'test-for-reading'
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
 
@@ -175,7 +175,7 @@ describe('storage/storage', () => {
             expect(stream.fileInfo).to.have.property('ext', 'bin')
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
@@ -192,7 +192,7 @@ describe('storage/storage', () => {
       expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out')
     })
 
-    it('returns stats for a known object', done => {
+    it('returns stats for a known object', (done) => {
       const content = 'stat-test'
       const expectedSize = content.length
       createKnownObject('foobar', content, (store, hash) => {
@@ -212,7 +212,7 @@ describe('storage/storage', () => {
       expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out')
     })
 
-    it('returns the size of a known object', done => {
+    it('returns the size of a known object', (done) => {
       createKnownObject('foobar', 'stat-test', (store, hash) => {
         expect(store.size(hash)).to.eventually.equal(15)
         done()

+ 2 - 0
storage-node/packages/util/externalPromise.js

@@ -7,6 +7,8 @@
 function newExternallyControlledPromise() {
   let resolve, reject
 
+  // Disable lint until the migration to TypeScript.
+  // eslint-disable-next-line promise/param-names
   const promise = new Promise((res, rej) => {
     resolve = res
     reject = rej

+ 2 - 2
storage-node/packages/util/fs/walk.js

@@ -83,7 +83,7 @@ class Walker {
       this.pending += files.length
       debug('Reading', dir, 'bumps pending to', this.pending)
 
-      files.forEach(name => {
+      files.forEach((name) => {
         const fname = path.resolve(dir, name)
         this.archive.lstat(fname, (err2, lstat) => {
           if (err2) {
@@ -126,7 +126,7 @@ class Walker {
  *
  * The callback is invoked one last time without data to signal the end of data.
  */
-module.exports = function(base, archive, cb) {
+module.exports = function (base, archive, cb) {
   // Archive is optional and defaults to fs, but cb is not.
   if (!cb) {
     cb = archive

+ 1 - 1
storage-node/packages/util/lru.js

@@ -104,7 +104,7 @@ class LRUCache {
       ++idx
     }
 
-    toPrune.forEach(key => {
+    toPrune.forEach((key) => {
       this.store.delete(key)
       this.access.delete(key)
     })

+ 2 - 0
storage-node/packages/util/pagination.js

@@ -116,6 +116,8 @@ module.exports = {
 
     // Parse current url
     const url = require('url')
+    // Disable lint because the code (and tests) relied upon obsolete UrlObject. Remove after migration to TypeScript.
+    // eslint-disable-next-line node/no-deprecated-api
     const reqUrl = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl)
     const params = new url.URLSearchParams(reqUrl.query)
 

+ 3 - 3
storage-node/packages/util/ranges.js

@@ -37,7 +37,7 @@ function parseRange(range) {
     throw new Error(`Not a valid range: ${range}`)
   }
 
-  const vals = matches[1].split('-').map(v => {
+  const vals = matches[1].split('-').map((v) => {
     return v === '*' || v === '' ? undefined : parseInt(v, 10)
   })
 
@@ -68,12 +68,12 @@ function parse(rangeStr) {
 
   // Parse individual ranges
   const ranges = []
-  res.rangeStr.split(',').forEach(range => {
+  res.rangeStr.split(',').forEach((range) => {
     ranges.push(parseRange(range))
   })
 
   // Merge ranges into result.
-  ranges.forEach(newRange => {
+  ranges.forEach((newRange) => {
     debug('Found range:', newRange)
 
     let isMerged = false

+ 10 - 10
storage-node/packages/util/test/fs/resolve.js

@@ -24,45 +24,45 @@ const path = require('path')
 const resolve = require('@joystream/storage-utils/fs/resolve')
 
 function tests(base) {
-  it('resolves absolute paths relative to the base', function() {
+  it('resolves absolute paths relative to the base', function () {
     const resolved = resolve(base, '/foo')
     const relative = path.relative(base, resolved)
     expect(relative).to.equal('foo')
   })
 
-  it('allows for relative paths that stay in the base', function() {
+  it('allows for relative paths that stay in the base', function () {
     const resolved = resolve(base, 'foo/../bar')
     const relative = path.relative(base, resolved)
     expect(relative).to.equal('bar')
   })
 
-  it('prevents relative paths from breaking out of the base', function() {
+  it('prevents relative paths from breaking out of the base', function () {
     expect(() => resolve(base, '../foo')).to.throw()
   })
 
-  it('prevents long relative paths from breaking out of the base', function() {
+  it('prevents long relative paths from breaking out of the base', function () {
     expect(() => resolve(base, '../../../foo')).to.throw()
   })
 
-  it('prevents sneaky relative paths from breaking out of the base', function() {
+  it('prevents sneaky relative paths from breaking out of the base', function () {
     expect(() => resolve(base, 'foo/../../../bar')).to.throw()
   })
 }
 
-describe('util/fs/resolve', function() {
-  describe('slash base', function() {
+describe('util/fs/resolve', function () {
+  describe('slash base', function () {
     tests('/')
   })
 
-  describe('empty base', function() {
+  describe('empty base', function () {
     tests('')
   })
 
-  describe('short base', function() {
+  describe('short base', function () {
     tests('/base')
   })
 
-  describe('long base', function() {
+  describe('long base', function () {
     tests('/this/base/is/very/long/indeed')
   })
 })

+ 2 - 2
storage-node/packages/util/test/fs/walk.js

@@ -60,8 +60,8 @@ function walktest(archive, base, done) {
   })
 }
 
-describe('util/fs/walk', function() {
-  it('reports all files in a file system hierarchy', function(done) {
+describe('util/fs/walk', function () {
+  it('reports all files in a file system hierarchy', function (done) {
     walktest(fs, path.resolve(__dirname, '../data'), done)
   })
 })

+ 11 - 11
storage-node/packages/util/test/lru.js

@@ -24,14 +24,14 @@ const lru = require('@joystream/storage-utils/lru')
 
 const DEFAULT_SLEEP = 1
 function sleep(ms = DEFAULT_SLEEP) {
-  return new Promise(resolve => {
+  return new Promise((resolve) => {
     setTimeout(resolve, ms)
   })
 }
 
-describe('util/lru', function() {
-  describe('simple usage', function() {
-    it('does not contain keys that were not added', function() {
+describe('util/lru', function () {
+  describe('simple usage', function () {
+    it('does not contain keys that were not added', function () {
       const cache = new lru.LRUCache()
       expect(cache.size()).to.equal(0)
 
@@ -41,7 +41,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.false
     })
 
-    it('contains keys that were added', function() {
+    it('contains keys that were added', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -52,7 +52,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.true
     })
 
-    it('does not contain keys that were deleted', function() {
+    it('does not contain keys that were deleted', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -67,7 +67,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.false
     })
 
-    it('can be cleared', function() {
+    it('can be cleared', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -77,8 +77,8 @@ describe('util/lru', function() {
     })
   })
 
-  describe('capacity management', function() {
-    it('does not grow beyond capacity', async function() {
+  describe('capacity management', function () {
+    it('does not grow beyond capacity', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 
@@ -96,7 +96,7 @@ describe('util/lru', function() {
       expect(cache.size()).to.equal(2) // Capacity exceeded
     })
 
-    it('removes the oldest key when pruning', async function() {
+    it('removes the oldest key when pruning', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 
@@ -119,7 +119,7 @@ describe('util/lru', function() {
       expect(cache.has('baz')).to.be.true
     })
 
-    it('updates LRU timestamp when reading', async function() {
+    it('updates LRU timestamp when reading', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 

+ 8 - 12
storage-node/packages/util/test/pagination.js

@@ -23,9 +23,9 @@ const mockHttp = require('node-mocks-http')
 
 const pagination = require('@joystream/storage-utils/pagination')
 
-describe('util/pagination', function() {
-  describe('openapi()', function() {
-    it('should add parameters and definitions to an API spec', function() {
+describe('util/pagination', function () {
+  describe('openapi()', function () {
+    it('should add parameters and definitions to an API spec', function () {
       const api = pagination.openapi({})
 
       // Parameters
@@ -62,8 +62,8 @@ describe('util/pagination', function() {
     })
   })
 
-  describe('paginate()', function() {
-    it('should add pagination links to a response object', function() {
+  describe('paginate()', function () {
+    it('should add pagination links to a response object', function () {
       const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10',
@@ -78,16 +78,14 @@ describe('util/pagination', function() {
 
       const res = pagination.paginate(req, {})
 
-      expect(res)
-        .to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next')
+      expect(res).to.have.property('pagination').that.has.all.keys('self', 'first', 'next')
 
       expect(res.pagination.self).to.equal('http://localhost/foo?limit=10')
       expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
       expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10')
     })
 
-    it('should add a last pagination link when requested', function() {
+    it('should add a last pagination link when requested', function () {
       const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10&offset=15',
@@ -103,9 +101,7 @@ describe('util/pagination', function() {
 
       const res = pagination.paginate(req, {}, 35)
 
-      expect(res)
-        .to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next', 'prev', 'last')
+      expect(res).to.have.property('pagination').that.has.all.keys('self', 'first', 'next', 'prev', 'last')
 
       expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15')
       expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')

+ 25 - 25
storage-node/packages/util/test/ranges.js

@@ -24,9 +24,9 @@ const streamBuffers = require('stream-buffers')
 
 const ranges = require('@joystream/storage-utils/ranges')
 
-describe('util/ranges', function() {
-  describe('parse()', function() {
-    it('should parse a full range', function() {
+describe('util/ranges', function () {
+  describe('parse()', function () {
+    it('should parse a full range', function () {
       // Range with unit
       let range = ranges.parse('bytes=0-100')
       expect(range.unit).to.equal('bytes')
@@ -50,14 +50,14 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(100)
     })
 
-    it('should error out on malformed strings', function() {
+    it('should error out on malformed strings', function () {
       expect(() => ranges.parse('foo')).to.throw()
       expect(() => ranges.parse('foo=bar')).to.throw()
       expect(() => ranges.parse('foo=100')).to.throw()
       expect(() => ranges.parse('foo=100-0')).to.throw()
     })
 
-    it('should parse a range without end', function() {
+    it('should parse a range without end', function () {
       const range = ranges.parse('0-')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('0-')
@@ -65,7 +65,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.be.undefined
     })
 
-    it('should parse a range without start', function() {
+    it('should parse a range without start', function () {
       const range = ranges.parse('-100')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('-100')
@@ -73,7 +73,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(100)
     })
 
-    it('should parse multiple ranges', function() {
+    it('should parse multiple ranges', function () {
       const range = ranges.parse('0-10,30-40,60-80')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('0-10,30-40,60-80')
@@ -85,7 +85,7 @@ describe('util/ranges', function() {
       expect(range.ranges[2][1]).to.equal(80)
     })
 
-    it('should merge overlapping ranges', function() {
+    it('should merge overlapping ranges', function () {
       // Two overlapping ranges
       let range = ranges.parse('0-20,10-30')
       expect(range.unit).to.equal('bytes')
@@ -119,7 +119,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(20)
     })
 
-    it('should sort ranges', function() {
+    it('should sort ranges', function () {
       const range = ranges.parse('10-30,0-5')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('10-30,0-5')
@@ -131,8 +131,8 @@ describe('util/ranges', function() {
     })
   })
 
-  describe('send()', function() {
-    it('should send full files on request', function(done) {
+  describe('send()', function () {
+    it('should send full files on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -141,7 +141,7 @@ describe('util/ranges', function() {
         name: 'test.file',
         type: 'application/test',
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -163,7 +163,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send a range spanning the entire file on request', function(done) {
+    it('should send a range spanning the entire file on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -175,7 +175,7 @@ describe('util/ranges', function() {
           ranges: [[0, 12]],
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -199,7 +199,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send a small range on request', function(done) {
+    it('should send a small range on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -211,7 +211,7 @@ describe('util/ranges', function() {
           ranges: [[1, 11]], // Cut off first and last letter
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -235,7 +235,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send ranges crossing buffer boundaries', function(done) {
+    it('should send ranges crossing buffer boundaries', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({
         chunkSize: 3, // Setting a chunk size smaller than the range should
@@ -250,7 +250,7 @@ describe('util/ranges', function() {
           ranges: [[1, 11]], // Cut off first and last letter
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -274,7 +274,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send multiple ranges', function(done) {
+    it('should send multiple ranges', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -289,12 +289,12 @@ describe('util/ranges', function() {
           ], // Slice two ranges out
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
         expect(res.statusCode).to.equal(206)
-        expect(res.getHeader('content-type')).to.satisfy(str => str.startsWith('multipart/byteranges'))
+        expect(res.getHeader('content-type')).to.satisfy((str) => str.startsWith('multipart/byteranges'))
         expect(res.getHeader('content-disposition')).to.equal('inline')
 
         // Data/stream handling
@@ -320,7 +320,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should deal with ranges without end', function(done) {
+    it('should deal with ranges without end', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -332,7 +332,7 @@ describe('util/ranges', function() {
           ranges: [[5, undefined]], // Skip the first part, but read until end
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -355,7 +355,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should ignore ranges without start', function(done) {
+    it('should ignore ranges without start', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -367,7 +367,7 @@ describe('util/ranges', function() {
           ranges: [[undefined, 5]], // Only last five
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling

+ 3 - 3
storage-node/packages/util/test/stripEndingSlash.js

@@ -3,11 +3,11 @@
 const expect = require('chai').expect
 const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
 
-describe('stripEndingSlash', function() {
-  it('stripEndingSlash should keep URL without the slash', function() {
+describe('stripEndingSlash', function () {
+  it('stripEndingSlash should keep URL without the slash', function () {
     expect(stripEndingSlash('http://keep.one')).to.equal('http://keep.one')
   })
-  it('stripEndingSlash should remove ending slash', function() {
+  it('stripEndingSlash should remove ending slash', function () {
     expect(stripEndingSlash('http://strip.one/')).to.equal('http://strip.one')
   })
 })

+ 23 - 0
storage-node/tsconfig.json

@@ -0,0 +1,23 @@
+{
+  "compilerOptions": {
+    "composite": true,
+    "rootDir": "./packages/",
+    "outDir": "./build",
+    "allowJs": true,
+    "target": "es2017",
+    "module": "commonjs",
+    "esModuleInterop": true,
+    "baseUrl": ".",
+    "skipLibCheck": true,
+    "types" : [ "node", "mocha" ]
+  },
+  "files": [],
+  "exclude": [
+    "**/node_modules/*",
+    "build"
+  ],
+  "references": [
+    { "path": "packages/cli" }
+ //   { "path": "packages/storage" }
+  ]
+}

+ 3 - 3
yarn.lock

@@ -3938,7 +3938,7 @@
   dependencies:
     "@types/node" "*"
 
-"@types/mocha@*":
+"@types/mocha@*", "@types/mocha@^7.0.2":
   version "7.0.2"
   resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-7.0.2.tgz#b17f16cf933597e10d6d78eae3251e692ce8b0ce"
   integrity sha512-ZvO2tAcjmMi8V/5Z3JsyofMe3hasRcaw88cto5etSVMwVQfeivGAlEYmaQgceUSVYFofVjT+ioHsATjdWcFt1w==
@@ -5600,7 +5600,7 @@ axios@^0.18.0:
     follow-redirects "1.5.10"
     is-buffer "^2.0.2"
 
-axios@^0.19.0:
+axios@^0.19.0, axios@^0.19.2:
   version "0.19.2"
   resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.2.tgz#3ea36c5d8818d0d5f8a8a97a6d36b86cdc00cb27"
   integrity sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA==
@@ -22762,7 +22762,7 @@ typescript-formatter@^7.2.2:
     commandpost "^1.0.0"
     editorconfig "^0.15.0"
 
-typescript@3.7.2, typescript@3.7.x, typescript@^3.0.3, typescript@^3.6.4, typescript@^3.7.2, typescript@^3.8.3:
+typescript@3.7.2, typescript@3.7.x, typescript@^3.0.3, typescript@^3.6.4, typescript@^3.7.2, typescript@^3.8.3, typescript@^3.9.6:
   version "3.7.2"
   resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.7.2.tgz#27e489b95fa5909445e9fef5ee48d81697ad18fb"
   integrity sha512-ml7V7JfiN2Xwvcer+XAf2csGO1bPBdRbFCkYBczNZggrBZ9c7G3riSUeJmqEU5uOtXNPMhE3n+R4FA/3YOAWOQ==