Browse Source

storage-node: Fix linter warnings.

Shamil Gadelshin 4 years ago
parent
commit
0ce9c88298

+ 7 - 1
storage-node/.eslintrc.js

@@ -22,7 +22,13 @@ module.exports = {
 	},
 	"overrides": [
 		{
-			"files": ["**/test/ranges.js", "**/test/lru.js", "**/test/fs/walk.js"],
+			"files": [
+				"**/test/ranges.js",
+				"**/test/lru.js",
+				"**/test/fs/walk.js",
+				"**/test/storage.js",
+				"**/test/identities.js",
+			],
 			"rules": {
 				// Disabling Rules because of used chai lib:
 				// https://stackoverflow.com/questions/45079454/no-unused-expressions-in-mocha-chai-unit-test-using-standardjs

+ 8 - 8
storage-node/packages/cli/bin/dev.js

@@ -90,10 +90,10 @@ const init = async (api) => {
 
 	// Make alice the storage lead
 	debug('Making Alice the storage Lead')
-	const leadOpeningId = await api.workers.dev_addStorageLeadOpening()
-	const leadApplicationId = await api.workers.dev_applyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
-	api.workers.dev_beginLeadOpeningReview(leadOpeningId)
-	await api.workers.dev_fillLeadOpening(leadOpeningId, leadApplicationId)
+	const leadOpeningId = await api.workers.devAddStorageLeadOpening()
+	const leadApplicationId = await api.workers.devApplyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
+	api.workers.devBeginLeadOpeningReview(leadOpeningId)
+	await api.workers.devFillLeadOpening(leadOpeningId, leadApplicationId)
 
 	const leadAccount = await api.workers.getLeadRoleAccount()
 	if (!leadAccount.eq(alice)) {
@@ -103,16 +103,16 @@ const init = async (api) => {
 	// Create a storage openinging, apply, start review, and fill opening
 	debug(`Making ${ROLE_ACCOUNT_URI} account a storage provider`)
 
-	const openingId = await api.workers.dev_addStorageOpening()
+	const openingId = await api.workers.devAddStorageOpening()
 	debug(`created new storage opening: ${openingId}`)
 
-	const applicationId = await api.workers.dev_applyOnOpening(openingId, aliceMemberId, alice, roleAccount)
+	const applicationId = await api.workers.devApplyOnOpening(openingId, aliceMemberId, alice, roleAccount)
 	debug(`applied with application id: ${applicationId}`)
 
-	api.workers.dev_beginStorageOpeningReview(openingId)
+	api.workers.devBeginStorageOpeningReview(openingId)
 
 	debug(`filling storage opening`)
-	const providerId = await api.workers.dev_fillStorageOpening(openingId, applicationId)
+	const providerId = await api.workers.devFillStorageOpening(openingId, applicationId)
 
 	debug(`Assigned storage provider id: ${providerId}`)
 

+ 5 - 5
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -53,7 +53,7 @@ module.exports = function (storage, runtime) {
 			try {
 				const size = await storage.size(id)
 				const stream = await storage.open(id, 'r')
-				const type = stream.file_info.mime_type
+				const type = stream.fileInfo.mimeType
 
 				// Close the stream; we don't need to fetch the file (if we haven't
 				// already). Then return result.
@@ -108,12 +108,12 @@ module.exports = function (storage, runtime) {
 					}
 				}
 
-				stream.on('file_info', async (info) => {
+				stream.on('fileInfo', async (info) => {
 					try {
 						debug('Detected file info:', info)
 
 						// Filter
-						const filter_result = filter({}, req.headers, info.mime_type)
+						const filter_result = filter({}, req.headers, info.mimeType)
 						if (200 != filter_result.code) {
 							debug('Rejecting content', filter_result.message)
 							stream.end()
@@ -212,11 +212,11 @@ module.exports = function (storage, runtime) {
 				// Add a file extension to download requests if necessary. If the file
 				// already contains an extension, don't add one.
 				let send_name = id
-				const type = stream.file_info.mime_type
+				const type = stream.fileInfo.mimeType
 				if (download) {
 					let ext = path.extname(send_name)
 					if (!ext) {
-						ext = stream.file_info.ext
+						ext = stream.fileInfo.ext
 						if (ext) {
 							send_name = `${send_name}.${ext}`
 						}

+ 10 - 11
storage-node/packages/runtime-api/test/identities.js

@@ -18,7 +18,6 @@
 
 'use strict'
 
-const mocha = require('mocha')
 const expect = require('chai').expect
 const sinon = require('sinon')
 const temp = require('temp').track()
@@ -36,17 +35,17 @@ describe('Identities', () => {
 		await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
 		// Edwards and schnorr keys should unlock
-		const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake((_) => 'asdf')
+		const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
 		await api.identities.loadUnlock('test/data/edwards.json')
 		await api.identities.loadUnlock('test/data/schnorr.json')
-		passphrase_stub.restore()
+		passphraseStub.restore()
 
 		// Except if the wrong passphrase is given
-		const passphrase_stub_bad = sinon.stub(api.identities, 'askForPassphrase').callsFake((_) => 'bad')
+		const passphraseStubBad = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'bad')
 		expect(async () => {
 			await api.identities.loadUnlock('test/data/edwards.json')
 		}).to.throw
-		passphrase_stub_bad.restore()
+		passphraseStubBad.restore()
 	})
 
 	it('knows about membership', async () => {
@@ -56,17 +55,17 @@ describe('Identities', () => {
 		// Without seeding the runtime with data, we can only verify that the API
 		// reacts well in the absence of membership
 		expect(await api.identities.isMember(addr)).to.be.false
-		const member_id = await api.identities.firstMemberIdOf(addr)
+		const memberId = await api.identities.firstMemberIdOf(addr)
 
-		expect(member_id).to.be.undefined
+		expect(memberId).to.be.undefined
 	})
 
 	it('exports keys', async () => {
 		const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-		const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake((_) => 'asdf')
+		const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
 		const exported = await api.identities.exportKeyPair(key.address)
-		passphrase_stub.restore()
+		passphraseStub.restore()
 
 		expect(exported).to.have.property('address')
 		expect(exported.address).to.equal(key.address)
@@ -88,9 +87,9 @@ describe('Identities', () => {
 
 		const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-		const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake((_) => 'asdf')
+		const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
 		const filename = await api.identities.writeKeyPairExport(key.address, prefix)
-		passphrase_stub.restore()
+		passphraseStub.restore()
 
 		const fs = require('fs')
 		const stat = fs.statSync(filename)

+ 0 - 3
storage-node/packages/runtime-api/test/index.js

@@ -18,9 +18,6 @@
 
 'use strict'
 
-const mocha = require('mocha')
-const expect = require('chai').expect
-
 const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('RuntimeApi', () => {

+ 22 - 22
storage-node/packages/runtime-api/workers.js

@@ -141,25 +141,25 @@ class WorkersApi {
 	 * Add a new storage group opening using the lead account. Returns the
 	 * new opening id.
 	 */
-	async dev_addStorageOpening() {
-		const openTx = this.dev_makeAddOpeningTx('Worker')
-		return this.dev_submitAddOpeningTx(openTx, await this.getLeadRoleAccount())
+	async devAddStorageOpening() {
+		const openTx = this.devMakeAddOpeningTx('Worker')
+		return this.devSubmitAddOpeningTx(openTx, await this.getLeadRoleAccount())
 	}
 
 	/*
 	 * Add a new storage working group lead opening using sudo account. Returns the
 	 * new opening id.
 	 */
-	async dev_addStorageLeadOpening() {
-		const openTx = this.dev_makeAddOpeningTx('Leader')
+	async devAddStorageLeadOpening() {
+		const openTx = this.devMakeAddOpeningTx('Leader')
 		const sudoTx = this.base.api.tx.sudo.sudo(openTx)
-		return this.dev_submitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
+		return this.devSubmitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
 	}
 
 	/*
 	 * Constructs an addOpening tx of openingType
 	 */
-	dev_makeAddOpeningTx(openingType) {
+	devMakeAddOpeningTx(openingType) {
 		return this.base.api.tx.storageWorkingGroup.addOpening(
 			'CurrentBlock',
 			{
@@ -178,7 +178,7 @@ class WorkersApi {
 	 * Submits a tx (expecting it to dispatch storageWorkingGroup.addOpening) and returns
 	 * the OpeningId from the resulting event.
 	 */
-	async dev_submitAddOpeningTx(tx, senderAccount) {
+	async devSubmitAddOpeningTx(tx, senderAccount) {
 		return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
 			eventModule: 'storageWorkingGroup',
 			eventName: 'OpeningAdded',
@@ -189,7 +189,7 @@ class WorkersApi {
 	/*
 	 * Apply on an opening, returns the application id.
 	 */
-	async dev_applyOnOpening(openingId, memberId, memberAccount, roleAccount) {
+	async devApplyOnOpening(openingId, memberId, memberAccount, roleAccount) {
 		const applyTx = this.base.api.tx.storageWorkingGroup.applyOnOpening(
 			memberId,
 			openingId,
@@ -209,8 +209,8 @@ class WorkersApi {
 	/*
 	 * Move lead opening to review state using sudo account
 	 */
-	async dev_beginLeadOpeningReview(openingId) {
-		const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+	async devBeginLeadOpeningReview(openingId) {
+		const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
 		const sudoTx = this.base.api.tx.sudo.sudo(beginReviewTx)
 		return this.base.signAndSend(await this.base.identities.getSudoAccount(), sudoTx)
 	}
@@ -218,48 +218,48 @@ class WorkersApi {
 	/*
 	 * Move a storage opening to review state using lead account
 	 */
-	async dev_beginStorageOpeningReview(openingId) {
-		const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+	async devBeginStorageOpeningReview(openingId) {
+		const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
 		return this.base.signAndSend(await this.getLeadRoleAccount(), beginReviewTx)
 	}
 
 	/*
 	 * Constructs a beingApplicantReview tx for openingId, which puts an opening into the review state
 	 */
-	dev_makeBeginOpeningReviewTx(openingId) {
+	devMakeBeginOpeningReviewTx(openingId) {
 		return this.base.api.tx.storageWorkingGroup.beginApplicantReview(openingId)
 	}
 
 	/*
 	 * Fill a lead opening, return the assigned worker id, using the sudo account
 	 */
-	async dev_fillLeadOpening(openingId, applicationId) {
-		const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
+	async devFillLeadOpening(openingId, applicationId) {
+		const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
 		const sudoTx = this.base.api.tx.sudo.sudo(fillTx)
-		const filled = await this.dev_submitFillOpeningTx(await this.base.identities.getSudoAccount(), sudoTx)
+		const filled = await this.devSubmitFillOpeningTx(await this.base.identities.getSudoAccount(), sudoTx)
 		return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
 	}
 
 	/*
 	 * Fill a storage opening, return the assigned worker id, using the lead account
 	 */
-	async dev_fillStorageOpening(openingId, applicationId) {
-		const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
-		const filled = await this.dev_submitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
+	async devFillStorageOpening(openingId, applicationId) {
+		const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
+		const filled = await this.devSubmitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
 		return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
 	}
 
 	/*
 	 * Constructs a FillOpening transaction
 	 */
-	dev_makeFillOpeningTx(openingId, applicationId) {
+	devMakeFillOpeningTx(openingId, applicationId) {
 		return this.base.api.tx.storageWorkingGroup.fillOpening(openingId, [applicationId], null)
 	}
 
 	/*
 	 * Dispatches a fill opening tx and returns a map of the application id to their new assigned worker ids.
 	 */
-	async dev_submitFillOpeningTx(senderAccount, tx) {
+	async devSubmitFillOpeningTx(senderAccount, tx) {
 		return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
 			eventModule: 'storageWorkingGroup',
 			eventName: 'OpeningFilled',

+ 14 - 10
storage-node/packages/storage/filter.js

@@ -25,11 +25,11 @@ const DEFAULT_ACCEPT_TYPES = ['video/*', 'audio/*', 'image/*']
 const DEFAULT_REJECT_TYPES = []
 
 // Configuration defaults
-function config_defaults(config) {
+function configDefaults(config) {
 	const filter = config.filter || {}
 
 	// We accept zero as switching this check off.
-	if (typeof filter.max_size === 'undefined' || typeof filter.max_size === 'null') {
+	if (typeof filter.max_size === 'undefined') {
 		filter.max_size = DEFAULT_MAX_FILE_SIZE
 	}
 
@@ -42,7 +42,7 @@ function config_defaults(config) {
 }
 
 // Mime type matching
-function mime_matches(acceptable, provided) {
+function mimeMatches(acceptable, provided) {
 	if (acceptable.endsWith('*')) {
 		// Wildcard match
 		const prefix = acceptable.slice(0, acceptable.length - 1)
@@ -51,14 +51,14 @@ function mime_matches(acceptable, provided) {
 	}
 	// Exact match
 	debug('exact matching', provided, 'against', acceptable)
-	return provided == acceptable
+	return provided === acceptable
 }
 
-function mime_matches_any(accept, reject, provided) {
+function mimeMatchesAny(accept, reject, provided) {
 	// Pass accept
 	let accepted = false
 	for (const item of accept) {
-		if (mime_matches(item, provided)) {
+		if (mimeMatches(item, provided)) {
 			debug('Content type matches', item, 'which is acceptable.')
 			accepted = true
 			break
@@ -70,7 +70,7 @@ function mime_matches_any(accept, reject, provided) {
 
 	// Don't pass reject
 	for (const item of reject) {
-		if (mime_matches(item, provided)) {
+		if (mimeMatches(item, provided)) {
 			debug('Content type matches', item, 'which is unacceptable.')
 			return false
 		}
@@ -86,9 +86,13 @@ function mime_matches_any(accept, reject, provided) {
  * This is a straightforward implementation of
  * https://github.com/Joystream/storage-node-joystream/issues/14 - but should
  * most likely be improved on in future.
+ * @param {object} config - configuration
+ * @param {object} headers - required headers
+ * @param {string} mimeType - expected MIME type
+ * @return {object} HTTP status code and error message.
  **/
-function filter_func(config, headers, mime_type) {
-	const filter = config_defaults(config)
+function filter_func(config, headers, mimeType) {
+	const filter = configDefaults(config)
 
 	// Enforce maximum file upload size
 	if (filter.max_size) {
@@ -109,7 +113,7 @@ function filter_func(config, headers, mime_type) {
 	}
 
 	// Enforce mime type based filtering
-	if (!mime_matches_any(filter.mime.accept, filter.mime.reject, mime_type)) {
+	if (!mimeMatchesAny(filter.mime.accept, filter.mime.reject, mimeType)) {
 		return {
 			code: 415,
 			message: 'Content has an unacceptable MIME type.',

+ 44 - 42
storage-node/packages/storage/storage.js

@@ -29,8 +29,8 @@ Promise.config({
 	cancellation: true,
 })
 
-const file_type = require('file-type')
-const ipfs_client = require('ipfs-http-client')
+const fileType = require('file-type')
+const ipfsClient = require('ipfs-http-client')
 const temp = require('temp').track()
 const _ = require('lodash')
 
@@ -46,31 +46,31 @@ const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
 
 // Default file info if nothing could be detected.
 const DEFAULT_FILE_INFO = {
-	mime_type: 'application/octet-stream',
+	mimeType: 'application/octet-stream',
 	ext: 'bin',
 }
 
 /*
  * fileType is a weird name, because we're really looking at MIME types.
  * Also, the type field includes extension info, so we're going to call
- * it file_info { mime_type, ext } instead.
+ * it fileInfo { mimeType, ext } instead.
  * Nitpicking, but it also means we can add our default type if things
  * go wrong.
  */
-function fix_file_info(info) {
+function fixFileInfo(info) {
 	if (!info) {
 		info = DEFAULT_FILE_INFO
 	} else {
-		info.mime_type = info.mime
+		info.mimeType = info.mime
 		delete info.mime
 	}
 	return info
 }
 
-function fix_file_info_on_stream(stream) {
-	const info = fix_file_info(stream.fileType)
+function fixFileInfoOnStream(stream) {
+	const info = fixFileInfo(stream.fileType)
 	delete stream.fileType
-	stream.file_info = info
+	stream.fileInfo = info
 	return stream
 }
 
@@ -102,15 +102,15 @@ class StorageWriteStream extends Transform {
 		this.temp.write(chunk)
 
 		// Try to detect file type during streaming.
-		if (!this.file_info && this.buf < file_type.minimumBytes) {
+		if (!this.fileInfo && this.buf < fileType.minimumBytes) {
 			this.buf = Buffer.concat([this.buf, chunk])
 
-			if (this.buf >= file_type.minimumBytes) {
-				const info = file_type(this.buf)
+			if (this.buf >= fileType.minimumBytes) {
+				const info = fileType(this.buf)
 				// No info? We can try again at the end of the stream.
 				if (info) {
-					this.file_info = fix_file_info(info)
-					this.emit('file_info', this.file_info)
+					this.fileInfo = fixFileInfo(info)
+					this.emit('fileInfo', this.fileInfo)
 				}
 			}
 		}
@@ -123,13 +123,13 @@ class StorageWriteStream extends Transform {
 		this.temp.end()
 
 		// Since we're finished, we can try to detect the file type again.
-		if (!this.file_info) {
+		if (!this.fileInfo) {
 			const read = fs.createReadStream(this.temp.path)
-			file_type
+			fileType
 				.stream(read)
 				.then((stream) => {
-					this.file_info = fix_file_info_on_stream(stream).file_info
-					this.emit('file_info', this.file_info)
+					this.fileInfo = fixFileInfoOnStream(stream).fileInfo
+					this.emit('fileInfo', this.fileInfo)
 				})
 				.catch((err) => {
 					debug('Error trying to detect file type at end-of-stream:', err)
@@ -168,7 +168,9 @@ class StorageWriteStream extends Transform {
 	 */
 	cleanup() {
 		debug('Cleaning up temporary file: ', this.temp.path)
-		fs.unlink(this.temp.path, () => {}) // Ignore errors
+		fs.unlink(this.temp.path, () => {
+			/* Ignore errors.*/
+		})
 		delete this.temp
 	}
 }
@@ -213,7 +215,7 @@ class Storage {
 		this._timeout = this.options.timeout || DEFAULT_TIMEOUT
 		this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID
 
-		this.ipfs = ipfs_client(this.options.ipfs.connect_options)
+		this.ipfs = ipfsClient(this.options.ipfs.connect_options)
 
 		this.pins = {}
 
@@ -231,7 +233,7 @@ class Storage {
 	 * the given timeout interval, and tries to execute the given operation within
 	 * that time.
 	 */
-	async _with_specified_timeout(timeout, operation) {
+	async withSpecifiedTimeout(timeout, operation) {
 		return new Promise(async (resolve, reject) => {
 			try {
 				resolve(await new Promise(operation))
@@ -244,8 +246,8 @@ class Storage {
 	/*
 	 * Resolve content ID with timeout.
 	 */
-	async _resolve_content_id_with_timeout(timeout, contentId) {
-		return await this._with_specified_timeout(timeout, async (resolve, reject) => {
+	async resolveContentIdWithTimeout(timeout, contentId) {
+		return await this.withSpecifiedTimeout(timeout, async (resolve, reject) => {
 			try {
 				resolve(await this._resolve_content_id(contentId))
 			} catch (err) {
@@ -258,9 +260,9 @@ class Storage {
 	 * Stat a content ID.
 	 */
 	async stat(contentId, timeout) {
-		const resolved = await this._resolve_content_id_with_timeout(timeout, contentId)
+		const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
-		return await this._with_specified_timeout(timeout, (resolve, reject) => {
+		return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
 			this.ipfs.files.stat(`/ipfs/${resolved}`, { withLocal: true }, (err, res) => {
 				if (err) {
 					reject(err)
@@ -283,8 +285,8 @@ class Storage {
 	 * Opens the specified content in read or write mode, and returns a Promise
 	 * with the stream.
 	 *
-	 * Read streams will contain a file_info property, with:
-	 *  - a `mime_type` field providing the file's MIME type, or a default.
+	 * Read streams will contain a fileInfo property, with:
+	 *  - a `mimeType` field providing the file's MIME type, or a default.
 	 *  - an `ext` property, providing a file extension suggestion, or a default.
 	 *
 	 * Write streams have a slightly different flow, in order to allow for MIME
@@ -295,49 +297,49 @@ class Storage {
 	 * When the commit has finished, a `committed` event is emitted, which
 	 * contains the IPFS backend's content ID.
 	 *
-	 * Write streams also emit a `file_info` event during writing. It is passed
-	 * the `file_info` field as described above. Event listeners may now opt to
+	 * Write streams also emit a `fileInfo` event during writing. It is passed
+	 * the `fileInfo` field as described above. Event listeners may now opt to
 	 * abort the write or continue and eventually `commit()` the file. There is
 	 * an explicit `cleanup()` function that removes temporary files as well,
 	 * in case comitting is not desired.
 	 */
 	async open(contentId, mode, timeout) {
-		if (mode != 'r' && mode != 'w') {
+		if (mode !== 'r' && mode !== 'w') {
 			throw Error('The only supported modes are "r", "w" and "a".')
 		}
 
 		// Write stream
 		if (mode === 'w') {
-			return await this._create_write_stream(contentId, timeout)
+			return await this.createWriteStream(contentId, timeout)
 		}
 
 		// Read stream - with file type detection
-		return await this._create_read_stream(contentId, timeout)
+		return await this.createReadStream(contentId, timeout)
 	}
 
-	async _create_write_stream(contentId) {
+	async createWriteStream() {
 		// IPFS wants us to just dump a stream into its storage, then returns a
 		// content ID (of its own).
 		// We need to instead return a stream immediately, that we eventually
 		// decorate with the content ID when that's available.
-		return new Promise((resolve, reject) => {
+		return new Promise((resolve) => {
 			const stream = new StorageWriteStream(this)
 			resolve(stream)
 		})
 	}
 
-	async _create_read_stream(contentId, timeout) {
-		const resolved = await this._resolve_content_id_with_timeout(timeout, contentId)
+	async createReadStream(contentId, timeout) {
+		const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
 		let found = false
-		return await this._with_specified_timeout(timeout, (resolve, reject) => {
+		return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
 			const ls = this.ipfs.getReadableStream(resolved)
 			ls.on('data', async (result) => {
 				if (result.path === resolved) {
 					found = true
 
-					const ft_stream = await file_type.stream(result.content)
-					resolve(fix_file_info_on_stream(ft_stream))
+					const ftStream = await fileType.stream(result.content)
+					resolve(fixFileInfoOnStream(ftStream))
 				}
 			})
 			ls.on('error', (err) => {
@@ -360,7 +362,7 @@ class Storage {
 	 * Synchronize the given content ID
 	 */
 	async synchronize(contentId) {
-		const resolved = await this._resolve_content_id_with_timeout(this._timeout, contentId)
+		const resolved = await this.resolveContentIdWithTimeout(this._timeout, contentId)
 
 		// validate resolved id is proper ipfs_cid, not null or empty string
 
@@ -370,8 +372,8 @@ class Storage {
 
 		debug(`Pinning ${resolved}`)
 
-		// This call blocks until file is retreived..
-		this.ipfs.pin.add(resolved, { quiet: true, pin: true }, (err, res) => {
+		// This call blocks until file is retrieved..
+		this.ipfs.pin.add(resolved, { quiet: true, pin: true }, (err) => {
 			if (err) {
 				debug(`Error Pinning: ${resolved}`)
 				delete this.pins[resolved]

+ 28 - 29
storage-node/packages/storage/test/storage.js

@@ -18,11 +18,10 @@
 
 'use strict'
 
-const mocha = require('mocha')
 const chai = require('chai')
-const chai_as_promised = require('chai-as-promised')
+const chaiAsPromised = require('chai-as-promised')
 
-chai.use(chai_as_promised)
+chai.use(chaiAsPromised)
 const expect = chai.expect
 
 const fs = require('fs')
@@ -51,7 +50,7 @@ function write(store, contentId, contents, callback) {
 		})
 }
 
-function read_all(stream) {
+function readAll(stream) {
 	return new Promise((resolve, reject) => {
 		const chunks = []
 		stream.on('data', (chunk) => chunks.push(chunk))
@@ -61,7 +60,7 @@ function read_all(stream) {
 	})
 }
 
-function create_known_object(contentId, contents, callback) {
+function createKnownObject(contentId, contents, callback) {
 	let hash
 	const store = Storage.create({
 		resolve_content_id: () => {
@@ -69,8 +68,8 @@ function create_known_object(contentId, contents, callback) {
 		},
 	})
 
-	write(store, contentId, contents, (the_hash) => {
-		hash = the_hash
+	write(store, contentId, contents, (theHash) => {
+		hash = theHash
 
 		callback(store, hash)
 	})
@@ -96,21 +95,21 @@ describe('storage/storage', () => {
 			storage
 				.open('mime-test', 'w')
 				.then((stream) => {
-					let file_info
-					stream.on('file_info', (info) => {
+					let fileInfo
+					stream.on('fileInfo', (info) => {
 						// Could filter & abort here now, but we're just going to set this,
 						// and expect it to be set later...
-						file_info = info
+						fileInfo = info
 					})
 
 					stream.on('finish', () => {
 						stream.commit()
 					})
 
-					stream.on('committed', (hash) => {
-						// ... if file_info is not set here, there's an issue.
-						expect(file_info).to.have.property('mime_type', 'application/xml')
-						expect(file_info).to.have.property('ext', 'xml')
+					stream.on('committed', () => {
+						// ... if fileInfo is not set here, there's an issue.
+						expect(fileInfo).to.have.property('mimeType', 'application/xml')
+						expect(fileInfo).to.have.property('ext', 'xml')
 						done()
 					})
 
@@ -127,11 +126,11 @@ describe('storage/storage', () => {
 
 		it('can read a stream', (done) => {
 			const contents = 'test-for-reading'
-			create_known_object('foobar', contents, (store, hash) => {
+			createKnownObject('foobar', contents, (store) => {
 				store
 					.open('foobar', 'r')
 					.then(async (stream) => {
-						const data = await read_all(stream)
+						const data = await readAll(stream)
 						expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
 						done()
 					})
@@ -143,18 +142,18 @@ describe('storage/storage', () => {
 
 		it('detects the MIME type of a read stream', (done) => {
 			const contents = fs.readFileSync('../../storage-node_new.svg')
-			create_known_object('foobar', contents, (store, hash) => {
+			createKnownObject('foobar', contents, (store) => {
 				store
 					.open('foobar', 'r')
 					.then(async (stream) => {
-						const data = await read_all(stream)
+						const data = await readAll(stream)
 						expect(contents.length).to.equal(data.length)
 						expect(Buffer.compare(data, contents)).to.equal(0)
-						expect(stream).to.have.property('file_info')
+						expect(stream).to.have.property('fileInfo')
 
 						// application/xml+svg would be better, but this is good-ish.
-						expect(stream.file_info).to.have.property('mime_type', 'application/xml')
-						expect(stream.file_info).to.have.property('ext', 'xml')
+						expect(stream.fileInfo).to.have.property('mimeType', 'application/xml')
+						expect(stream.fileInfo).to.have.property('ext', 'xml')
 						done()
 					})
 					.catch((err) => {
@@ -165,15 +164,15 @@ describe('storage/storage', () => {
 
 		it('provides default MIME type for read streams', (done) => {
 			const contents = 'test-for-reading'
-			create_known_object('foobar', contents, (store, hash) => {
+			createKnownObject('foobar', contents, (store) => {
 				store
 					.open('foobar', 'r')
 					.then(async (stream) => {
-						const data = await read_all(stream)
+						const data = await readAll(stream)
 						expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
 
-						expect(stream.file_info).to.have.property('mime_type', 'application/octet-stream')
-						expect(stream.file_info).to.have.property('ext', 'bin')
+						expect(stream.fileInfo).to.have.property('mimeType', 'application/octet-stream')
+						expect(stream.fileInfo).to.have.property('ext', 'bin')
 						done()
 					})
 					.catch((err) => {
@@ -195,9 +194,9 @@ describe('storage/storage', () => {
 
 		it('returns stats for a known object', (done) => {
 			const content = 'stat-test'
-			const expected_size = content.length
-			create_known_object('foobar', content, (store, hash) => {
-				expect(store.stat(hash)).to.eventually.have.property('size', expected_size)
+			const expectedSize = content.length
+			createKnownObject('foobar', content, (store, hash) => {
+				expect(store.stat(hash)).to.eventually.have.property('size', expectedSize)
 				done()
 			})
 		})
@@ -214,7 +213,7 @@ describe('storage/storage', () => {
 		})
 
 		it('returns the size of a known object', (done) => {
-			create_known_object('foobar', 'stat-test', (store, hash) => {
+			createKnownObject('foobar', 'stat-test', (store, hash) => {
 				expect(store.size(hash)).to.eventually.equal(15)
 				done()
 			})

+ 2 - 1
storage-node/packages/util/externalPromise.js

@@ -1,5 +1,6 @@
 /**
- * Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
+ * Creates a new promise.
+ * @return { object} Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
  * so it can be fulfilled 'externally'. This is a bit of a hack, but most useful application is when
  * concurrent async operations are initiated that are all waiting on the same result value.
  */

+ 5 - 5
storage-node/packages/util/fs/resolve.js

@@ -42,18 +42,18 @@ function resolve(base, name) {
 	// At this point resolving the path should stay within the base we specify.
 	// We do specify a base other than the file system root, because the file
 	// everything is always relative to the file system root.
-	const test_base = path.join(path.sep, 'test-base')
-	debug('Test base is', test_base)
-	res = path.resolve(test_base, res)
+	const testBase = path.join(path.sep, 'test-base')
+	debug('Test base is', testBase)
+	res = path.resolve(testBase, res)
 	debug('Resolved', res)
 
 	// Ok, we can check for violations now.
-	if (res.slice(0, test_base.length) != test_base) {
+	if (res.slice(0, testBase.length) !== testBase) {
 		throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`)
 	}
 
 	// If we strip the base now, we have the relative name resolved.
-	res = res.slice(test_base.length + 1)
+	res = res.slice(testBase.length + 1)
 	debug('Relative', res)
 
 	// Finally we can join this relative name to the requested base.

+ 7 - 7
storage-node/packages/util/fs/walk.js

@@ -28,7 +28,7 @@ class Walker {
 		this.archive = archive
 		this.base = base
 		this.slice_offset = this.base.length
-		if (this.base[this.slice_offset - 1] != '/') {
+		if (this.base[this.slice_offset - 1] !== '/') {
 			this.slice_offset += 1
 		}
 		this.cb = cb
@@ -38,7 +38,7 @@ class Walker {
 	/*
 	 * Check pending
 	 */
-	check_pending(name) {
+	checkPending(name) {
 		// Decrease pending count again.
 		this.pending -= 1
 		debug('Finishing', name, 'decreases pending to', this.pending)
@@ -51,7 +51,7 @@ class Walker {
 	/*
 	 * Helper function for walk; split out because it's used in two places.
 	 */
-	report_and_recurse(relname, fname, lstat, linktarget) {
+	reportAndRecurse(relname, fname, lstat, linktarget) {
 		// First report the value
 		this.cb(null, relname, lstat, linktarget)
 
@@ -60,7 +60,7 @@ class Walker {
 			this.walk(fname)
 		}
 
-		this.check_pending(fname)
+		this.checkPending(fname)
 	}
 
 	walk(dir) {
@@ -102,15 +102,15 @@ class Walker {
 								return
 							}
 
-							this.report_and_recurse(relname, fname, lstat, linktarget)
+							this.reportAndRecurse(relname, fname, lstat, linktarget)
 						})
 					} else {
-						this.report_and_recurse(relname, fname, lstat)
+						this.reportAndRecurse(relname, fname, lstat)
 					}
 				})
 			})
 
-			this.check_pending(dir)
+			this.checkPending(dir)
 		})
 	}
 }

+ 4 - 4
storage-node/packages/util/lru.js

@@ -98,13 +98,13 @@ class LRUCache {
 
 		debug('Have to prune', this.store.size - this.capacity, 'items.')
 		let idx = 0
-		const to_prune = []
-		while (idx < sorted.length && to_prune.length < this.store.size - this.capacity) {
-			to_prune.push(sorted[idx][0])
+		const toPrune = []
+		while (idx < sorted.length && toPrune.length < this.store.size - this.capacity) {
+			toPrune.push(sorted[idx][0])
 			++idx
 		}
 
-		to_prune.forEach((key) => {
+		toPrune.forEach((key) => {
 			this.store.delete(key)
 			this.access.delete(key)
 		})

+ 19 - 19
storage-node/packages/util/pagination.js

@@ -21,7 +21,7 @@
 const debug = require('debug')('joystream:middleware:pagination')
 
 // Pagination definitions
-const _api_defs = {
+const apiDefs = {
 	parameters: {
 		paginationLimit: {
 			name: 'limit',
@@ -78,9 +78,9 @@ const _api_defs = {
  *   -> Validates pagination parameters
  * - apiDoc.responses.200.schema.pagination = pagination.response
  *   -> Generates pagination info on response
- * - paginate(req, res, [last_offset])
+ * - paginate(req, res, [lastOffset])
  *   -> add (valid) pagination fields to response object
- *      If last_offset is given, create a last link with that offset
+ *      If lastOffset is given, create a last link with that offset
  **/
 module.exports = {
 	// Add pagination parameters and pagination info responses.
@@ -96,13 +96,13 @@ module.exports = {
 	// Update swagger/openapi specs with our own parameters and definitions
 	openapi(api) {
 		api.components = api.components || {}
-		api.components.parameters = { ...(api.components.parameters || {}), ..._api_defs.parameters }
-		api.components.schemas = { ...(api.components.schemas || {}), ..._api_defs.schemas }
+		api.components.parameters = { ...(api.components.parameters || {}), ...apiDefs.parameters }
+		api.components.schemas = { ...(api.components.schemas || {}), ...apiDefs.schemas }
 		return api
 	},
 
 	// Pagination function
-	paginate(req, res, last_offset) {
+	paginate(req, res, lastOffset) {
 		// Skip if the response is not an object.
 		if (Object.prototype.toString.call(res) !== '[object Object]') {
 			debug('Cannot paginate non-objects.')
@@ -116,38 +116,38 @@ module.exports = {
 
 		// Parse current url
 		const url = require('url')
-		const req_url = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl)
-		const params = new url.URLSearchParams(req_url.query)
+		const reqUrl = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl)
+		const params = new url.URLSearchParams(reqUrl.query)
 
 		// Pagination object
 		const pagination = {
-			self: req_url.href,
+			self: reqUrl.href,
 		}
 
 		const prev = offset - limit
 		if (prev >= 0) {
 			params.set('offset', prev)
-			req_url.search = params.toString()
-			pagination.prev = url.format(req_url)
+			reqUrl.search = params.toString()
+			pagination.prev = url.format(reqUrl)
 		}
 
 		const next = offset + limit
 		if (next >= 0) {
 			params.set('offset', next)
-			req_url.search = params.toString()
-			pagination.next = url.format(req_url)
+			reqUrl.search = params.toString()
+			pagination.next = url.format(reqUrl)
 		}
 
-		if (last_offset) {
-			params.set('offset', last_offset)
-			req_url.search = params.toString()
-			pagination.last = url.format(req_url)
+		if (lastOffset) {
+			params.set('offset', lastOffset)
+			reqUrl.search = params.toString()
+			pagination.last = url.format(reqUrl)
 		}
 
 		// First
 		params.set('offset', 0)
-		req_url.search = params.toString()
-		pagination.first = url.format(req_url)
+		reqUrl.search = params.toString()
+		pagination.first = url.format(reqUrl)
 
 		debug('pagination', pagination)