Selaa lähdekoodia

Merge branch 'nicaea' into feature/nicaea-proposals

Gleb Urvanov 4 vuotta sitten
vanhempi
commit
649f89d413
100 muutettua tiedostoa jossa 4215 lisäystä ja 3113 poistoa
  1. 2 2
      Cargo.lock
  2. 3 2
      cli/package.json
  3. 220 36
      cli/src/Api.ts
  4. 234 1
      cli/src/Types.ts
  5. 1 1
      cli/src/base/AccountsCommandBase.ts
  6. 349 1
      cli/src/base/ApiCommandBase.ts
  7. 80 0
      cli/src/base/DefaultCommandBase.ts
  8. 115 3
      cli/src/base/WorkingGroupsCommandBase.ts
  9. 11 61
      cli/src/commands/api/inspect.ts
  10. 40 0
      cli/src/commands/working-groups/application.ts
  11. 96 0
      cli/src/commands/working-groups/createOpening.ts
  12. 58 0
      cli/src/commands/working-groups/fillOpening.ts
  13. 78 0
      cli/src/commands/working-groups/opening.ts
  14. 22 0
      cli/src/commands/working-groups/openings.ts
  15. 1 1
      cli/src/commands/working-groups/overview.ts
  16. 46 0
      cli/src/commands/working-groups/startAcceptingApplications.ts
  17. 46 0
      cli/src/commands/working-groups/startReviewPeriod.ts
  18. 45 0
      cli/src/commands/working-groups/terminateApplication.ts
  19. 24 3
      cli/src/helpers/display.ts
  20. 1 1
      node/Cargo.toml
  21. 2 1
      package.json
  22. 2 1
      pioneer/.eslintrc.js
  23. 5 1
      pioneer/packages/joy-roles/src/OpeningMetadata.ts
  24. 2 0
      pioneer/packages/joy-roles/src/elements.tsx
  25. 42 18
      pioneer/packages/joy-roles/src/flows/apply.controller.tsx
  26. 2 0
      pioneer/packages/joy-roles/src/flows/apply.elements.stories.tsx
  27. 2 0
      pioneer/packages/joy-roles/src/flows/apply.stories.tsx
  28. 55 101
      pioneer/packages/joy-roles/src/flows/apply.tsx
  29. 3 3
      pioneer/packages/joy-roles/src/index.tsx
  30. 22 0
      pioneer/packages/joy-roles/src/mocks.ts
  31. 130 273
      pioneer/packages/joy-roles/src/tabs/Admin.controller.tsx
  32. 10 10
      pioneer/packages/joy-roles/src/tabs/MyRoles.controller.tsx
  33. 12 9
      pioneer/packages/joy-roles/src/tabs/MyRoles.elements.stories.tsx
  34. 16 4
      pioneer/packages/joy-roles/src/tabs/MyRoles.tsx
  35. 1 0
      pioneer/packages/joy-roles/src/tabs/Opportunities.controller.tsx
  36. 2 1
      pioneer/packages/joy-roles/src/tabs/Opportunities.elements.stories.tsx
  37. 4 8
      pioneer/packages/joy-roles/src/tabs/Opportunities.stories.tsx
  38. 44 19
      pioneer/packages/joy-roles/src/tabs/Opportunities.tsx
  39. 2 22
      pioneer/packages/joy-roles/src/tabs/WorkingGroup.controller.tsx
  40. 1 1
      pioneer/packages/joy-roles/src/tabs/WorkingGroup.stories.tsx
  41. 24 13
      pioneer/packages/joy-roles/src/tabs/WorkingGroup.tsx
  42. 30 45
      pioneer/packages/joy-roles/src/transport.mock.ts
  43. 171 117
      pioneer/packages/joy-roles/src/transport.substrate.ts
  44. 10 9
      pioneer/packages/joy-roles/src/transport.ts
  45. 5 0
      pioneer/packages/joy-roles/src/working_groups.ts
  46. 2 1
      pioneer/packages/joy-utils/src/View.tsx
  47. 1 1
      pioneer/packages/react-components/src/AddressCard.tsx
  48. 48 11
      runtime-modules/storage/src/data_directory.rs
  49. 129 0
      runtime-modules/storage/src/tests/data_directory.rs
  50. 3 1
      runtime-modules/storage/src/tests/mock.rs
  51. 1 1
      runtime/Cargo.toml
  52. 6 1
      runtime/src/lib.rs
  53. 29 27
      runtime/src/migration.rs
  54. 38 286
      storage-node/.eslintrc.js
  55. 8 0
      storage-node/.prettierrc
  56. 7 1
      storage-node/package.json
  57. 46 41
      storage-node/packages/cli/bin/cli.js
  58. 15 18
      storage-node/packages/cli/bin/dev.js
  59. 1 1
      storage-node/packages/cli/test/index.js
  60. 59 61
      storage-node/packages/colossus/bin/cli.js
  61. 30 32
      storage-node/packages/colossus/lib/app.js
  62. 26 28
      storage-node/packages/colossus/lib/discovery.js
  63. 12 13
      storage-node/packages/colossus/lib/middleware/file_uploads.js
  64. 23 23
      storage-node/packages/colossus/lib/middleware/validate_responses.js
  65. 34 38
      storage-node/packages/colossus/lib/sync.js
  66. 114 128
      storage-node/packages/colossus/paths/asset/v0/{id}.js
  67. 64 65
      storage-node/packages/colossus/paths/discover/v0/{id}.js
  68. 1 1
      storage-node/packages/colossus/test/index.js
  69. 37 40
      storage-node/packages/discovery/discover.js
  70. 29 32
      storage-node/packages/discovery/example.js
  71. 3 4
      storage-node/packages/discovery/index.js
  72. 17 16
      storage-node/packages/discovery/publish.js
  73. 1 1
      storage-node/packages/discovery/test/index.js
  74. 100 85
      storage-node/packages/helios/bin/cli.js
  75. 1 1
      storage-node/packages/helios/test/index.js
  76. 22 20
      storage-node/packages/runtime-api/assets.js
  77. 25 36
      storage-node/packages/runtime-api/balances.js
  78. 12 16
      storage-node/packages/runtime-api/discovery.js
  79. 27 26
      storage-node/packages/runtime-api/identities.js
  80. 45 37
      storage-node/packages/runtime-api/index.js
  81. 19 22
      storage-node/packages/runtime-api/test/assets.js
  82. 18 20
      storage-node/packages/runtime-api/test/balances.js
  83. 48 49
      storage-node/packages/runtime-api/test/identities.js
  84. 6 9
      storage-node/packages/runtime-api/test/index.js
  85. 45 44
      storage-node/packages/runtime-api/workers.js
  86. 45 49
      storage-node/packages/storage/filter.js
  87. 4 4
      storage-node/packages/storage/index.js
  88. 162 179
      storage-node/packages/storage/storage.js
  89. 150 151
      storage-node/packages/storage/test/storage.js
  90. 10 9
      storage-node/packages/util/externalPromise.js
  91. 19 21
      storage-node/packages/util/fs/resolve.js
  92. 53 62
      storage-node/packages/util/fs/walk.js
  93. 46 55
      storage-node/packages/util/lru.js
  94. 46 51
      storage-node/packages/util/pagination.js
  95. 207 243
      storage-node/packages/util/ranges.js
  96. 5 6
      storage-node/packages/util/stripEndingSlash.js
  97. 38 50
      storage-node/packages/util/test/fs/resolve.js
  98. 29 31
      storage-node/packages/util/test/fs/walk.js
  99. 119 131
      storage-node/packages/util/test/lru.js
  100. 59 66
      storage-node/packages/util/test/pagination.js

+ 2 - 2
Cargo.lock

@@ -1569,7 +1569,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node"
-version = "2.4.1"
+version = "2.5.0"
 dependencies = [
  "ctrlc",
  "derive_more 0.14.1",
@@ -1614,7 +1614,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node-runtime"
-version = "6.18.0"
+version = "6.19.0"
 dependencies = [
  "parity-scale-codec",
  "safe-mix",

+ 3 - 2
cli/package.json

@@ -8,7 +8,7 @@
   },
   "bugs": "https://github.com/Joystream/substrate-runtime-joystream/issues",
   "dependencies": {
-    "@joystream/types": "./types",
+    "@joystream/types": "^0.11.0",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
     "@oclif/plugin-help": "^2.2.3",
@@ -21,7 +21,8 @@
     "moment": "^2.24.0",
     "proper-lockfile": "^4.1.1",
     "slug": "^2.1.1",
-    "tslib": "^1.11.1"
+    "tslib": "^1.11.1",
+    "ajv": "^6.11.0"
   },
   "devDependencies": {
     "@oclif/dev-cli": "^1.22.2",

+ 220 - 36
cli/src/Api.ts

@@ -13,21 +13,39 @@ import {
     CouncilInfoObj, CouncilInfoTuple, createCouncilInfoObj,
     WorkingGroups,
     GroupMember,
+    OpeningStatus,
+    GroupOpeningStage,
+    GroupOpening,
+    GroupApplication
 } from './Types';
 import { DerivedFees, DerivedBalances } from '@polkadot/api-derive/types';
 import { CLIError } from '@oclif/errors';
 import ExitCodes from './ExitCodes';
-import { Worker, WorkerId, RoleStakeProfile } from '@joystream/types/working-group';
+import {
+    Worker, WorkerId,
+    RoleStakeProfile,
+    Opening as WGOpening,
+    Application as WGApplication
+} from '@joystream/types/working-group';
+import {
+    Opening,
+    Application,
+    OpeningStage,
+    ApplicationStageKeys,
+    ApplicationId,
+    OpeningId
+} from '@joystream/types/hiring';
 import { MemberId, Profile } from '@joystream/types/members';
 import { RewardRelationship, RewardRelationshipId } from '@joystream/types/recurring-rewards';
 import { Stake, StakeId } from '@joystream/types/stake';
 import { LinkageResult } from '@polkadot/types/codec/Linkage';
+import { Moment } from '@polkadot/types/interfaces';
 
 export const DEFAULT_API_URI = 'wss://rome-rpc-endpoint.joystream.org:9944/';
 const DEFAULT_DECIMALS = new u32(12);
 
 // Mapping of working group to api module
-const apiModuleByGroup: { [key in WorkingGroups]: string } = {
+export const apiModuleByGroup: { [key in WorkingGroups]: string } = {
     [WorkingGroups.StorageProviders]: 'storageWorkingGroup'
 };
 
@@ -35,7 +53,7 @@ const apiModuleByGroup: { [key in WorkingGroups]: string } = {
 export default class Api {
     private _api: ApiPromise;
 
-    private constructor(originalApi:ApiPromise) {
+    private constructor(originalApi: ApiPromise) {
         this._api = originalApi;
     }
 
@@ -44,12 +62,12 @@ export default class Api {
     }
 
     private static async initApi(apiUri: string = DEFAULT_API_URI): Promise<ApiPromise> {
-        const wsProvider:WsProvider = new WsProvider(apiUri);
+        const wsProvider: WsProvider = new WsProvider(apiUri);
         registerJoystreamTypes();
         const api = await ApiPromise.create({ provider: wsProvider });
 
         // Initializing some api params based on pioneer/packages/react-api/Api.tsx
-        const [ properties ] = await Promise.all([
+        const [properties] = await Promise.all([
             api.rpc.system.properties()
         ]);
 
@@ -58,8 +76,8 @@ export default class Api {
 
         // formatBlanace config
         formatBalance.setDefaults({
-          decimals: tokenDecimals,
-          unit: tokenSymbol
+            decimals: tokenDecimals,
+            unit: tokenSymbol
         });
 
         return api;
@@ -86,7 +104,7 @@ export default class Api {
         return results;
     }
 
-    async getAccountsBalancesInfo(accountAddresses:string[]): Promise<DerivedBalances[]> {
+    async getAccountsBalancesInfo(accountAddresses: string[]): Promise<DerivedBalances[]> {
         let accountsBalances: DerivedBalances[] = await this._api.derive.balances.votingBalances(accountAddresses);
 
         return accountsBalances;
@@ -94,7 +112,7 @@ export default class Api {
 
     // Get on-chain data related to given account.
     // For now it's just account balances
-    async getAccountSummary(accountAddresses:string): Promise<AccountSummary> {
+    async getAccountSummary(accountAddresses: string): Promise<AccountSummary> {
         const balances: DerivedBalances = (await this.getAccountsBalancesInfo([accountAddresses]))[0];
         // TODO: Some more information can be fetched here in the future
 
@@ -103,21 +121,21 @@ export default class Api {
 
     async getCouncilInfo(): Promise<CouncilInfoObj> {
         const queries: { [P in keyof CouncilInfoObj]: QueryableStorageMultiArg<"promise"> } = {
-            activeCouncil:    this._api.query.council.activeCouncil,
-            termEndsAt:       this._api.query.council.termEndsAt,
-            autoStart:        this._api.query.councilElection.autoStart,
-            newTermDuration:  this._api.query.councilElection.newTermDuration,
-            candidacyLimit:   this._api.query.councilElection.candidacyLimit,
-            councilSize:      this._api.query.councilElection.councilSize,
-            minCouncilStake:  this._api.query.councilElection.minCouncilStake,
-            minVotingStake:   this._api.query.councilElection.minVotingStake,
+            activeCouncil: this._api.query.council.activeCouncil,
+            termEndsAt: this._api.query.council.termEndsAt,
+            autoStart: this._api.query.councilElection.autoStart,
+            newTermDuration: this._api.query.councilElection.newTermDuration,
+            candidacyLimit: this._api.query.councilElection.candidacyLimit,
+            councilSize: this._api.query.councilElection.councilSize,
+            minCouncilStake: this._api.query.councilElection.minCouncilStake,
+            minVotingStake: this._api.query.councilElection.minVotingStake,
             announcingPeriod: this._api.query.councilElection.announcingPeriod,
-            votingPeriod:     this._api.query.councilElection.votingPeriod,
-            revealingPeriod:  this._api.query.councilElection.revealingPeriod,
-            round:            this._api.query.councilElection.round,
-            stage:            this._api.query.councilElection.stage
+            votingPeriod: this._api.query.councilElection.votingPeriod,
+            revealingPeriod: this._api.query.councilElection.revealingPeriod,
+            round: this._api.query.councilElection.round,
+            stage: this._api.query.councilElection.stage
         }
-        const results: CouncilInfoTuple = <CouncilInfoTuple> await this.queryMultiOnce(Object.values(queries));
+        const results: CouncilInfoTuple = <CouncilInfoTuple>await this.queryMultiOnce(Object.values(queries));
 
         return createCouncilInfoObj(...results);
     }
@@ -126,7 +144,7 @@ export default class Api {
     async estimateFee(account: KeyringPair, recipientAddr: string, amount: BN): Promise<BN> {
         const transfer = this._api.tx.balances.transfer(recipientAddr, amount);
         const signature = account.sign(transfer.toU8a());
-        const transactionByteSize:BN = new BN(transfer.encodedLength + signature.length);
+        const transactionByteSize: BN = new BN(transfer.encodedLength + signature.length);
 
         const fees: DerivedFees = await this._api.derive.balances.fees();
 
@@ -151,7 +169,19 @@ export default class Api {
     }
 
     protected multiLinkageResult<K extends Codec, V extends Codec>(result: LinkageResult): [Vec<K>, Vec<V>] {
-        return [ result[0] as Vec<K>, result[1] as Vec<V> ];
+        return [result[0] as Vec<K>, result[1] as Vec<V>];
+    }
+
+    protected async blockHash(height: number): Promise<string> {
+        const blockHash = await this._api.rpc.chain.getBlockHash(height);
+
+        return blockHash.toString();
+    }
+
+    protected async blockTimestamp(height: number): Promise<Date> {
+        const blockTime = (await this._api.query.timestamp.now.at(await this.blockHash(height))) as Moment;
+
+        return new Date(blockTime.toNumber());
     }
 
     protected workingGroupApiQuery(group: WorkingGroups) {
@@ -184,8 +214,10 @@ export default class Api {
         return await this.groupMember(leadWorkerId, leadWorker);
     }
 
-    protected async stakeValue (stakeId: StakeId): Promise<Balance> {
-        const stake = (await this._api.query.stake.stakes(stakeId)) as Stake;
+    protected async stakeValue(stakeId: StakeId): Promise<Balance> {
+        const stake = this.singleLinkageResult<Stake>(
+            await this._api.query.stake.stakes(stakeId) as LinkageResult
+        );
         return stake.value;
     }
 
@@ -193,17 +225,17 @@ export default class Api {
         return this.stakeValue(stakeProfile.stake_id);
     }
 
-    protected async workerTotalReward (relationshipId: RewardRelationshipId): Promise<Balance> {
+    protected async workerTotalReward(relationshipId: RewardRelationshipId): Promise<Balance> {
         const relationship = this.singleLinkageResult<RewardRelationship>(
             await this._api.query.recurringRewards.rewardRelationships(relationshipId) as LinkageResult
         );
         return relationship.total_reward_received;
     }
 
-    protected async groupMember (
+    protected async groupMember(
         id: WorkerId,
         worker: Worker
-      ): Promise<GroupMember> {
+    ): Promise<GroupMember> {
         const roleAccount = worker.role_account_id;
         const memberId = worker.member_id;
 
@@ -215,12 +247,12 @@ export default class Api {
 
         let stakeValue: Balance = this._api.createType("Balance", 0);
         if (worker.role_stake_profile && worker.role_stake_profile.isSome) {
-          stakeValue = await this.workerStake(worker.role_stake_profile.unwrap());
+            stakeValue = await this.workerStake(worker.role_stake_profile.unwrap());
         }
 
         let earnedValue: Balance = this._api.createType("Balance", 0);
         if (worker.reward_relationship && worker.reward_relationship.isSome) {
-          earnedValue = await this.workerTotalReward(worker.reward_relationship.unwrap());
+            earnedValue = await this.workerTotalReward(worker.reward_relationship.unwrap());
         }
 
         return ({
@@ -233,24 +265,176 @@ export default class Api {
         });
     }
 
-    async groupMembers (group: WorkingGroups): Promise<GroupMember[]> {
+    async groupMembers(group: WorkingGroups): Promise<GroupMember[]> {
         const nextId = (await this.workingGroupApiQuery(group).nextWorkerId()) as WorkerId;
 
         // This is chain specfic, but if next id is still 0, it means no curators have been added yet
         if (nextId.eq(0)) {
-          return [];
+            return [];
         }
 
-        const [ workerIds, workers ] = this.multiLinkageResult<WorkerId, Worker>(
+        const [workerIds, workers] = this.multiLinkageResult<WorkerId, Worker>(
             (await this.workingGroupApiQuery(group).workerById()) as LinkageResult
         );
 
         let groupMembers: GroupMember[] = [];
-        for (let [ index, worker ] of Object.entries(workers.toArray())) {
+        for (let [index, worker] of Object.entries(workers.toArray())) {
             const workerId = workerIds[parseInt(index)];
             groupMembers.push(await this.groupMember(workerId, worker));
         }
 
         return groupMembers.reverse();
-      }
+    }
+
+    async openingsByGroup(group: WorkingGroups): Promise<GroupOpening[]> {
+        const openings: GroupOpening[] = [];
+        const nextId = (await this.workingGroupApiQuery(group).nextOpeningId()) as OpeningId;
+
+        // This is chain specfic, but if next id is still 0, it means no openings have been added yet
+        if (!nextId.eq(0)) {
+            const highestId = nextId.toNumber() - 1;
+            for (let i = highestId; i >= 0; i--) {
+                openings.push(await this.groupOpening(group, i));
+            }
+        }
+
+        return openings;
+    }
+
+    protected async hiringOpeningById(id: number | OpeningId): Promise<Opening> {
+        const result = await this._api.query.hiring.openingById(id) as LinkageResult;
+        return this.singleLinkageResult<Opening>(result);
+    }
+
+    protected async hiringApplicationById(id: number | ApplicationId): Promise<Application> {
+        const result = await this._api.query.hiring.applicationById(id) as LinkageResult;
+        return this.singleLinkageResult<Application>(result);
+    }
+
+    async wgApplicationById(group: WorkingGroups, wgApplicationId: number): Promise<WGApplication> {
+        const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId() as ApplicationId;
+
+        if (wgApplicationId < 0 || wgApplicationId >= nextAppId.toNumber()) {
+            throw new CLIError('Invalid working group application ID!');
+        }
+
+        return this.singleLinkageResult<WGApplication>(
+            await this.workingGroupApiQuery(group).applicationById(wgApplicationId) as LinkageResult
+        );
+    }
+
+    protected async parseApplication(wgApplicationId: number, wgApplication: WGApplication): Promise<GroupApplication> {
+        const appId = wgApplication.application_id;
+        const application = await this.hiringApplicationById(appId);
+
+        const { active_role_staking_id: roleStakingId, active_application_staking_id: appStakingId } = application;
+
+        return {
+            wgApplicationId,
+            applicationId: appId.toNumber(),
+            member: await this.memberProfileById(wgApplication.member_id),
+            roleAccout: wgApplication.role_account_id,
+            stakes: {
+                application: appStakingId.isSome ? (await this.stakeValue(appStakingId.unwrap())).toNumber() : 0,
+                role: roleStakingId.isSome ? (await this.stakeValue(roleStakingId.unwrap())).toNumber() : 0
+            },
+            humanReadableText: application.human_readable_text.toString(),
+            stage: application.stage.type as ApplicationStageKeys
+        };
+    }
+
+    async groupApplication(group: WorkingGroups, wgApplicationId: number): Promise<GroupApplication> {
+        const wgApplication = await this.wgApplicationById(group, wgApplicationId);
+        return await this.parseApplication(wgApplicationId, wgApplication);
+    }
+
+    protected async groupOpeningApplications(group: WorkingGroups, wgOpeningId: number): Promise<GroupApplication[]> {
+        const applications: GroupApplication[] = [];
+
+        const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId() as ApplicationId;
+        for (let i = 0; i < nextAppId.toNumber(); i++) {
+            const wgApplication = await this.wgApplicationById(group, i);
+            if (wgApplication.opening_id.toNumber() !== wgOpeningId) {
+                continue;
+            }
+            applications.push(await this.parseApplication(i, wgApplication));
+        }
+
+
+        return applications;
+    }
+
+    async groupOpening(group: WorkingGroups, wgOpeningId: number): Promise<GroupOpening> {
+        const nextId = ((await this.workingGroupApiQuery(group).nextOpeningId()) as OpeningId).toNumber();
+
+        if (wgOpeningId < 0 || wgOpeningId >= nextId) {
+            throw new CLIError('Invalid working group opening ID!');
+        }
+
+        const groupOpening = this.singleLinkageResult<WGOpening>(
+            await this.workingGroupApiQuery(group).openingById(wgOpeningId) as LinkageResult
+        );
+
+        const openingId = groupOpening.hiring_opening_id.toNumber();
+        const opening = await this.hiringOpeningById(openingId);
+        const applications = await this.groupOpeningApplications(group, wgOpeningId);
+        const stage = await this.parseOpeningStage(opening.stage);
+        const stakes = {
+            application: opening.application_staking_policy.unwrapOr(undefined),
+            role: opening.role_staking_policy.unwrapOr(undefined)
+        }
+
+        return ({
+            wgOpeningId,
+            openingId,
+            opening,
+            stage,
+            stakes,
+            applications
+        });
+    }
+
+    async parseOpeningStage(stage: OpeningStage): Promise<GroupOpeningStage> {
+        let
+            status: OpeningStatus | undefined,
+            stageBlock: number | undefined,
+            stageDate: Date | undefined;
+
+        if (stage.isOfType('WaitingToBegin')) {
+            const stageData = stage.asType('WaitingToBegin');
+            const currentBlockNumber = (await this._api.derive.chain.bestNumber()).toNumber();
+            const expectedBlockTime = (this._api.consts.babe.expectedBlockTime as Moment).toNumber();
+            status = OpeningStatus.WaitingToBegin;
+            stageBlock = stageData.begins_at_block.toNumber();
+            stageDate = new Date(Date.now() + (stageBlock - currentBlockNumber) * expectedBlockTime);
+        }
+
+        if (stage.isOfType('Active')) {
+            const stageData = stage.asType('Active');
+            const substage = stageData.stage;
+            if (substage.isOfType('AcceptingApplications')) {
+                status = OpeningStatus.AcceptingApplications;
+                stageBlock = substage.asType('AcceptingApplications').started_accepting_applicants_at_block.toNumber();
+            }
+            if (substage.isOfType('ReviewPeriod')) {
+                status = OpeningStatus.InReview;
+                stageBlock = substage.asType('ReviewPeriod').started_review_period_at_block.toNumber();
+            }
+            if (substage.isOfType('Deactivated')) {
+                status = substage.asType('Deactivated').cause.isOfType('Filled')
+                    ? OpeningStatus.Complete
+                    : OpeningStatus.Cancelled;
+                stageBlock = substage.asType('Deactivated').deactivated_at_block.toNumber();
+            }
+            if (stageBlock) {
+                stageDate = new Date(await this.blockTimestamp(stageBlock));
+            }
+        }
+
+        return {
+            status: status || OpeningStatus.Unknown,
+            block: stageBlock,
+            date: stageDate
+        };
+    }
 }

+ 234 - 1
cli/src/Types.ts

@@ -1,11 +1,29 @@
 import BN from 'bn.js';
 import { ElectionStage, Seat } from '@joystream/types/council';
-import { Option } from '@polkadot/types';
+import { Option, Text } from '@polkadot/types';
+import { Constructor } from '@polkadot/types/types';
+import { Struct, Vec } from '@polkadot/types/codec';
+import { u32 } from '@polkadot/types/primitive';
 import { BlockNumber, Balance, AccountId } from '@polkadot/types/interfaces';
 import { DerivedBalances } from '@polkadot/api-derive/types';
 import { KeyringPair } from '@polkadot/keyring/types';
 import { WorkerId } from '@joystream/types/working-group';
 import { Profile, MemberId } from '@joystream/types/members';
+import {
+    GenericJoyStreamRoleSchema,
+    JobSpecifics,
+    ApplicationDetails,
+    QuestionSections,
+    QuestionSection,
+    QuestionsFields,
+    QuestionField,
+    EntryInMembershipModuke,
+    HiringProcess,
+    AdditionalRolehiringProcessDetails,
+    CreatorDetails
+} from '@joystream/types/hiring/schemas/role.schema.typings';
+import ajv from 'ajv';
+import { Opening, StakingPolicy, ApplicationStageKeys } from '@joystream/types/hiring';
 
 // KeyringPair type extended with mandatory "meta.name"
 // It's used for accounts/keys management within CLI.
@@ -83,3 +101,218 @@ export type GroupMember = {
     stake: Balance;
     earned: Balance;
 }
+
+export type GroupApplication = {
+    wgApplicationId: number;
+    applicationId: number;
+    member: Profile | null;
+    roleAccout: AccountId;
+    stakes: {
+        application: number;
+        role: number;
+    },
+    humanReadableText: string;
+    stage: ApplicationStageKeys;
+}
+
+export enum OpeningStatus {
+    WaitingToBegin = 'WaitingToBegin',
+    AcceptingApplications = 'AcceptingApplications',
+    InReview = 'InReview',
+    Complete = 'Complete',
+    Cancelled = 'Cancelled',
+    Unknown = 'Unknown'
+}
+
+export type GroupOpeningStage = {
+    status: OpeningStatus;
+    block?: number;
+    date?: Date;
+}
+
+export type GroupOpeningStakes = {
+    application?: StakingPolicy;
+    role?: StakingPolicy;
+}
+
+export type GroupOpening = {
+    wgOpeningId: number;
+    openingId: number;
+    stage: GroupOpeningStage;
+    opening: Opening;
+    stakes: GroupOpeningStakes;
+    applications: GroupApplication[];
+}
+
+// Some helper structs for generating human_readable_text in working group opening extrinsic
+// Note those types are not part of the runtime etc., we just use them to simplify prompting for values
+// (since there exists functionality that handles that for substrate types like: Struct, Vec etc.)
+interface WithJSONable<T> {
+    toJSON: () => T;
+}
+export class HRTJobSpecificsStruct extends Struct implements WithJSONable<JobSpecifics> {
+    constructor (value?: JobSpecifics) {
+        super({
+          title: "Text",
+          description: "Text",
+        }, value);
+    }
+    get title(): string {
+        return (this.get('title') as Text).toString();
+    }
+    get description(): string {
+        return (this.get('description') as Text).toString();
+    }
+    toJSON(): JobSpecifics {
+        const { title, description } = this;
+        return { title, description };
+    }
+}
+export class HRTEntryInMembershipModukeStruct extends Struct implements WithJSONable<EntryInMembershipModuke> {
+    constructor (value?: EntryInMembershipModuke) {
+        super({
+          handle: "Text",
+        }, value);
+    }
+    get handle(): string {
+        return (this.get('handle') as Text).toString();
+    }
+    toJSON(): EntryInMembershipModuke {
+        const { handle } = this;
+        return { handle };
+    }
+}
+export class HRTCreatorDetailsStruct extends Struct implements WithJSONable<CreatorDetails> {
+    constructor (value?: CreatorDetails) {
+        super({
+          membership: HRTEntryInMembershipModukeStruct,
+        }, value);
+    }
+    get membership(): EntryInMembershipModuke {
+        return (this.get('membership') as HRTEntryInMembershipModukeStruct).toJSON();
+    }
+    toJSON(): CreatorDetails {
+        const { membership } = this;
+        return { membership };
+    }
+}
+export class HRTHiringProcessStruct extends Struct implements WithJSONable<HiringProcess> {
+    constructor (value?: HiringProcess) {
+        super({
+          details: "Vec<Text>",
+        }, value);
+    }
+    get details(): AdditionalRolehiringProcessDetails {
+        return (this.get('details') as Vec<Text>).toArray().map(v => v.toString());
+    }
+    toJSON(): HiringProcess {
+        const { details } = this;
+        return { details };
+    }
+}
+export class HRTQuestionFieldStruct extends Struct implements WithJSONable<QuestionField> {
+    constructor (value?: QuestionField) {
+        super({
+            title: "Text",
+            type: "Text"
+        }, value);
+    }
+    get title(): string {
+        return (this.get('title') as Text).toString();
+    }
+    get type(): string {
+        return (this.get('type') as Text).toString();
+    }
+    toJSON(): QuestionField {
+        const { title, type } = this;
+        return { title, type };
+    }
+}
+class HRTQuestionsFieldsVec extends Vec.with(HRTQuestionFieldStruct) implements WithJSONable<QuestionsFields> {
+    toJSON(): QuestionsFields {
+        return this.toArray().map(v => v.toJSON());
+    }
+}
+export class HRTQuestionSectionStruct extends Struct implements WithJSONable<QuestionSection> {
+    constructor (value?: QuestionSection) {
+        super({
+            title: "Text",
+            questions: HRTQuestionsFieldsVec
+        }, value);
+    }
+    get title(): string {
+        return (this.get('title') as Text).toString();
+    }
+    get questions(): QuestionsFields {
+        return (this.get('questions') as HRTQuestionsFieldsVec).toJSON();
+    }
+    toJSON(): QuestionSection {
+        const { title, questions } = this;
+        return { title, questions };
+    }
+}
+export class HRTQuestionSectionsVec extends Vec.with(HRTQuestionSectionStruct) implements WithJSONable<QuestionSections> {
+    toJSON(): QuestionSections {
+        return this.toArray().map(v => v.toJSON());
+    }
+};
+export class HRTApplicationDetailsStruct extends Struct implements WithJSONable<ApplicationDetails> {
+    constructor (value?: ApplicationDetails) {
+        super({
+            sections: HRTQuestionSectionsVec
+        }, value);
+    }
+    get sections(): QuestionSections {
+        return (this.get('sections') as HRTQuestionSectionsVec).toJSON();
+    }
+    toJSON(): ApplicationDetails {
+        const { sections } = this;
+        return { sections };
+    }
+}
+export class HRTStruct extends Struct implements WithJSONable<GenericJoyStreamRoleSchema> {
+    constructor (value?: GenericJoyStreamRoleSchema) {
+        super({
+            version: "u32",
+            headline: "Text",
+            job: HRTJobSpecificsStruct,
+            application: HRTApplicationDetailsStruct,
+            reward: "Text",
+            creator: HRTCreatorDetailsStruct,
+            process: HRTHiringProcessStruct
+        }, value);
+    }
+    get version(): number {
+        return (this.get('version') as u32).toNumber();
+    }
+    get headline(): string {
+        return (this.get('headline') as Text).toString();
+    }
+    get job(): JobSpecifics {
+        return (this.get('job') as HRTJobSpecificsStruct).toJSON();
+    }
+    get application(): ApplicationDetails {
+        return (this.get('application') as HRTApplicationDetailsStruct).toJSON();
+    }
+    get reward(): string {
+        return (this.get('reward') as Text).toString();
+    }
+    get creator(): CreatorDetails {
+        return (this.get('creator') as HRTCreatorDetailsStruct).toJSON();
+    }
+    get process(): HiringProcess {
+        return (this.get('process') as HRTHiringProcessStruct).toJSON();
+    }
+    toJSON(): GenericJoyStreamRoleSchema {
+        const { version, headline, job, application, reward, creator, process } = this;
+        return { version, headline, job, application, reward, creator, process };
+    }
+};
+
+// A mapping of argName to json struct and schemaValidator
+// It is used to map arguments of type "Bytes" that are in fact a json string
+// (and can be validated against a schema)
+export type JSONArgsMapping = { [argName: string]: {
+    struct: Constructor<Struct>,
+    schemaValidator: ajv.ValidateFunction
+} };

+ 1 - 1
cli/src/base/AccountsCommandBase.ts

@@ -11,7 +11,7 @@ import { NamedKeyringPair } from '../Types';
 import { DerivedBalances } from '@polkadot/api-derive/types';
 import { toFixedLength } from '../helpers/display';
 
-const ACCOUNTS_DIRNAME = '/accounts';
+const ACCOUNTS_DIRNAME = 'accounts';
 const SPECIAL_ACCOUNT_POSTFIX = '__DEV';
 
 /**

+ 349 - 1
cli/src/base/ApiCommandBase.ts

@@ -2,7 +2,19 @@ import ExitCodes from '../ExitCodes';
 import { CLIError } from '@oclif/errors';
 import StateAwareCommandBase from './StateAwareCommandBase';
 import Api from '../Api';
-import { ApiPromise } from '@polkadot/api'
+import { JSONArgsMapping } from '../Types';
+import { getTypeDef, createType, Option, Tuple, Bytes } from '@polkadot/types';
+import { Codec, TypeDef, TypeDefInfo, Constructor } from '@polkadot/types/types';
+import { Vec, Struct, Enum } from '@polkadot/types/codec';
+import { ApiPromise } from '@polkadot/api';
+import { KeyringPair } from '@polkadot/keyring/types';
+import chalk from 'chalk';
+import { SubmittableResultImpl } from '@polkadot/api/types';
+import ajv from 'ajv';
+
+export type ApiMethodInputArg = Codec;
+
+class ExtrinsicFailedError extends Error { };
 
 /**
  * Abstract base class for commands that require access to the API.
@@ -25,4 +37,340 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         const apiUri: string = this.getPreservedState().apiUri;
         this.api = await Api.create(apiUri);
     }
+
+    // This is needed to correctly handle some structs, enums etc.
+    // Where the main typeDef doesn't provide enough information
+    protected getRawTypeDef(type: string) {
+        const instance = createType(type as any);
+        return getTypeDef(instance.toRawType());
+    }
+
+    // Prettifier for type names which are actually JSON strings
+    protected prettifyJsonTypeName(json: string) {
+        const obj = JSON.parse(json) as { [key: string]: string };
+        return "{\n"+Object.keys(obj).map(prop => `  ${prop}${chalk.white(':'+obj[prop])}`).join("\n")+"\n}";
+    }
+
+    // Get param name based on TypeDef object
+    protected paramName(typeDef: TypeDef) {
+        return chalk.green(
+            typeDef.displayName ||
+            typeDef.name ||
+            (typeDef.type.startsWith('{') ? this.prettifyJsonTypeName(typeDef.type) : typeDef.type)
+        );
+    }
+
+    // Prompt for simple/plain value (provided as string) of given type
+    async promptForSimple(typeDef: TypeDef, defaultValue?: Codec): Promise<Codec> {
+        const providedValue = await this.simplePrompt({
+            message: `Provide value for ${ this.paramName(typeDef) }`,
+            type: 'input',
+            default: defaultValue?.toString()
+        });
+        return createType(typeDef.type as any, providedValue);
+    }
+
+    // Prompt for Option<Codec> value
+    async promptForOption(typeDef: TypeDef, defaultValue?: Option<Codec>): Promise<Option<Codec>> {
+        const subtype = <TypeDef> typeDef.sub; // We assume that Opion always has a single subtype
+        const confirmed = await this.simplePrompt({
+            message: `Do you want to provide the optional ${ this.paramName(typeDef) } parameter?`,
+            type: 'confirm',
+            default: defaultValue ? defaultValue.isSome : false,
+        });
+
+        if (confirmed) {
+            this.openIndentGroup();
+            const value = await this.promptForParam(subtype.type, subtype.name, defaultValue?.unwrapOr(undefined));
+            this.closeIndentGroup();
+            return new Option(subtype.type as any, value);
+        }
+
+        return new Option(subtype.type as any, null);
+    }
+
+    // Prompt for Tuple
+    // TODO: Not well tested yet
+    async promptForTuple(typeDef: TypeDef, defaultValue: Tuple): Promise<Tuple> {
+        console.log(chalk.grey(`Providing values for ${ this.paramName(typeDef) } tuple:`));
+
+        this.openIndentGroup();
+        const result: ApiMethodInputArg[] = [];
+        // We assume that for Tuple there is always at least 1 subtype (pethaps it's even always an array?)
+        const subtypes: TypeDef[] = Array.isArray(typeDef.sub) ? typeDef.sub! : [ typeDef.sub! ];
+
+        for (const [index, subtype] of Object.entries(subtypes)) {
+            const inputParam = await this.promptForParam(subtype.type, subtype.name, defaultValue[parseInt(index)]);
+            result.push(inputParam);
+        }
+        this.closeIndentGroup();
+
+        return new Tuple((subtypes.map(subtype => subtype.type)) as any, result);
+    }
+
+    // Prompt for Struct
+    async promptForStruct(typeDef: TypeDef, defaultValue?: Struct): Promise<ApiMethodInputArg> {
+        console.log(chalk.grey(`Providing values for ${ this.paramName(typeDef) } struct:`));
+
+        this.openIndentGroup();
+        const structType = typeDef.type;
+        const rawTypeDef = this.getRawTypeDef(structType);
+        // We assume struct typeDef always has array of typeDefs inside ".sub"
+        const structSubtypes = rawTypeDef.sub as TypeDef[];
+
+        const structValues: { [key: string]: ApiMethodInputArg } = {};
+        for (const subtype of structSubtypes) {
+            structValues[subtype.name!] =
+                await this.promptForParam(subtype.type, subtype.name, defaultValue && defaultValue.get(subtype.name!));
+        }
+        this.closeIndentGroup();
+
+        return createType(structType as any, structValues);
+    }
+
+    // Prompt for Vec
+    async promptForVec(typeDef: TypeDef, defaultValue?: Vec<Codec>): Promise<Vec<Codec>> {
+        console.log(chalk.grey(`Providing values for ${ this.paramName(typeDef) } vector:`));
+
+        this.openIndentGroup();
+        // We assume Vec always has one TypeDef as ".sub"
+        const subtype = typeDef.sub as TypeDef;
+        let entries: Codec[] = [];
+        let addAnother = false;
+        do {
+            addAnother = await this.simplePrompt({
+                message: `Do you want to add another entry to ${ this.paramName(typeDef) } vector (currently: ${entries.length})?`,
+                type: 'confirm',
+                default: defaultValue ? entries.length < defaultValue.length : false
+            });
+            const defaultEntryValue = defaultValue && defaultValue[entries.length];
+            if (addAnother) {
+                entries.push(await this.promptForParam(subtype.type, subtype.name, defaultEntryValue));
+            }
+        } while (addAnother);
+        this.closeIndentGroup();
+
+        return new Vec(subtype.type as any, entries);
+    }
+
+    // Prompt for Enum
+    async promptForEnum(typeDef: TypeDef, defaultValue?: Enum): Promise<Enum> {
+        const enumType = typeDef.type;
+        const rawTypeDef = this.getRawTypeDef(enumType);
+        // We assume enum always has array on TypeDefs inside ".sub"
+        const enumSubtypes = rawTypeDef.sub as TypeDef[];
+
+        const enumSubtypeName = await this.simplePrompt({
+            message: `Choose value for ${this.paramName(typeDef)}:`,
+            type: 'list',
+            choices: enumSubtypes.map(subtype => ({
+                name: subtype.name,
+                value: subtype.name
+            })),
+            default: defaultValue?.type
+        });
+
+        const enumSubtype = enumSubtypes.find(st => st.name === enumSubtypeName)!;
+
+        if (enumSubtype.type !== 'Null') {
+            return createType(
+                enumType as any,
+                { [enumSubtype.name!]: await this.promptForParam(enumSubtype.type, enumSubtype.name, defaultValue?.value) }
+            );
+        }
+
+        return createType(enumType as any, enumSubtype.name);
+    }
+
+    // Prompt for param based on "paramType" string (ie. Option<MemeberId>)
+    // TODO: This may not yet work for all possible types
+    async promptForParam(paramType: string, forcedName?: string, defaultValue?: ApiMethodInputArg): Promise<ApiMethodInputArg> {
+        const typeDef = getTypeDef(paramType);
+        const rawTypeDef = this.getRawTypeDef(paramType);
+
+        if (forcedName) {
+            typeDef.name = forcedName;
+        }
+
+        if (rawTypeDef.info === TypeDefInfo.Option) {
+            return await this.promptForOption(typeDef, defaultValue as Option<Codec>);
+        }
+        else if (rawTypeDef.info === TypeDefInfo.Tuple) {
+            return await this.promptForTuple(typeDef, defaultValue as Tuple);
+        }
+        else if (rawTypeDef.info === TypeDefInfo.Struct) {
+            return await this.promptForStruct(typeDef, defaultValue as Struct);
+        }
+        else if (rawTypeDef.info === TypeDefInfo.Enum) {
+            return await this.promptForEnum(typeDef, defaultValue as Enum);
+        }
+        else if (rawTypeDef.info === TypeDefInfo.Vec) {
+            return await this.promptForVec(typeDef, defaultValue as Vec<Codec>);
+        }
+        else {
+            return await this.promptForSimple(typeDef, defaultValue);
+        }
+    }
+
+    async promptForJsonBytes(
+        JsonStruct: Constructor<Struct>,
+        argName?: string,
+        defaultValue?: Bytes,
+        schemaValidator?: ajv.ValidateFunction
+    ) {
+        const rawType = (new JsonStruct()).toRawType();
+        const typeDef = getTypeDef(rawType);
+
+        const defaultStruct =
+            defaultValue &&
+            new JsonStruct(JSON.parse(Buffer.from(defaultValue.toHex().replace('0x', ''), 'hex').toString()));
+
+        if (argName) {
+            typeDef.name = argName;
+        }
+
+        let isValid: boolean = true, jsonText: string;
+        do {
+            const structVal = await this.promptForStruct(typeDef, defaultStruct);
+            jsonText = JSON.stringify(structVal.toJSON());
+            if (schemaValidator) {
+                isValid = Boolean(schemaValidator(JSON.parse(jsonText)));
+                if (!isValid) {
+                    this.log("\n");
+                    this.warn(
+                        "Schema validation failed with:\n"+
+                        schemaValidator.errors?.map(e => chalk.red(`${chalk.bold(e.dataPath)}: ${e.message}`)).join("\n")+
+                        "\nTry again..."
+                    )
+                    this.log("\n");
+                }
+            }
+        } while(!isValid);
+
+        return new Bytes('0x'+Buffer.from(jsonText, 'ascii').toString('hex'));
+    }
+
+    async promptForExtrinsicParams(
+        module: string,
+        method: string,
+        jsonArgs?: JSONArgsMapping,
+        defaultValues?: ApiMethodInputArg[]
+    ): Promise<ApiMethodInputArg[]> {
+        const extrinsicMethod = this.getOriginalApi().tx[module][method];
+        let values: ApiMethodInputArg[] = [];
+
+        this.openIndentGroup();
+        for (const [index, arg] of Object.entries(extrinsicMethod.meta.args.toArray())) {
+            const argName = arg.name.toString();
+            const argType = arg.type.toString();
+            const defaultValue = defaultValues && defaultValues[parseInt(index)];
+            if (jsonArgs && jsonArgs[argName]) {
+                const { struct, schemaValidator } = jsonArgs[argName];
+                values.push(await this.promptForJsonBytes(struct, argName, defaultValue as Bytes, schemaValidator));
+            }
+            else {
+                values.push(await this.promptForParam(argType, argName, defaultValue));
+            }
+        };
+        this.closeIndentGroup();
+
+        return values;
+    }
+
+    sendExtrinsic(account: KeyringPair, module: string, method: string, params: Codec[]) {
+        return new Promise((resolve, reject) => {
+            const extrinsicMethod = this.getOriginalApi().tx[module][method];
+            let unsubscribe: () => void;
+            extrinsicMethod(...params)
+                .signAndSend(account, {}, (result: SubmittableResultImpl) => {
+                    // Implementation loosely based on /pioneer/packages/react-signer/src/Modal.tsx
+                    if (!result || !result.status) {
+                        return;
+                    }
+
+                    if (result.status.isFinalized) {
+                      unsubscribe();
+                      result.events
+                        .filter(({ event: { section } }): boolean => section === 'system')
+                        .forEach(({ event: { method } }): void => {
+                          if (method === 'ExtrinsicFailed') {
+                            reject(new ExtrinsicFailedError('Extrinsic execution error!'));
+                          } else if (method === 'ExtrinsicSuccess') {
+                            resolve();
+                          }
+                        });
+                    } else if (result.isError) {
+                        reject(new ExtrinsicFailedError('Extrinsic execution error!'));
+                    }
+                })
+                .then(unsubFunc => unsubscribe = unsubFunc)
+                .catch(e => reject(new ExtrinsicFailedError(`Cannot send the extrinsic: ${e.message ? e.message : JSON.stringify(e)}`)));
+        });
+    }
+
+    async sendAndFollowExtrinsic(
+        account: KeyringPair,
+        module: string,
+        method: string,
+        params: Codec[],
+        warnOnly: boolean = false // If specified - only warning will be displayed (instead of error beeing thrown)
+    ) {
+        try {
+            this.log(chalk.white(`\nSending ${ module }.${ method } extrinsic...`));
+            await this.sendExtrinsic(account, module, method, params);
+            this.log(chalk.green(`Extrinsic successful!`));
+        } catch (e) {
+            if (e instanceof ExtrinsicFailedError && warnOnly) {
+                this.warn(`${ module }.${ method } extrinsic failed! ${ e.message }`);
+            }
+            else if (e instanceof ExtrinsicFailedError) {
+                throw new CLIError(`${ module }.${ method } extrinsic failed! ${ e.message }`, { exit: ExitCodes.ApiError });
+            }
+            else {
+                throw e;
+            }
+        }
+    }
+
+    async buildAndSendExtrinsic(
+        account: KeyringPair,
+        module: string,
+        method: string,
+        jsonArgs?: JSONArgsMapping, // Special JSON arguments (ie. human_readable_text of working group opening)
+        defaultValues?: ApiMethodInputArg[],
+        warnOnly: boolean = false // If specified - only warning will be displayed (instead of error beeing thrown)
+    ): Promise<ApiMethodInputArg[]> {
+        const params = await this.promptForExtrinsicParams(module, method, jsonArgs, defaultValues);
+        await this.sendAndFollowExtrinsic(account, module, method, params, warnOnly);
+
+        return params;
+    }
+
+    extrinsicArgsFromDraft(module: string, method: string, draftFilePath: string): ApiMethodInputArg[] {
+        let draftJSONObj, parsedArgs: ApiMethodInputArg[] = [];
+        const extrinsicMethod = this.getOriginalApi().tx[module][method];
+        try {
+            draftJSONObj = require(draftFilePath);
+        } catch(e) {
+            throw new CLIError(`Could not load draft from: ${draftFilePath}`, { exit: ExitCodes.InvalidFile });
+        }
+        if (
+            !draftJSONObj
+            || !Array.isArray(draftJSONObj)
+            || draftJSONObj.length !== extrinsicMethod.meta.args.length
+        ) {
+            throw new CLIError(`The draft file at ${draftFilePath} is invalid!`, { exit: ExitCodes.InvalidFile });
+        }
+        for (const [index, arg] of Object.entries(extrinsicMethod.meta.args.toArray())) {
+            const argName = arg.name.toString();
+            const argType = arg.type.toString();
+            try {
+                parsedArgs.push(createType(argType as any, draftJSONObj[parseInt(index)]));
+            } catch (e) {
+                throw new CLIError(`Couldn't parse ${argName} value from draft at ${draftFilePath}!`, { exit: ExitCodes.InvalidFile });
+            }
+        }
+
+        return parsedArgs;
+    }
 }

+ 80 - 0
cli/src/base/DefaultCommandBase.ts

@@ -1,11 +1,91 @@
 import ExitCodes from '../ExitCodes';
 import Command from '@oclif/command';
+import inquirer, { DistinctQuestion } from 'inquirer';
+import chalk from 'chalk';
 
 /**
  * Abstract base class for pretty much all commands
  * (prevents console.log from hanging the process and unifies the default exit code)
  */
 export default abstract class DefaultCommandBase extends Command {
+    protected indentGroupsOpened = 0;
+    protected jsonPrettyIdent = '';
+
+    openIndentGroup() {
+        console.group();
+        ++this.indentGroupsOpened;
+    }
+
+    closeIndentGroup() {
+        console.groupEnd();
+        --this.indentGroupsOpened;
+    }
+
+    async simplePrompt(question: DistinctQuestion) {
+        const { result } = await inquirer.prompt([{
+            ...question,
+            name: 'result',
+            // prefix = 2 spaces for each group - 1 (because 1 is always added by default)
+            prefix: Array.from(new Array(this.indentGroupsOpened)).map(() => '  ').join('').slice(1)
+        }]);
+
+        return result;
+    }
+
+    private jsonPrettyIndented(line:string) {
+        return `${this.jsonPrettyIdent}${ line }`;
+    }
+
+    private jsonPrettyOpen(char: '{' | '[') {
+        this.jsonPrettyIdent += '    ';
+        return chalk.gray(char)+"\n";
+    }
+
+    private jsonPrettyClose(char: '}' | ']') {
+        this.jsonPrettyIdent = this.jsonPrettyIdent.slice(0, -4);
+        return this.jsonPrettyIndented(chalk.gray(char));
+    }
+
+    private jsonPrettyKeyVal(key:string, val:any): string {
+        return this.jsonPrettyIndented(chalk.white(`${key}: ${this.jsonPrettyAny(val)}`));
+    }
+
+    private jsonPrettyObj(obj: { [key: string]: any }): string {
+        return this.jsonPrettyOpen('{')
+            + Object.keys(obj).map(k => this.jsonPrettyKeyVal(k, obj[k])).join(',\n') + "\n"
+            + this.jsonPrettyClose('}');
+    }
+
+    private jsonPrettyArr(arr: any[]): string {
+        return this.jsonPrettyOpen('[')
+            + arr.map(v => this.jsonPrettyIndented(this.jsonPrettyAny(v))).join(',\n') + "\n"
+            + this.jsonPrettyClose(']');
+    }
+
+    private jsonPrettyAny(val: any): string {
+        if (Array.isArray(val)) {
+            return this.jsonPrettyArr(val);
+        }
+        else if (typeof val === 'object' && val !== null) {
+            return this.jsonPrettyObj(val);
+        }
+        else if (typeof val === 'string') {
+            return chalk.green(`"${val}"`);
+        }
+
+        // Number, boolean etc.
+        return chalk.cyan(val);
+    }
+
+    jsonPrettyPrint(json: string) {
+        try {
+            const parsed = JSON.parse(json);
+            console.log(this.jsonPrettyAny(parsed));
+        } catch(e) {
+            console.log(this.jsonPrettyAny(json));
+        }
+    }
+
     async finally(err: any) {
         // called after run and catch regardless of whether or not the command errored
         // We'll force exit here, in case there is no error, to prevent console.log from hanging the process

+ 115 - 3
cli/src/base/WorkingGroupsCommandBase.ts

@@ -1,11 +1,18 @@
 import ExitCodes from '../ExitCodes';
 import AccountsCommandBase from './AccountsCommandBase';
 import { flags } from '@oclif/command';
-import { WorkingGroups, AvailableGroups, NamedKeyringPair, GroupMember } from '../Types';
+import { WorkingGroups, AvailableGroups, NamedKeyringPair, GroupMember, GroupOpening } from '../Types';
+import { apiModuleByGroup } from '../Api';
 import { CLIError } from '@oclif/errors';
 import inquirer from 'inquirer';
+import { ApiMethodInputArg } from './ApiCommandBase';
+import fs from 'fs';
+import path from 'path';
+import _ from 'lodash';
+import { ApplicationStageKeys } from '@joystream/types/hiring';
 
 const DEFAULT_GROUP = WorkingGroups.StorageProviders;
+const DRAFTS_FOLDER = 'opening-drafts';
 
 /**
  * Abstract base class for commands related to working groups
@@ -67,11 +74,116 @@ export default abstract class WorkingGroupsCommandBase extends AccountsCommandBa
         return groupMembers[choosenWorkerIndex];
     }
 
+    async promptForApplicationsToAccept(opening: GroupOpening): Promise<number[]> {
+        const acceptableApplications = opening.applications.filter(a => a.stage === ApplicationStageKeys.Active);
+        const acceptedApplications = await this.simplePrompt({
+            message: 'Select succesful applicants',
+            type: 'checkbox',
+            choices: acceptableApplications.map(a => ({
+                name: ` ${a.wgApplicationId}: ${a.member?.handle.toString()}`,
+                value: a.wgApplicationId,
+            }))
+        });
+
+        return acceptedApplications;
+    }
+
+    async promptForNewOpeningDraftName() {
+        let
+            draftName: string = '',
+            fileExists: boolean = false,
+            overrideConfirmed: boolean = false;
+
+        do {
+            draftName = await this.simplePrompt({
+                type: 'input',
+                message: 'Provide the draft name',
+                validate: val => (typeof val === 'string' && val.length >= 1) || 'Draft name is required!'
+            });
+
+            fileExists = fs.existsSync(this.getOpeningDraftPath(draftName));
+            if (fileExists) {
+                overrideConfirmed = await this.simplePrompt({
+                    type: 'confirm',
+                    message: 'Such draft already exists. Do you wish to override it?',
+                    default: false
+                });
+            }
+        } while(fileExists && !overrideConfirmed);
+
+        return draftName;
+    }
+
+    async promptForOpeningDraft() {
+        let draftFiles: string[] = [];
+        try {
+            draftFiles = fs.readdirSync(this.getOpeingDraftsPath());
+        }
+        catch(e) {
+            throw this.createDataReadError(DRAFTS_FOLDER);
+        }
+        if (!draftFiles.length) {
+            throw new CLIError('No drafts available!', { exit: ExitCodes.FileNotFound });
+        }
+        const draftNames = draftFiles.map(fileName => _.startCase(fileName.replace('.json', '')));
+        const selectedDraftName = await this.simplePrompt({
+            message: 'Select a draft',
+            type: 'list',
+            choices: draftNames
+        });
+
+        return selectedDraftName;
+    }
+
+    loadOpeningDraftParams(draftName: string) {
+        const draftFilePath = this.getOpeningDraftPath(draftName);
+        const params = this.extrinsicArgsFromDraft(
+            apiModuleByGroup[this.group],
+            'addOpening',
+            draftFilePath
+        );
+
+        return params;
+    }
+
+    getOpeingDraftsPath() {
+        return path.join(this.getAppDataPath(), DRAFTS_FOLDER);
+    }
+
+    getOpeningDraftPath(draftName: string) {
+        return path.join(this.getOpeingDraftsPath(), _.snakeCase(draftName)+'.json');
+    }
+
+    saveOpeningDraft(draftName: string, params: ApiMethodInputArg[]) {
+        const paramsJson = JSON.stringify(
+            params.map(p => p.toJSON()),
+            null,
+            2
+        );
+
+        try {
+            fs.writeFileSync(this.getOpeningDraftPath(draftName), paramsJson);
+        } catch(e) {
+            throw this.createDataWriteError(DRAFTS_FOLDER);
+        }
+    }
+
+    private initOpeningDraftsDir(): void {
+        if (!fs.existsSync(this.getOpeingDraftsPath())) {
+            fs.mkdirSync(this.getOpeingDraftsPath());
+        }
+    }
+
     async init() {
         await super.init();
-        const { flags } = this.parse(WorkingGroupsCommandBase);
+        try {
+            this.initOpeningDraftsDir();
+        } catch (e) {
+            throw this.createDataDirInitError();
+        }
+        const { flags } = this.parse(this.constructor as typeof WorkingGroupsCommandBase);
         if (!AvailableGroups.includes(flags.group as any)) {
-            throw new CLIError('Invalid group!', { exit: ExitCodes.InvalidInput });
+            throw new CLIError(`Invalid group! Available values are: ${AvailableGroups.join(', ')}`, { exit: ExitCodes.InvalidInput });
         }
         this.group = flags.group as WorkingGroups;
     }

+ 11 - 61
cli/src/commands/api/inspect.ts

@@ -2,14 +2,13 @@ import { flags } from '@oclif/command';
 import { CLIError } from '@oclif/errors';
 import { displayNameValueTable } from '../../helpers/display';
 import { ApiPromise } from '@polkadot/api';
-import { getTypeDef } from '@polkadot/types';
-import { Codec, TypeDef, TypeDefInfo } from '@polkadot/types/types';
+import { Option } from '@polkadot/types';
+import { Codec } from '@polkadot/types/types';
 import { ConstantCodec } from '@polkadot/api-metadata/consts/types';
 import ExitCodes from '../../ExitCodes';
 import chalk from 'chalk';
 import { NameValueObj } from '../../Types';
-import inquirer from 'inquirer';
-import ApiCommandBase from '../../base/ApiCommandBase';
+import ApiCommandBase, { ApiMethodInputArg } from '../../base/ApiCommandBase';
 
 // Command flags type
 type ApiInspectFlags = {
@@ -30,12 +29,6 @@ const TYPES_AVAILABLE = [
 // It works as if we specified: type ApiType = 'query' | 'consts'...;
 type ApiType = typeof TYPES_AVAILABLE[number];
 
-// Format of the api input args (as they are specified in the CLI)
-type ApiMethodInputSimpleArg = string;
-// This recurring type allows the correct handling of nested types like:
-// ((Type1, Type2), Option<Type3>) etc.
-type ApiMethodInputArg = ApiMethodInputSimpleArg | ApiMethodInputArg[];
-
 export default class ApiInspect extends ApiCommandBase {
     static description =
         'Lists available node API modules/methods and/or their description(s), '+
@@ -154,62 +147,19 @@ export default class ApiInspect extends ApiCommandBase {
         return { apiType, apiModule, apiMethod };
     }
 
-    // Prompt for simple value (string)
-    async promptForSimple(typeName: string): Promise<string> {
-        const userInput = await inquirer.prompt([{
-            name: 'providedValue',
-            message: `Provide value for ${ typeName }`,
-            type: 'input'
-        } ])
-        return <string> userInput.providedValue;
-    }
-
-    // Prompt for optional value (returns undefined if user refused to provide)
-    async promptForOption(typeDef: TypeDef): Promise<ApiMethodInputArg | undefined> {
-        const userInput = await inquirer.prompt([{
-            name: 'confirmed',
-            message: `Do you want to provide the optional ${ typeDef.type } parameter?`,
-            type: 'confirm'
-        } ]);
-
-        if (userInput.confirmed) {
-            const subtype = <TypeDef> typeDef.sub; // We assume that Opion always has a single subtype
-            let value = await this.promptForParam(subtype.type);
-            return value;
-        }
-    }
-
-    // Prompt for tuple - returns array of values
-    async promptForTuple(typeDef: TypeDef): Promise<(ApiMethodInputArg)[]> {
-        let result: ApiMethodInputArg[] = [];
-
-        if (!typeDef.sub) return [ await this.promptForSimple(typeDef.type) ];
-
-        const subtypes: TypeDef[] = Array.isArray(typeDef.sub) ? typeDef.sub : [ typeDef.sub ];
-
-        for (let subtype of subtypes) {
-            let inputParam = await this.promptForParam(subtype.type);
-            if (inputParam !== undefined) result.push(inputParam);
-        }
-
-        return result;
-    }
-
-    // Prompt for param based on "paramType" string (ie. Option<MemeberId>)
-    async promptForParam(paramType: string): Promise<ApiMethodInputArg | undefined> {
-        const typeDef: TypeDef = getTypeDef(paramType);
-        if (typeDef.info === TypeDefInfo.Option) return await this.promptForOption(typeDef);
-        else if (typeDef.info === TypeDefInfo.Tuple) return await this.promptForTuple(typeDef);
-        else return await this.promptForSimple(typeDef.type);
-    }
-
     // Request values for params using array of param types (strings)
     async requestParamsValues(paramTypes: string[]): Promise<ApiMethodInputArg[]> {
         let result: ApiMethodInputArg[] = [];
         for (let [key, paramType] of Object.entries(paramTypes)) {
             this.log(chalk.bold.white(`Parameter no. ${ parseInt(key)+1 } (${ paramType }):`));
             let paramValue = await this.promptForParam(paramType);
-            if (paramValue !== undefined) result.push(paramValue);
+            if (paramValue instanceof Option && paramValue.isSome) {
+                result.push(paramValue.unwrap());
+            }
+            else if (!(paramValue instanceof Option)) {
+                result.push(paramValue);
+            }
+            // In case of empty option we MUST NOT add anything to the array (otherwise it causes some error)
         }
 
         return result;
@@ -227,7 +177,7 @@ export default class ApiInspect extends ApiCommandBase {
 
             if (apiType === 'query') {
                 // Api query - call with (or without) arguments
-                let args: ApiMethodInputArg[] = flags.callArgs ? flags.callArgs.split(',') : [];
+                let args: (string | ApiMethodInputArg)[] = flags.callArgs ? flags.callArgs.split(',') : [];
                 const paramsTypes: string[] = this.getQueryMethodParamsTypes(apiModule, apiMethod);
                 if (args.length < paramsTypes.length) {
                     this.warn('Some parameters are missing! Please, provide the missing parameters:');

+ 40 - 0
cli/src/commands/working-groups/application.ts

@@ -0,0 +1,40 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import { displayCollapsedRow, displayHeader } from '../../helpers/display';
+import _ from 'lodash';
+import chalk from 'chalk';
+
+export default class WorkingGroupsApplication extends WorkingGroupsCommandBase {
+    static description = 'Shows an overview of given application by Working Group Application ID';
+    static args = [
+        {
+            name: 'wgApplicationId',
+            required: true,
+            description: 'Working Group Application ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsApplication);
+
+        const application = await this.getApi().groupApplication(this.group, parseInt(args.wgApplicationId));
+
+        displayHeader('Human readable text');
+        this.jsonPrettyPrint(application.humanReadableText);
+
+        displayHeader(`Details`);
+        const applicationRow = {
+            'WG application ID': application.wgApplicationId,
+            'Application ID': application.applicationId,
+            'Member handle': application.member?.handle.toString() || chalk.red('NONE'),
+            'Role account': application.roleAccout.toString(),
+            'Stage': application.stage,
+            'Application stake': application.stakes.application,
+            'Role stake': application.stakes.role,
+            'Total stake': Object.values(application.stakes).reduce((a, b) => a + b)
+        };
+        displayCollapsedRow(applicationRow);
+    }
+}

+ 96 - 0
cli/src/commands/working-groups/createOpening.ts

@@ -0,0 +1,96 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import { HRTStruct } from '../../Types';
+import chalk from 'chalk';
+import { flags } from '@oclif/command';
+import { ApiMethodInputArg } from '../../base/ApiCommandBase';
+import { schemaValidator } from '@joystream/types/hiring';
+import { apiModuleByGroup } from '../../Api';
+
+export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase {
+    static description = 'Create working group opening (requires lead access)';
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+        useDraft: flags.boolean({
+            char: 'd',
+            description:
+                "Whether to create the opening from existing draft.\n"+
+                "If provided without --draftName - the list of choices will be displayed."
+        }),
+        draftName: flags.string({
+            char: 'n',
+            description:
+                'Name of the draft to create the opening from.',
+            dependsOn: ['useDraft']
+        }),
+        createDraftOnly: flags.boolean({
+            char: 'c',
+            description:
+                'If provided - the extrinsic will not be executed. Use this flag if you only want to create a draft.'
+        }),
+        skipPrompts: flags.boolean({
+            char: 's',
+            description:
+                "Whether to skip all prompts when adding from draft (will use all default values)",
+            dependsOn: ['useDraft'],
+            exclusive: ['createDraftOnly']
+        })
+    };
+
+    async run() {
+        const account = await this.getRequiredSelectedAccount();
+        // lead-only gate
+        await this.getRequiredLead();
+
+        const { flags } = this.parse(WorkingGroupsCreateOpening);
+
+        let defaultValues: ApiMethodInputArg[] | undefined = undefined;
+        if (flags.useDraft) {
+            const draftName = flags.draftName || await this.promptForOpeningDraft();
+            defaultValues =  await this.loadOpeningDraftParams(draftName);
+        }
+
+        if (!flags.skipPrompts) {
+            const module = apiModuleByGroup[this.group];
+            const method = 'addOpening';
+            const jsonArgsMapping = { 'human_readable_text': { struct: HRTStruct, schemaValidator } };
+
+            let saveDraft = false, params: ApiMethodInputArg[];
+            if (flags.createDraftOnly) {
+                params = await this.promptForExtrinsicParams(module, method, jsonArgsMapping, defaultValues);
+                saveDraft = true;
+            }
+            else {
+                await this.requestAccountDecoding(account); // Prompt for password
+
+                params = await this.buildAndSendExtrinsic(
+                    account,
+                    module,
+                    method,
+                    jsonArgsMapping,
+                    defaultValues,
+                    true
+                );
+
+                this.log(chalk.green('Opening succesfully created!'));
+
+                saveDraft = await this.simplePrompt({
+                    message: 'Do you wish to save this opening as draft?',
+                    type: 'confirm'
+                });
+            }
+
+            if (saveDraft) {
+                const draftName = await this.promptForNewOpeningDraftName();
+                this.saveOpeningDraft(draftName, params);
+
+                this.log(chalk.green(`Opening draft ${ chalk.white(draftName) } succesfully saved!`));
+            }
+        }
+        else {
+            await this.requestAccountDecoding(account); // Prompt for password
+            this.log(chalk.white('Sending the extrinsic...'));
+            await this.sendExtrinsic(account, apiModuleByGroup[this.group], 'addOpening', defaultValues!);
+            this.log(chalk.green('Opening succesfully created!'));
+        }
+    }
+}

+ 58 - 0
cli/src/commands/working-groups/fillOpening.ts

@@ -0,0 +1,58 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import _ from 'lodash';
+import { OpeningStatus } from '../../Types';
+import ExitCodes from '../../ExitCodes';
+import { apiModuleByGroup } from '../../Api';
+import { OpeningId } from '@joystream/types/hiring';
+import { ApplicationIdSet, RewardPolicy } from '@joystream/types/working-group';
+import chalk from 'chalk';
+
+export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
+    static description = 'Allows filling working group opening that\'s currently in review. Requires lead access.';
+    static args = [
+        {
+            name: 'wgOpeningId',
+            required: true,
+            description: 'Working Group Opening ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsFillOpening);
+
+        const account = await this.getRequiredSelectedAccount();
+        // Lead-only gate
+        await this.getRequiredLead();
+
+        const opening = await this.getApi().groupOpening(this.group, parseInt(args.wgOpeningId));
+
+        if (opening.stage.status !== OpeningStatus.InReview) {
+            this.error('This opening is not in the Review stage!', { exit: ExitCodes.InvalidInput });
+        }
+
+        const applicationIds = await this.promptForApplicationsToAccept(opening);
+        const rewardPolicyOpt = await this.promptForParam(`Option<${RewardPolicy.name}>`, 'RewardPolicy');
+
+        await this.requestAccountDecoding(account);
+
+        await this.sendAndFollowExtrinsic(
+            account,
+            apiModuleByGroup[this.group],
+            'fillOpening',
+            [
+                new OpeningId(opening.wgOpeningId),
+                new ApplicationIdSet(applicationIds),
+                rewardPolicyOpt
+            ]
+        );
+
+        this.log(chalk.green(`Opening ${chalk.white(opening.wgOpeningId)} succesfully filled!`));
+        this.log(
+            chalk.green('Accepted working group application IDs: ') +
+            chalk.white(applicationIds.length ? applicationIds.join(chalk.green(', ')) : 'NONE')
+        );
+    }
+}

+ 78 - 0
cli/src/commands/working-groups/opening.ts

@@ -0,0 +1,78 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import { displayTable, displayCollapsedRow, displayHeader } from '../../helpers/display';
+import _ from 'lodash';
+import { OpeningStatus, GroupOpeningStage, GroupOpeningStakes } from '../../Types';
+import { StakingAmountLimitModeKeys, StakingPolicy } from '@joystream/types/hiring';
+import { formatBalance } from '@polkadot/util';
+import chalk from 'chalk';
+
+export default class WorkingGroupsOpening extends WorkingGroupsCommandBase {
+    static description = 'Shows an overview of given working group opening by Working Group Opening ID';
+    static args = [
+        {
+            name: 'wgOpeningId',
+            required: true,
+            description: 'Working Group Opening ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    stageColumns(stage: GroupOpeningStage) {
+        const { status, date, block } = stage;
+        const statusTimeHeader = status === OpeningStatus.WaitingToBegin ? 'Starts at' : 'Last status change';
+        return {
+            'Stage': _.startCase(status),
+            [statusTimeHeader]: (date && block)
+                ? `~ ${date.toLocaleTimeString()} ${ date.toLocaleDateString()} (#${block})`
+                : (block && `#${block}` || '?')
+        };
+    }
+
+    formatStake(stake: StakingPolicy | undefined) {
+        if (!stake) return 'NONE';
+        const { amount, amount_mode } = stake;
+        return amount_mode.type === StakingAmountLimitModeKeys.AtLeast
+            ? `>= ${ formatBalance(amount) }`
+            : `== ${ formatBalance(amount) }`;
+    }
+
+    stakeColumns(stakes: GroupOpeningStakes) {
+        const { role, application } = stakes;
+        return {
+            'Application stake': this.formatStake(application),
+            'Role stake': this.formatStake(role),
+        }
+    }
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsOpening);
+
+        const opening = await this.getApi().groupOpening(this.group, parseInt(args.wgOpeningId));
+
+        displayHeader('Human readable text');
+        this.jsonPrettyPrint(opening.opening.human_readable_text.toString());
+
+        displayHeader('Opening details');
+        const openingRow = {
+            'WG Opening ID': opening.wgOpeningId,
+            'Opening ID': opening.openingId,
+            ...this.stageColumns(opening.stage),
+            ...this.stakeColumns(opening.stakes)
+        };
+        displayCollapsedRow(openingRow);
+
+        displayHeader(`Applications (${opening.applications.length})`);
+        const applicationsRows = opening.applications.map(a => ({
+            'WG appl. ID': a.wgApplicationId,
+            'Appl. ID': a.applicationId,
+            'Member': a.member?.handle.toString() || chalk.red('NONE'),
+            'Stage': a.stage,
+            'Appl. stake': a.stakes.application,
+            'Role stake': a.stakes.role,
+            'Total stake': Object.values(a.stakes).reduce((a, b) => a + b)
+        }));
+        displayTable(applicationsRows, 5);
+    }
+  }

+ 22 - 0
cli/src/commands/working-groups/openings.ts

@@ -0,0 +1,22 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import { displayTable } from '../../helpers/display';
+import _ from 'lodash';
+
+export default class WorkingGroupsOpenings extends WorkingGroupsCommandBase {
+    static description = 'Shows an overview of given working group openings';
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const openings = await this.getApi().openingsByGroup(this.group);
+
+        const openingsRows = openings.map(o => ({
+            'WG Opening ID': o.wgOpeningId,
+            'Opening ID': o.openingId,
+            'Stage': `${_.startCase(o.stage.status)}${o.stage.block ? ` (#${o.stage.block})` : ''}`,
+            'Applications': o.applications.length
+        }));
+        displayTable(openingsRows, 5);
+    }
+}

+ 1 - 1
cli/src/commands/working-groups/overview.ts

@@ -33,6 +33,6 @@ export default class WorkingGroupsOverview extends WorkingGroupsCommandBase {
             'Stake': formatBalance(m.stake),
             'Earned': formatBalance(m.earned)
         }));
-        displayTable(membersRows, 20);
+        displayTable(membersRows, 5);
     }
   }

+ 46 - 0
cli/src/commands/working-groups/startAcceptingApplications.ts

@@ -0,0 +1,46 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import _ from 'lodash';
+import { OpeningStatus } from '../../Types';
+import ExitCodes from '../../ExitCodes';
+import { apiModuleByGroup } from '../../Api';
+import { OpeningId } from '@joystream/types/hiring';
+import chalk from 'chalk';
+
+export default class WorkingGroupsStartAcceptingApplications extends WorkingGroupsCommandBase {
+    static description = 'Changes the status of pending opening to "Accepting applications". Requires lead access.';
+    static args = [
+        {
+            name: 'wgOpeningId',
+            required: true,
+            description: 'Working Group Opening ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsStartAcceptingApplications);
+
+        const account = await this.getRequiredSelectedAccount();
+        // Lead-only gate
+        await this.getRequiredLead();
+
+        const opening = await this.getApi().groupOpening(this.group, parseInt(args.wgOpeningId));
+
+        if (opening.stage.status !== OpeningStatus.WaitingToBegin) {
+            this.error('This opening is not in "Waiting To Begin" stage!', { exit: ExitCodes.InvalidInput });
+        }
+
+        await this.requestAccountDecoding(account);
+
+        await this.sendAndFollowExtrinsic(
+            account,
+            apiModuleByGroup[this.group],
+            'acceptApplications',
+            [ new OpeningId(opening.wgOpeningId) ]
+        );
+
+        this.log(chalk.green(`Opening ${chalk.white(opening.wgOpeningId)} status changed to: ${ chalk.white('Accepting Applications') }`));
+    }
+}

+ 46 - 0
cli/src/commands/working-groups/startReviewPeriod.ts

@@ -0,0 +1,46 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import _ from 'lodash';
+import { OpeningStatus } from '../../Types';
+import ExitCodes from '../../ExitCodes';
+import { apiModuleByGroup } from '../../Api';
+import { OpeningId } from '@joystream/types/hiring';
+import chalk from 'chalk';
+
+export default class WorkingGroupsStartReviewPeriod extends WorkingGroupsCommandBase {
+    static description = 'Changes the status of active opening to "In review". Requires lead access.';
+    static args = [
+        {
+            name: 'wgOpeningId',
+            required: true,
+            description: 'Working Group Opening ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsStartReviewPeriod);
+
+        const account = await this.getRequiredSelectedAccount();
+        // Lead-only gate
+        await this.getRequiredLead();
+
+        const opening = await this.getApi().groupOpening(this.group, parseInt(args.wgOpeningId));
+
+        if (opening.stage.status !== OpeningStatus.AcceptingApplications) {
+            this.error('This opening is not in "Accepting Applications" stage!', { exit: ExitCodes.InvalidInput });
+        }
+
+        await this.requestAccountDecoding(account);
+
+        await this.sendAndFollowExtrinsic(
+            account,
+            apiModuleByGroup[this.group],
+            'beginApplicantReview',
+            [ new OpeningId(opening.wgOpeningId) ]
+        );
+
+        this.log(chalk.green(`Opening ${chalk.white(opening.wgOpeningId)} status changed to: ${ chalk.white('In Review') }`));
+    }
+}

+ 45 - 0
cli/src/commands/working-groups/terminateApplication.ts

@@ -0,0 +1,45 @@
+import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase';
+import _ from 'lodash';
+import ExitCodes from '../../ExitCodes';
+import { apiModuleByGroup } from '../../Api';
+import { ApplicationStageKeys, ApplicationId } from '@joystream/types/hiring';
+import chalk from 'chalk';
+
+export default class WorkingGroupsTerminateApplication extends WorkingGroupsCommandBase {
+    static description = 'Terminates given working group application. Requires lead access.';
+    static args = [
+        {
+            name: 'wgApplicationId',
+            required: true,
+            description: 'Working Group Application ID'
+        },
+    ]
+    static flags = {
+        ...WorkingGroupsCommandBase.flags,
+    };
+
+    async run() {
+        const { args } = this.parse(WorkingGroupsTerminateApplication);
+
+        const account = await this.getRequiredSelectedAccount();
+        // Lead-only gate
+        await this.getRequiredLead();
+
+        const application = await this.getApi().groupApplication(this.group, parseInt(args.wgApplicationId));
+
+        if (application.stage !== ApplicationStageKeys.Active) {
+            this.error('This application is not active!', { exit: ExitCodes.InvalidInput });
+        }
+
+        await this.requestAccountDecoding(account);
+
+        await this.sendAndFollowExtrinsic(
+            account,
+            apiModuleByGroup[this.group],
+            'terminateApplication',
+            [new ApplicationId(application.wgApplicationId)]
+        );
+
+        this.log(chalk.green(`Application ${chalk.white(application.wgApplicationId)} has been succesfully terminated!`));
+    }
+}

+ 24 - 3
cli/src/helpers/display.ts

@@ -23,13 +23,34 @@ export function displayNameValueTable(rows: NameValueObj[]) {
     );
 }
 
-export function displayTable(rows: { [k: string]: string }[], minColumnWidth = 0) {
+export function displayCollapsedRow(row: { [k: string]: string | number }) {
+    const collapsedRow: NameValueObj[] = Object.keys(row).map(name => ({
+        name,
+        value: typeof row[name] === 'string' ? row[name] as string : row[name].toString()
+    }));
+
+    displayNameValueTable(collapsedRow);
+}
+
+export function displayCollapsedTable(rows: { [k: string]: string | number }[]) {
+    for (const row of rows) displayCollapsedRow(row);
+}
+
+export function displayTable(rows: { [k: string]: string | number }[], cellHorizontalPadding = 0) {
     if (!rows.length) {
         return;
     }
+    const maxLength = (columnName: string) => rows.reduce(
+        (maxLength, row) => {
+            const val = row[columnName];
+            const valLength = typeof val === 'string' ? val.length : val.toString().length;
+            return Math.max(maxLength, valLength);
+        },
+        columnName.length
+    )
     const columnDef = (columnName: string) => ({
-        get: (row: typeof rows[number])  => chalk.white(row[columnName]),
-        minWidth: minColumnWidth
+        get: (row: typeof rows[number])  => chalk.white(`${row[columnName]}`),
+        minWidth: maxLength(columnName) + cellHorizontalPadding
     });
     let columns: Table.table.Columns<{ [k: string]: string }> = {};
     Object.keys(rows[0]).forEach(columnName => columns[columnName] = columnDef(columnName))

+ 1 - 1
node/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream']
 build = 'build.rs'
 edition = '2018'
 name = 'joystream-node'
-version = '2.4.1'
+version = '2.5.0'
 default-run = "joystream-node"
 
 [[bin]]

+ 2 - 1
package.json

@@ -30,7 +30,8 @@
 		"typescript": "^3.7.2"
 	},
 	"devDependencies": {
-		"husky": "^4.2.5"
+		"husky": "^4.2.5",
+		"eslint-plugin-prettier": "^3.1.4"
 	},
 	"husky": {
 	  "hooks": {

+ 2 - 1
pioneer/.eslintrc.js

@@ -15,6 +15,7 @@ module.exports = {
     '@typescript-eslint/camelcase': 'off',
     'react/prop-types': 'off',
     'new-cap': 'off',
-    '@typescript-eslint/interface-name-prefix': 'off'
+    '@typescript-eslint/interface-name-prefix': 'off',
+    '@typescript-eslint/ban-ts-comment': 'error'
   }
 };

+ 5 - 1
pioneer/packages/joy-roles/src/OpeningMetadata.ts

@@ -1,6 +1,10 @@
+import { WorkingGroups } from './working_groups';
+import { OpeningType } from '@joystream/types/working-group';
+
 export type OpeningMetadata = {
   id: string;
-  group: string;
+  group: WorkingGroups;
+  type?: OpeningType;
 }
 
 export type OpeningMetadataProps = {

+ 2 - 0
pioneer/packages/joy-roles/src/elements.tsx

@@ -9,6 +9,7 @@ import Identicon from '@polkadot/react-identicon';
 import { IProfile, MemberId } from '@joystream/types/members';
 import { GenericAccountId } from '@polkadot/types';
 import { LeadRoleState } from '@joystream/types/content-working-group';
+import { WorkerId } from '@joystream/types/working-group';
 
 type BalanceProps = {
   balance?: Balance;
@@ -47,6 +48,7 @@ export type GroupMember = {
 
 export type GroupLead = {
   memberId: MemberId;
+  workerId?: WorkerId; // In case of "working-group" module
   roleAccount: GenericAccountId;
   profile: IProfile;
   title: string;

+ 42 - 18
pioneer/packages/joy-roles/src/flows/apply.controller.tsx

@@ -17,7 +17,7 @@ import { keyPairDetails, FlowModal, ProgressSteps } from './apply';
 
 import { OpeningStakeAndApplicationStatus } from '../tabs/Opportunities';
 import { Min, Step, Sum } from '../balances';
-import { WorkingGroups } from '../working_groups';
+import { WorkingGroups, AvailableGroups } from '../working_groups';
 
 type State = {
   // Input data from state
@@ -39,6 +39,7 @@ type State = {
 
   // Data generated for transaction
   transactionDetails: Map<string, string>;
+  roleKeyNameBase: string;
   roleKeyName: string;
 
   // Error capture and display
@@ -52,6 +53,7 @@ const newEmptyState = (): State => {
     appDetails: {},
     hasError: false,
     transactionDetails: new Map<string, string>(),
+    roleKeyNameBase: '',
     roleKeyName: '',
     txKeyAddress: new AccountId(),
     activeStep: 0,
@@ -61,41 +63,51 @@ const newEmptyState = (): State => {
 };
 
 export class ApplyController extends Controller<State, ITransport> {
-  protected currentOpeningId = -1
+  protected currentOpeningId = -1;
+  protected currentGroup: WorkingGroups | null = null;
 
-  constructor (transport: ITransport, initialState: State = newEmptyState()) {
+  constructor (
+    transport: ITransport,
+    initialState: State = newEmptyState()
+  ) {
     super(transport, initialState);
 
     this.transport.accounts().subscribe((keys) => this.updateAccounts(keys));
   }
 
+  protected parseGroup (group: string | undefined): WorkingGroups | undefined {
+    return AvailableGroups.find(availableGroup => availableGroup === group);
+  }
+
   protected updateAccounts (keys: keyPairDetails[]) {
     this.state.keypairs = keys;
     this.dispatch();
   }
 
-  findOpening (rawId: string | undefined) {
+  findOpening (rawId: string | undefined, rawGroup: string | undefined) {
     if (!rawId) {
       return this.onError('ApplyController: no ID provided in params');
     }
     const id = parseInt(rawId);
+    const group = this.parseGroup(rawGroup);
+
+    if (!group) {
+      return this.onError('ApplyController: invalid group');
+    }
 
-    if (this.currentOpeningId === id) {
+    if (this.currentOpeningId === id && this.currentGroup === group) {
       return;
     }
 
     Promise.all(
       [
-        this.transport.curationGroupOpening(id),
-        this.transport.openingApplicationRanks(id)
+        this.transport.groupOpening(group, id),
+        this.transport.openingApplicationRanks(group, id)
       ]
     )
       .then(
         ([opening, ranks]) => {
-          const hrt = opening.opening.parse_human_readable_text();
-          if (typeof hrt !== 'object') {
-            return this.onError('human_readable_text is not an object');
-          }
+          const hrt = opening.opening.parse_human_readable_text_with_fallback();
 
           this.state.role = hrt;
           this.state.applications = opening.applications;
@@ -112,7 +124,7 @@ export class ApplyController extends Controller<State, ITransport> {
             ? ProgressSteps.ConfirmStakes
             : ProgressSteps.ApplicationDetails;
 
-          this.state.roleKeyName = hrt.job.title + ' role key';
+          this.state.roleKeyNameBase = hrt.job.title + ' role key';
 
           // When everything is collected, update the view
           this.dispatch();
@@ -121,11 +133,13 @@ export class ApplyController extends Controller<State, ITransport> {
       .catch(
         (err: any) => {
           this.currentOpeningId = -1;
+          this.currentGroup = null;
           this.onError(err);
         }
       );
 
     this.currentOpeningId = id;
+    this.currentGroup = group;
   }
 
   setApplicationStake (b: Balance): void {
@@ -183,8 +197,22 @@ export class ApplyController extends Controller<State, ITransport> {
     return true;
   }
 
+  private updateRoleKeyName () {
+    let roleKeyNamePrefix = 0;
+    do {
+      this.state.roleKeyName = `${this.state.roleKeyNameBase}${(++roleKeyNamePrefix > 1 ? ` ${roleKeyNamePrefix}` : '')}`;
+    } while (this.state.keypairs?.some(k => (
+      k.shortName.toLowerCase() === this.state.roleKeyName.toLowerCase()
+    )));
+  }
+
   async makeApplicationTransaction (): Promise<number> {
-    return this.transport.applyToCuratorOpening(
+    if (!this.currentGroup || this.currentOpeningId < 0) {
+      throw new Error('Trying to apply to unfetched opening');
+    }
+    this.updateRoleKeyName();
+    return this.transport.applyToOpening(
+      this.currentGroup,
       this.currentOpeningId,
       this.state.roleKeyName,
       this.state.txKeyAddress.toString(),
@@ -197,14 +225,10 @@ export class ApplyController extends Controller<State, ITransport> {
 
 export const ApplyView = View<ApplyController, State>(
   (state, controller, params) => {
-    if (params.get('group') !== WorkingGroups.ContentCurators) {
-      return <h1>Applying not yet implemented for this group!</h1>;
-    }
-    controller.findOpening(params.get('id'));
+    controller.findOpening(params.get('id'), params.get('group'));
     return (
       <Container className="apply-flow">
         <div className="dimmer"></div>
-        // @ts-ignore
         <FlowModal
           role={state.role!}
           applications={state.applications!}

+ 2 - 0
pioneer/packages/joy-roles/src/flows/apply.elements.stories.tsx

@@ -1,3 +1,5 @@
+// TODO: FIXME: Remove the ts-nocheck and fix errors!
+// eslint-disable-next-line @typescript-eslint/ban-ts-comment
 // @ts-nocheck
 import React, { useState } from 'react';
 import { number, object, withKnobs } from '@storybook/addon-knobs';

+ 2 - 0
pioneer/packages/joy-roles/src/flows/apply.stories.tsx

@@ -1,3 +1,5 @@
+// TODO: FIXME: Remove the ts-nocheck and fix errors!
+// eslint-disable-next-line @typescript-eslint/ban-ts-comment
 // @ts-nocheck
 import React from 'react';
 import { number, object, select, text, withKnobs } from '@storybook/addon-knobs';

+ 55 - 101
pioneer/packages/joy-roles/src/flows/apply.tsx

@@ -172,17 +172,17 @@ export function FundSourceSelector (props: FundSourceSelectorProps & FundSourceC
   );
 }
 
-function rankIcon (place: number, slots: number): SemanticICONS {
-  if (place <= 1) {
-    return 'thermometer empty';
-  } else if (place <= (slots / 4)) {
-    return 'thermometer quarter';
-  } else if (place <= (slots / 2)) {
-    return 'thermometer half';
-  } else if (place > (slots / 2) && place < slots) {
+function rankIcon (estimatedSlot: number, slots: number): SemanticICONS {
+  if (estimatedSlot === 1) { // 1st place
+    return 'thermometer';
+  } else if (estimatedSlot <= (slots / 3)) { // Places 2-33 if slotsCount == 100
     return 'thermometer three quarters';
+  } else if (estimatedSlot <= (slots / 1.5)) { // Places 34-66 if slotsCount == 100
+    return 'thermometer half';
+  } else if (estimatedSlot <= slots) { // Places 67-100 if slotsCount == 100
+    return 'thermometer quarter';
   }
-  return 'thermometer';
+  return 'thermometer empty'; // Places >100 for slotsCount == 100
 }
 
 export type StakeRankSelectorProps = {
@@ -192,40 +192,28 @@ export type StakeRankSelectorProps = {
   step: Balance;
   otherStake: Balance;
   requirement: IStakeRequirement;
+  maxNumberOfApplications: number;
 }
 
 export function StakeRankSelector (props: StakeRankSelectorProps) {
   const slotCount = props.slots.length;
-  const [rank, setRank] = useState(1);
-  const minStake = props.requirement.value;
+  const minStake = props.maxNumberOfApplications && props.slots.length === props.maxNumberOfApplications
+    ? props.slots[0].sub(props.otherStake).addn(1) // Slots are ordered by stake ASC
+    : props.requirement.value;
+  const stakeSufficient = props.stake.gte(minStake);
 
   const ticks = [];
   for (let i = 0; i < slotCount; i++) {
     ticks.push(<div key={i} className="tick" style={{ width: (100 / slotCount) + '%' }}>{slotCount - i}</div>);
   }
 
-  const findRankValue = (newStake: Balance): number => {
-    if (newStake.add(props.otherStake).gt(props.slots[slotCount - 1])) {
-      return slotCount;
-    }
-
-    for (let i = slotCount; i--; i >= 0) {
-      if (newStake.add(props.otherStake).gt(props.slots[i])) {
-        return i + 1;
-      }
-    }
-
-    return 0;
-  };
+  let estimatedSlot = slotCount + 1;
+  props.slots.forEach(slotStake => props.stake.gt(slotStake.sub(props.otherStake)) && --estimatedSlot);
 
   const changeValue = (e: any, { value }: any) => {
     const newStake = new u128(value);
     props.setStake(newStake);
-    setRank(findRankValue(newStake));
   };
-  useEffect(() => {
-    props.setStake(props.slots[0]);
-  }, []);
 
   const slider = null;
   return (
@@ -238,21 +226,26 @@ export function StakeRankSelector (props: StakeRankSelectorProps) {
           type="number"
           step={slotCount > 1 ? props.step.toNumber() : 1}
           value={props.stake.toNumber() > 0 ? props.stake.toNumber() : 0}
-          min={props.slots.length > 0 ? props.slots[0].sub(props.otherStake).toNumber() : 0}
-          error={props.stake.lt(minStake)}
+          min={minStake}
+          error={!stakeSufficient}
         />
-        <Label size='large'>
-          <Icon name={rankIcon(rank, slotCount)} />
-          Estimated rank
-          <Label.Detail>{(slotCount + 1) - rank} / {slotCount}</Label.Detail>
-        </Label>
-        <Label size='large'>
+        { props.maxNumberOfApplications > 0 && (
+          <Label size='large'>
+            <Icon name={rankIcon(estimatedSlot, slotCount)} />
+            Estimated rank
+            <Label.Detail>{estimatedSlot} / {props.maxNumberOfApplications}</Label.Detail>
+          </Label>
+        ) }
+        <Label size='large' color={stakeSufficient ? 'green' : 'red'}>
           <Icon name="shield" />
           Your stake
           <Label.Detail>{formatBalance(props.stake)}</Label.Detail>
         </Label>
       </Container>
       {slider}
+      { !stakeSufficient && (
+        <Label color="red">Currently you need to stake at least {formatBalance(minStake)} to be considered for this position!</Label>
+      ) }
     </Container>
   );
 }
@@ -377,24 +370,17 @@ export type StageTransitionProps = {
   prevTransition: () => void;
 }
 
-export type ApplicationStatusProps = {
-  numberOfApplications: number;
-}
-
 type CaptureKeyAndPassphraseProps = {
   keyAddress: AccountId;
   setKeyAddress: (a: AccountId) => void;
-  keyPassphrase: string;
-  setKeyPassphrase: (p: string) => void;
-  minStake: Balance;
+  // keyPassphrase: string;
+  // setKeyPassphrase: (p: string) => void;
+  // minStake: Balance;
 }
 
 export type ConfirmStakesStageProps =
-  StakeRequirementProps &
   FundSourceSelectorProps &
-  ApplicationStatusProps &
-  StakeRankSelectorProps &
-  CaptureKeyAndPassphraseProps & {
+  Pick<StakeRankSelectorProps, 'slots' | 'step'> & {
     applications: OpeningStakeAndApplicationStatus;
     selectedApplicationStake: Balance;
     setSelectedApplicationStake: (b: Balance) => void;
@@ -426,7 +412,7 @@ export function ConfirmStakesStage (props: ConfirmStakesStageProps & StageTransi
   );
 }
 
-type StakeSelectorProps = ConfirmStakesStageProps & ApplicationStatusProps
+type StakeSelectorProps = ConfirmStakesStageProps;
 
 function ConfirmStakes (props: StakeSelectorProps) {
   if (bothStakesVariable(props.applications)) {
@@ -488,55 +474,25 @@ export type ConfirmStakes2UpProps = {
 }
 
 export function ConfirmStakes2Up (props: ConfirmStakes2UpProps) {
-  const [valid, setValid] = useState(true);
   const slotCount = props.slots.length;
-  const [rank, setRank] = useState(1);
-  const minStake = props.slots[0];
-  const [combined, setCombined] = useState(new u128(0));
-
-  const findRankValue = (newStake: Balance): number => {
-    if (slotCount === 0) {
-      return 0;
-    }
+  const { maxNumberOfApplications, requiredApplicationStake, requiredRoleStake } = props.applications;
+  const minStake = maxNumberOfApplications && props.slots.length === maxNumberOfApplications
+    ? props.slots[0].addn(1) // Slots are sorted by combined stake ASC
+    : requiredApplicationStake.value.add(requiredRoleStake.value);
+  const combined = Add(props.selectedApplicationStake, props.selectedRoleStake);
+  const valid = combined.gte(minStake);
 
-    if (newStake.gt(props.slots[slotCount - 1])) {
-      return slotCount;
-    }
-
-    for (let i = slotCount; i--; i >= 0) {
-      if (newStake.gt(props.slots[i])) {
-        return i + 1;
-      }
-    }
-
-    return 0;
-  };
-
-  // Watch stake values
-  useEffect(() => {
-    const newCombined = Add(props.selectedApplicationStake, props.selectedRoleStake);
-    setCombined(newCombined);
-  },
-  [props.selectedApplicationStake, props.selectedRoleStake]
-  );
-
-  useEffect(() => {
-    setRank(findRankValue(combined));
-    if (slotCount > 0) {
-      setValid(combined.gte(minStake));
-    }
-  },
-  [combined]
-  );
+  let estimatedSlot = slotCount + 1;
+  props.slots.forEach(slotStake => combined.gt(slotStake) && --estimatedSlot);
 
   const ticks = [];
   for (let i = 0; i < slotCount; i++) {
-    ticks.push(<div key={i} className="tick" style={{ width: (100 / slotCount) + '%' }}>{slotCount - i}</div>);
+    ticks.push(<div key={i} className="tick" style={{ width: (100 / slotCount) + '%' }}>{i + 1}</div>);
   }
 
-  const tickLabel = <div className="ui pointing below label" style={{ left: ((100 / slotCount) * rank) + '%' }}>
+  const tickLabel = <div className="ui pointing below label" style={{ left: ((100 / slotCount) * (estimatedSlot - 1)) + '%' }}>
     Your rank
-    <div className="detail">{(slotCount - rank) + 1}/{props.applications.maxNumberOfApplications}</div>
+    <div className="detail">{estimatedSlot}/{props.applications.maxNumberOfApplications}</div>
   </div>;
 
   let tickContainer = null;
@@ -630,11 +586,13 @@ export function ConfirmStakes2Up (props: ConfirmStakes2UpProps) {
                   Your current combined stake
                   <Label.Detail>{formatBalance(new u128(props.selectedApplicationStake.add(props.selectedRoleStake)))}</Label.Detail>
                 </Label>
-                <Label color='grey'>
-                  <Icon name={rankIcon(rank, slotCount)} />
-                  Estimated rank
-                  <Label.Detail>{(slotCount - rank) + 1}/{props.applications.maxNumberOfApplications}</Label.Detail>
-                </Label>
+                { maxNumberOfApplications > 0 && (
+                  <Label color='grey'>
+                    <Icon name={rankIcon(estimatedSlot, slotCount)} />
+                    Estimated rank
+                    <Label.Detail>{estimatedSlot}/{props.applications.maxNumberOfApplications}</Label.Detail>
+                  </Label>
+                ) }
               </Grid.Column>
             </Grid.Row>
           </Grid>
@@ -677,7 +635,8 @@ function StakeRankMiniSelector (props: StakeRankMiniSelectorProps) {
   );
 }
 
-type CaptureStake1UpProps = ApplicationStatusProps & {
+type CaptureStake1UpProps = {
+  numberOfApplications: number;
   name: string;
   stakeReturnPolicy: string;
   colour: string;
@@ -710,11 +669,6 @@ function CaptureStake1Up (props: CaptureStake1UpProps) {
     );
   }
 
-  // Set default value
-  useEffect(() => {
-    props.setValue(props.requirement.value);
-  }, []);
-
   let slider = null;
   let atLeast = null;
   if (props.requirement.atLeast()) {
@@ -1030,7 +984,7 @@ export function DoneStage (props: DoneStageProps) {
   );
 }
 
-export type FlowModalProps = ConfirmStakesStageProps & FundSourceSelectorProps & {
+export type FlowModalProps = Pick<StakeRankSelectorProps, 'slots' | 'step'> & FundSourceSelectorProps & {
   role: GenericJoyStreamRoleSchema;
   applications: OpeningStakeAndApplicationStatus;
   hasConfirmStep: boolean;

+ 3 - 3
pioneer/packages/joy-roles/src/index.tsx

@@ -86,9 +86,9 @@ export const App: React.FC<Props> = (props: Props) => {
         />
       </header>
       <Switch>
-        <Route path={`${basePath}/opportunities/:group/:id/apply`} render={(props) => renderViewComponent(ApplyView(applyCtrl), props)} />
-        <Route path={`${basePath}/opportunities/:group/:id`} render={(props) => renderViewComponent(OpportunityView(oppCtrl), props)} />
-        <Route path={`${basePath}/opportunities/:group`} render={(props) => renderViewComponent(OpportunitiesView(oppsCtrl), props)} />
+        <Route path={`${basePath}/opportunities/:group/:id([0-9]+)/apply`} render={(props) => renderViewComponent(ApplyView(applyCtrl), props)} />
+        <Route path={`${basePath}/opportunities/:group/:id([0-9]+)`} render={(props) => renderViewComponent(OpportunityView(oppCtrl), props)} />
+        <Route path={`${basePath}/opportunities/:group/:lead(lead)?`} render={(props) => renderViewComponent(OpportunitiesView(oppsCtrl), props)} />
         <Route path={`${basePath}/opportunities`} render={() => renderViewComponent(OpportunitiesView(oppsCtrl))} />
         <Route path={`${basePath}/my-roles`} render={() => renderViewComponent(MyRolesView(myRolesCtrl))} />
         <Route path={`${basePath}/admin`} render={() => renderViewComponent(AdminView(adminCtrl))} />

+ 22 - 0
pioneer/packages/joy-roles/src/mocks.ts

@@ -3,6 +3,14 @@ import AccountId from '@polkadot/types/primitive/Generic/AccountId';
 
 import { ActorInRole, IProfile, EntryMethod } from '@joystream/types/members';
 
+import {
+  AcceptingApplications,
+  ActiveOpeningStage,
+  OpeningStage,
+  ActiveOpeningStageVariant,
+  ApplicationId
+} from '@joystream/types/hiring';
+
 export function mockProfile (name: string, avatar_uri = ''): IProfile {
   return {
     handle: new Text(name),
@@ -18,3 +26,17 @@ export function mockProfile (name: string, avatar_uri = ''): IProfile {
     roles: new Vec<ActorInRole>(ActorInRole)
   };
 }
+
+export const mockStage = new OpeningStage({
+  Active: new ActiveOpeningStageVariant({
+    applications_added: new (Vec.with(ApplicationId))([]),
+    active_application_count: new u32(0),
+    unstaking_application_count: new u32(0),
+    deactivated_application_count: new u32(0),
+    stage: new ActiveOpeningStage({
+      AcceptingApplications: new AcceptingApplications({
+        started_accepting_applicants_at_block: new u32(100)
+      })
+    })
+  })
+});

+ 130 - 273
pioneer/packages/joy-roles/src/tabs/Admin.controller.tsx

@@ -1,14 +1,14 @@
-// @ts-nocheck
 import React, { useState } from 'react';
 import { Link } from 'react-router-dom';
 import { formatBalance } from '@polkadot/util';
 
 import { ApiPromise } from '@polkadot/api';
+import { GenericAccountId, Option, Text, Vec, u32, u128 } from '@polkadot/types';
 import { Balance } from '@polkadot/types/interfaces';
-import { GenericAccountId, Option, u32, u64, u128, Set, Text, Vec } from '@polkadot/types';
 
 import { SingleLinkedMapEntry, Controller, View } from '@polkadot/joy-utils/index';
 import { MyAccountProvider, useMyAccount } from '@polkadot/joy-utils/MyAccountContext';
+import { SubmittableExtrinsic } from '@polkadot/api/promise/types';
 
 import {
   Accordion,
@@ -35,11 +35,12 @@ import {
   ApplicationStage,
   ActivateOpeningAt,
   ApplicationRationingPolicy,
-  CurrentBlock, ExactBlock,
+  CurrentBlock,
   Opening,
   OpeningStage,
   StakingPolicy,
-  StakingAmountLimitModeKeys
+  StakingAmountLimitModeKeys,
+  StakingAmountLimitMode
 } from '@joystream/types/hiring';
 
 import {
@@ -49,13 +50,10 @@ import {
 
 import { Stake, StakeId } from '@joystream/types/stake';
 
-import {
-  GenericJoyStreamRoleSchema
-} from '@joystream/types/hiring/schemas/role.schema.typings';
 import {
   CuratorApplication, CuratorApplicationId,
   CuratorOpening,
-  OpeningPolicyCommitment, IOpeningPolicyCommitment
+  IOpeningPolicyCommitment, CuratorOpeningId
 } from '@joystream/types/content-working-group';
 
 import {
@@ -68,7 +66,7 @@ import {
   openingDescription
 } from '../openingStateMarkup';
 
-import { Add, Sort, Sum, Zero } from '../balances';
+import { Add, Zero } from '../balances';
 
 type ids = {
   curatorId: number;
@@ -92,6 +90,18 @@ type opening = ids & {
   classification: OpeningStageClassification;
 }
 
+// Only max_review_period_length is not optional, so other fields can be "undefined"
+type policyDescriptor = Pick<IOpeningPolicyCommitment, 'max_review_period_length'> & Partial<IOpeningPolicyCommitment>;
+
+type stakingFieldName = 'application_staking_policy' | 'role_staking_policy';
+
+type openingDescriptor = {
+  title: string;
+  start: ActivateOpeningAt;
+  policy: policyDescriptor;
+  text: Text;
+}
+
 type State = {
   openings: Map<number, opening>;
   currentDescriptor: openingDescriptor;
@@ -143,6 +153,27 @@ function newHRT (title: string): Text {
   );
 }
 
+const createRationingPolicyOpt = (maxApplicants: number) =>
+  new Option<ApplicationRationingPolicy>(
+    ApplicationRationingPolicy,
+    new ApplicationRationingPolicy({
+      max_active_applicants: new u32(maxApplicants)
+    })
+  );
+const createStakingPolicyOpt = (amount: number, amount_mode: StakingAmountLimitMode): Option<StakingPolicy> =>
+  new Option(
+    StakingPolicy,
+    new StakingPolicy({
+      amount: new u128(amount),
+      amount_mode,
+      crowded_out_unstaking_period_length: new Option('BlockNumber', null),
+      review_period_expired_unstaking_period_length: new Option('BlockNumber', null)
+    })
+  );
+
+const STAKING_MODE_EXACT = new StakingAmountLimitMode(StakingAmountLimitModeKeys.Exact);
+const STAKING_MODE_AT_LEAST = new StakingAmountLimitMode(StakingAmountLimitModeKeys.AtLeast);
+
 const stockOpenings: openingDescriptor[] = [
   {
     title: 'Test config A: no application stake, no role stake, no applicant limit',
@@ -165,13 +196,7 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration C')
   },
@@ -180,19 +205,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration D')
   },
@@ -201,13 +215,7 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      role_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration E')
   },
@@ -216,19 +224,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      role_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration F')
   },
@@ -237,13 +234,7 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration G')
   },
@@ -252,19 +243,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration H')
   },
@@ -273,13 +253,7 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      role_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration I')
   },
@@ -288,19 +262,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      role_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration J')
   },
@@ -309,20 +272,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration K')
   },
@@ -331,26 +282,9 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration L')
   },
@@ -359,20 +293,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration M')
   },
@@ -381,26 +303,9 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration N')
   },
@@ -409,20 +314,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration O')
   },
@@ -437,20 +330,8 @@ const stockOpenings: openingDescriptor[] = [
           max_active_applicants: new u32(10)
         })
       ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_EXACT),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_AT_LEAST)
     },
     text: newHRT('Test configuration P')
   },
@@ -459,20 +340,8 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration Q')
   },
@@ -481,26 +350,9 @@ const stockOpenings: openingDescriptor[] = [
     start: new ActivateOpeningAt(CurrentBlock),
     policy: {
       max_review_period_length: new u32(99999),
-      application_rationing_policy: new Option<ApplicationRationingPolicy>(
-        ApplicationRationingPolicy,
-        new ApplicationRationingPolicy({
-          max_active_applicants: new u32(10)
-        })
-      ),
-      application_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(100),
-          amount_mode: StakingAmountLimitModeKeys.AtLeast
-        })
-      ),
-      role_staking_policy: new Option<StakingPolicy>(
-        StakingPolicy,
-        new StakingPolicy({
-          amount: new u128(200),
-          amount_mode: StakingAmountLimitModeKeys.Exact
-        })
-      )
+      application_rationing_policy: createRationingPolicyOpt(10),
+      application_staking_policy: createStakingPolicyOpt(100, STAKING_MODE_AT_LEAST),
+      role_staking_policy: createStakingPolicyOpt(200, STAKING_MODE_EXACT)
     },
     text: newHRT('Test configuration R')
   }
@@ -509,35 +361,29 @@ const stockOpenings: openingDescriptor[] = [
 const newEmptyState = (): State => {
   return {
     openings: new Map<number, opening>(),
-    openingDescriptor: stockOpenings[0],
+    currentDescriptor: stockOpenings[0],
     modalOpen: false
   };
 };
 
-// TODO: Make a list of stock openings
-type openingDescriptor = {
-  title: string;
-  start: ActivateOpeningAt;
-  policy: IOpeningPolicyCommitment;
-  text: Text;
-}
-
 export class AdminController extends Controller<State, ITransport> {
   api: ApiPromise
   constructor (transport: ITransport, api: ApiPromise, initialState: State = newEmptyState()) {
     super(transport, initialState);
     this.api = api;
-    this.state.openingDescriptor = stockOpenings[0];
+    this.state.currentDescriptor = stockOpenings[0];
     this.updateState();
   }
 
   newOpening (creatorAddress: string, desc: openingDescriptor) {
     const tx = this.api.tx.contentWorkingGroup.addCuratorOpening(
       desc.start,
-      new OpeningPolicyCommitment(desc.policy),
+      desc.policy,
       desc.text
-    );
+    ) as unknown as SubmittableExtrinsic;
 
+    // FIXME: That's a bad way to send extrinsic in Pioneer (without "queueExtrinsic" etc.)
+    // and probably the reason why it always appears as succesful
     tx.signAndSend(creatorAddress, ({ events = [], status }) => {
       if (status.isFinalized) {
         this.updateState();
@@ -553,7 +399,9 @@ export class AdminController extends Controller<State, ITransport> {
   }
 
   startAcceptingApplications (creatorAddress: string, id = 0) {
-    const tx = this.api.tx.contentWorkingGroup.acceptCuratorApplications(new u32(id));
+    const tx = this.api.tx.contentWorkingGroup.acceptCuratorApplications(id);
+    // FIXME: That's a bad way to send extrinsic in Pioneer (without "queueExtrinsic" etc.)
+    // and probably the reason why it always appears as succesful
     tx.signAndSend(creatorAddress, ({ events = [], status }) => {
       if (status.isFinalized) {
         this.updateState();
@@ -576,12 +424,14 @@ export class AdminController extends Controller<State, ITransport> {
     }
     const tx = this.api.tx.contentWorkingGroup.applyOnCuratorOpening(
       membershipIds[0],
-      new u32(openingId),
+      openingId,
       new GenericAccountId(creatorAddress),
       new Option(u128, 400),
       new Option(u128, 400),
       new Text('This is my application')
-    );
+    ) as unknown as SubmittableExtrinsic;
+    // FIXME: That's a bad way to send extrinsic in Pioneer (without "queueExtrinsic" etc.)
+    // and probably the reason why it always appears as succesful
     tx.signAndSend(creatorAddress, ({ events = [], status }) => {
       if (status.isFinalized) {
         this.updateState();
@@ -597,9 +447,9 @@ export class AdminController extends Controller<State, ITransport> {
   }
 
   beginApplicantReview (creatorAddress: string, openingId: number) {
-    const tx = this.api.tx.contentWorkingGroup.beginCuratorApplicantReview(
-      new u32(openingId)
-    );
+    const tx = this.api.tx.contentWorkingGroup.beginCuratorApplicantReview(openingId);
+    // FIXME: That's a bad way to send extrinsic in Pioneer (without "queueExtrinsic" etc.)
+    // and probably the reason why it always appears as succesful
     tx.signAndSend(creatorAddress, ({ events = [], status }) => {
       if (status.isFinalized) {
         this.updateState();
@@ -616,10 +466,12 @@ export class AdminController extends Controller<State, ITransport> {
 
   acceptCuratorApplications (creatorAddress: string, openingId: number, applications: Array<number>) {
     const tx = this.api.tx.contentWorkingGroup.fillCuratorOpening(
-      new u32(openingId),
+      openingId,
       applications,
       null
-    );
+    ) as unknown as SubmittableExtrinsic;
+    // FIXME: That's a bad way to send extrinsic in Pioneer (without "queueExtrinsic" etc.)
+    // and probably the reason why it always appears as succesful
     tx.signAndSend(creatorAddress, ({ events = [], status }) => {
       if (status.isFinalized) {
         this.updateState();
@@ -634,8 +486,8 @@ export class AdminController extends Controller<State, ITransport> {
     });
   }
 
-  protected async profile (id: MemberId): Promise<Profile> {
-    return (await this.api.query.members.memberProfile(id)) as Profile;
+  protected async profile (id: MemberId): Promise<Option<Profile>> {
+    return (await this.api.query.members.memberProfile(id)) as Option<Profile>;
   }
 
   protected async stakeValue (stakeId: StakeId): Promise<Balance> {
@@ -667,14 +519,14 @@ export class AdminController extends Controller<State, ITransport> {
   async updateState () {
     this.state.openings = new Map<number, opening>();
 
-    const nextOpeningId = await this.api.query.contentWorkingGroup.nextCuratorOpeningId() as u64;
+    const nextOpeningId = await this.api.query.contentWorkingGroup.nextCuratorOpeningId() as CuratorOpeningId;
     for (let i = nextOpeningId.toNumber() - 1; i >= 0; i--) {
       const curatorOpening = new SingleLinkedMapEntry<CuratorOpening>(
         CuratorOpening,
         await this.api.query.contentWorkingGroup.curatorOpeningById(i)
       );
 
-      const openingId = curatorOpening.value.getField<u32>('opening_id');
+      const openingId = curatorOpening.value.opening_id;
 
       const baseOpening = new SingleLinkedMapEntry<Opening>(
         Opening,
@@ -683,11 +535,8 @@ export class AdminController extends Controller<State, ITransport> {
         )
       );
 
-      let title = 'unknown (JSON schema invalid)';
-      const hrt = baseOpening.value.parse_human_readable_text();
-      if (typeof hrt === 'object') {
-        title = (hrt).job.title;
-      }
+      const hrt = baseOpening.value.parse_human_readable_text_with_fallback();
+      const title = hrt.job.title;
 
       this.state.openings.set(i, {
         openingId: openingId.toNumber(),
@@ -699,14 +548,14 @@ export class AdminController extends Controller<State, ITransport> {
       });
     }
 
-    const nextAppid = await this.api.query.contentWorkingGroup.nextCuratorApplicationId() as u64;
+    const nextAppid = await this.api.query.contentWorkingGroup.nextCuratorApplicationId() as CuratorApplicationId;
     for (let i = 0; i < nextAppid.toNumber(); i++) {
       const cApplication = new SingleLinkedMapEntry<CuratorApplication>(
         CuratorApplication,
         await this.api.query.contentWorkingGroup.curatorApplicationById(i)
       );
 
-      const appId = cApplication.value.getField<u32>('application_id');
+      const appId = cApplication.value.application_id;
       const baseApplications = new SingleLinkedMapEntry<Application>(
         Application,
         await this.api.query.hiring.applicationById(
@@ -715,16 +564,16 @@ export class AdminController extends Controller<State, ITransport> {
       );
 
       const curatorOpening = this.state.openings.get(
-        cApplication.value.getField<u32>('curator_opening_id').toNumber()
+        cApplication.value.curator_opening_id.toNumber()
       ) as opening;
 
       curatorOpening.applications.push({
         openingId: appId.toNumber(),
         curatorId: i,
-        stage: baseApplications.value.getField<ApplicationStage>('stage'),
-        account: cApplication.value.getField('role_account').toString(),
-        memberId: cApplication.value.getField<u32>('member_id').toNumber(),
-        profile: (await this.profile(cApplication.value.getField<u32>('member_id'))).unwrap(),
+        stage: baseApplications.value.stage,
+        account: cApplication.value.role_account_id.toString(),
+        memberId: cApplication.value.member_id.toNumber(),
+        profile: (await this.profile(cApplication.value.member_id)).unwrap(),
         applicationStake: await this.applicationStake(baseApplications.value),
         roleStake: await this.roleStake(baseApplications.value),
         application: baseApplications.value
@@ -736,7 +585,7 @@ export class AdminController extends Controller<State, ITransport> {
 
   showNewOpeningModal (desc: openingDescriptor) {
     this.state.modalOpen = true;
-    this.state.openingDescriptor = desc;
+    this.state.currentDescriptor = desc;
     this.dispatch();
   }
 
@@ -772,7 +621,7 @@ export const AdminView = View<AdminController, State>(
               <Modal open={state.modalOpen} onClose={() => controller.closeModal()}>
                 <Modal.Content image>
                   <Modal.Description>
-                    <NewOpening desc={state.openingDescriptor} fn={(desc) => controller.newOpening(address, desc)} />
+                    <NewOpening desc={state.currentDescriptor} fn={(desc) => controller.newOpening(address, desc)} />
                   </Modal.Description>
                 </Modal.Content>
               </Modal>
@@ -832,8 +681,8 @@ const NewOpening = (props: NewOpeningProps) => {
 
   const [policy, setPolicy] = useState(props.desc.policy);
 
-  const onChangePolicyField = (fieldName, value) => {
-    const newState = Object.assign({}, policy);
+  const onChangePolicyField = <PolicyKey extends keyof policyDescriptor>(fieldName: PolicyKey, value: policyDescriptor[PolicyKey]) => {
+    const newState = { ...policy };
     newState[fieldName] = value;
     setPolicy(newState);
   };
@@ -863,24 +712,31 @@ const NewOpening = (props: NewOpeningProps) => {
     }
   ];
 
-  const changeStakingMode = (fieldName: string, mode: string, stakeValue: number) => {
-    const value = new Option<StakingPolic>(
-      StakingPolicy,
-      new StakingPolicy({
-        amount: new u128(stakeValue),
-        amount_mode: mode === '' && policy[fieldName].isSome ? policy[fieldName].type : mode
-      })
+  const changeStakingMode = (
+    fieldName: stakingFieldName,
+    mode: StakingAmountLimitModeKeys | '',
+    stakeValue: number
+  ) => {
+    if (mode === '') {
+      const policyField = policy[fieldName];
+      mode = policyField && policyField.isSome
+        ? (policyField.unwrap().amount_mode.type as StakingAmountLimitModeKeys)
+        : StakingAmountLimitModeKeys.Exact; // Default
+    }
+    const value = createStakingPolicyOpt(
+      stakeValue,
+      mode === StakingAmountLimitModeKeys.Exact ? STAKING_MODE_EXACT : STAKING_MODE_AT_LEAST
     );
     onChangePolicyField(fieldName, value);
   };
 
-  const onStakeModeCheckboxChange = (fn: (v: boolean) => void, fieldName: string, checked: boolean, stakeValue: number) => {
+  const onStakeModeCheckboxChange = (fn: (v: boolean) => void, fieldName: stakingFieldName, checked: boolean, stakeValue: number) => {
     fn(checked);
 
     if (checked) {
       changeStakingMode(fieldName, StakingAmountLimitModeKeys.AtLeast, stakeValue);
     } else {
-      onChangePolicyField(fieldName, null);
+      onChangePolicyField(fieldName, undefined);
     }
   };
 
@@ -890,7 +746,8 @@ const NewOpening = (props: NewOpeningProps) => {
     props.fn({
       start: start,
       policy: policy,
-      text: new Text(text)
+      text: new Text(text),
+      title: ''
     });
   };
 
@@ -932,13 +789,13 @@ const NewOpening = (props: NewOpeningProps) => {
               selection
               onChange={(e, { value }: any) => changeStakingMode('application_staking_policy', value, 0)}
               options={stakeLimitOptions}
-              value={policy.application_staking_policy.unwrap().amount_mode.type}
+              value={policy.application_staking_policy?.unwrap().amount_mode.type}
             />
 
             <label>Stake value</label>
             <Input
               type="number"
-              value={policy.application_staking_policy.unwrap().amount.toNumber()}
+              value={policy.application_staking_policy?.unwrap().amount.toNumber()}
               onChange={(e: any, { value }: any) => changeStakingMode('application_staking_policy', '', value)}
             />
           </Message>
@@ -955,13 +812,13 @@ const NewOpening = (props: NewOpeningProps) => {
               selection
               onChange={(e, { value }: any) => changeStakingMode('role_staking_policy', value, 0)}
               options={stakeLimitOptions}
-              value={policy.role_staking_policy.unwrap().amount_mode.type}
+              value={policy.role_staking_policy?.unwrap().amount_mode.type}
             />
 
             <label>Stake value</label>
             <Input
               type="number"
-              value={policy.role_staking_policy.unwrap().amount.toNumber()}
+              value={policy.role_staking_policy?.unwrap().amount.toNumber()}
               onChange={(e: any, { value }: any) => changeStakingMode('role_staking_policy', '', value)}
             />
           </Message>

+ 10 - 10
pioneer/packages/joy-roles/src/tabs/MyRoles.controller.tsx

@@ -10,14 +10,14 @@ import {
 
 type State = {
   applications: OpeningApplication[];
-  currentCurationRoles: ActiveRoleWithCTAs[];
+  currentRoles: ActiveRoleWithCTAs[];
   myAddress: string;
 }
 
 const newEmptyState = (): State => {
   return {
     applications: [],
-    currentCurationRoles: [],
+    currentRoles: [],
     myAddress: ''
   };
 };
@@ -34,18 +34,18 @@ export class MyRolesController extends Controller<State, ITransport> {
   }
 
   protected async updateApplications (myAddress: string) {
-    this.state.applications = await this.transport.openingApplications(myAddress);
+    this.state.applications = await this.transport.openingApplicationsByAddress(myAddress);
     this.dispatch();
   }
 
   protected async updateCurationGroupRoles (myAddress: string) {
-    const roles = await this.transport.myCurationGroupRoles(myAddress);
-    this.state.currentCurationRoles = roles.map(role => ({
+    const roles = await this.transport.myRoles(myAddress);
+    this.state.currentRoles = roles.map(role => ({
       ...role,
       CTAs: [
         {
           title: 'Leave role',
-          callback: (rationale: string) => { this.leaveCurationRole(role, rationale); }
+          callback: (rationale: string) => { this.leaveRole(role, rationale); }
         }
       ]
     })
@@ -53,19 +53,19 @@ export class MyRolesController extends Controller<State, ITransport> {
     this.dispatch();
   }
 
-  leaveCurationRole (role: ActiveRole, rationale: string) {
-    this.transport.leaveCurationRole(this.state.myAddress, role.curatorId.toNumber(), rationale);
+  leaveRole (role: ActiveRole, rationale: string) {
+    this.transport.leaveRole(role.group, this.state.myAddress, role.workerId.toNumber(), rationale);
   }
 
   cancelApplication (application: OpeningApplication) {
-    this.transport.withdrawCuratorApplication(this.state.myAddress, application.id);
+    this.transport.withdrawApplication(application.meta.group, this.state.myAddress, application.id);
   }
 }
 
 export const MyRolesView = View<MyRolesController, State>(
   (state, controller) => (
     <Container className="my-roles">
-      <CurrentRoles currentRoles={state.currentCurationRoles} />
+      <CurrentRoles currentRoles={state.currentRoles} />
       <Applications applications={state.applications} cancelCallback={(a) => controller.cancelApplication(a)} />
     </Container>
   )

+ 12 - 9
pioneer/packages/joy-roles/src/tabs/MyRoles.elements.stories.tsx

@@ -31,6 +31,7 @@ import {
 } from './Opportunities.stories';
 
 import { CuratorId } from '@joystream/types/content-working-group';
+import { WorkingGroups, workerRoleNameByGroup } from '../working_groups';
 
 export default {
   title: 'Roles / Components / My roles tab / Elements',
@@ -45,10 +46,11 @@ export function CurrentRolesFragment () {
   const props: CurrentRolesProps = {
     currentRoles: [
       {
-        curatorId: new CuratorId(1),
-        name: 'Storage provider',
+        workerId: new CuratorId(1),
+        name: workerRoleNameByGroup[WorkingGroups.StorageProviders],
         reward: new u128(321),
         stake: new u128(100),
+        group: WorkingGroups.StorageProviders,
         CTAs: [
           {
             title: 'Unstake',
@@ -57,11 +59,12 @@ export function CurrentRolesFragment () {
         ]
       },
       {
-        curatorId: new CuratorId(1),
+        workerId: new CuratorId(1),
         name: 'Some other role',
         url: 'some URL',
         reward: new u128(321),
         stake: new u128(12343200),
+        group: WorkingGroups.ContentCurators,
         CTAs: [
           {
             title: 'Leave role',
@@ -164,7 +167,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.AcceptingApplications,
@@ -184,7 +187,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.AcceptingApplications,
@@ -204,7 +207,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.InReview,
@@ -226,7 +229,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.InReview,
@@ -248,7 +251,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.Complete,
@@ -268,7 +271,7 @@ const permutations: (ApplicationProps & TestProps)[] = [
     id: 1,
     meta: {
       id: '1',
-      group: 'group-name'
+      group: WorkingGroups.ContentCurators
     },
     stage: {
       state: OpeningState.Cancelled,

+ 16 - 4
pioneer/packages/joy-roles/src/tabs/MyRoles.tsx

@@ -21,8 +21,6 @@ import { u128 } from '@polkadot/types';
 import { Balance } from '@polkadot/types/interfaces';
 
 import { Loadable } from '@polkadot/joy-utils/index';
-
-import { GenericJoyStreamRoleSchema } from '@joystream/types/hiring/schemas/role.schema.typings';
 import { Opening } from '@joystream/types/hiring';
 
 import {
@@ -35,6 +33,10 @@ import {
 import { CancelledReason, OpeningStageClassification, OpeningState } from '../classifiers';
 import { OpeningMetadata } from '../OpeningMetadata';
 import { CuratorId } from '@joystream/types/content-working-group';
+import { WorkerId } from '@joystream/types/working-group';
+import _ from 'lodash';
+import styled from 'styled-components';
+import { WorkingGroups } from '../working_groups';
 
 type CTACallback = (rationale: string) => void
 
@@ -102,9 +104,10 @@ function RoleName (props: NameAndURL) {
 }
 
 export interface ActiveRole extends NameAndURL {
-  curatorId: CuratorId;
+  workerId: CuratorId | WorkerId;
   reward: Balance;
   stake: Balance;
+  group: WorkingGroups;
 }
 
 export interface ActiveRoleWithCTAs extends ActiveRole {
@@ -379,14 +382,20 @@ function CancelButton (props: ApplicationProps) {
   );
 }
 
+const ApplicationLabel = styled(Label)`
+  margin-left: 1em !important;
+  border: 1px solid #999 !important;
+`;
+
 export function Application (props: ApplicationProps) {
   let countdown = null;
   if (props.stage.state === OpeningState.InReview) {
     countdown = <OpeningBodyReviewInProgress {...props.stage} />;
   }
 
-  const application = props.opening.parse_human_readable_text() as GenericJoyStreamRoleSchema;
+  const application = props.opening.parse_human_readable_text_with_fallback();
   const appState = applicationState(props);
+  const isLeadApplication = props.meta.type?.isOfType('Leader');
 
   let CTA = null;
   if (appState === ApplicationState.Positive && props.stage.state !== OpeningState.Complete) {
@@ -400,6 +409,9 @@ export function Application (props: ApplicationProps) {
         <Label.Detail className="right">
           {openingIcon(props.stage.state)}
           {openingDescription(props.stage.state)}
+          <ApplicationLabel>
+            {_.startCase(props.meta.group) + (isLeadApplication ? ' Lead' : '')}
+          </ApplicationLabel>
         </Label.Detail>
       </Label>
       <Grid columns="equal">

+ 1 - 0
pioneer/packages/joy-roles/src/tabs/Opportunities.controller.tsx

@@ -42,6 +42,7 @@ export const OpportunitiesView = View<OpportunitiesController, State>(
   (state, controller, params) => (
     <OpeningsView
       group={AvailableGroups.includes(params.get('group') as any) ? params.get('group') as WorkingGroups : undefined}
+      lead={!!params.get('lead')}
       openings={state.opportunities}
       block_time_in_seconds={state.blockTime}
       member_id={state.memberId}

+ 2 - 1
pioneer/packages/joy-roles/src/tabs/Opportunities.elements.stories.tsx

@@ -22,6 +22,7 @@ import { OpeningMetadata } from '../OpeningMetadata';
 
 import 'semantic-ui-css/semantic.min.css';
 import '@polkadot/joy-roles/index.sass';
+import { WorkingGroups } from '../working_groups';
 
 export default {
   title: 'Roles / Components / Opportunities groups tab / Elements',
@@ -34,7 +35,7 @@ type TestProps = {
 
 const meta: OpeningMetadata = {
   id: '1',
-  group: 'group-name'
+  group: WorkingGroups.ContentCurators
 };
 
 export function OpeningHeaderByState () {

+ 4 - 8
pioneer/packages/joy-roles/src/tabs/Opportunities.stories.tsx

@@ -7,11 +7,10 @@ import { Balance } from '@polkadot/types/interfaces';
 
 import {
   Opening,
-  AcceptingApplications,
-  ActiveOpeningStage,
   ApplicationRationingPolicy,
   StakingPolicy
 } from '@joystream/types/hiring';
+import { mockStage } from '../mocks';
 import {
   OpeningView,
   OpeningStakeAndApplicationStatus
@@ -27,6 +26,7 @@ import { OpeningMetadata } from '../OpeningMetadata';
 
 import 'semantic-ui-css/semantic.min.css';
 import '@polkadot/joy-roles/index.sass';
+import { WorkingGroups } from '../working_groups';
 
 export default {
   title: 'Roles / Components / Opportunities groups tab',
@@ -58,11 +58,7 @@ export function newMockHumanReadableText (obj: any) {
 
 export const opening = new Opening({
   created: new u32(100),
-  stage: new ActiveOpeningStage({
-    acceptingApplications: new AcceptingApplications({
-      started_accepting_applicants_at_block: new u32(100)
-    })
-  }),
+  stage: mockStage,
   max_review_period_length: new u32(100),
   application_rationing_policy: new Option(ApplicationRationingPolicy),
   application_staking_policy: new Option(StakingPolicy),
@@ -153,7 +149,7 @@ export function OpportunitySandbox () {
 
   const meta: OpeningMetadata = {
     id: '1',
-    group: 'group-name'
+    group: WorkingGroups.ContentCurators
   };
 
   return (

+ 44 - 19
pioneer/packages/joy-roles/src/tabs/Opportunities.tsx

@@ -4,7 +4,7 @@ import NumberFormat from 'react-number-format';
 import marked from 'marked';
 import CopyToClipboard from 'react-copy-to-clipboard';
 
-import { Link, useHistory } from 'react-router-dom';
+import { Link, useHistory, useLocation } from 'react-router-dom';
 import {
   Button,
   Card,
@@ -41,7 +41,7 @@ import {
 import { Loadable } from '@polkadot/joy-utils/index';
 import styled from 'styled-components';
 import _ from 'lodash';
-import { WorkingGroups, AvailableGroups } from '../working_groups';
+import { WorkingGroups, AvailableGroups, workerRoleNameByGroup } from '../working_groups';
 
 type OpeningStage = OpeningMetadataProps & {
   stage: OpeningStageClassification;
@@ -476,19 +476,16 @@ type OpeningViewProps = WorkingGroupOpening & BlockTimeProps & MemberIdProps
 export const OpeningView = Loadable<OpeningViewProps>(
   ['opening', 'block_time_in_seconds'],
   props => {
-    const hrt = props.opening.parse_human_readable_text();
-
-    if (typeof hrt === 'undefined' || typeof hrt === 'string') {
-      return null;
-    }
-
-    const text = hrt;
+    const text = props.opening.parse_human_readable_text_with_fallback();
+    const isLeadOpening = props.meta.type?.isOfType('Leader');
 
     return (
       <Container className={'opening ' + openingClass(props.stage.state)}>
         <OpeningTitle>
           {text.job.title}
-          <OpeningLabel>{ _.startCase(props.meta.group) }</OpeningLabel>
+          <OpeningLabel>
+            { _.startCase(props.meta.group) }{ isLeadOpening ? ' Lead' : '' }
+          </OpeningLabel>
         </OpeningTitle>
         <Card fluid className="container">
           <Card.Content className="header">
@@ -527,15 +524,28 @@ export type OpeningsViewProps = MemberIdProps & {
   openings?: Array<WorkingGroupOpening>;
   block_time_in_seconds?: number;
   group?: WorkingGroups;
+  lead?: boolean;
 }
 
 export const OpeningsView = Loadable<OpeningsViewProps>(
   ['openings', 'block_time_in_seconds'],
   props => {
     const history = useHistory();
-    const { group = '' } = props;
-    const onFilterChange: DropdownProps['onChange'] = (e, data) => (
-      data.value !== group && history.push(`/working-groups/opportunities/${data.value}`)
+    const location = useLocation();
+    const basePath = '/working-groups/opportunities';
+    const { group = null, lead = false } = props;
+    const onFilterChange: DropdownProps['onChange'] = (e, data) => {
+      const newPath = data.value || basePath;
+      if (newPath !== location.pathname) { history.push(newPath as string); }
+    };
+    const groupOption = (group: WorkingGroups | null, lead = false) => ({
+      value: `${basePath}${group ? `/${group}` : ''}${lead ? '/lead' : ''}`,
+      text: _.startCase(`${group || 'All opportuniries'}`) + (lead ? ' (Lead)' : '')
+    });
+    // Can assert "props.openings!" because we're using "Loadable" which prevents them from beeing undefined
+    const filteredOpenings = props.openings!.filter(o =>
+      (!group || o.meta.group === group) &&
+      (!group || !o.meta.type || (lead === o.meta.type.isOfType('Leader')))
     );
 
     return (
@@ -544,17 +554,32 @@ export const OpeningsView = Loadable<OpeningsViewProps>(
           <FilterOpportunitiesDropdown
             placeholder="All opportunities"
             options={
-              [{ value: '', text: 'All opportunities' }]
-                .concat(AvailableGroups.map(g => ({ value: g, text: _.startCase(g) })))
+              [groupOption(null, false)]
+                .concat(AvailableGroups.map(g => groupOption(g)))
+                // Currently we filter-out content curators, because they don't use the new working-group module yet
+                .concat(AvailableGroups.filter(g => g !== WorkingGroups.ContentCurators).map(g => groupOption(g, true)))
             }
-            value={group}
+            value={groupOption(group, lead).value}
             onChange={onFilterChange}
             selection
           />
         </FilterOpportunities>
-        {props.openings && props.openings.filter(o => !group || o.meta.group === group).map((opening, key) => (
-          <OpeningView key={key} {...opening} block_time_in_seconds={props.block_time_in_seconds as number} member_id={props.member_id} />
-        ))}
+        { (
+          filteredOpenings.length
+            ? filteredOpenings.map((opening, key) => (
+              <OpeningView
+                key={key}
+                {...opening}
+                block_time_in_seconds={props.block_time_in_seconds as number}
+                member_id={props.member_id} />
+            ))
+            : (
+              <h2>
+                No openings{group ? ` for ${workerRoleNameByGroup[group]}${lead ? ' Lead' : ''} role ` : ' '}
+                are currently available!
+              </h2>
+            )
+        ) }
       </Container>
     );
   }

+ 2 - 22
pioneer/packages/joy-roles/src/tabs/WorkingGroup.controller.tsx

@@ -7,18 +7,14 @@ import { ITransport } from '../transport';
 import {
   ContentCurators,
   WorkingGroupMembership,
-  GroupLeadStatus,
   StorageProviders
 } from './WorkingGroup';
 
-import { WorkingGroups } from '../working_groups';
 import styled from 'styled-components';
 
 type State = {
   contentCurators?: WorkingGroupMembership;
   storageProviders?: WorkingGroupMembership;
-  contentLeadStatus?: GroupLeadStatus;
-  storageLeadStatus?: GroupLeadStatus;
 }
 
 export class WorkingGroupsController extends Controller<State, ITransport> {
@@ -26,8 +22,6 @@ export class WorkingGroupsController extends Controller<State, ITransport> {
     super(transport, {});
     this.getCurationGroup();
     this.getStorageGroup();
-    this.getCuratorLeadStatus();
-    this.getStorageLeadStatus();
   }
 
   getCurationGroup () {
@@ -43,20 +37,6 @@ export class WorkingGroupsController extends Controller<State, ITransport> {
       this.dispatch();
     });
   }
-
-  getCuratorLeadStatus () {
-    this.transport.groupLeadStatus(WorkingGroups.ContentCurators).then((value: GroupLeadStatus) => {
-      this.setState({ contentLeadStatus: value });
-      this.dispatch();
-    });
-  }
-
-  getStorageLeadStatus () {
-    this.transport.groupLeadStatus(WorkingGroups.StorageProviders).then((value: GroupLeadStatus) => {
-      this.setState({ storageLeadStatus: value });
-      this.dispatch();
-    });
-  }
 }
 
 const WorkingGroupsOverview = styled.div`
@@ -71,8 +51,8 @@ const WorkingGroupsOverview = styled.div`
 export const WorkingGroupsView = View<WorkingGroupsController, State>(
   (state) => (
     <WorkingGroupsOverview>
-      <ContentCurators {...state.contentCurators} leadStatus={state.contentLeadStatus}/>
-      <StorageProviders {...state.storageProviders} leadStatus={state.storageLeadStatus}/>
+      <ContentCurators {...state.contentCurators}/>
+      <StorageProviders {...state.storageProviders}/>
     </WorkingGroupsOverview>
   )
 );

+ 1 - 1
pioneer/packages/joy-roles/src/tabs/WorkingGroup.stories.tsx

@@ -75,6 +75,6 @@ export function ContentCuratorsSection () {
   ];
 
   return (
-    <ContentCurators members={members} rolesAvailable={boolean('Roles available', true)} />
+    <ContentCurators workers={members} workerRolesAvailable={boolean('Roles available', true)} />
   );
 }

+ 24 - 13
pioneer/packages/joy-roles/src/tabs/WorkingGroup.tsx

@@ -10,12 +10,14 @@ import styled from 'styled-components';
 import _ from 'lodash';
 
 export type WorkingGroupMembership = {
-  members: GroupMember[];
-  rolesAvailable: boolean;
+  leadStatus: GroupLeadStatus;
+  workers: GroupMember[];
+  workerRolesAvailable: boolean;
+  leadRolesAvailable: boolean;
 }
 
 const NoRolesAvailable = () => (
-  <Message>
+  <Message info>
     <Message.Header>No open roles at the moment</Message.Header>
     <p>The team is full at the moment, but we intend to expand. Check back for open roles soon!</p>
   </Message>
@@ -25,13 +27,14 @@ type JoinRoleProps = {
   group: WorkingGroups;
   title: string;
   description: string;
+  lead?: boolean;
 };
 
-const JoinRole = ({ group, title, description }: JoinRoleProps) => (
+const JoinRole = ({ group, lead = false, title, description }: JoinRoleProps) => (
   <Message positive>
     <Message.Header>{title}</Message.Header>
     <p>{description}</p>
-    <Link to={`/working-groups/opportunities/${group}`}>
+    <Link to={`/working-groups/opportunities/${group}${lead ? '/lead' : ''}`}>
       <Button icon labelPosition="right" color="green" positive>
         Find out more
         <Icon name={'right arrow' as SemanticICONS} />
@@ -66,36 +69,44 @@ type GroupOverviewProps = GroupOverviewOuterProps & {
   customGroupName?: string;
   customJoinTitle?: string;
   customJoinDesc?: string;
+  customBecomeLeadTitle?: string;
+  customBecomeLeadDesc?: string;
 }
 
 const GroupOverview = Loadable<GroupOverviewProps>(
-  ['members', 'leadStatus'],
+  ['workers', 'leadStatus'],
   ({
     group,
     description,
-    members,
+    workers,
     leadStatus,
-    rolesAvailable,
+    workerRolesAvailable,
+    leadRolesAvailable,
     customGroupName,
     customJoinTitle,
-    customJoinDesc
+    customJoinDesc,
+    customBecomeLeadTitle,
+    customBecomeLeadDesc
   }: GroupOverviewProps) => {
     const groupName = customGroupName || _.startCase(group);
     const joinTitle = customJoinTitle || `Join the ${groupName} group!`;
     const joinDesc = customJoinDesc || `There are openings for new ${groupName}. This is a great way to support Joystream!`;
+    const becomeLeadTitle = customBecomeLeadTitle || `Become ${groupName} Lead!`;
+    const becomeLeadDesc = customBecomeLeadDesc || `An opportunity to become ${groupName} Leader is currently available! This is a great way to support Joystream!`;
     return (
       <GroupOverviewSection>
         <h2>{ groupName }</h2>
         <p>{ description }</p>
         <Card.Group>
-          { members!.map((member, key) => (
-            <GroupMemberView key={key} {...member} />
+          { workers!.map((worker, key) => (
+            <GroupMemberView key={key} {...worker} />
           )) }
         </Card.Group>
-        { rolesAvailable
+        { workerRolesAvailable
           ? <JoinRole group={group} title={joinTitle} description={joinDesc} />
           : <NoRolesAvailable /> }
         { leadStatus && <CurrentLead groupName={groupName} {...leadStatus}/> }
+        { leadRolesAvailable && <JoinRole group={group} lead title={becomeLeadTitle} description={becomeLeadDesc} /> }
       </GroupOverviewSection>
     );
   }
@@ -142,7 +153,7 @@ export const CurrentLead = Loadable<CurrentLeadProps>(
     const leadDesc = customLeadDesc || `This role is responsible for hiring ${groupName}.`;
     return (
       <LeadSection>
-        <Message positive>
+        <Message>
           <Message.Header>{ groupName } Lead</Message.Header>
           <p>{ leadDesc }</p>
           {lead

+ 30 - 45
pioneer/packages/joy-roles/src/transport.mock.ts

@@ -9,8 +9,6 @@ import { ITransport } from './transport';
 import { Role, MemberId } from '@joystream/types/members';
 import {
   Opening,
-  AcceptingApplications,
-  ActiveOpeningStage,
   ApplicationRationingPolicy,
   StakingPolicy
 } from '@joystream/types/hiring';
@@ -26,8 +24,8 @@ import { tomorrow, yesterday, newMockHumanReadableText } from './tabs/Opportunit
 import { OpeningState } from './classifiers';
 
 import * as faker from 'faker';
-import { mockProfile } from './mocks';
-import { WorkingGroups } from './working_groups';
+import { mockProfile, mockStage } from './mocks';
+import { WorkingGroups, workerRoleNameByGroup } from './working_groups';
 
 export class Transport extends TransportBase implements ITransport {
   protected simulateApiResponse<T> (value: T): Promise<T> {
@@ -52,10 +50,12 @@ export class Transport extends TransportBase implements ITransport {
     });
   }
 
-  curationGroup (): Promise<WorkingGroupMembership> {
+  async curationGroup (): Promise<WorkingGroupMembership> {
     return this.simulateApiResponse<WorkingGroupMembership>({
-      rolesAvailable: true,
-      members: [
+      leadStatus: await this.groupLeadStatus(),
+      workerRolesAvailable: true,
+      leadRolesAvailable: false,
+      workers: [
         {
           memberId: new MemberId(1),
           roleAccount: new GenericAccountId('5HZ6GtaeyxagLynPryM7ZnmLzoWFePKuDrkb4AT8rT4pU1fp'),
@@ -112,10 +112,12 @@ export class Transport extends TransportBase implements ITransport {
     });
   }
 
-  storageGroup (): Promise<WorkingGroupMembership> {
+  async storageGroup (): Promise<WorkingGroupMembership> {
     return this.simulateApiResponse<WorkingGroupMembership>({
-      rolesAvailable: true,
-      members: [
+      leadStatus: await this.groupLeadStatus(),
+      workerRolesAvailable: true,
+      leadRolesAvailable: true,
+      workers: [
         {
           memberId: new MemberId(1),
           roleAccount: new GenericAccountId('5HZ6GtaeyxagLynPryM7ZnmLzoWFePKuDrkb4AT8rT4pU1fp'),
@@ -137,11 +139,7 @@ export class Transport extends TransportBase implements ITransport {
         {
           opening: new Opening({
             created: new u32(50000),
-            stage: new ActiveOpeningStage({
-              acceptingApplications: new AcceptingApplications({
-                started_accepting_applicants_at_block: new u32(100)
-              })
-            }),
+            stage: mockStage,
             max_review_period_length: new u32(100),
             application_rationing_policy: new Option(ApplicationRationingPolicy),
             application_staking_policy: new Option(StakingPolicy),
@@ -185,7 +183,7 @@ export class Transport extends TransportBase implements ITransport {
           }),
           meta: {
             id: '1',
-            group: 'somegroup'
+            group: WorkingGroups.ContentCurators
           },
           stage: {
             state: OpeningState.AcceptingApplications,
@@ -212,16 +210,13 @@ export class Transport extends TransportBase implements ITransport {
     );
   }
 
-  curationGroupOpening (id: number): Promise<WorkingGroupOpening> {
+  // eslint-disable-next-line @typescript-eslint/require-await
+  async groupOpening (group: WorkingGroups, id: number): Promise<WorkingGroupOpening> {
     return this.simulateApiResponse<WorkingGroupOpening>(
       {
         opening: new Opening({
           created: new u32(50000),
-          stage: new ActiveOpeningStage({
-            acceptingApplications: new AcceptingApplications({
-              started_accepting_applicants_at_block: new u32(100)
-            })
-          }),
+          stage: mockStage,
           max_review_period_length: new u32(100),
           application_rationing_policy: new Option(ApplicationRationingPolicy),
           application_staking_policy: new Option(StakingPolicy),
@@ -269,7 +264,7 @@ export class Transport extends TransportBase implements ITransport {
         }),
         meta: {
           id: '1',
-          group: 'group-name'
+          group: WorkingGroups.ContentCurators
         },
         stage: {
           state: OpeningState.AcceptingApplications,
@@ -296,11 +291,7 @@ export class Transport extends TransportBase implements ITransport {
     );
   }
 
-  async groupOpening (group: WorkingGroups, id: number): Promise<WorkingGroupOpening> {
-    return await this.curationGroupOpening(id);
-  }
-
-  openingApplicationRanks (openingId: number): Promise<Balance[]> {
+  openingApplicationRanks (group: WorkingGroups, openingId: number): Promise<Balance[]> {
     const slots: Balance[] = [];
     for (let i = 0; i < 20; i++) {
       slots.push(new u128((i * 100) + 10 + i + 1));
@@ -350,12 +341,12 @@ export class Transport extends TransportBase implements ITransport {
   }
 
   // eslint-disable-next-line @typescript-eslint/require-await
-  async openingApplications (): Promise<OpeningApplication[]> {
+  async openingApplicationsByAddress (address: string): Promise<OpeningApplication[]> {
     return [{
       id: 1,
       meta: {
         id: '1',
-        group: 'group-name'
+        group: WorkingGroups.ContentCurators
       },
       stage: {
         state: OpeningState.AcceptingApplications,
@@ -365,11 +356,7 @@ export class Transport extends TransportBase implements ITransport {
       },
       opening: new Opening({
         created: new u32(50000),
-        stage: new ActiveOpeningStage({
-          acceptingApplications: new AcceptingApplications({
-            started_accepting_applicants_at_block: new u32(100)
-          })
-        }),
+        stage: mockStage,
         max_review_period_length: new u32(100),
         application_rationing_policy: new Option(ApplicationRationingPolicy),
         application_staking_policy: new Option(StakingPolicy),
@@ -423,11 +410,12 @@ export class Transport extends TransportBase implements ITransport {
   }
 
   // eslint-disable-next-line @typescript-eslint/require-await
-  async myCurationGroupRoles (): Promise<ActiveRole[]> {
+  async myRoles (address: string): Promise<ActiveRole[]> {
     return [
       {
-        curatorId: new CuratorId(1),
-        name: 'My curation group role',
+        workerId: new CuratorId(1),
+        name: workerRoleNameByGroup[WorkingGroups.ContentCurators],
+        group: WorkingGroups.ContentCurators,
         url: 'some URL',
         reward: new u128(321),
         stake: new u128(12343200)
@@ -435,12 +423,9 @@ export class Transport extends TransportBase implements ITransport {
     ];
   }
 
-  myStorageGroupRoles (): Subscribable<ActiveRole[]> {
-    return new Observable<ActiveRole[]>(observer => { /* do nothing */ });
-  }
-
   // eslint-disable-next-line @typescript-eslint/require-await
-  async applyToCuratorOpening (
+  async applyToOpening (
+    group: WorkingGroups,
     id: number,
     roleAccountName: string,
     sourceAccount: string,
@@ -450,11 +435,11 @@ export class Transport extends TransportBase implements ITransport {
     return 0;
   }
 
-  leaveCurationRole (sourceAccount: string, id: number, rationale: string) {
+  leaveRole (group: WorkingGroups, sourceAccount: string, id: number, rationale: string) {
     /* do nothing */
   }
 
-  withdrawCuratorApplication (sourceAccount: string, id: number) {
+  withdrawApplication (group: WorkingGroups, sourceAccount: string, id: number) {
     /* do nothing */
   }
 }

+ 171 - 117
pioneer/packages/joy-roles/src/transport.substrate.ts

@@ -1,9 +1,8 @@
-import { Observable } from 'rxjs';
 import { map, switchMap } from 'rxjs/operators';
 
 import ApiPromise from '@polkadot/api/promise';
 import { Balance } from '@polkadot/types/interfaces';
-import { GenericAccountId, Option, u32, u128, Vec } from '@polkadot/types';
+import { GenericAccountId, Option, u128, Vec } from '@polkadot/types';
 import { Constructor } from '@polkadot/types/types';
 import { Moment } from '@polkadot/types/interfaces/runtime';
 import { QueueTxExtrinsicAdd } from '@polkadot/react-components/Status/types';
@@ -30,7 +29,7 @@ import {
   RoleStakeProfile
 } from '@joystream/types/working-group';
 
-import { Application, Opening, OpeningId, ApplicationId } from '@joystream/types/hiring';
+import { Application, Opening, OpeningId, ApplicationId, ActiveApplicationStage } from '@joystream/types/hiring';
 import { Stake, StakeId } from '@joystream/types/stake';
 import { RewardRelationship, RewardRelationshipId } from '@joystream/types/recurring-rewards';
 import { ActorInRole, Profile, MemberId, Role, RoleKeys, ActorId } from '@joystream/types/members';
@@ -48,7 +47,7 @@ import {
   classifyOpeningStakes,
   isApplicationHired
 } from './classifiers';
-import { WorkingGroups, AvailableGroups } from './working_groups';
+import { WorkingGroups, AvailableGroups, workerRoleNameByGroup } from './working_groups';
 import { Sort, Sum, Zero } from './balances';
 import _ from 'lodash';
 
@@ -69,7 +68,14 @@ type WGApiMethodType =
   | 'applicationById'
   | 'nextWorkerId'
   | 'workerById';
-type WGApiMethodsMapping = { [key in WGApiMethodType]: string };
+type WGApiTxMethodType =
+  'applyOnOpening'
+  | 'withdrawApplication'
+  | 'leaveRole';
+type WGApiMethodsMapping = {
+  query: { [key in WGApiMethodType]: string };
+  tx: { [key in WGApiTxMethodType]: string };
+};
 
 type GroupApplication = CuratorApplication | WGApplication;
 type GroupApplicationId = CuratorApplicationId | ApplicationId;
@@ -82,6 +88,7 @@ type GroupLead = Lead | Worker;
 type GroupLeadWithMemberId = {
   lead: GroupLead;
   memberId: MemberId;
+  workerId?: WorkerId; // Only when it's `working-groups` module lead
 }
 
 type WGApiMapping = {
@@ -98,12 +105,19 @@ const workingGroupsApiMapping: WGApiMapping = {
   [WorkingGroups.StorageProviders]: {
     module: 'storageWorkingGroup',
     methods: {
-      nextOpeningId: 'nextOpeningId',
-      openingById: 'openingById',
-      nextApplicationId: 'nextApplicationId',
-      applicationById: 'applicationById',
-      nextWorkerId: 'nextWorkerId',
-      workerById: 'workerById'
+      query: {
+        nextOpeningId: 'nextOpeningId',
+        openingById: 'openingById',
+        nextApplicationId: 'nextApplicationId',
+        applicationById: 'applicationById',
+        nextWorkerId: 'nextWorkerId',
+        workerById: 'workerById'
+      },
+      tx: {
+        applyOnOpening: 'applyOnOpening',
+        withdrawApplication: 'withdrawApplication',
+        leaveRole: 'leaveRole'
+      }
     },
     openingType: WGOpening,
     applicationType: WGApplication,
@@ -112,12 +126,19 @@ const workingGroupsApiMapping: WGApiMapping = {
   [WorkingGroups.ContentCurators]: {
     module: 'contentWorkingGroup',
     methods: {
-      nextOpeningId: 'nextCuratorOpeningId',
-      openingById: 'curatorOpeningById',
-      nextApplicationId: 'nextCuratorApplicationId',
-      applicationById: 'curatorApplicationById',
-      nextWorkerId: 'nextCuratorId',
-      workerById: 'curatorById'
+      query: {
+        nextOpeningId: 'nextCuratorOpeningId',
+        openingById: 'curatorOpeningById',
+        nextApplicationId: 'nextCuratorApplicationId',
+        applicationById: 'curatorApplicationById',
+        nextWorkerId: 'nextCuratorId',
+        workerById: 'curatorById'
+      },
+      tx: {
+        applyOnOpening: 'applyOnCuratorOpening',
+        withdrawApplication: 'withdrawCuratorApplication',
+        leaveRole: 'leaveCuratorRole'
+      }
     },
     openingType: CuratorOpening,
     applicationType: CuratorApplication,
@@ -139,11 +160,18 @@ export class Transport extends TransportBase implements ITransport {
 
   cachedApiMethodByGroup (group: WorkingGroups, method: WGApiMethodType) {
     const apiModule = workingGroupsApiMapping[group].module;
-    const apiMethod = workingGroupsApiMapping[group].methods[method];
+    const apiMethod = workingGroupsApiMapping[group].methods.query[method];
 
     return this.cachedApi.query[apiModule][apiMethod];
   }
 
+  apiExtrinsicByGroup (group: WorkingGroups, method: WGApiTxMethodType) {
+    const apiModule = workingGroupsApiMapping[group].module;
+    const apiMethod = workingGroupsApiMapping[group].methods.tx[method];
+
+    return this.api.tx[apiModule][apiMethod];
+  }
+
   unsubscribe () {
     this.cachedApi.unsubscribe();
   }
@@ -233,13 +261,13 @@ export class Transport extends TransportBase implements ITransport {
       roleAccount,
       memberId,
       profile: profile.unwrap(),
-      title: _.startCase(group).slice(0, -1), // FIXME: Temporary solution (just removes "s" at the end)
+      title: workerRoleNameByGroup[group],
       stake: stakeValue,
       earned: earnedValue
     });
   }
 
-  protected async areAnyGroupRolesOpen (group: WorkingGroups): Promise<boolean> {
+  protected async areGroupRolesOpen (group: WorkingGroups, lead = false): Promise<boolean> {
     const nextId = await this.cachedApiMethodByGroup(group, 'nextOpeningId')() as GroupOpeningId;
 
     // This is chain specfic, but if next id is still 0, it means no openings have been added yet
@@ -253,9 +281,16 @@ export class Transport extends TransportBase implements ITransport {
       await this.cachedApiMethodByGroup(group, 'openingById')()
     );
 
-    for (let i = 0; i < groupOpenings.linked_values.length; i++) {
-      const opening = await this.opening(groupOpenings.linked_values[i].hiring_opening_id.toNumber());
-      if (opening.is_active) {
+    for (const groupOpening of groupOpenings.linked_values) {
+      const opening = await this.opening(groupOpening.hiring_opening_id.toNumber());
+      if (
+        opening.is_active &&
+        (
+          groupOpening instanceof WGOpening
+            ? (lead === groupOpening.opening_type.isOfType('Leader'))
+            : !lead // Lead opening are never available for content working group currently
+        )
+      ) {
         return true;
       }
     }
@@ -263,11 +298,6 @@ export class Transport extends TransportBase implements ITransport {
     return false;
   }
 
-  protected async areAnyCuratorRolesOpen (): Promise<boolean> {
-    // Backward compatibility
-    return this.areAnyGroupRolesOpen(WorkingGroups.ContentCurators);
-  }
-
   protected async currentCuratorLead (): Promise<GroupLeadWithMemberId | null> {
     const optLeadId = (await this.cachedApi.query.contentWorkingGroup.currentLeadId()) as Option<LeadId>;
 
@@ -309,7 +339,8 @@ export class Transport extends TransportBase implements ITransport {
 
     return {
       lead: leadWorker,
-      memberId: leadWorker.member_id
+      memberId: leadWorker.member_id,
+      workerId: leadWorkerId
     };
   }
 
@@ -328,6 +359,7 @@ export class Transport extends TransportBase implements ITransport {
       return {
         lead: {
           memberId: currentLead.memberId,
+          workerId: currentLead.workerId,
           roleAccount: currentLead.lead.role_account_id,
           profile: profile.unwrap(),
           title: _.startCase(group) + ' Lead',
@@ -343,32 +375,35 @@ export class Transport extends TransportBase implements ITransport {
   }
 
   async groupOverview (group: WorkingGroups): Promise<WorkingGroupMembership> {
-    const rolesAvailable = await this.areAnyGroupRolesOpen(group);
+    const workerRolesAvailable = await this.areGroupRolesOpen(group);
+    const leadRolesAvailable = await this.areGroupRolesOpen(group, true);
+    const leadStatus = await this.groupLeadStatus(group);
 
     const nextId = await this.cachedApiMethodByGroup(group, 'nextWorkerId')() as GroupWorkerId;
 
-    // This is chain specfic, but if next id is still 0, it means no curators have been added yet
-    if (nextId.eq(0)) {
-      return {
-        members: [],
-        rolesAvailable
-      };
-    }
-
-    const values = new MultipleLinkedMapEntry<GroupWorkerId, GroupWorker>(
-      ActorId,
-      workingGroupsApiMapping[group].workerType,
-      await this.cachedApiMethodByGroup(group, 'workerById')() as GroupWorker
-    );
+    let workersWithIds: { worker: GroupWorker; id: GroupWorkerId }[] = [];
+    // This is chain specfic, but if next id is still 0, it means no workers have been added yet
+    if (!nextId.eq(0)) {
+      const values = new MultipleLinkedMapEntry<GroupWorkerId, GroupWorker>(
+        ActorId,
+        workingGroupsApiMapping[group].workerType,
+        await this.cachedApiMethodByGroup(group, 'workerById')() as GroupWorker
+      );
 
-    const workers = values.linked_values.filter(value => value.is_active).reverse();
-    const workerIds = values.linked_keys.filter((v, k) => values.linked_values[k].is_active).reverse();
+      workersWithIds = values.linked_values
+        // First bind workers with ids
+        .map((worker, i) => ({ worker, id: values.linked_keys[i] }))
+        // Filter by: active and "not lead"
+        .filter(({ worker, id }) => worker.is_active && (!leadStatus.lead?.workerId || !id.eq(leadStatus.lead.workerId)));
+    }
 
     return {
-      members: await Promise.all(
-        workers.map((worker, k) => this.groupMember(group, workerIds[k], worker))
+      leadStatus,
+      workers: await Promise.all(
+        workersWithIds.map(({ worker, id }) => this.groupMember(group, id, worker))
       ),
-      rolesAvailable
+      workerRolesAvailable,
+      leadRolesAvailable
     };
   }
 
@@ -446,14 +481,8 @@ export class Transport extends TransportBase implements ITransport {
     return output;
   }
 
-  protected async curatorOpeningApplications (curatorOpeningId: number): Promise<WorkingGroupPair<Application, CuratorApplication>[]> {
-    // Backwards compatibility
-    const applications = await this.groupOpeningApplications(WorkingGroups.ContentCurators, curatorOpeningId);
-    return applications as WorkingGroupPair<Application, CuratorApplication>[];
-  }
-
   async groupOpening (group: WorkingGroups, id: number): Promise<WorkingGroupOpening> {
-    const nextId = (await this.cachedApiMethodByGroup(group, 'nextOpeningId')() as u32).toNumber();
+    const nextId = (await this.cachedApiMethodByGroup(group, 'nextOpeningId')() as GroupOpeningId).toNumber();
     if (id < 0 || id >= nextId) {
       throw new Error('invalid id');
     }
@@ -461,10 +490,10 @@ export class Transport extends TransportBase implements ITransport {
     const groupOpening = new SingleLinkedMapEntry<GroupOpening>(
       workingGroupsApiMapping[group].openingType,
       await this.cachedApiMethodByGroup(group, 'openingById')(id)
-    );
+    ).value;
 
     const opening = await this.opening(
-      groupOpening.value.hiring_opening_id.toNumber()
+      groupOpening.hiring_opening_id.toNumber()
     );
 
     const applications = await this.groupOpeningApplications(group, id);
@@ -474,7 +503,8 @@ export class Transport extends TransportBase implements ITransport {
       opening: opening,
       meta: {
         id: id.toString(),
-        group
+        group,
+        type: groupOpening instanceof WGOpening ? groupOpening.opening_type : undefined
       },
       stage: await classifyOpeningStage(this, opening),
       applications: {
@@ -488,11 +518,6 @@ export class Transport extends TransportBase implements ITransport {
     });
   }
 
-  async curationGroupOpening (id: number): Promise<WorkingGroupOpening> {
-    // Backwards compatibility
-    return this.groupOpening(WorkingGroups.ContentCurators, id);
-  }
-
   protected async openingApplicationTotalStake (application: Application): Promise<Balance> {
     const promises = new Array<Promise<Balance>>();
 
@@ -507,13 +532,14 @@ export class Transport extends TransportBase implements ITransport {
     return Sum(await Promise.all(promises));
   }
 
-  async openingApplicationRanks (openingId: number): Promise<Balance[]> {
-    const applications = await this.curatorOpeningApplications(openingId);
+  async openingApplicationRanks (group: WorkingGroups, openingId: number): Promise<Balance[]> {
+    const applications = await this.groupOpeningApplications(group, openingId);
     return Sort(
       (await Promise.all(
-        applications.map(application => this.openingApplicationTotalStake(application.hiringModule))
+        applications
+          .filter(a => a.hiringModule.stage.value instanceof ActiveApplicationStage)
+          .map(application => this.openingApplicationTotalStake(application.hiringModule))
       ))
-        .filter((b) => !b.eq(Zero))
     );
   }
 
@@ -577,11 +603,12 @@ export class Transport extends TransportBase implements ITransport {
   }
 
   protected async myApplicationRank (myApp: Application, applications: Array<Application>): Promise<number> {
+    const activeApplications = applications.filter(app => app.stage.value instanceof ActiveApplicationStage);
     const stakes = await Promise.all(
-      applications.map(app => this.openingApplicationTotalStake(app))
+      activeApplications.map(app => this.openingApplicationTotalStake(app))
     );
 
-    const appvalues = applications.map((app, key) => {
+    const appvalues = activeApplications.map((app, key) => {
       return {
         app: app,
         value: stakes[key]
@@ -601,15 +628,15 @@ export class Transport extends TransportBase implements ITransport {
     return appvalues.findIndex(v => v.app.eq(myApp)) + 1;
   }
 
-  async openingApplications (roleKeyId: string): Promise<OpeningApplication[]> {
-    const curatorApps = new MultipleLinkedMapEntry<CuratorApplicationId, CuratorApplication>(
-      CuratorApplicationId,
-      CuratorApplication,
-      await this.cachedApi.query.contentWorkingGroup.curatorApplicationById()
+  async openingApplicationsByAddressAndGroup (group: WorkingGroups, roleKey: string): Promise<OpeningApplication[]> {
+    const applications = new MultipleLinkedMapEntry<GroupApplicationId, GroupApplication>(
+      ApplicationId,
+      workingGroupsApiMapping[group].applicationType,
+      await this.cachedApiMethodByGroup(group, 'applicationById')()
     );
 
-    const myApps = curatorApps.linked_values.filter(app => app.role_account.eq(roleKeyId));
-    const myAppIds = curatorApps.linked_keys.filter((id, key) => curatorApps.linked_values[key].role_account.eq(roleKeyId));
+    const myApps = applications.linked_values.filter(app => app.role_account_id.eq(roleKey));
+    const myAppIds = applications.linked_keys.filter((id, key) => applications.linked_values[key].role_account_id.eq(roleKey));
 
     const hiringAppPairs = await Promise.all(
       myApps.map(
@@ -628,73 +655,98 @@ export class Transport extends TransportBase implements ITransport {
       hiringApps.map(app => this.applicationStakes(app))
     );
 
-    const wgs = await Promise.all(
-      myApps.map(curatorOpening => {
-        return this.curationGroupOpening(curatorOpening.curator_opening_id.toNumber());
+    const openings = await Promise.all(
+      myApps.map(application => {
+        return this.groupOpening(group, application.opening_id.toNumber());
       })
     );
 
     const allAppsByOpening = (await Promise.all(
-      myApps.map(curatorOpening => {
-        return this.curatorOpeningApplications(curatorOpening.curator_opening_id.toNumber());
+      myApps.map(application => {
+        return this.groupOpeningApplications(group, application.opening_id.toNumber());
       })
     ));
 
     return await Promise.all(
-      wgs.map(async (wg, key) => {
+      openings.map(async (o, key) => {
         return {
           id: myAppIds[key].toNumber(),
           hired: isApplicationHired(hiringApps[key]),
           cancelledReason: classifyApplicationCancellation(hiringApps[key]),
           rank: await this.myApplicationRank(hiringApps[key], allAppsByOpening[key].map(a => a.hiringModule)),
-          capacity: wg.applications.maxNumberOfApplications,
-          stage: wg.stage,
-          opening: wg.opening,
-          meta: wg.meta,
+          capacity: o.applications.maxNumberOfApplications,
+          stage: o.stage,
+          opening: o.opening,
+          meta: o.meta,
           applicationStake: stakes[key].application,
           roleStake: stakes[key].role,
-          review_end_time: wg.stage.review_end_time,
-          review_end_block: wg.stage.review_end_block
+          review_end_time: o.stage.review_end_time,
+          review_end_block: o.stage.review_end_block
         };
       })
     );
   }
 
-  async myCurationGroupRoles (roleKeyId: string): Promise<ActiveRole[]> {
-    const curators = new MultipleLinkedMapEntry<CuratorId, Curator>(
-      CuratorId,
-      Curator,
-      await this.cachedApi.query.contentWorkingGroup.curatorById()
+  // Get opening applications for all groups by address
+  async openingApplicationsByAddress (roleKey: string): Promise<OpeningApplication[]> {
+    let applications: OpeningApplication[] = [];
+    for (const group of AvailableGroups) {
+      applications = applications.concat(await this.openingApplicationsByAddressAndGroup(group, roleKey));
+    }
+
+    return applications;
+  }
+
+  async myRolesByGroup (group: WorkingGroups, roleKeyId: string): Promise<ActiveRole[]> {
+    const workers = new MultipleLinkedMapEntry<GroupWorkerId, GroupWorker>(
+      ActorId,
+      workingGroupsApiMapping[group].workerType,
+      await this.cachedApiMethodByGroup(group, 'workerById')()
     );
 
+    const groupLead = (await this.groupLeadStatus(group)).lead;
+
     return Promise.all(
-      curators
+      workers
         .linked_values
         .toArray()
-        .filter(curator => curator.role_account.eq(roleKeyId) && curator.is_active)
-        .map(async (curator, key) => {
+        // We need to associate worker ids with workers BEFORE filtering the array
+        .map((worker, index) => ({ worker, id: workers.linked_keys[index] }))
+        .filter(({ worker }) => worker.role_account_id.eq(roleKeyId) && worker.is_active)
+        .map(async workerWithId => {
+          const { worker, id } = workerWithId;
+
           let stakeValue: Balance = new u128(0);
-          if (curator.role_stake_profile && curator.role_stake_profile.isSome) {
-            stakeValue = await this.workerStake(curator.role_stake_profile.unwrap());
+          if (worker.role_stake_profile && worker.role_stake_profile.isSome) {
+            stakeValue = await this.workerStake(worker.role_stake_profile.unwrap());
           }
 
           let earnedValue: Balance = new u128(0);
-          if (curator.reward_relationship && curator.reward_relationship.isSome) {
-            earnedValue = await this.workerTotalReward(curator.reward_relationship.unwrap());
+          if (worker.reward_relationship && worker.reward_relationship.isSome) {
+            earnedValue = await this.workerTotalReward(worker.reward_relationship.unwrap());
           }
 
           return {
-            curatorId: curators.linked_keys[key],
-            name: 'Content curator',
+            workerId: id,
+            name: (groupLead?.workerId && groupLead.workerId.eq(id))
+              ? _.startCase(group) + ' Lead'
+              : workerRoleNameByGroup[group],
             reward: earnedValue,
-            stake: stakeValue
+            stake: stakeValue,
+            group
           };
         })
     );
   }
 
-  myStorageGroupRoles (): Subscribable<ActiveRole[]> {
-    return new Observable<ActiveRole[]>(observer => { /* do nothing */ });
+  // All groups roles by key
+  async myRoles (roleKey: string): Promise<ActiveRole[]> {
+    let roles: ActiveRole[] = [];
+    for (const group of AvailableGroups) {
+      roles = roles.concat(await this.myRolesByGroup(group, roleKey));
+    }
+
+    return roles;
   }
 
   protected generateRoleAccount (name: string, password = ''): string | null {
@@ -709,7 +761,8 @@ export class Transport extends TransportBase implements ITransport {
     return status.account as string;
   }
 
-  applyToCuratorOpening (
+  applyToOpening (
+    group: WorkingGroups,
     id: number,
     roleAccountName: string,
     sourceAccount: string,
@@ -727,13 +780,14 @@ export class Transport extends TransportBase implements ITransport {
           if (!roleAccount) {
             reject(new Error('failed to create role account'));
           }
-          const tx = this.api.tx.contentWorkingGroup.applyOnCuratorOpening(
-            membershipIds[0],
-            new u32(id),
-            new GenericAccountId(roleAccount as string),
-            roleStake.eq(Zero) ? null : roleStake,
-            appStake.eq(Zero) ? null : appStake,
-            applicationText
+          const tx = this.apiExtrinsicByGroup(group, 'applyOnOpening')(
+            membershipIds[0], // Member id
+            id, // Worker/Curator opening id
+            roleAccount, // Role account
+            // TODO: Will need to be adjusted if AtLeast Zero stakes become possible
+            roleStake.eq(Zero) ? null : roleStake, // Role stake
+            appStake.eq(Zero) ? null : appStake, // Application stake
+            applicationText // Human readable text
           ) as unknown as SubmittableExtrinsic;
 
           const txFailedCb = () => {
@@ -754,8 +808,8 @@ export class Transport extends TransportBase implements ITransport {
     });
   }
 
-  leaveCurationRole (sourceAccount: string, id: number, rationale: string) {
-    const tx = this.api.tx.contentWorkingGroup.leaveCuratorRole(
+  leaveRole (group: WorkingGroups, sourceAccount: string, id: number, rationale: string) {
+    const tx = this.apiExtrinsicByGroup(group, 'leaveRole')(
       id,
       rationale
     ) as unknown as SubmittableExtrinsic;
@@ -766,8 +820,8 @@ export class Transport extends TransportBase implements ITransport {
     });
   }
 
-  withdrawCuratorApplication (sourceAccount: string, id: number) {
-    const tx = this.api.tx.contentWorkingGroup.withdrawCuratorApplication(
+  withdrawApplication (group: WorkingGroups, sourceAccount: string, id: number) {
+    const tx = this.apiExtrinsicByGroup(group, 'withdrawApplication')(
       id
     ) as unknown as SubmittableExtrinsic;
 

+ 10 - 9
pioneer/packages/joy-roles/src/transport.ts

@@ -16,22 +16,23 @@ export interface ITransport {
   storageGroup: () => Promise<WorkingGroupMembership>;
   currentOpportunities: () => Promise<Array<WorkingGroupOpening>>;
   groupOpening: (group: WorkingGroups, id: number) => Promise<WorkingGroupOpening>;
-  curationGroupOpening: (id: number) => Promise<WorkingGroupOpening>;
-  openingApplicationRanks: (openingId: number) => Promise<Balance[]>;
+  openingApplicationRanks: (group: WorkingGroups, openingId: number) => Promise<Balance[]>;
   expectedBlockTime: () => Promise<number>;
   blockHash: (height: number) => Promise<string>;
   blockTimestamp: (height: number) => Promise<Date>;
   transactionFee: () => Promise<Balance>;
   accounts: () => Subscribable<keyPairDetails[]>;
-  openingApplications: (address: string) => Promise<OpeningApplication[]>;
-  myCurationGroupRoles: (address: string) => Promise<ActiveRole[]>;
-  myStorageGroupRoles: () => Subscribable<ActiveRole[]>;
-  applyToCuratorOpening: (id: number,
+  openingApplicationsByAddress: (address: string) => Promise<OpeningApplication[]>;
+  myRoles: (address: string) => Promise<ActiveRole[]>;
+  applyToOpening: (
+    group: WorkingGroups,
+    id: number,
     roleAccountName: string,
     sourceAccount: string,
     appStake: Balance,
     roleStake: Balance,
-    applicationText: string) => Promise<number>;
-  leaveCurationRole: (sourceAccount: string, id: number, rationale: string) => void;
-  withdrawCuratorApplication: (sourceAccount: string, id: number) => void;
+    applicationText: string
+  ) => Promise<number>;
+  leaveRole: (group: WorkingGroups, sourceAccount: string, id: number, rationale: string) => void;
+  withdrawApplication: (group: WorkingGroups, sourceAccount: string, id: number) => void;
 }

+ 5 - 0
pioneer/packages/joy-roles/src/working_groups.ts

@@ -7,3 +7,8 @@ export const AvailableGroups: readonly WorkingGroups[] = [
   WorkingGroups.ContentCurators,
   WorkingGroups.StorageProviders
 ] as const;
+
+export const workerRoleNameByGroup: { [key in WorkingGroups]: string } = {
+  [WorkingGroups.ContentCurators]: 'Content Curator',
+  [WorkingGroups.StorageProviders]: 'Storage Provider'
+};

+ 2 - 1
pioneer/packages/joy-utils/src/View.tsx

@@ -39,11 +39,12 @@ export function View<C extends Controller<S, any>, S> (args: ViewProps<C, S>): V
 
       useEffect(() => {
         controller.subscribe(onUpdate);
+        controller.dispatch(); // Dispatch on first subscription (in case there's was a re-render of the View)
 
         return () => {
           controller.unsubscribe(onUpdate);
         };
-      });
+      }, []);
 
       let context: Params;
       if (typeof props.params !== 'undefined') {

+ 1 - 1
pioneer/packages/react-components/src/AddressCard.tsx

@@ -3,7 +3,7 @@
 // of the Apache-2.0 license. See the LICENSE file for details.
 // import { I18nProps } from '@polkadot/react-components/types';
 
-// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
+// eslint-disable-next-line @typescript-eslint/ban-ts-ignore, @typescript-eslint/ban-ts-comment
 // @ts-ignore This line needed for the styled export... don't ask why
 // eslint-disable-next-line @typescript-eslint/no-unused-vars
 import BN from 'bn.js';

+ 48 - 11
runtime-modules/storage/src/data_directory.rs

@@ -22,8 +22,10 @@
 //#![warn(missing_docs)]
 
 use codec::{Decode, Encode};
+use rstd::collections::btree_map::BTreeMap;
 use rstd::prelude::*;
 use sr_primitives::traits::{MaybeSerialize, Member};
+use srml_support::traits::Get;
 use srml_support::{decl_error, decl_event, decl_module, decl_storage, ensure, Parameter};
 use system::{self, ensure_root};
 
@@ -56,6 +58,8 @@ pub trait Trait:
 
     /// Validates member id and origin combination.
     type MemberOriginValidator: ActorOriginValidator<Self::Origin, MemberId<Self>, Self::AccountId>;
+
+    type MaxObjectsPerInjection: Get<u32>;
 }
 
 decl_error! {
@@ -75,6 +79,9 @@ decl_error! {
 
         /// Require root origin in extrinsics.
         RequireRootOrigin,
+
+        /// DataObject Injection Failed. Too Many DataObjects.
+        DataObjectsInjectionExceededLimit
     }
 }
 
@@ -116,23 +123,32 @@ impl Default for LiaisonJudgement {
     }
 }
 
+/// Alias for DataObjectInternal
+pub type DataObject<T> = DataObjectInternal<
+    MemberId<T>,
+    <T as system::Trait>::BlockNumber,
+    <T as timestamp::Trait>::Moment,
+    <T as data_object_type_registry::Trait>::DataObjectTypeId,
+    StorageProviderId<T>,
+>;
+
 /// Manages content ids, type and storage provider decision about it.
 #[derive(Clone, Encode, Decode, PartialEq, Debug)]
-pub struct DataObject<T: Trait> {
+pub struct DataObjectInternal<MemberId, BlockNumber, Moment, DataObjectTypeId, StorageProviderId> {
     /// Content owner.
-    pub owner: MemberId<T>,
+    pub owner: MemberId,
 
     /// Content added at.
-    pub added_at: BlockAndTime<T::BlockNumber, T::Moment>,
+    pub added_at: BlockAndTime<BlockNumber, Moment>,
 
     /// Content type id.
-    pub type_id: <T as data_object_type_registry::Trait>::DataObjectTypeId,
+    pub type_id: DataObjectTypeId,
 
     /// Content size in bytes.
     pub size: u64,
 
     /// Storage provider id of the liaison.
-    pub liaison: StorageProviderId<T>,
+    pub liaison: StorageProviderId,
 
     /// Storage provider as liaison judgment.
     pub liaison_judgement: LiaisonJudgement,
@@ -141,6 +157,9 @@ pub struct DataObject<T: Trait> {
     pub ipfs_content_id: Vec<u8>,
 }
 
+/// A map collection of unique DataObjects keyed by the ContentId
+pub type DataObjectsMap<T> = BTreeMap<<T as Trait>::ContentId, DataObject<T>>;
+
 decl_storage! {
     trait Store for Module<T: Trait> as DataDirectory {
         /// List of ids known to the system.
@@ -188,6 +207,8 @@ decl_module! {
         /// Predefined errors.
         type Error = Error;
 
+        /// Maximum objects allowed per inject_data_objects() transaction
+        const MaxObjectsPerInjection: u32 = T::MaxObjectsPerInjection::get();
 
         /// Adds the content to the system. Member id should match its origin. The created DataObject
         /// awaits liaison to accept or reject it.
@@ -213,7 +234,7 @@ decl_module! {
             let liaison = T::StorageProviderHelper::get_random_storage_provider()?;
 
             // Let's create the entry then
-            let data: DataObject<T> = DataObject {
+            let data: DataObject<T> = DataObjectInternal {
                 type_id,
                 size,
                 added_at: common::current_block_time::<T>(),
@@ -279,13 +300,29 @@ decl_module! {
             <KnownContentIds<T>>::put(upd_content_ids);
         }
 
-        /// Sets the content id from the list of known content ids. Requires root privileges.
-        fn set_known_content_id(origin, content_ids: Vec<T::ContentId>) {
+        /// Injects a set of data objects and their corresponding content id into the directory.
+        /// The operation is "silent" - no events will be emitted as objects are added.
+        /// The number of objects that can be added per call is limited to prevent the dispatch
+        /// from causing the block production to fail if it takes too much time to process.
+        /// Existing data objects will be overwritten.
+        pub(crate) fn inject_data_objects(origin, objects: DataObjectsMap<T>) {
             ensure_root(origin)?;
 
-            // == MUTATION SAFE ==
-
-            <KnownContentIds<T>>::put(content_ids);
+            // Must provide something to inject
+            ensure!(objects.len() <= T::MaxObjectsPerInjection::get() as usize, Error::DataObjectsInjectionExceededLimit);
+
+            for (id, object) in objects.into_iter() {
+                // append to known content ids
+                // duplicates will be removed at the end
+                <KnownContentIds<T>>::mutate(|ids| ids.push(id));
+                <DataObjectByContentId<T>>::insert(id, object);
+            }
+
+            // remove duplicate ids
+            <KnownContentIds<T>>::mutate(|ids| {
+                ids.sort();
+                ids.dedup();
+            });
         }
     }
 }

+ 129 - 0
runtime-modules/storage/src/tests/data_directory.rs

@@ -2,6 +2,7 @@
 
 use super::mock::*;
 use crate::data_directory::Error;
+use rstd::collections::btree_map::BTreeMap;
 use system::RawOrigin;
 
 #[test]
@@ -169,3 +170,131 @@ fn reject_content_as_liaison() {
         assert_eq!(res, Ok(()));
     });
 }
+
+#[test]
+fn data_object_injection_works() {
+    with_default_mock_builder(|| {
+        // No objects in directory before injection
+        assert_eq!(TestDataDirectory::known_content_ids(), vec![]);
+
+        // new objects to inject into the directory
+        let mut objects = BTreeMap::new();
+
+        let object = data_directory::DataObjectInternal {
+            type_id: 1,
+            size: 1234,
+            added_at: data_directory::BlockAndTime {
+                block: 10,
+                time: 1024,
+            },
+            owner: 1,
+            liaison: TEST_MOCK_LIAISON_STORAGE_PROVIDER_ID,
+            liaison_judgement: data_directory::LiaisonJudgement::Pending,
+            ipfs_content_id: vec![],
+        };
+
+        let content_id_1 = 1;
+        objects.insert(content_id_1, object.clone());
+
+        let content_id_2 = 2;
+        objects.insert(content_id_2, object.clone());
+
+        let res = TestDataDirectory::inject_data_objects(Origin::ROOT, objects);
+        assert!(res.is_ok());
+
+        assert_eq!(
+            TestDataDirectory::known_content_ids(),
+            vec![content_id_1, content_id_2]
+        );
+
+        assert_eq!(
+            TestDataDirectory::data_object_by_content_id(content_id_1),
+            Some(object.clone())
+        );
+
+        assert_eq!(
+            TestDataDirectory::data_object_by_content_id(content_id_2),
+            Some(object)
+        );
+    });
+}
+
+#[test]
+fn data_object_injection_overwrites_and_removes_duplicate_ids() {
+    with_default_mock_builder(|| {
+        let sender = 1u64;
+        let member_id = 1u64;
+        let content_id_1 = 1;
+        let content_id_2 = 2;
+
+        // Start with some existing objects in directory which will be
+        // overwritten
+        let res = TestDataDirectory::add_content(
+            Origin::signed(sender),
+            member_id,
+            content_id_1,
+            1,
+            10,
+            vec![8, 8, 8, 8],
+        );
+        assert!(res.is_ok());
+        let res = TestDataDirectory::add_content(
+            Origin::signed(sender),
+            member_id,
+            content_id_2,
+            2,
+            20,
+            vec![9, 9, 9, 9],
+        );
+        assert!(res.is_ok());
+
+        let mut objects = BTreeMap::new();
+
+        let object1 = data_directory::DataObjectInternal {
+            type_id: 1,
+            size: 6666,
+            added_at: data_directory::BlockAndTime {
+                block: 10,
+                time: 1000,
+            },
+            owner: 5,
+            liaison: TEST_MOCK_LIAISON_STORAGE_PROVIDER_ID,
+            liaison_judgement: data_directory::LiaisonJudgement::Pending,
+            ipfs_content_id: vec![5, 6, 7],
+        };
+
+        let object2 = data_directory::DataObjectInternal {
+            type_id: 1,
+            size: 7777,
+            added_at: data_directory::BlockAndTime {
+                block: 20,
+                time: 2000,
+            },
+            owner: 6,
+            liaison: TEST_MOCK_LIAISON_STORAGE_PROVIDER_ID,
+            liaison_judgement: data_directory::LiaisonJudgement::Pending,
+            ipfs_content_id: vec![5, 6, 7],
+        };
+
+        objects.insert(content_id_1, object1.clone());
+        objects.insert(content_id_2, object2.clone());
+
+        let res = TestDataDirectory::inject_data_objects(Origin::ROOT, objects);
+        assert!(res.is_ok());
+
+        assert_eq!(
+            TestDataDirectory::known_content_ids(),
+            vec![content_id_1, content_id_2]
+        );
+
+        assert_eq!(
+            TestDataDirectory::data_object_by_content_id(content_id_1),
+            Some(object1.clone())
+        );
+
+        assert_eq!(
+            TestDataDirectory::data_object_by_content_id(content_id_2),
+            Some(object2)
+        );
+    });
+}

+ 3 - 1
runtime-modules/storage/src/tests/mock.rs

@@ -63,7 +63,7 @@ impl ContentIdExists<Test> for MockContent {
         which: &<Test as data_directory::Trait>::ContentId,
     ) -> Result<data_directory::DataObject<Test>, &'static str> {
         match *which {
-            TEST_MOCK_EXISTING_CID => Ok(data_directory::DataObject {
+            TEST_MOCK_EXISTING_CID => Ok(data_directory::DataObjectInternal {
                 type_id: 1,
                 size: 1234,
                 added_at: data_directory::BlockAndTime {
@@ -89,6 +89,7 @@ parameter_types! {
     pub const MaximumBlockLength: u32 = 2 * 1024;
     pub const AvailableBlockRatio: Perbill = Perbill::one();
     pub const MinimumPeriod: u64 = 5;
+    pub const MaxObjectsPerInjection: u32 = 5;
 }
 
 impl system::Trait for Test {
@@ -166,6 +167,7 @@ impl data_directory::Trait for Test {
     type StorageProviderHelper = ();
     type IsActiveDataObjectType = AnyDataObjectTypeIsActive;
     type MemberOriginValidator = ();
+    type MaxObjectsPerInjection = MaxObjectsPerInjection;
 }
 
 impl crate::data_directory::StorageProviderHelper<Test> for () {

+ 1 - 1
runtime/Cargo.toml

@@ -4,7 +4,7 @@ edition = '2018'
 name = 'joystream-node-runtime'
 # Follow convention: https://github.com/Joystream/substrate-runtime-joystream/issues/1
 # {Authoring}.{Spec}.{Impl} of the RuntimeVersion
-version = '6.18.0'
+version = '6.19.0'
 
 [features]
 default = ['std']

+ 6 - 1
runtime/src/lib.rs

@@ -161,7 +161,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
     spec_name: create_runtime_str!("joystream-node"),
     impl_name: create_runtime_str!("joystream-node"),
     authoring_version: 6,
-    spec_version: 18,
+    spec_version: 19,
     impl_version: 0,
     apis: RUNTIME_API_VERSIONS,
 };
@@ -560,6 +560,10 @@ impl memo::Trait for Runtime {
     type Event = Event;
 }
 
+parameter_types! {
+    pub const MaxObjectsPerInjection: u32 = 100;
+}
+
 impl storage::data_object_type_registry::Trait for Runtime {
     type Event = Event;
     type DataObjectTypeId = u64;
@@ -571,6 +575,7 @@ impl storage::data_directory::Trait for Runtime {
     type StorageProviderHelper = integration::storage::StorageProviderHelper;
     type IsActiveDataObjectType = DataObjectTypeRegistry;
     type MemberOriginValidator = MembershipOriginValidator<Self>;
+    type MaxObjectsPerInjection = MaxObjectsPerInjection;
 }
 
 impl storage::data_object_storage_registry::Trait for Runtime {

+ 29 - 27
runtime/src/migration.rs

@@ -3,7 +3,10 @@
 
 use crate::VERSION;
 use rstd::prelude::*;
-use sr_primitives::{print, traits::Zero};
+use sr_primitives::{
+    print,
+    traits::{One, Zero},
+};
 use srml_support::{debug, decl_event, decl_module, decl_storage};
 
 impl<T: Trait> Module<T> {
@@ -21,8 +24,7 @@ impl<T: Trait> Module<T> {
 
         Self::initialize_storage_working_group_mint();
         Self::initialize_storage_working_group_text_constraints();
-        // temporary comment storage migration
-        //        Self::clear_storage_data();
+        Self::clear_storage_data();
     }
 }
 
@@ -98,28 +100,28 @@ impl<T: Trait> Module<T> {
         );
     }
 
-    // fn clear_storage_data() {
-    //     // Clear storage data object registry data.
-    //     for id in <storage::data_directory::Module<T>>::known_content_ids() {
-    //         <storage::data_object_storage_registry::RelationshipsByContentId<T>>::remove(id);
-    //     }
-    //
-    //     let mut potential_id = <T as storage::data_object_storage_registry::Trait>::DataObjectStorageRelationshipId::zero();
-    //     while potential_id
-    //         < storage::data_object_storage_registry::Module::<T>::next_relationship_id()
-    //     {
-    //         <storage::data_object_storage_registry::Relationships<T>>::remove(&potential_id);
-    //
-    //         potential_id += <T as storage::data_object_storage_registry::Trait>::DataObjectStorageRelationshipId::one();
-    //     }
-    //
-    //     storage::data_object_storage_registry::NextRelationshipId::<T>::kill();
-    //
-    //     // Clear storage data directory data.
-    //     for id in <storage::data_directory::Module<T>>::known_content_ids() {
-    //         <storage::data_directory::DataObjectByContentId<T>>::remove(id);
-    //     }
-    //
-    //     <storage::data_directory::KnownContentIds<T>>::kill();
-    // }
+    fn clear_storage_data() {
+        // Clear storage data object registry data.
+        for id in <storage::data_directory::Module<T>>::known_content_ids() {
+            <storage::data_object_storage_registry::RelationshipsByContentId<T>>::remove(id);
+        }
+
+        let mut potential_id = <T as storage::data_object_storage_registry::Trait>::DataObjectStorageRelationshipId::zero();
+        while potential_id
+            < storage::data_object_storage_registry::Module::<T>::next_relationship_id()
+        {
+            <storage::data_object_storage_registry::Relationships<T>>::remove(&potential_id);
+
+            potential_id += <T as storage::data_object_storage_registry::Trait>::DataObjectStorageRelationshipId::one();
+        }
+
+        storage::data_object_storage_registry::NextRelationshipId::<T>::kill();
+
+        // Clear storage data directory data.
+        for id in <storage::data_directory::Module<T>>::known_content_ids() {
+            <storage::data_directory::DataObjectByContentId<T>>::remove(id);
+        }
+
+        <storage::data_directory::KnownContentIds<T>>::kill();
+    }
 }

+ 38 - 286
storage-node/.eslintrc.js

@@ -1,290 +1,42 @@
 module.exports = {
-    "env": {
-        "es6": true,
-        "node": true
+    env: {
+        node: true,
+        es6: true,
+		mocha: true,
     },
-    "extends": "eslint:recommended",
-    "parserOptions": {
-        "ecmaVersion": 2018
+    globals: {
+        Atomics: "readonly",
+        SharedArrayBuffer: "readonly",
     },
-    "rules": {
-        "accessor-pairs": "error",
-        "array-bracket-newline": "off",
-        "array-bracket-spacing": [
-            "error",
-            "never",
-        ],
-        "array-callback-return": "error",
-        "array-element-newline": [
-          "error",
-          "consistent",
-        ],
-        "arrow-body-style": [
-          "warn",
-          "as-needed"
-        ],
-        "arrow-parens": [
-            "error",
-            "always"
-        ],
-        "arrow-spacing": [
-            "error",
-            {
-                "after": true,
-                "before": true
-            }
-        ],
-        "block-scoped-var": "error",
-        "block-spacing": "error",
-        "brace-style": "off",
-        "callback-return": "error",
-        "camelcase": "off",
-        "capitalized-comments": "off",
-        "class-methods-use-this": "error",
-        "comma-dangle": "off",
-        "comma-spacing": "off",
-        "comma-style": [
-            "error",
-            "last"
-        ],
-        "complexity": "error",
-        "computed-property-spacing": [
-            "error",
-            "never"
-        ],
-        "consistent-return": "error",
-        "consistent-this": "error",
-        "curly": "error",
-        "default-case": "error",
-        "dot-location": "error",
-        "dot-notation": "off",
-        "eol-last": "error",
-        "eqeqeq": "off",
-        "func-call-spacing": "error",
-        "func-name-matching": "off",
-        "func-names": "off",
-        "func-style": "off",
-        "function-paren-newline": "off",
-        "generator-star-spacing": "error",
-        "global-require": "off",
-        "guard-for-in": "warn",
-        "handle-callback-err": "error",
-        "id-blacklist": "error",
-        "id-length": "off",
-        "id-match": "error",
-        "implicit-arrow-linebreak": "off",
-        "indent": "off",
-        "indent-legacy": "off",
-        "init-declarations": "off",
-        "jsx-quotes": "error",
-        "key-spacing": "error",
-        "keyword-spacing": [
-            "error",
-            {
-                "after": true,
-                "before": true
-            }
-        ],
-        "line-comment-position": "off",
-        "linebreak-style": [
-            "error",
-            "unix"
-        ],
-        "lines-around-comment": "error",
-        "lines-around-directive": "error",
-        "lines-between-class-members": "error",
-        "max-classes-per-file": "error",
-        "max-depth": "error",
-        "max-len": "off",
-        "max-lines": "off",
-        "max-lines-per-function": "off",
-        "max-nested-callbacks": "error",
-        "max-params": "off",
-        "max-statements": "off",
-        "max-statements-per-line": "error",
-        "multiline-comment-style": "off",
-        "new-cap": "error",
-        "new-parens": "error",
-        "newline-after-var": "off",
-        "newline-before-return": "off",
-        "newline-per-chained-call": "off",
-        "no-alert": "error",
-        "no-array-constructor": "error",
-        "no-async-promise-executor": "error",
-        "no-await-in-loop": "error",
-        "no-bitwise": "error",
-        "no-buffer-constructor": "error",
-        "no-caller": "error",
-        "no-catch-shadow": "error",
-        "no-confusing-arrow": "error",
-        "no-continue": "off",
-        "no-constant-condition": "off",
-        "no-div-regex": "error",
-        "no-duplicate-imports": "error",
-        "no-else-return": "off",
-        "no-empty-function": "error",
-        "no-eq-null": "error",
-        "no-eval": "error",
-        "no-extend-native": "error",
-        "no-extra-bind": "error",
-        "no-extra-label": "error",
-        "no-extra-parens": "off",
-        "no-floating-decimal": "error",
-        "no-implicit-globals": "error",
-        "no-implied-eval": "error",
-        "no-inline-comments": "off",
-        "no-invalid-this": "error",
-        "no-iterator": "error",
-        "no-label-var": "error",
-        "no-labels": "error",
-        "no-lone-blocks": "error",
-        "no-lonely-if": "error",
-        "no-loop-func": "error",
-        "no-magic-numbers": "off",
-        "no-misleading-character-class": "error",
-        "no-mixed-operators": "error",
-        "no-mixed-requires": "error",
-        "no-multi-assign": "error",
-        "no-multi-spaces": "off",
-        "no-multi-str": "error",
-        "no-multiple-empty-lines": "error",
-        "no-native-reassign": "error",
-        "no-negated-condition": "error",
-        "no-negated-in-lhs": "error",
-        "no-nested-ternary": "error",
-        "no-new": "error",
-        "no-new-func": "error",
-        "no-new-object": "error",
-        "no-new-require": "error",
-        "no-new-wrappers": "error",
-        "no-octal-escape": "error",
-        "no-param-reassign": "error",
-        "no-path-concat": "error",
-        "no-plusplus": "off",
-        "no-process-env": "error",
-        "no-process-exit": "error",
-        "no-proto": "error",
-        "no-prototype-builtins": "error",
-        "no-restricted-globals": "error",
-        "no-restricted-imports": "error",
-        "no-restricted-modules": "error",
-        "no-restricted-properties": "error",
-        "no-restricted-syntax": "error",
-        "no-return-assign": "error",
-        "no-return-await": "error",
-        "no-script-url": "error",
-        "no-self-compare": "error",
-        "no-sequences": "error",
-        "no-shadow": "error",
-        "no-shadow-restricted-names": "error",
-        "no-spaced-func": "error",
-        "no-sync": "warn",
-        "no-tabs": "error",
-        "no-template-curly-in-string": "error",
-        "no-ternary": "off",
-        "no-throw-literal": "error",
-        "no-trailing-spaces": "error",
-        "no-undef-init": "error",
-        "no-undefined": "off",
-        "no-underscore-dangle": "off",
-        "no-unmodified-loop-condition": "error",
-        "no-unneeded-ternary": "off",
-        "no-unused-expressions": "error",
-        "no-unused-vars": [
-          "error",
-          {
-            "argsIgnorePattern": "^_",
-          },
-        ],
-        "no-use-before-define": "error",
-        "no-useless-call": "error",
-        "no-useless-catch": "error",
-        "no-useless-computed-key": "error",
-        "no-useless-concat": "error",
-        "no-useless-constructor": "error",
-        "no-useless-rename": "error",
-        "no-useless-return": "error",
-        "no-useless-escape": "off",
-        "no-var": "off",
-        "no-void": "error",
-        "no-warning-comments": "warn",
-        "no-whitespace-before-property": "error",
-        "no-with": "error",
-        "nonblock-statement-body-position": "error",
-        "object-curly-newline": "error",
-        "object-curly-spacing": [
-            "error",
-            "always"
-        ],
-        "object-shorthand": "off",
-        "one-var": "off",
-        "one-var-declaration-per-line": "error",
-        "operator-assignment": "error",
-        "operator-linebreak": "error",
-        "padded-blocks": "off",
-        "padding-line-between-statements": "error",
-        "prefer-arrow-callback": "off",
-        "prefer-const": "error",
-        "prefer-destructuring": "off",
-        "prefer-numeric-literals": "error",
-        "prefer-object-spread": "error",
-        "prefer-promise-reject-errors": "error",
-        "prefer-reflect": "off",
-        "prefer-rest-params": "error",
-        "prefer-spread": "error",
-        "prefer-template": "off",
-        "quote-props": "off",
-        "quotes": "off",
-        "radix": "error",
-        "require-atomic-updates": "error",
-        "require-await": "error",
-        "require-jsdoc": "warn",
-        "require-unicode-regexp": "error",
-        "rest-spread-spacing": [
-            "error",
-            "never"
-        ],
-        "semi": "off",
-        "semi-spacing": "error",
-        "semi-style": [
-            "error",
-            "last"
-        ],
-        "sort-imports": "error",
-        "sort-keys": "off",
-        "sort-vars": "error",
-        "space-before-blocks": "error",
-        "space-before-function-paren": "off",
-        "space-in-parens": [
-            "error",
-            "never"
-        ],
-        "space-infix-ops": "error",
-        "space-unary-ops": "error",
-        "spaced-comment": [
-            "error",
-            "always"
-        ],
-        "strict": "error",
-        "switch-colon-spacing": "error",
-        "symbol-description": "error",
-        "template-curly-spacing": [
-            "error",
-            "never"
-        ],
-        "template-tag-spacing": "error",
-        "unicode-bom": [
-            "error",
-            "never"
-        ],
-        "valid-jsdoc": "error",
-        "vars-on-top": "off",
-        "wrap-iife": "error",
-        "wrap-regex": "error",
-        "yield-star-spacing": "error",
-        "yoda": [
-            "error",
-            "never"
-        ]
-    }
+    extends: [
+        "esnext",
+        "esnext/style-guide",
+        "plugin:prettier/recommended"
+    ],
+	"rules": {
+		"import/no-commonjs": "off", // remove after converting to TS.
+		// Disabling Rules because of monorepo environment:
+		// https://github.com/benmosher/eslint-plugin-import/issues/1174
+		"import/no-extraneous-dependencies": "off",
+		"import/no-nodejs-modules": "off", // nodejs project
+		"no-console": "off" // we use console in the project
+	},
+	"overrides": [
+		{
+			"files": [
+				"**/test/ranges.js",
+				"**/test/lru.js",
+				"**/test/fs/walk.js",
+				"**/test/storage.js",
+				"**/test/identities.js",
+				"**/test/balances.js",
+				"**/test/assets.js",
+			],
+			"rules": {
+				// Disabling Rules because of used chai lib:
+				// https://stackoverflow.com/questions/45079454/no-unused-expressions-in-mocha-chai-unit-test-using-standardjs
+				"no-unused-expressions": "off",
+			}
+		}
+	]
 };

+ 8 - 0
storage-node/.prettierrc

@@ -0,0 +1,8 @@
+{
+    "semi": false,
+    "trailingComma": "es5",
+    "singleQuote": true,
+	"arrowParens": "avoid",
+	"useTabs": false,
+	"tabWidth": 2
+}

+ 7 - 1
storage-node/package.json

@@ -32,9 +32,15 @@
   ],
   "scripts": {
     "test": "wsrun --serial test",
-    "lint": "wsrun --serial lint"
+    "lint": "eslint --ignore-path .gitignore ."
   },
   "devDependencies": {
+    "eslint": "^5.16.0",
+    "eslint-config-esnext": "^4.1.0",
+    "eslint-config-prettier": "^6.11.0",
+    "eslint-plugin-babel": "^5.3.1",
+    "eslint-plugin-prettier": "^3.1.4",
+    "prettier": "^2.0.5",
     "wsrun": "^3.6.5"
   }
 }

+ 46 - 41
storage-node/packages/cli/bin/cli.js

@@ -33,7 +33,8 @@ const FLAG_DEFINITIONS = {
   // TODO
 }
 
-const cli = meow(`
+const cli = meow(
+  `
   Usage:
     $ storage-cli command [arguments..] [key_file] [passphrase]
 
@@ -54,16 +55,17 @@ const cli = meow(`
     dev-init          Setup chain with Alice as lead and storage provider.
     dev-check         Check the chain is setup with Alice as lead and storage provider.
   `,
-  { flags: FLAG_DEFINITIONS })
+  { flags: FLAG_DEFINITIONS }
+)
 
-function assert_file (name, filename) {
+function assertFile(name, filename) {
   assert(filename, `Need a ${name} parameter to proceed!`)
   assert(fs.statSync(filename).isFile(), `Path "${filename}" is not a file, aborting!`)
 }
 
-function load_identity (api, filename, passphrase) {
+function loadIdentity(api, filename, passphrase) {
   if (filename) {
-    assert_file('keyfile', filename)
+    assertFile('keyfile', filename)
     api.identities.loadUnlock(filename, passphrase)
   } else {
     debug('Loading Alice as identity')
@@ -73,48 +75,45 @@ function load_identity (api, filename, passphrase) {
 
 const commands = {
   // add Alice well known account as storage provider
-  'dev-init': async (api) => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring
-    // load_identity(api)
-    let dev = require('./dev')
+  'dev-init': async api => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    const dev = require('./dev')
     return dev.init(api)
   },
   // Checks that the setup done by dev-init command was successful.
-  'dev-check': async (api) => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring
-    // load_identity(api)
-    let dev = require('./dev')
+  'dev-check': async api => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    const dev = require('./dev')
     return dev.check(api)
   },
   // The upload method is not correctly implemented
   // needs to get the liaison after creating a data object,
   // resolve the ipns id to the asset put api url of the storage-node
   // before uploading..
-  'upload': async (api, url, filename, do_type_id, keyfile, passphrase) => {
-    load_identity(keyfile, passphrase)
+  upload: async (api, url, filename, doTypeId, keyfile, passphrase) => {
+    loadIdentity(keyfile, passphrase)
     // Check parameters
-    assert_file('file', filename)
+    assertFile('file', filename)
 
     const size = fs.statSync(filename).size
     debug(`File "${filename}" is ${chalk.green(size)} Bytes.`)
 
-    if (!do_type_id) {
-      do_type_id = 1
+    if (!doTypeId) {
+      doTypeId = 1
     }
 
-    debug('Data Object Type ID is: ' + chalk.green(do_type_id))
+    debug('Data Object Type ID is: ' + chalk.green(doTypeId))
 
     // Generate content ID
     // FIXME this require path is like this because of
     // https://github.com/Joystream/apps/issues/207
     const { ContentId } = require('@joystream/types/media')
-    var cid = ContentId.generate()
+    let cid = ContentId.generate()
     cid = cid.encode().toString()
     debug('Generated content ID: ' + chalk.green(cid))
 
     // Create Data Object
-    const data_object = await api.assets.createDataObject(
-      api.identities.key.address, cid, do_type_id, size)
+    await api.assets.createDataObject(api.identities.key.address, cid, doTypeId, size)
     debug('Data object created.')
 
     // TODO in future, optionally contact liaison here?
@@ -124,12 +123,12 @@ const commands = {
 
     const f = fs.createReadStream(filename)
     const opts = {
-      url: url,
+      url,
       headers: {
         'content-type': '',
-        'content-length': `${size}`
+        'content-length': `${size}`,
       },
-      json: true
+      json: true,
     }
     return new Promise((resolve, reject) => {
       const r = request.put(opts, (error, response, body) => {
@@ -151,15 +150,15 @@ const commands = {
   // needs to be updated to take a content id and resolve it a potential set
   // of providers that has it, and select one (possibly try more than one provider)
   // to fetch it from the get api url of a provider..
-  'download': async (api, url, content_id, filename) => {
+  download: async (api, url, contentId, filename) => {
     const request = require('request')
-    url = `${url}asset/v0/${content_id}`
+    url = `${url}asset/v0/${contentId}`
     debug('Downloading URL', chalk.green(url), 'to', chalk.green(filename))
 
     const f = fs.createWriteStream(filename)
     const opts = {
-      url: url,
-      json: true
+      url,
+      json: true,
     }
     return new Promise((resolve, reject) => {
       const r = request.get(opts, (error, response, body) => {
@@ -168,9 +167,15 @@ const commands = {
           return
         }
 
-        debug('Downloading', chalk.green(response.headers['content-type']), 'of size', chalk.green(response.headers['content-length']), '...')
+        debug(
+          'Downloading',
+          chalk.green(response.headers['content-type']),
+          'of size',
+          chalk.green(response.headers['content-length']),
+          '...'
+        )
 
-        f.on('error', (err) => {
+        f.on('error', err => {
           reject(err)
         })
 
@@ -187,17 +192,17 @@ const commands = {
     })
   },
   // similar to 'download' function
-  'head': async (api, url, content_id) => {
+  head: async (api, url, contentId) => {
     const request = require('request')
-    url = `${url}asset/v0/${content_id}`
+    url = `${url}asset/v0/${contentId}`
     debug('Checking URL', chalk.green(url), '...')
 
     const opts = {
-      url: url,
-      json: true
+      url,
+      json: true,
     }
     return new Promise((resolve, reject) => {
-      const r = request.head(opts, (error, response, body) => {
+      request.head(opts, (error, response, body) => {
         if (error) {
           reject(error)
           return
@@ -208,17 +213,17 @@ const commands = {
           return
         }
 
-        for (var propname in response.headers) {
+        for (const propname in response.headers) {
           debug(`  ${chalk.yellow(propname)}: ${response.headers[propname]}`)
         }
 
         resolve()
       })
     })
-  }
+  },
 }
 
-async function main () {
+async function main() {
   const api = await RuntimeApi.create()
 
   // Simple CLI commands
@@ -227,7 +232,7 @@ async function main () {
     throw new Error('Need a command to run!')
   }
 
-  if (commands.hasOwnProperty(command)) {
+  if (Object.prototype.hasOwnProperty.call(commands, command)) {
     // Command recognized
     const args = _.clone(cli.input).slice(1)
     await commands[command](api, ...args)
@@ -240,7 +245,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch((err) => {
+  .catch(err => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 15 - 18
storage-node/packages/cli/bin/dev.js

@@ -1,28 +1,25 @@
-/* eslint-disable no-console */
-
 'use strict'
 
 const debug = require('debug')('joystream:storage-cli:dev')
-const assert = require('assert')
 
 // Derivation path appended to well known development seed used on
 // development chains
 const ALICE_URI = '//Alice'
 const ROLE_ACCOUNT_URI = '//Colossus'
 
-function aliceKeyPair (api) {
+function aliceKeyPair(api) {
   return api.identities.keyring.addFromUri(ALICE_URI, null, 'sr25519')
 }
 
-function roleKeyPair (api) {
+function roleKeyPair(api) {
   return api.identities.keyring.addFromUri(ROLE_ACCOUNT_URI, null, 'sr25519')
 }
 
-function developmentPort () {
+function developmentPort() {
   return 3001
 }
 
-const check = async (api) => {
+const check = async api => {
   const roleAccountId = roleKeyPair(api).address
   const providerId = await api.workers.findProviderIdByRoleAccount(roleAccountId)
 
@@ -43,7 +40,7 @@ const check = async (api) => {
 // Setup Alice account on a developement chain as
 // a member, storage lead, and a storage provider using a deterministic
 // development key for the role account
-const init = async (api) => {
+const init = async api => {
   try {
     await check(api)
     return
@@ -82,7 +79,7 @@ const init = async (api) => {
   if (aliceMemberId === undefined) {
     debug('Registering Alice as member..')
     aliceMemberId = await api.identities.registerMember(alice, {
-      handle: 'alice'
+      handle: 'alice',
     })
   } else {
     debug('Alice is already a member')
@@ -90,10 +87,10 @@ const init = async (api) => {
 
   // Make alice the storage lead
   debug('Making Alice the storage Lead')
-  const leadOpeningId = await api.workers.dev_addStorageLeadOpening()
-  const leadApplicationId = await api.workers.dev_applyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
-  api.workers.dev_beginLeadOpeningReview(leadOpeningId)
-  await api.workers.dev_fillLeadOpening(leadOpeningId, leadApplicationId)
+  const leadOpeningId = await api.workers.devAddStorageLeadOpening()
+  const leadApplicationId = await api.workers.devApplyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
+  api.workers.devBeginLeadOpeningReview(leadOpeningId)
+  await api.workers.devFillLeadOpening(leadOpeningId, leadApplicationId)
 
   const leadAccount = await api.workers.getLeadRoleAccount()
   if (!leadAccount.eq(alice)) {
@@ -103,16 +100,16 @@ const init = async (api) => {
   // Create a storage openinging, apply, start review, and fill opening
   debug(`Making ${ROLE_ACCOUNT_URI} account a storage provider`)
 
-  const openingId = await api.workers.dev_addStorageOpening()
+  const openingId = await api.workers.devAddStorageOpening()
   debug(`created new storage opening: ${openingId}`)
 
-  const applicationId = await api.workers.dev_applyOnOpening(openingId, aliceMemberId, alice, roleAccount)
+  const applicationId = await api.workers.devApplyOnOpening(openingId, aliceMemberId, alice, roleAccount)
   debug(`applied with application id: ${applicationId}`)
 
-  api.workers.dev_beginStorageOpeningReview(openingId)
+  api.workers.devBeginStorageOpeningReview(openingId)
 
   debug(`filling storage opening`)
-  const providerId = await api.workers.dev_fillStorageOpening(openingId, applicationId)
+  const providerId = await api.workers.devFillStorageOpening(openingId, applicationId)
 
   debug(`Assigned storage provider id: ${providerId}`)
 
@@ -124,5 +121,5 @@ module.exports = {
   check,
   aliceKeyPair,
   roleKeyPair,
-  developmentPort
+  developmentPort,
 }

+ 1 - 1
storage-node/packages/cli/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 59 - 61
storage-node/packages/colossus/bin/cli.js

@@ -25,38 +25,39 @@ const FLAG_DEFINITIONS = {
   port: {
     type: 'number',
     alias: 'p',
-    default: 3000
+    default: 3000,
   },
   keyFile: {
     type: 'string',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
+    },
   },
   publicUrl: {
     type: 'string',
     alias: 'u',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
+    },
   },
   passphrase: {
-    type: 'string'
+    type: 'string',
   },
   wsProvider: {
     type: 'string',
-    default: 'ws://localhost:9944'
+    default: 'ws://localhost:9944',
   },
   providerId: {
     type: 'number',
     alias: 'i',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
-  }
+    },
+  },
 }
 
-const cli = meow(`
+const cli = meow(
+  `
   Usage:
     $ colossus [command] [arguments]
 
@@ -76,14 +77,15 @@ const cli = meow(`
     --port=PORT, -p PORT    Port number to listen on, defaults to 3000.
     --ws-provider WS_URL    Joystream-node websocket provider, defaults to ws://localhost:9944
   `,
-  { flags: FLAG_DEFINITIONS })
+  { flags: FLAG_DEFINITIONS }
+)
 
 // All-important banner!
-function banner () {
+function banner() {
   console.log(chalk.blue(figlet.textSync('joystream', 'Speed')))
 }
 
-function start_express_app(app, port) {
+function startExpressApp(app, port) {
   const http = require('http')
   const server = http.createServer(app)
 
@@ -102,39 +104,39 @@ function start_express_app(app, port) {
 }
 
 // Start app
-function start_all_services ({ store, api, port }) {
+function startAllServices({ store, api, port }) {
   const app = require('../lib/app')(PROJECT_ROOT, store, api) // reduce falgs to only needed values
-  return start_express_app(app, port)
+  return startExpressApp(app, port)
 }
 
 // Start discovery service app only
-function start_discovery_service ({ api, port }) {
+function startDiscoveryService({ api, port }) {
   const app = require('../lib/discovery')(PROJECT_ROOT, api) // reduce flags to only needed values
-  return start_express_app(app, port)
+  return startExpressApp(app, port)
 }
 
 // Get an initialized storage instance
-function get_storage (runtime_api) {
+function getStorage(runtimeApi) {
   // TODO at some point, we can figure out what backend-specific connection
   // options make sense. For now, just don't use any configuration.
   const { Storage } = require('@joystream/storage-node-backend')
 
   const options = {
-    resolve_content_id: async (content_id) => {
+    resolve_content_id: async contentId => {
       // Resolve via API
-      const obj = await runtime_api.assets.getDataObject(content_id)
+      const obj = await runtimeApi.assets.getDataObject(contentId)
       if (!obj || obj.isNone) {
         return
       }
       // if obj.liaison_judgement !== Accepted .. throw ?
       return obj.unwrap().ipfs_content_id.toString()
-    }
+    },
   }
 
   return Storage.create(options)
 }
 
-async function init_api_production ({ wsProvider, providerId, keyFile, passphrase }) {
+async function initApiProduction({ wsProvider, providerId, keyFile, passphrase }) {
   // Load key information
   const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
@@ -150,28 +152,28 @@ async function init_api_production ({ wsProvider, providerId, keyFile, passphras
     account_file: keyFile,
     passphrase,
     provider_url: wsProvider,
-    storageProviderId: providerId
+    storageProviderId: providerId,
   })
 
   if (!api.identities.key) {
     throw new Error('Failed to unlock storage provider account')
   }
 
-  if (!await api.workers.isRoleAccountOfStorageProvider(api.storageProviderId, api.identities.key.address)) {
+  if (!(await api.workers.isRoleAccountOfStorageProvider(api.storageProviderId, api.identities.key.address))) {
     throw new Error('storage provider role account and storageProviderId are not associated with a worker')
   }
 
   return api
 }
 
-async function init_api_development () {
+async function initApiDevelopment() {
   // Load key information
   const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
   const wsProvider = 'ws://localhost:9944'
 
   const api = await RuntimeApi.create({
-    provider_url: wsProvider
+    provider_url: wsProvider,
   })
 
   const dev = require('../../cli/bin/dev')
@@ -183,39 +185,39 @@ async function init_api_development () {
   return api
 }
 
-function get_service_information (publicUrl) {
+function getServiceInformation(publicUrl) {
   // For now assume we run all services on the same endpoint
-  return({
+  return {
     asset: {
       version: 1, // spec version
-      endpoint: publicUrl
+      endpoint: publicUrl,
     },
     discover: {
       version: 1, // spec version
-      endpoint: publicUrl
-    }
-  })
+      endpoint: publicUrl,
+    },
+  }
 }
 
-async function announce_public_url (api, publicUrl) {
+async function announcePublicUrl(api, publicUrl) {
   // re-announce in future
-  const reannounce = function (timeoutMs) {
-    setTimeout(announce_public_url, timeoutMs, api, publicUrl)
+  const reannounce = function(timeoutMs) {
+    setTimeout(announcePublicUrl, timeoutMs, api, publicUrl)
   }
 
   debug('announcing public url')
   const { publish } = require('@joystream/service-discovery')
 
   try {
-    const serviceInformation = get_service_information(publicUrl)
+    const serviceInformation = getServiceInformation(publicUrl)
 
-    let keyId = await publish.publish(serviceInformation)
+    const keyId = await publish.publish(serviceInformation)
 
     await api.discovery.setAccountInfo(keyId)
 
     debug('publishing complete, scheduling next update')
 
-// >> sometimes after tx is finalized.. we are not reaching here!
+    // >> sometimes after tx is finalized.. we are not reaching here!
 
     // Reannounce before expiery. Here we are concerned primarily
     // with keeping the account information refreshed and 'available' in
@@ -230,61 +232,57 @@ async function announce_public_url (api, publicUrl) {
   }
 }
 
-function go_offline (api) {
-  return api.discovery.unsetAccountInfo()
-}
-
 // Simple CLI commands
-var command = cli.input[0]
+let command = cli.input[0]
 if (!command) {
   command = 'server'
 }
 
-async function start_colossus ({ api, publicUrl, port, flags }) {
+async function startColossus({ api, publicUrl, port, flags }) {
   // TODO: check valid url, and valid port number
-  const store = get_storage(api)
+  const store = getStorage(api)
   banner()
-  const { start_syncing } = require('../lib/sync')
-  start_syncing(api, { syncPeriod: SYNC_PERIOD_MS }, store)
-  announce_public_url(api, publicUrl)
-  return start_all_services({ store, api, port, flags }) // dont pass all flags only required values
+  const { startSyncing } = require('../lib/sync')
+  startSyncing(api, { syncPeriod: SYNC_PERIOD_MS }, store)
+  announcePublicUrl(api, publicUrl)
+  return startAllServices({ store, api, port, flags }) // dont pass all flags only required values
 }
 
 const commands = {
-  'server': async () => {
+  server: async () => {
     let publicUrl, port, api
 
     if (cli.flags.dev) {
       const dev = require('../../cli/bin/dev')
-      api = await init_api_development()
+      api = await initApiDevelopment()
       port = dev.developmentPort()
       publicUrl = `http://localhost:${port}/`
     } else {
-      api = await init_api_production(cli.flags)
+      api = await initApiProduction(cli.flags)
       publicUrl = cli.flags.publicUrl
       port = cli.flags.port
     }
 
-    return start_colossus({ api, publicUrl, port })
+    return startColossus({ api, publicUrl, port })
   },
-  'discovery': async () => {
+  discovery: async () => {
     debug('Starting Joystream Discovery Service')
     const { RuntimeApi } = require('@joystream/storage-runtime-api')
     const wsProvider = cli.flags.wsProvider
     const api = await RuntimeApi.create({ provider_url: wsProvider })
     const port = cli.flags.port
-    await start_discovery_service({ api, port })
-  }
+    await startDiscoveryService({ api, port })
+  },
 }
 
-async function main () {
+async function main() {
   // Simple CLI commands
-  var command = cli.input[0]
+  let command = cli.input[0]
   if (!command) {
     command = 'server'
   }
 
-  if (commands.hasOwnProperty(command)) {
+  if (Object.prototype.hasOwnProperty.call(commands, command)) {
     // Command recognized
     const args = _.clone(cli.input).slice(1)
     await commands[command](...args)
@@ -297,7 +295,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch((err) => {
+  .catch(err => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 30 - 32
storage-node/packages/colossus/lib/app.js

@@ -16,61 +16,59 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
 // Node requires
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
 // npm requires
-const express = require('express');
-const openapi = require('express-openapi');
-const bodyParser = require('body-parser');
-const cors = require('cors');
-const yaml = require('js-yaml');
+const express = require('express')
+const openapi = require('express-openapi')
+const bodyParser = require('body-parser')
+const cors = require('cors')
+const yaml = require('js-yaml')
 
 // Project requires
-const validateResponses = require('./middleware/validate_responses');
-const fileUploads = require('./middleware/file_uploads');
-const pagination = require('@joystream/storage-utils/pagination');
+const validateResponses = require('./middleware/validate_responses')
+const fileUploads = require('./middleware/file_uploads')
+const pagination = require('@joystream/storage-utils/pagination')
 
 // Configure app
-function create_app(project_root, storage, runtime)
-{
-  const app = express();
-  app.use(cors());
-  app.use(bodyParser.json());
+function createApp(projectRoot, storage, runtime) {
+  const app = express()
+  app.use(cors())
+  app.use(bodyParser.json())
   // FIXME app.use(bodyParser.urlencoded({ extended: true }));
 
   // Load & extend/configure API docs
-  var api = yaml.safeLoad(fs.readFileSync(
-    path.resolve(project_root, 'api-base.yml')));
-  api['x-express-openapi-additional-middleware'] = [validateResponses];
-  api['x-express-openapi-validation-strict'] = true;
+  let api = yaml.safeLoad(fs.readFileSync(path.resolve(projectRoot, 'api-base.yml')))
+  api['x-express-openapi-additional-middleware'] = [validateResponses]
+  api['x-express-openapi-validation-strict'] = true
 
-  api = pagination.openapi(api);
+  api = pagination.openapi(api)
 
   openapi.initialize({
     apiDoc: api,
-    app: app,
-    paths: path.resolve(project_root, 'paths'),
+    app,
+    paths: path.resolve(projectRoot, 'paths'),
     docsPath: '/swagger.json',
     consumesMiddleware: {
-      'multipart/form-data': fileUploads
+      'multipart/form-data': fileUploads,
     },
     dependencies: {
-      storage: storage,
-      runtime: runtime,
+      storage,
+      runtime,
     },
-  });
+  })
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res, next) {
-    res.status(err.status).json(err);
-  });
+  app.use(function(err, req, res) {
+    res.status(err.status).json(err)
+  })
 
-  return app;
+  return app
 }
 
-module.exports = create_app;
+module.exports = createApp

+ 26 - 28
storage-node/packages/colossus/lib/discovery.js

@@ -16,57 +16,55 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
 // npm requires
-const express = require('express');
-const openapi = require('express-openapi');
-const bodyParser = require('body-parser');
-const cors = require('cors');
-const yaml = require('js-yaml');
+const express = require('express')
+const openapi = require('express-openapi')
+const bodyParser = require('body-parser')
+const cors = require('cors')
+const yaml = require('js-yaml')
 
 // Node requires
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
 // Project requires
-const validateResponses = require('./middleware/validate_responses');
+const validateResponses = require('./middleware/validate_responses')
 
 // Configure app
-function create_app(project_root, runtime)
-{
-  const app = express();
-  app.use(cors());
-  app.use(bodyParser.json());
+function createApp(projectRoot, runtime) {
+  const app = express()
+  app.use(cors())
+  app.use(bodyParser.json())
   // FIXME app.use(bodyParser.urlencoded({ extended: true }));
 
   // Load & extend/configure API docs
-  var api = yaml.safeLoad(fs.readFileSync(
-    path.resolve(project_root, 'api-base.yml')));
-  api['x-express-openapi-additional-middleware'] = [validateResponses];
-  api['x-express-openapi-validation-strict'] = true;
+  const api = yaml.safeLoad(fs.readFileSync(path.resolve(projectRoot, 'api-base.yml')))
+  api['x-express-openapi-additional-middleware'] = [validateResponses]
+  api['x-express-openapi-validation-strict'] = true
 
   openapi.initialize({
     apiDoc: api,
-    app: app,
-    //paths: path.resolve(project_root, 'discovery_app_paths'),
+    app,
+    // paths: path.resolve(projectRoot, 'discovery_app_paths'),
     paths: {
       path: '/discover/v0/{id}',
-      module: require('../paths/discover/v0/{id}')
+      module: require('../paths/discover/v0/{id}'),
     },
     docsPath: '/swagger.json',
     dependencies: {
-      runtime: runtime,
+      runtime,
     },
-  });
+  })
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res, next) {
-    res.status(err.status).json(err);
-  });
+  app.use(function(err, req, res) {
+    res.status(err.status).json(err)
+  })
 
-  return app;
+  return app
 }
 
-module.exports = create_app;
+module.exports = createApp

+ 12 - 13
storage-node/packages/colossus/lib/middleware/file_uploads.js

@@ -16,29 +16,28 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const multer = require('multer');
+const multer = require('multer')
 
 // Taken from express-openapi examples
-module.exports = function(req, res, next)
-{
+module.exports = function(req, res, next) {
   multer().any()(req, res, function(err) {
     if (err) {
-      return next(err);
+      return next(err)
     }
     // Handle both single and multiple files
     const filesMap = req.files.reduce(
       (acc, f) =>
         Object.assign(acc, {
-          [f.fieldname]: (acc[f.fieldname] || []).concat(f)
+          [f.fieldname]: (acc[f.fieldname] || []).concat(f),
         }),
       {}
-    );
-    Object.keys(filesMap).forEach((fieldname) => {
-      const files = filesMap[fieldname];
-      req.body[fieldname] = files.length > 1 ? files.map(() => '') : '';
-    });
-    return next();
-  });
+    )
+    Object.keys(filesMap).forEach(fieldname => {
+      const files = filesMap[fieldname]
+      req.body[fieldname] = files.length > 1 ? files.map(() => '') : ''
+    })
+    return next()
+  })
 }

+ 23 - 23
storage-node/packages/colossus/lib/middleware/validate_responses.js

@@ -16,46 +16,46 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:middleware:validate');
+const debug = require('debug')('joystream:middleware:validate')
 
 // Function taken directly from https://github.com/kogosoftwarellc/open-api/tree/master/packages/express-openapi
-module.exports = function(req, res, next)
-{
-  const strictValidation = req.apiDoc['x-express-openapi-validation-strict'] ? true : false;
+module.exports = function(req, res, next) {
+  const strictValidation = !!req.apiDoc['x-express-openapi-validation-strict']
   if (typeof res.validateResponse === 'function') {
-    const send = res.send;
+    const send = res.send
     res.send = function expressOpenAPISend(...args) {
-      const onlyWarn = !strictValidation;
+      const onlyWarn = !strictValidation
       if (res.get('x-express-openapi-validation-error-for') !== undefined) {
-        return send.apply(res, args);
+        return send.apply(res, args)
       }
       if (res.get('x-express-openapi-validation-for') !== undefined) {
-        return send.apply(res, args);
+        return send.apply(res, args)
       }
 
-      const body = args[0];
-      let validation = res.validateResponse(res.statusCode, body);
-      let validationMessage;
+      const body = args[0]
+      let validation = res.validateResponse(res.statusCode, body)
+      let validationMessage
       if (validation === undefined) {
-        validation = { message: undefined, errors: undefined };
+        validation = { message: undefined, errors: undefined }
       }
       if (validation.errors) {
-        const errorList = Array.from(validation.errors).map((_) => _.message).join(',');
-        validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`;
-        debug(validationMessage);
+        const errorList = Array.from(validation.errors)
+          .map(_ => _.message)
+          .join(',')
+        validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`
+        debug(validationMessage)
         // Set to avoid a loop, and to provide the original status code
-        res.set('x-express-openapi-validation-error-for', res.statusCode.toString());
+        res.set('x-express-openapi-validation-error-for', res.statusCode.toString())
       }
       if ((onlyWarn || !validation.errors) && res.statusCode) {
-        res.set('x-express-openapi-validation-for', res.statusCode.toString());
-        return send.apply(res, args);
-      } else {
-        res.status(500);
-        return res.json({ error: validationMessage });
+        res.set('x-express-openapi-validation-for', res.statusCode.toString())
+        return send.apply(res, args)
       }
+      res.status(500)
+      return res.json({ error: validationMessage })
     }
   }
-  next();
+  next()
 }

+ 34 - 38
storage-node/packages/colossus/lib/sync.js

@@ -16,99 +16,95 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:sync');
+const debug = require('debug')('joystream:sync')
 
-async function sync_callback(api, storage) {
+async function syncCallback(api, storage) {
   // The first step is to gather all data objects from chain.
   // TODO: in future, limit to a configured tranche
   // FIXME this isn't actually on chain yet, so we'll fake it.
-  const knownContentIds = await api.assets.getKnownContentIds() || [];
+  const knownContentIds = (await api.assets.getKnownContentIds()) || []
 
-  const role_addr = api.identities.key.address
+  const roleAddress = api.identities.key.address
   const providerId = api.storageProviderId
 
   // Iterate over all sync objects, and ensure they're synced.
-  const allChecks = knownContentIds.map(async (content_id) => {
-    let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, content_id);
+  const allChecks = knownContentIds.map(async contentId => {
+    // eslint-disable-next-line prefer-const
+    let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, contentId)
 
     // get the data object
     // make sure the data object was Accepted by the liaison,
     // don't just blindly attempt to fetch them
 
-    let fileLocal;
+    let fileLocal
     try {
       // check if we have content or not
-      let stats = await storage.stat(content_id);
-      fileLocal = stats.local;
+      const stats = await storage.stat(contentId)
+      fileLocal = stats.local
     } catch (err) {
       // on error stating or timeout
-      debug(err.message);
+      debug(err.message)
       // we don't have content if we can't stat it
-      fileLocal = false;
+      fileLocal = false
     }
 
     if (!fileLocal) {
       try {
-        await storage.synchronize(content_id);
+        await storage.synchronize(contentId)
       } catch (err) {
         // duplicate logging
         // debug(err.message)
         return
       }
       // why are we returning, if we synced the file
-      return;
+      return
     }
 
     if (!relationship) {
       // create relationship
-      debug(`Creating new storage relationship for ${content_id.encode()}`);
+      debug(`Creating new storage relationship for ${contentId.encode()}`)
       try {
-        relationshipId = await api.assets.createAndReturnStorageRelationship(role_addr, providerId, content_id);
-        await api.assets.toggleStorageRelationshipReady(role_addr, providerId, relationshipId, true);
+        relationshipId = await api.assets.createAndReturnStorageRelationship(roleAddress, providerId, contentId)
+        await api.assets.toggleStorageRelationshipReady(roleAddress, providerId, relationshipId, true)
       } catch (err) {
-        debug(`Error creating new storage relationship ${content_id.encode()}: ${err.stack}`);
-        return;
+        debug(`Error creating new storage relationship ${contentId.encode()}: ${err.stack}`)
+        return
       }
     } else if (!relationship.ready) {
-      debug(`Updating storage relationship to ready for ${content_id.encode()}`);
+      debug(`Updating storage relationship to ready for ${contentId.encode()}`)
       // update to ready. (Why would there be a relationship set to ready: false?)
       try {
-        await api.assets.toggleStorageRelationshipReady(role_addr, providerId, relationshipId, true);
-      } catch(err) {
-        debug(`Error setting relationship ready ${content_id.encode()}: ${err.stack}`);
+        await api.assets.toggleStorageRelationshipReady(roleAddress, providerId, relationshipId, true)
+      } catch (err) {
+        debug(`Error setting relationship ready ${contentId.encode()}: ${err.stack}`)
       }
     } else {
       // we already have content and a ready relationship set. No need to do anything
-      // debug(`content already stored locally ${content_id.encode()}`);
+      // debug(`content already stored locally ${contentId.encode()}`);
     }
-  });
-
+  })
 
-  return Promise.all(allChecks);
+  return Promise.all(allChecks)
 }
 
-
-async function sync_periodic(api, flags, storage)
-{
+async function syncPeriodic(api, flags, storage) {
   try {
     debug('Starting sync run...')
-    await sync_callback(api, storage)
+    await syncCallback(api, storage)
     debug('sync run complete')
   } catch (err) {
-    debug(`Error in sync_periodic ${err.stack}`);
+    debug(`Error in syncPeriodic ${err.stack}`)
   }
   // always try again
-  setTimeout(sync_periodic, flags.syncPeriod, api, flags, storage);
+  setTimeout(syncPeriodic, flags.syncPeriod, api, flags, storage)
 }
 
-
-function start_syncing(api, flags, storage)
-{
-  sync_periodic(api, flags, storage);
+function startSyncing(api, flags, storage) {
+  syncPeriodic(api, flags, storage)
 }
 
 module.exports = {
-  start_syncing: start_syncing,
+  startSyncing,
 }

+ 114 - 128
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -16,25 +16,22 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const path = require('path');
+const path = require('path')
 
-const debug = require('debug')('joystream:colossus:api:asset');
+const debug = require('debug')('joystream:colossus:api:asset')
 
-const util_ranges = require('@joystream/storage-utils/ranges');
-const filter = require('@joystream/storage-node-backend/filter');
+const utilRanges = require('@joystream/storage-utils/ranges')
+const filter = require('@joystream/storage-node-backend/filter')
 
-function error_handler(response, err, code)
-{
-  debug(err);
-  response.status((err.code || code) || 500).send({ message: err.toString() });
+function errorHandler(response, err, code) {
+  debug(err)
+  response.status(err.code || code || 500).send({ message: err.toString() })
 }
 
-
-module.exports = function(storage, runtime)
-{
-  var doc = {
+module.exports = function(storage, runtime) {
+  const doc = {
     // parameters for all operations in this path
     parameters: [
       {
@@ -49,203 +46,195 @@ module.exports = function(storage, runtime)
     ],
 
     // Head: report that ranges are OK
-    head: async function(req, res, _next)
-    {
-      const id = req.params.id;
+    async head(req, res) {
+      const id = req.params.id
 
       // Open file
       try {
-        const size = await storage.size(id);
-        const stream = await storage.open(id, 'r');
-        const type = stream.file_info.mime_type;
+        const size = await storage.size(id)
+        const stream = await storage.open(id, 'r')
+        const type = stream.fileInfo.mimeType
 
         // Close the stream; we don't need to fetch the file (if we haven't
         // already). Then return result.
-        stream.destroy();
+        stream.destroy()
 
-        res.status(200);
-        res.contentType(type);
-        res.header('Content-Disposition', 'inline');
-        res.header('Content-Transfer-Encoding', 'binary');
-        res.header('Accept-Ranges', 'bytes');
+        res.status(200)
+        res.contentType(type)
+        res.header('Content-Disposition', 'inline')
+        res.header('Content-Transfer-Encoding', 'binary')
+        res.header('Accept-Ranges', 'bytes')
         if (size > 0) {
-          res.header('Content-Length', size);
+          res.header('Content-Length', size)
         }
-        res.send();
+        res.send()
       } catch (err) {
-        error_handler(res, err, err.code);
+        errorHandler(res, err, err.code)
       }
     },
 
     // Put for uploads
-    put: async function(req, res, _next)
-    {
-      const id = req.params.id; // content id
+    async put(req, res) {
+      const id = req.params.id // content id
 
       // First check if we're the liaison for the name, otherwise we can bail
       // out already.
-      const role_addr = runtime.identities.key.address;
-      const providerId = runtime.storageProviderId;
-      let dataObject;
+      const roleAddress = runtime.identities.key.address
+      const providerId = runtime.storageProviderId
+      let dataObject
       try {
         debug('calling checkLiaisonForDataObject')
-        dataObject = await runtime.assets.checkLiaisonForDataObject(providerId, id);
+        dataObject = await runtime.assets.checkLiaisonForDataObject(providerId, id)
         debug('called checkLiaisonForDataObject')
       } catch (err) {
-        error_handler(res, err, 403);
-        return;
+        errorHandler(res, err, 403)
+        return
       }
 
       // We'll open a write stream to the backend, but reserve the right to
       // abort upload if the filters don't smell right.
-      var stream;
+      let stream
       try {
-        stream = await storage.open(id, 'w');
+        stream = await storage.open(id, 'w')
 
         // We don't know whether the filtering occurs before or after the
         // stream was finished, and can only commit if both passed.
-        var finished = false;
-        var accepted = false;
-        const possibly_commit = () => {
+        let finished = false
+        let accepted = false
+        const possiblyCommit = () => {
           if (finished && accepted) {
-            debug('Stream is finished and passed filters; committing.');
-            stream.commit();
+            debug('Stream is finished and passed filters; committing.')
+            stream.commit()
           }
-        };
-
+        }
 
-        stream.on('file_info', async (info) => {
+        stream.on('fileInfo', async info => {
           try {
-            debug('Detected file info:', info);
+            debug('Detected file info:', info)
 
             // Filter
-            const filter_result = filter({}, req.headers, info.mime_type);
-            if (200 != filter_result.code) {
-              debug('Rejecting content', filter_result.message);
-              stream.end();
-              res.status(filter_result.code).send({ message: filter_result.message });
+            const filterResult = filter({}, req.headers, info.mimeType)
+            if (200 !== filterResult.code) {
+              debug('Rejecting content', filterResult.message)
+              stream.end()
+              res.status(filterResult.code).send({ message: filterResult.message })
 
               // Reject the content
-              await runtime.assets.rejectContent(role_addr, providerId, id);
-              return;
+              await runtime.assets.rejectContent(roleAddress, providerId, id)
+              return
             }
-            debug('Content accepted.');
-            accepted = true;
+            debug('Content accepted.')
+            accepted = true
 
             // We may have to commit the stream.
-            possibly_commit();
+            possiblyCommit()
           } catch (err) {
-            error_handler(res, err);
+            errorHandler(res, err)
           }
-        });
+        })
 
         stream.on('finish', () => {
           try {
-            finished = true;
-            possibly_commit();
+            finished = true
+            possiblyCommit()
           } catch (err) {
-            error_handler(res, err);
+            errorHandler(res, err)
           }
-        });
+        })
 
-        stream.on('committed', async (hash) => {
+        stream.on('committed', async hash => {
           console.log('commited', dataObject)
           try {
             if (hash !== dataObject.ipfs_content_id.toString()) {
-              debug('Rejecting content. IPFS hash does not match value in objectId');
-              await runtime.assets.rejectContent(role_addr, providerId, id);
-              res.status(400).send({ message: "Uploaded content doesn't match IPFS hash" });
-              return;
+              debug('Rejecting content. IPFS hash does not match value in objectId')
+              await runtime.assets.rejectContent(roleAddress, providerId, id)
+              res.status(400).send({ message: "Uploaded content doesn't match IPFS hash" })
+              return
             }
 
             debug('accepting Content')
-            await runtime.assets.acceptContent(role_addr, providerId, id);
+            await runtime.assets.acceptContent(roleAddress, providerId, id)
 
             debug('creating storage relationship for newly uploaded content')
             // Create storage relationship and flip it to ready.
-            const dosr_id = await runtime.assets.createAndReturnStorageRelationship(role_addr, providerId, id);
+            const dosrId = await runtime.assets.createAndReturnStorageRelationship(roleAddress, providerId, id)
 
             debug('toggling storage relationship for newly uploaded content')
-            await runtime.assets.toggleStorageRelationshipReady(role_addr, providerId, dosr_id, true);
+            await runtime.assets.toggleStorageRelationshipReady(roleAddress, providerId, dosrId, true)
 
-            debug('Sending OK response.');
-            res.status(200).send({ message: 'Asset uploaded.' });
+            debug('Sending OK response.')
+            res.status(200).send({ message: 'Asset uploaded.' })
           } catch (err) {
-            debug(`${err.message}`);
-            error_handler(res, err);
+            debug(`${err.message}`)
+            errorHandler(res, err)
           }
-        });
-
-        stream.on('error', (err) => error_handler(res, err));
-        req.pipe(stream);
+        })
 
+        stream.on('error', err => errorHandler(res, err))
+        req.pipe(stream)
       } catch (err) {
-        error_handler(res, err);
-        return;
+        errorHandler(res, err)
+        return
       }
     },
 
     // Get content
-    get: async function(req, res, _next)
-    {
-      const id = req.params.id;
-      const download = req.query.download;
+    async get(req, res) {
+      const id = req.params.id
+      const download = req.query.download
 
       // Parse range header
-      var ranges;
+      let ranges
       if (!download) {
         try {
-          var range_header = req.headers['range'];
-          ranges = util_ranges.parse(range_header);
+          const rangeHeader = req.headers.range
+          ranges = utilRanges.parse(rangeHeader)
         } catch (err) {
           // Do nothing; it's ok to ignore malformed ranges and respond with the
           // full content according to https://www.rfc-editor.org/rfc/rfc7233.txt
         }
-        if (ranges && ranges.unit != 'bytes') {
+        if (ranges && ranges.unit !== 'bytes') {
           // Ignore ranges that are not byte units.
-          ranges = undefined;
+          ranges = undefined
         }
       }
-      debug('Requested range(s) is/are', ranges);
+      debug('Requested range(s) is/are', ranges)
 
       // Open file
       try {
-        const size = await storage.size(id);
-        const stream = await storage.open(id, 'r');
+        const size = await storage.size(id)
+        const stream = await storage.open(id, 'r')
 
         // Add a file extension to download requests if necessary. If the file
         // already contains an extension, don't add one.
-        var send_name = id;
-        const type = stream.file_info.mime_type;
+        let sendName = id
+        const type = stream.fileInfo.mimeType
         if (download) {
-          var ext = path.extname(send_name);
+          let ext = path.extname(sendName)
           if (!ext) {
-            ext = stream.file_info.ext;
+            ext = stream.fileInfo.ext
             if (ext) {
-              send_name = `${send_name}.${ext}`;
+              sendName = `${sendName}.${ext}`
             }
           }
         }
 
-        var opts = {
-          name: send_name,
-          type: type,
-          size: size,
-          ranges: ranges,
-          download: download,
-        };
-        util_ranges.send(res, stream, opts);
-
-
+        const opts = {
+          name: sendName,
+          type,
+          size,
+          ranges,
+          download,
+        }
+        utilRanges.send(res, stream, opts)
       } catch (err) {
-        error_handler(res, err, err.code);
+        errorHandler(res, err, err.code)
       }
-    }
-  };
+    },
+  }
 
   // OpenAPI specs
-  doc.get.apiDoc =
-  {
+  doc.get.apiDoc = {
     description: 'Download an asset.',
     operationId: 'assetData',
     tags: ['asset', 'data'],
@@ -279,16 +268,15 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-  doc.put.apiDoc =
-  {
+  doc.put.apiDoc = {
     description: 'Asset upload.',
     operationId: 'assetUpload',
     tags: ['asset', 'data'],
@@ -313,7 +301,7 @@ module.exports = function(storage, runtime)
               properties: {
                 message: {
                   type: 'string',
-                }
+                },
               },
             },
           },
@@ -324,17 +312,15 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-
-  doc.head.apiDoc =
-  {
+  doc.head.apiDoc = {
     description: 'Asset download information.',
     operationId: 'assetInfo',
     tags: ['asset', 'metadata'],
@@ -347,13 +333,13 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-  return doc;
-};
+  return doc
+}

+ 64 - 65
storage-node/packages/colossus/paths/discover/v0/{id}.js

@@ -1,12 +1,11 @@
 const { discover } = require('@joystream/service-discovery')
-const debug = require('debug')('joystream:colossus:api:discovery');
+const debug = require('debug')('joystream:colossus:api:discovery')
 
-const MAX_CACHE_AGE = 30 * 60 * 1000;
-const USE_CACHE = true;
+const MAX_CACHE_AGE = 30 * 60 * 1000
+const USE_CACHE = true
 
-module.exports = function(runtime)
-{
-  var doc = {
+module.exports = function(runtime) {
+  const doc = {
     // parameters for all operations in this path
     parameters: [
       {
@@ -21,71 +20,71 @@ module.exports = function(runtime)
     ],
 
     // Resolve Service Information
-    get: async function(req, res)
-    {
-        try {
-          var parsedId = parseInt(req.params.id);
-        } catch (err) {
-          return res.status(400).end();
-        }
+    async get(req, res) {
+      let parsedId
+      try {
+        parsedId = parseInt(req.params.id)
+      } catch (err) {
+        return res.status(400).end()
+      }
 
-        const id = parsedId
-        let cacheMaxAge = req.query.max_age;
+      const id = parsedId
+      let cacheMaxAge = req.query.max_age
 
-        if (cacheMaxAge) {
-          try {
-            cacheMaxAge = parseInt(cacheMaxAge);
-          } catch(err) {
-            cacheMaxAge = MAX_CACHE_AGE
-          }
-        } else {
-          cacheMaxAge = 0
+      if (cacheMaxAge) {
+        try {
+          cacheMaxAge = parseInt(cacheMaxAge)
+        } catch (err) {
+          cacheMaxAge = MAX_CACHE_AGE
         }
+      } else {
+        cacheMaxAge = 0
+      }
 
-        // todo - validate id before querying
+      // todo - validate id before querying
 
-        try {
-          debug(`resolving ${id}`);
-          const info = await discover.discover(id, runtime, USE_CACHE, cacheMaxAge);
-          if (info == null) {
-            debug('info not found');
-            res.status(404).end();
-          } else {
-            res.status(200).send(info);
-          }
-        } catch (err) {
-          debug(`${err}`);
+      try {
+        debug(`resolving ${id}`)
+        const info = await discover.discover(id, runtime, USE_CACHE, cacheMaxAge)
+        if (info === null) {
+          debug('info not found')
           res.status(404).end()
+        } else {
+          res.status(200).send(info)
         }
-    }
-  };
+      } catch (err) {
+        debug(`${err}`)
+        res.status(404).end()
+      }
+    },
+  }
 
-    // OpenAPI specs
-    doc.get.apiDoc = {
-        description: 'Resolve Service Information',
-        operationId: 'discover',
-        //tags: ['asset', 'data'],
-        responses: {
-            200: {
-                description: 'Wrapped JSON Service Information',
-                content: {
-                  'application/json': {
-                    schema: {
-                      required: ['serialized'],
-                      properties: {
-                        'serialized': {
-                          type: 'string'
-                        },
-                        'signature': {
-                          type: 'string'
-                        }
-                      },
-                    },
-                  }
-                }
-            }
-        }
-    }
+  // OpenAPI specs
+  doc.get.apiDoc = {
+    description: 'Resolve Service Information',
+    operationId: 'discover',
+    // tags: ['asset', 'data'],
+    responses: {
+      200: {
+        description: 'Wrapped JSON Service Information',
+        content: {
+          'application/json': {
+            schema: {
+              required: ['serialized'],
+              properties: {
+                serialized: {
+                  type: 'string',
+                },
+                signature: {
+                  type: 'string',
+                },
+              },
+            },
+          },
+        },
+      },
+    },
+  }
 
-    return doc;
-};
+  return doc
+}

+ 1 - 1
storage-node/packages/colossus/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 37 - 40
storage-node/packages/discovery/discover.js

@@ -7,9 +7,10 @@ const BN = require('bn.js')
 const { newExternallyControlledPromise } = require('@joystream/storage-utils/externalPromise')
 
 /**
- * Determines if code is running in a browser by testing for the global window object
+ * Determines if code is running in a browser by testing for the global window object.
+ * @return {boolean} returns result check.
  */
-function inBrowser () {
+function inBrowser() {
   return typeof window !== 'undefined'
 }
 
@@ -17,13 +18,13 @@ function inBrowser () {
  * Map storage-provider id to a Promise of a discovery result. The purpose
  * is to avoid concurrent active discoveries for the same provider.
  */
-var activeDiscoveries = {}
+const activeDiscoveries = {}
 
 /**
  * Map of storage provider id to string
  * Cache of past discovery lookup results
  */
-var accountInfoCache = {}
+const accountInfoCache = {}
 
 /**
  * After what period of time a cached record is considered stale, and would
@@ -38,17 +39,16 @@ const CACHE_TTL = 60 * 60 * 1000
  * @param { RuntimeApi } runtimeApi - api instance to query the chain
  * @returns { Promise<string | null> } - ipns multiformat address
  */
-async function getIpnsIdentity (storageProviderId, runtimeApi) {
+async function getIpnsIdentity(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
   // lookup ipns identity from chain corresponding to storageProviderId
   const info = await runtimeApi.discovery.getAccountInfo(storageProviderId)
 
-  if (info == null) {
+  if (info === null) {
     // no identity found on chain for account
     return null
-  } else {
-    return info.identity.toString()
   }
+  return info.identity.toString()
 }
 
 /**
@@ -61,11 +61,9 @@ async function getIpnsIdentity (storageProviderId, runtimeApi) {
  * @param {string} gateway - optional ipfs http gateway url to perform ipfs queries
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_ipfs_http_gateway (
-  storageProviderId, runtimeApi, gateway = 'http://localhost:8080') {
-
+async function discoverOverIpfsHttpGateway(storageProviderId, runtimeApi, gateway = 'http://localhost:8080') {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -73,7 +71,7 @@ async function discover_over_ipfs_http_gateway (
 
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
-  if (identity == null) {
+  if (identity === null) {
     // dont waste time trying to resolve if no identity was found
     throw new Error('no identity to resolve')
   }
@@ -97,9 +95,9 @@ async function discover_over_ipfs_http_gateway (
  * @param {string} discoverApiEndpoint - url for a colossus discovery api endpoint
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_joystream_discovery_service (storageProviderId, runtimeApi, discoverApiEndpoint) {
+async function discoverOverJoystreamDiscoveryService(storageProviderId, runtimeApi, discoverApiEndpoint) {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -108,13 +106,13 @@ async function discover_over_joystream_discovery_service (storageProviderId, run
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
   // dont waste time trying to resolve if no identity was found
-  if (identity == null) {
+  if (identity === null) {
     throw new Error('no identity to resolve')
   }
 
   if (!discoverApiEndpoint) {
     // Use bootstrap nodes
-    let discoveryBootstrapNodes = await runtimeApi.discovery.getBootstrapEndpoints()
+    const discoveryBootstrapNodes = await runtimeApi.discovery.getBootstrapEndpoints()
 
     if (discoveryBootstrapNodes.length) {
       discoverApiEndpoint = stripEndingSlash(discoveryBootstrapNodes[0].toString())
@@ -139,9 +137,9 @@ async function discover_over_joystream_discovery_service (storageProviderId, run
  * @param {RuntimeApi} runtimeApi - api instance to query the chain
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
+async function discoverOverLocalIpfsNode(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -149,26 +147,26 @@ async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
 
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
-  if (identity == null) {
+  if (identity === null) {
     // dont waste time trying to resolve if no identity was found
     throw new Error('no identity to resolve')
   }
 
-  const ipns_address = `/ipns/${identity}/`
+  const ipnsAddress = `/ipns/${identity}/`
 
   debug('resolved ipns to ipfs object')
   // Can this call hang forever!? can/should we set a timeout?
-  let ipfs_name = await ipfs.name.resolve(ipns_address, {
+  const ipfsName = await ipfs.name.resolve(ipnsAddress, {
     // don't recurse, there should only be one indirection to the service info file
     recursive: false,
-    nocache: false
+    nocache: false,
   })
 
-  debug('getting ipfs object', ipfs_name)
-  let data = await ipfs.get(ipfs_name) // this can sometimes hang forever!?! can we set a timeout?
+  debug('getting ipfs object', ipfsName)
+  const data = await ipfs.get(ipfsName) // this can sometimes hang forever!?! can we set a timeout?
 
   // there should only be one file published under the resolved path
-  let content = data[0].content
+  const content = data[0].content
 
   return JSON.parse(content)
 }
@@ -187,7 +185,7 @@ async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
  * @param {number} maxCacheAge - maximum age of a cached query that triggers automatic re-discovery
  * @returns { Promise<object | null> } - the published service information
  */
-async function discover (storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
+async function discover(storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
   storageProviderId = new BN(storageProviderId)
   const id = storageProviderId.toNumber()
   const cached = accountInfoCache[id]
@@ -195,30 +193,29 @@ async function discover (storageProviderId, runtimeApi, useCachedValue = false,
   if (cached && useCachedValue) {
     if (maxCacheAge > 0) {
       // get latest value
-      if (Date.now() > (cached.updated + maxCacheAge)) {
+      if (Date.now() > cached.updated + maxCacheAge) {
         return _discover(storageProviderId, runtimeApi)
       }
     }
     // refresh if cache if stale, new value returned on next cached query
-    if (Date.now() > (cached.updated + CACHE_TTL)) {
+    if (Date.now() > cached.updated + CACHE_TTL) {
       _discover(storageProviderId, runtimeApi)
     }
     // return best known value
     return cached.value
-  } else {
-    return _discover(storageProviderId, runtimeApi)
   }
+  return _discover(storageProviderId, runtimeApi)
 }
 
 /**
  * Internal method that handles concurrent discoveries and caching of results. Will
- * select the appropriate discovery protocol based on wether we are in a browser environemtn or not.
+ * select the appropriate discovery protocol based on whether we are in a browser environment or not.
  * If not in a browser it expects a local ipfs node to be running.
- * @param {number | BN | u64} storageProviderId
+ * @param {number | BN | u64} storageProviderId - ID of the storage provider
  * @param {RuntimeApi} runtimeApi - api instance for querying the chain
  * @returns { Promise<object | null> } - the published service information
  */
-async function _discover (storageProviderId, runtimeApi) {
+async function _discover(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
   const id = storageProviderId.toNumber()
 
@@ -235,16 +232,16 @@ async function _discover (storageProviderId, runtimeApi) {
   let result
   try {
     if (inBrowser()) {
-      result = await discover_over_joystream_discovery_service(storageProviderId, runtimeApi)
+      result = await discoverOverJoystreamDiscoveryService(storageProviderId, runtimeApi)
     } else {
-      result = await discover_over_local_ipfs_node(storageProviderId, runtimeApi)
+      result = await discoverOverLocalIpfsNode(storageProviderId, runtimeApi)
     }
 
     debug(result)
     result = JSON.stringify(result)
     accountInfoCache[id] = {
       value: result,
-      updated: Date.now()
+      updated: Date.now(),
     }
 
     deferredDiscovery.resolve(result)
@@ -269,7 +266,7 @@ async function _discover (storageProviderId, runtimeApi) {
 
 module.exports = {
   discover,
-  discover_over_joystream_discovery_service,
-  discover_over_ipfs_http_gateway,
-  discover_over_local_ipfs_node
+  discoverOverJoystreamDiscoveryService,
+  discoverOverIpfsHttpGateway,
+  discoverOverLocalIpfsNode,
 }

+ 29 - 32
storage-node/packages/discovery/example.js

@@ -3,38 +3,35 @@ const { RuntimeApi } = require('@joystream/storage-runtime-api')
 const { discover, publish } = require('./')
 
 async function main() {
-    // The assigned storage-provider id
-    const provider_id = 0
-
-    const runtimeApi = await RuntimeApi.create({
-        // Path to the role account key file of the provider
-        account_file: "/path/to/role_account_key_file.json",
-        storageProviderId: provider_id
-    })
-
-    let ipns_id = await publish.publish(
-        {
-            asset: {
-                version: 1,
-                endpoint: 'http://endpoint.com'
-            }
-        },
-        runtimeApi
-    )
-
-    console.log(ipns_id)
-
-    // register ipns_id on chain
-    await runtimeApi.setAccountInfo(ipfs_id)
-
-    let serviceInfo = await discover.discover(
-        provider_id,
-        runtimeApi
-    )
-
-    console.log(serviceInfo)
-
-    runtimeApi.api.disconnect()
+  // The assigned storage-provider id
+  const providerId = 0
+
+  const runtimeApi = await RuntimeApi.create({
+    // Path to the role account key file of the provider
+    account_file: '/path/to/role_account_key_file.json',
+    storageProviderId: providerId,
+  })
+
+  const ipnsId = await publish.publish(
+    {
+      asset: {
+        version: 1,
+        endpoint: 'http://endpoint.com',
+      },
+    },
+    runtimeApi
+  )
+
+  console.log(ipnsId)
+
+  // register ipnsId on chain
+  await runtimeApi.setAccountInfo(ipnsId)
+
+  const serviceInfo = await discover.discover(providerId, runtimeApi)
+
+  console.log(serviceInfo)
+
+  runtimeApi.api.disconnect()
 }
 
 main()

+ 3 - 4
storage-node/packages/discovery/index.js

@@ -1,5 +1,4 @@
-
 module.exports = {
-    discover : require('./discover'),
-    publish : require('./publish'),
-}
+  discover: require('./discover'),
+  publish: require('./publish'),
+}

+ 17 - 16
storage-node/packages/discovery/publish.js

@@ -1,4 +1,5 @@
 const ipfsClient = require('ipfs-http-client')
+
 const ipfs = ipfsClient('localhost', '5001', { protocol: 'http' })
 
 const debug = require('debug')('joystream:discovery:publish')
@@ -14,9 +15,9 @@ const PUBLISH_KEY = 'self'
  * Applies JSON serialization on the data object and converts the utf-8
  * string to a Buffer.
  * @param {object} data - json object
- * @returns {Buffer}
+ * @returns {Buffer} returns buffer from UTF-8 json
  */
-function bufferFrom (data) {
+function bufferFrom(data) {
   return Buffer.from(JSON.stringify(data), 'utf-8')
 }
 
@@ -24,11 +25,11 @@ function bufferFrom (data) {
  * Encodes the service info into a standard format see. /storage-node/docs/json-signing.md
  * To be able to add a signature over the json data. Signing is not currently implemented.
  * @param {object} info - json object
- * @returns {Buffer}
+ * @returns {Buffer} return buffer.
  */
-function encodeServiceInfo (info) {
+function encodeServiceInfo(info) {
   return bufferFrom({
-    serialized: JSON.stringify(info)
+    serialized: JSON.stringify(info),
   })
 }
 
@@ -36,35 +37,35 @@ function encodeServiceInfo (info) {
  * Publishes the service information, encoded using the standard defined in encodeServiceInfo()
  * to ipfs, using the local ipfs node's PUBLISH_KEY, and returns the key id used to publish.
  * What we refer to as the ipns id.
- * @param {object} service_info - the service information to publish
+ * @param {object} serviceInfo - the service information to publish
  * @returns {string} - the ipns id
  */
-async function publish (service_info) {
+async function publish(serviceInfo) {
   const keys = await ipfs.key.list()
-  let services_key = keys.find((key) => key.name === PUBLISH_KEY)
+  let servicesKey = keys.find(key => key.name === PUBLISH_KEY)
 
   // An ipfs node will always have the self key.
   // If the publish key is specified as anything else and it doesn't exist
   // we create it.
-  if (PUBLISH_KEY !== 'self' && !services_key) {
+  if (PUBLISH_KEY !== 'self' && !servicesKey) {
     debug('generating ipns services key')
-    services_key = await ipfs.key.gen(PUBLISH_KEY, {
+    servicesKey = await ipfs.key.gen(PUBLISH_KEY, {
       type: 'rsa',
-      size: 2048
+      size: 2048,
     })
   }
 
-  if (!services_key) {
+  if (!servicesKey) {
     throw new Error('No IPFS publishing key available!')
   }
 
   debug('adding service info file to node')
-  const files = await ipfs.add(encodeServiceInfo(service_info))
+  const files = await ipfs.add(encodeServiceInfo(serviceInfo))
 
   debug('publishing...')
   const published = await ipfs.name.publish(files[0].hash, {
     key: PUBLISH_KEY,
-    resolve: false
+    resolve: false,
     // lifetime: // string - Time duration of the record. Default: 24h
     // ttl:      // string - Time duration this record should be cached
   })
@@ -79,9 +80,9 @@ async function publish (service_info) {
 
   // Return the key id under which the content was published. Which is used
   // to lookup the actual ipfs content id of the published service information
-  return services_key.id
+  return servicesKey.id
 }
 
 module.exports = {
-  publish
+  publish,
 }

+ 1 - 1
storage-node/packages/discovery/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 100 - 85
storage-node/packages/helios/bin/cli.js

@@ -6,7 +6,7 @@ const { discover } = require('@joystream/service-discovery')
 const axios = require('axios')
 const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
 
-async function main () {
+async function main() {
   const runtime = await RuntimeApi.create()
   const { api } = runtime
 
@@ -18,83 +18,98 @@ async function main () {
   const { ids: storageProviders } = await runtime.workers.getAllProviders()
   console.log(`Found ${storageProviders.length} staked providers`)
 
-  const storageProviderAccountInfos = await Promise.all(storageProviders.map(async (providerId) => {
-    return ({
-      providerId,
-      info: await runtime.discovery.getAccountInfo(providerId)
+  const storageProviderAccountInfos = await Promise.all(
+    storageProviders.map(async providerId => {
+      return {
+        providerId,
+        info: await runtime.discovery.getAccountInfo(providerId),
+      }
     })
-  }))
+  )
 
   // providers that have updated their account info and published ipfs id
   // considered live if the record hasn't expired yet
-  const liveProviders = storageProviderAccountInfos.filter(({info}) => {
+  const liveProviders = storageProviderAccountInfos.filter(({ info }) => {
     return info && info.expires_at.gte(currentHeight)
   })
 
-  const downProviders = storageProviderAccountInfos.filter(({info}) => {
-    return info == null
+  const downProviders = storageProviderAccountInfos.filter(({ info }) => {
+    return info === null
   })
 
-  const expiredTtlProviders = storageProviderAccountInfos.filter(({info}) => {
+  const expiredTtlProviders = storageProviderAccountInfos.filter(({ info }) => {
     return info && currentHeight.gte(info.expires_at)
   })
 
-  let providersStatuses = mapInfoToStatus(liveProviders, currentHeight)
+  const providersStatuses = mapInfoToStatus(liveProviders, currentHeight)
   console.log('\n== Live Providers\n', providersStatuses)
 
-  let expiredProviderStatuses = mapInfoToStatus(expiredTtlProviders, currentHeight)
+  const expiredProviderStatuses = mapInfoToStatus(expiredTtlProviders, currentHeight)
   console.log('\n== Expired Providers\n', expiredProviderStatuses)
 
-  console.log('\n== Down Providers!\n', downProviders.map(provider => {
-    return ({
-      providerId: provider.providerId
+  console.log(
+    '\n== Down Providers!\n',
+    downProviders.map(provider => {
+      return {
+        providerId: provider.providerId,
+      }
     })
-  }))
+  )
 
   // Resolve IPNS identities of providers
   console.log('\nResolving live provider API Endpoints...')
-  let endpoints = await Promise.all(providersStatuses.map(async ({providerId}) => {
-    try {
-      let serviceInfo = await discover.discover_over_joystream_discovery_service(providerId, runtime)
-
-      if (serviceInfo == null) {
-        console.log(`provider ${providerId} has not published service information`)
+  const endpoints = await Promise.all(
+    providersStatuses.map(async ({ providerId }) => {
+      try {
+        const serviceInfo = await discover.discoverOverJoystreamDiscoveryService(providerId, runtime)
+
+        if (serviceInfo === null) {
+          console.log(`provider ${providerId} has not published service information`)
+          return { providerId, endpoint: null }
+        }
+
+        const info = JSON.parse(serviceInfo.serialized)
+        console.log(`${providerId} -> ${info.asset.endpoint}`)
+        return { providerId, endpoint: info.asset.endpoint }
+      } catch (err) {
+        console.log('resolve failed for id', providerId, err.message)
         return { providerId, endpoint: null }
       }
-
-      let info = JSON.parse(serviceInfo.serialized)
-      console.log(`${providerId} -> ${info.asset.endpoint}`)
-      return { providerId, endpoint: info.asset.endpoint }
-    } catch (err) {
-      console.log('resolve failed for id', providerId, err.message)
-      return { providerId, endpoint: null }
-    }
-  }))
+    })
+  )
 
   console.log('\nChecking API Endpoints are online')
-  await Promise.all(endpoints.map(async (provider) => {
-    if (!provider.endpoint) {
-      console.log('skipping', provider.address)
-      return
-    }
-    const swaggerUrl = `${stripEndingSlash(provider.endpoint)}/swagger.json`
-    let error
-    try {
-      await axios.get(swaggerUrl)
-      // maybe print out api version information to detect which version of colossus is running?
-      // or add anothe api endpoint for diagnostics information
-    } catch (err) { error = err }
-    console.log(`${provider.endpoint} - ${error ? error.message : 'OK'}`)
-  }))
-
-  let knownContentIds = await runtime.assets.getKnownContentIds()
+  await Promise.all(
+    endpoints.map(async provider => {
+      if (!provider.endpoint) {
+        console.log('skipping', provider.address)
+        return
+      }
+      const swaggerUrl = `${stripEndingSlash(provider.endpoint)}/swagger.json`
+      let error
+      try {
+        await axios.get(swaggerUrl)
+        // maybe print out api version information to detect which version of colossus is running?
+        // or add anothe api endpoint for diagnostics information
+      } catch (err) {
+        error = err
+      }
+      console.log(`${provider.endpoint} - ${error ? error.message : 'OK'}`)
+    })
+  )
+
+  const knownContentIds = await runtime.assets.getKnownContentIds()
   console.log(`\nData Directory has ${knownContentIds.length} assets`)
 
   // Check which providers are reporting a ready relationship for each asset
-  await Promise.all(knownContentIds.map(async (contentId) => {
-    let [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
-    console.log(`${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`)
-  }))
+  await Promise.all(
+    knownContentIds.map(async contentId => {
+      const [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
+      console.log(
+        `${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`
+      )
+    })
+  )
 
   // interesting disconnect doesn't work unless an explicit provider was created
   // for underlying api instance
@@ -103,44 +118,45 @@ async function main () {
 
   console.log(`\nChecking available assets on providers (this can take some time)...`)
   endpoints.forEach(async ({ providerId, endpoint }) => {
-    if (!endpoint) { return }
+    if (!endpoint) {
+      return
+    }
     const total = knownContentIds.length
-    let { found } = await countContentAvailability(knownContentIds, endpoint)
+    const { found } = await countContentAvailability(knownContentIds, endpoint)
     console.log(`provider ${providerId}: has ${found} out of ${total}`)
   })
 }
 
-function mapInfoToStatus (providers, currentHeight) {
-  return providers.map(({providerId, info}) => {
+function mapInfoToStatus(providers, currentHeight) {
+  return providers.map(({ providerId, info }) => {
     if (info) {
       return {
         providerId,
         identity: info.identity.toString(),
         expiresIn: info.expires_at.sub(currentHeight).toNumber(),
-        expired: currentHeight.gte(info.expires_at)
-      }
-    } else {
-      return {
-        providerId,
-        identity: null,
-        status: 'down'
+        expired: currentHeight.gte(info.expires_at),
       }
     }
+    return {
+      providerId,
+      identity: null,
+      status: 'down',
+    }
   })
 }
 
 // HTTP HEAD with axios all known content ids on each provider
-async function countContentAvailability (contentIds, source) {
-  let content = {}
+async function countContentAvailability(contentIds, source) {
+  const content = {}
   let found = 0
   let missing = 0
   for (let i = 0; i < contentIds.length; i++) {
     const assetUrl = makeAssetUrl(contentIds[i], source)
     try {
-      let info = await axios.head(assetUrl)
+      const info = await axios.head(assetUrl)
       content[encodeAddress(contentIds[i])] = {
         type: info.headers['content-type'],
-        bytes: info.headers['content-length']
+        bytes: info.headers['content-length'],
       }
       // TODO: cross check against dataobject size
       found++
@@ -152,32 +168,31 @@ async function countContentAvailability (contentIds, source) {
   return { found, missing, content }
 }
 
-function makeAssetUrl (contentId, source) {
+function makeAssetUrl(contentId, source) {
   source = stripEndingSlash(source)
   return `${source}/asset/v0/${encodeAddress(contentId)}`
 }
 
-async function assetRelationshipState (api, contentId, providers) {
-  let dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
+async function assetRelationshipState(api, contentId, providers) {
+  const dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
 
-  let relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
+  const relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
 
   // how many relationships associated with active providers and in ready state
-  let activeRelationships = await Promise.all(relationshipIds.map(async (id) => {
-    let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
-    relationship = relationship.unwrap()
-    // only interested in ready relationships
-    if (!relationship.ready) {
-      return undefined
-    }
-    // Does the relationship belong to an active provider ?
-    return providers.find((provider) => relationship.storage_provider.eq(provider))
-  }))
-
-  return ([
-    activeRelationships.filter(active => active).length,
-    dataObject.unwrap().liaison_judgement
-  ])
+  const activeRelationships = await Promise.all(
+    relationshipIds.map(async id => {
+      let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
+      relationship = relationship.unwrap()
+      // only interested in ready relationships
+      if (!relationship.ready) {
+        return undefined
+      }
+      // Does the relationship belong to an active provider ?
+      return providers.find(provider => relationship.storage_provider.eq(provider))
+    })
+  )
+
+  return [activeRelationships.filter(active => active).length, dataObject.unwrap().liaison_judgement]
 }
 
 main()

+ 1 - 1
storage-node/packages/helios/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 22 - 20
storage-node/packages/runtime-api/assets.js

@@ -3,7 +3,7 @@
 const debug = require('debug')('joystream:runtime:assets')
 const { decodeAddress } = require('@polkadot/keyring')
 
-function parseContentId (contentId) {
+function parseContentId(contentId) {
   try {
     return decodeAddress(contentId)
   } catch (err) {
@@ -15,21 +15,21 @@ function parseContentId (contentId) {
  * Add asset related functionality to the substrate API.
  */
 class AssetsApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new AssetsApi()
     ret.base = base
-    await ret.init()
+    await AssetsApi.init()
     return ret
   }
 
-  async init () {
+  static async init() {
     debug('Init')
   }
 
   /*
    * Create and return a data object.
    */
-  async createDataObject (accountId, memberId, contentId, doTypeId, size, ipfsCid) {
+  async createDataObject(accountId, memberId, contentId, doTypeId, size, ipfsCid) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.addContent(memberId, contentId, doTypeId, size, ipfsCid)
     await this.base.signAndSend(accountId, tx)
@@ -42,7 +42,7 @@ class AssetsApi {
   /*
    * Return the Data Object for a contendId
    */
-  async getDataObject (contentId) {
+  async getDataObject(contentId) {
     contentId = parseContentId(contentId)
     return this.base.api.query.dataDirectory.dataObjectByContentId(contentId)
   }
@@ -55,7 +55,7 @@ class AssetsApi {
    *
    * Each failure errors out, success returns the data object.
    */
-  async checkLiaisonForDataObject (storageProviderId, contentId) {
+  async checkLiaisonForDataObject(storageProviderId, contentId) {
     contentId = parseContentId(contentId)
 
     let obj = await this.getDataObject(contentId)
@@ -80,7 +80,7 @@ class AssetsApi {
   /*
    * Sets the data object liaison judgement to Accepted
    */
-  async acceptContent (providerAccoundId, storageProviderId, contentId) {
+  async acceptContent(providerAccoundId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.acceptContent(storageProviderId, contentId)
     return this.base.signAndSend(providerAccoundId, tx)
@@ -89,7 +89,7 @@ class AssetsApi {
   /*
    * Sets the data object liaison judgement to Rejected
    */
-  async rejectContent (providerAccountId, storageProviderId, contentId) {
+  async rejectContent(providerAccountId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.rejectContent(storageProviderId, contentId)
     return this.base.signAndSend(providerAccountId, tx)
@@ -98,7 +98,7 @@ class AssetsApi {
   /*
    * Creates storage relationship for a data object and provider
    */
-  async createStorageRelationship (providerAccountId, storageProviderId, contentId, callback) {
+  async createStorageRelationship(providerAccountId, storageProviderId, contentId, callback) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataObjectStorageRegistry.addRelationship(storageProviderId, contentId)
 
@@ -109,16 +109,16 @@ class AssetsApi {
   /*
    * Gets storage relationship for contentId for the given provider
    */
-  async getStorageRelationshipAndId (storageProviderId, contentId) {
+  async getStorageRelationshipAndId(storageProviderId, contentId) {
     contentId = parseContentId(contentId)
-    let rids = await this.base.api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
+    const rids = await this.base.api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
 
     while (rids.length) {
       const relationshipId = rids.shift()
       let relationship = await this.base.api.query.dataObjectStorageRegistry.relationships(relationshipId)
       relationship = relationship.unwrap()
       if (relationship.storage_provider.eq(storageProviderId)) {
-        return ({ relationship, relationshipId })
+        return { relationship, relationshipId }
       }
     }
 
@@ -128,12 +128,14 @@ class AssetsApi {
   /*
    * Creates storage relationship for a data object and provider and returns the relationship id
    */
-  async createAndReturnStorageRelationship (providerAccountId, storageProviderId, contentId) {
+  async createAndReturnStorageRelationship(providerAccountId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, (events) => {
-          events.forEach((event) => {
+        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, events => {
+          events.forEach(event => {
             resolve(event[1].DataObjectStorageRelationshipId)
           })
         })
@@ -146,8 +148,8 @@ class AssetsApi {
   /*
    * Set the ready state for a data object storage relationship to the new value
    */
-  async toggleStorageRelationshipReady (providerAccountId, storageProviderId, dosrId, ready) {
-    var tx = ready
+  async toggleStorageRelationshipReady(providerAccountId, storageProviderId, dosrId, ready) {
+    const tx = ready
       ? this.base.api.tx.dataObjectStorageRegistry.setRelationshipReady(storageProviderId, dosrId)
       : this.base.api.tx.dataObjectStorageRegistry.unsetRelationshipReady(storageProviderId, dosrId)
     return this.base.signAndSend(providerAccountId, tx)
@@ -156,11 +158,11 @@ class AssetsApi {
   /*
    * Returns array of know content ids
    */
-  async getKnownContentIds () {
+  async getKnownContentIds() {
     return this.base.api.query.dataDirectory.knownContentIds()
   }
 }
 
 module.exports = {
-  AssetsApi
+  AssetsApi,
 }

+ 25 - 36
storage-node/packages/runtime-api/balances.js

@@ -16,75 +16,64 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:runtime:balances');
-
-const { IdentitiesApi } = require('@joystream/storage-runtime-api/identities');
+const debug = require('debug')('joystream:runtime:balances')
 
 /*
  * Bundle API calls related to account balances.
  */
-class BalancesApi
-{
-  static async create(base)
-  {
-    const ret = new BalancesApi();
-    ret.base = base;
-    await ret.init();
-    return ret;
+class BalancesApi {
+  static async create(base) {
+    const ret = new BalancesApi()
+    ret.base = base
+    await BalancesApi.init()
+    return ret
   }
 
-  async init(account_file)
-  {
-    debug('Init');
+  static async init() {
+    debug('Init')
   }
 
   /*
    * Return true/false if the account has the minimum balance given.
    */
-  async hasMinimumBalanceOf(accountId, min)
-  {
-    const balance = await this.freeBalance(accountId);
+  async hasMinimumBalanceOf(accountId, min) {
+    const balance = await this.freeBalance(accountId)
     if (typeof min === 'number') {
-      return balance.cmpn(min) >= 0;
-    }
-    else {
-      return balance.cmp(min) >= 0;
+      return balance.cmpn(min) >= 0
     }
+    return balance.cmp(min) >= 0
   }
 
   /*
    * Return the account's current free balance.
    */
-  async freeBalance(accountId)
-  {
-    const decoded = this.base.identities.keyring.decodeAddress(accountId, true);
-    return this.base.api.query.balances.freeBalance(decoded);
+  async freeBalance(accountId) {
+    const decoded = this.base.identities.keyring.decodeAddress(accountId, true)
+    return this.base.api.query.balances.freeBalance(decoded)
   }
 
   /*
    * Return the base transaction fee.
    */
-  baseTransactionFee()
-  {
-    return this.base.api.consts.transactionPayment.transactionBaseFee;
+  baseTransactionFee() {
+    return this.base.api.consts.transactionPayment.transactionBaseFee
   }
 
   /*
    * Transfer amount currency from one address to another. The sending
    * address must be an unlocked key pair!
    */
-  async transfer(from, to, amount)
-  {
-    const decode = require('@polkadot/keyring').decodeAddress;
-    const to_decoded = decode(to, true);
+  async transfer(from, to, amount) {
+    const decode = require('@polkadot/keyring').decodeAddress
+    const toDecoded = decode(to, true)
 
-    const tx = this.base.api.tx.balances.transfer(to_decoded, amount);
-    return this.base.signAndSend(from, tx);
+    const tx = this.base.api.tx.balances.transfer(toDecoded, amount)
+    return this.base.signAndSend(from, tx)
   }
 }
 
 module.exports = {
-  BalancesApi: BalancesApi,
+  BalancesApi,
 }

+ 12 - 16
storage-node/packages/runtime-api/discovery.js

@@ -6,40 +6,37 @@ const debug = require('debug')('joystream:runtime:discovery')
  * Add discovery related functionality to the substrate API.
  */
 class DiscoveryApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new DiscoveryApi()
     ret.base = base
-    await ret.init()
+    await DiscoveryApi.init()
     return ret
   }
 
-  async init () {
+  static async init() {
     debug('Init')
   }
 
   /*
    * Get Bootstrap endpoints
    */
-  async getBootstrapEndpoints () {
+  async getBootstrapEndpoints() {
     return this.base.api.query.discovery.bootstrapEndpoints()
   }
 
   /*
    * Set Bootstrap endpoints, requires the sudo account to be provided and unlocked
    */
-  async setBootstrapEndpoints (sudoAccount, endpoints) {
+  async setBootstrapEndpoints(sudoAccount, endpoints) {
     const tx = this.base.api.tx.discovery.setBootstrapEndpoints(endpoints)
     // make sudo call
-    return this.base.signAndSend(
-      sudoAccount,
-      this.base.api.tx.sudo.sudo(tx)
-    )
+    return this.base.signAndSend(sudoAccount, this.base.api.tx.sudo.sudo(tx))
   }
 
   /*
    * Get AccountInfo of a storage provider
    */
-  async getAccountInfo (storageProviderId) {
+  async getAccountInfo(storageProviderId) {
     const info = await this.base.api.query.discovery.accountInfoByStorageProviderId(storageProviderId)
     // Not an Option so we use default value check to know if info was found
     return info.expires_at.eq(0) ? null : info
@@ -48,29 +45,28 @@ class DiscoveryApi {
   /*
    * Set AccountInfo of our storage provider
    */
-  async setAccountInfo (ipnsId) {
+  async setAccountInfo(ipnsId) {
     const roleAccountId = this.base.identities.key.address
     const storageProviderId = this.base.storageProviderId
     const isProvider = await this.base.workers.isStorageProvider(storageProviderId)
     if (isProvider) {
       const tx = this.base.api.tx.discovery.setIpnsId(storageProviderId, ipnsId)
       return this.base.signAndSend(roleAccountId, tx)
-    } else {
-      throw new Error('Cannot set AccountInfo, id is not a storage provider')
     }
+    throw new Error('Cannot set AccountInfo, id is not a storage provider')
   }
 
   /*
    * Clear AccountInfo of our storage provider
    */
-  async unsetAccountInfo () {
+  async unsetAccountInfo() {
     const roleAccountId = this.base.identities.key.address
     const storageProviderId = this.base.storageProviderId
-    var tx = this.base.api.tx.discovery.unsetIpnsId(storageProviderId)
+    const tx = this.base.api.tx.discovery.unsetIpnsId(storageProviderId)
     return this.base.signAndSend(roleAccountId, tx)
   }
 }
 
 module.exports = {
-  DiscoveryApi
+  DiscoveryApi,
 }

+ 27 - 26
storage-node/packages/runtime-api/identities.js

@@ -20,11 +20,9 @@
 
 const path = require('path')
 const fs = require('fs')
-// const readline = require('readline')
-
 const debug = require('debug')('joystream:runtime:identities')
 const { Keyring } = require('@polkadot/keyring')
-const util_crypto = require('@polkadot/util-crypto')
+const utilCrypto = require('@polkadot/util-crypto')
 
 /*
  * Add identity management to the substrate API.
@@ -32,14 +30,14 @@ const util_crypto = require('@polkadot/util-crypto')
  * This loosely groups: accounts, key management, and membership.
  */
 class IdentitiesApi {
-  static async create (base, {account_file, passphrase, canPromptForPassphrase}) {
+  static async create(base, { accountFile, passphrase, canPromptForPassphrase }) {
     const ret = new IdentitiesApi()
     ret.base = base
-    await ret.init(account_file, passphrase, canPromptForPassphrase)
+    await ret.init(accountFile, passphrase, canPromptForPassphrase)
     return ret
   }
 
-  async init (account_file, passphrase, canPromptForPassphrase) {
+  async init(accountFile, passphrase, canPromptForPassphrase) {
     debug('Init')
 
     // Creatre keyring
@@ -49,7 +47,7 @@ class IdentitiesApi {
 
     // Load account file, if possible.
     try {
-      this.key = await this.loadUnlock(account_file, passphrase)
+      this.key = await this.loadUnlock(accountFile, passphrase)
     } catch (err) {
       debug('Error loading account file:', err.message)
     }
@@ -58,8 +56,8 @@ class IdentitiesApi {
   /*
    * Load a key file and unlock it if necessary.
    */
-  async loadUnlock (account_file, passphrase) {
-    const fullname = path.resolve(account_file)
+  async loadUnlock(accountFile, passphrase) {
+    const fullname = path.resolve(accountFile)
     debug('Initializing key from', fullname)
     const key = this.keyring.addFromJson(require(fullname))
     await this.tryUnlock(key, passphrase)
@@ -71,7 +69,7 @@ class IdentitiesApi {
    * Try to unlock a key if it isn't already unlocked.
    * passphrase should be supplied as argument.
    */
-  async tryUnlock (key, passphrase) {
+  async tryUnlock(key, passphrase) {
     if (!key.isLocked) {
       debug('Key is not locked, not attempting to unlock')
       return
@@ -112,7 +110,10 @@ class IdentitiesApi {
   /*
    * Ask for a passphrase
    */
-  askForPassphrase (address) {
+
+  /* eslint-disable class-methods-use-this */
+  // Disable lint because the method used by a mocking library.
+  askForPassphrase(address) {
     // Query for passphrase
     const prompt = require('password-prompt')
     return prompt(`Enter passphrase for ${address}: `, { required: false })
@@ -121,7 +122,7 @@ class IdentitiesApi {
   /*
    * Return true if the account is a root account of a member
    */
-  async isMember (accountId) {
+  async isMember(accountId) {
     const memberIds = await this.memberIdsOf(accountId) // return array of member ids
     return memberIds.length > 0 // true if at least one member id exists for the acccount
   }
@@ -129,7 +130,7 @@ class IdentitiesApi {
   /*
    * Return all the member IDs of an account by the root account id
    */
-  async memberIdsOf (accountId) {
+  async memberIdsOf(accountId) {
     const decoded = this.keyring.decodeAddress(accountId)
     return this.base.api.query.members.memberIdsByRootAccountId(decoded)
   }
@@ -137,16 +138,16 @@ class IdentitiesApi {
   /*
    * Return the first member ID of an account, or undefined if not a member root account.
    */
-  async firstMemberIdOf (accountId) {
+  async firstMemberIdOf(accountId) {
     const decoded = this.keyring.decodeAddress(accountId)
-    let ids = await this.base.api.query.members.memberIdsByRootAccountId(decoded)
+    const ids = await this.base.api.query.members.memberIdsByRootAccountId(decoded)
     return ids[0]
   }
 
   /*
    * Export a key pair to JSON. Will ask for a passphrase.
    */
-  async exportKeyPair (accountId) {
+  async exportKeyPair(accountId) {
     const passphrase = await this.askForPassphrase(accountId)
 
     // Produce JSON output
@@ -157,12 +158,12 @@ class IdentitiesApi {
    * Export a key pair and write it to a JSON file with the account ID as the
    * name.
    */
-  async writeKeyPairExport (accountId, prefix) {
+  async writeKeyPairExport(accountId, prefix) {
     // Generate JSON
     const data = await this.exportKeyPair(accountId)
 
     // Write JSON
-    var filename = `${data.address}.json`
+    let filename = `${data.address}.json`
 
     if (prefix) {
       const path = require('path')
@@ -171,7 +172,7 @@ class IdentitiesApi {
 
     fs.writeFileSync(filename, JSON.stringify(data), {
       encoding: 'utf8',
-      mode: 0o600
+      mode: 0o600,
     })
 
     return filename
@@ -181,20 +182,20 @@ class IdentitiesApi {
    * Register account id with userInfo as a new member
    * using default policy 0, returns new member id
    */
-  async registerMember (accountId, userInfo) {
+  async registerMember(accountId, userInfo) {
     const tx = this.base.api.tx.members.buyMembership(0, userInfo)
 
     return this.base.signAndSendThenGetEventResult(accountId, tx, {
       eventModule: 'members',
       eventName: 'MemberRegistered',
-      eventProperty: 'MemberId'
+      eventProperty: 'MemberId',
     })
   }
 
   /*
    * Injects a keypair and sets it as the default identity
    */
-  useKeyPair (keyPair) {
+  useKeyPair(keyPair) {
     this.key = this.keyring.addPair(keyPair)
   }
 
@@ -202,11 +203,11 @@ class IdentitiesApi {
    * Create a new role key. If no name is given,
    * default to 'storage'.
    */
-  async createNewRoleKey (name) {
+  async createNewRoleKey(name) {
     name = name || 'storage-provider'
 
     // Generate new key pair
-    const keyPair = util_crypto.naclKeypairFromRandom()
+    const keyPair = utilCrypto.naclKeypairFromRandom()
 
     // Encode to an address.
     const addr = this.keyring.encodeAddress(keyPair.publicKey)
@@ -215,7 +216,7 @@ class IdentitiesApi {
     // Add to key wring. We set the meta to identify the account as
     // a role key.
     const meta = {
-      name: `${name} role account`
+      name: `${name} role account`,
     }
 
     const createPair = require('@polkadot/keyring/pair').default
@@ -232,5 +233,5 @@ class IdentitiesApi {
 }
 
 module.exports = {
-  IdentitiesApi
+  IdentitiesApi,
 }

+ 45 - 37
storage-node/packages/runtime-api/index.js

@@ -35,13 +35,13 @@ const { newExternallyControlledPromise } = require('@joystream/storage-utils/ext
  * Initialize runtime (substrate) API and keyring.
  */
 class RuntimeApi {
-  static async create (options) {
-    const runtime_api = new RuntimeApi()
-    await runtime_api.init(options || {})
-    return runtime_api
+  static async create(options) {
+    const runtimeApi = new RuntimeApi()
+    await runtimeApi.init(options || {})
+    return runtimeApi
   }
 
-  async init (options) {
+  async init(options) {
     debug('Init')
 
     options = options || {}
@@ -66,7 +66,7 @@ class RuntimeApi {
     this.identities = await IdentitiesApi.create(this, {
       account_file: options.account_file,
       passphrase: options.passphrase,
-      canPromptForPassphrase: options.canPromptForPassphrase
+      canPromptForPassphrase: options.canPromptForPassphrase,
     })
     this.balances = await BalancesApi.create(this)
     this.workers = await WorkersApi.create(this)
@@ -74,12 +74,12 @@ class RuntimeApi {
     this.discovery = await DiscoveryApi.create(this)
   }
 
-  disconnect () {
+  disconnect() {
     this.api.disconnect()
   }
 
-  executeWithAccountLock (account_id, func) {
-    return this.asyncLock.acquire(`${account_id}`, func)
+  executeWithAccountLock(accountId, func) {
+    return this.asyncLock.acquire(`${accountId}`, func)
   }
 
   /*
@@ -89,14 +89,14 @@ class RuntimeApi {
    * The result of the Promise is an array containing first the full event
    * name, and then the event fields as an object.
    */
-  async waitForEvent (module, name) {
+  async waitForEvent(module, name) {
     return this.waitForEvents([[module, name]])
   }
 
-  _matchingEvents(subscribed, events) {
+  static matchingEvents(subscribed, events) {
     debug(`Number of events: ${events.length} subscribed to ${subscribed}`)
 
-    const filtered = events.filter((record) => {
+    const filtered = events.filter(record => {
       const { event, phase } = record
 
       // Show what we are busy with
@@ -104,14 +104,14 @@ class RuntimeApi {
       debug(`\t\t${event.meta.documentation.toString()}`)
 
       // Skip events we're not interested in.
-      const matching = subscribed.filter((value) => {
+      const matching = subscribed.filter(value => {
         return event.section === value[0] && event.method === value[1]
       })
       return matching.length > 0
     })
     debug(`Filtered: ${filtered.length}`)
 
-    const mapped = filtered.map((record) => {
+    const mapped = filtered.map(record => {
       const { event } = record
       const types = event.typeDef
 
@@ -122,8 +122,8 @@ class RuntimeApi {
         payload[types[index].type] = data
       })
 
-      const full_name = `${event.section}.${event.method}`
-      return [full_name, payload]
+      const fullName = `${event.section}.${event.method}`
+      return [fullName, payload]
     })
     debug('Mapped', mapped)
 
@@ -137,10 +137,10 @@ class RuntimeApi {
    *
    * Returns the first matched event *only*.
    */
-  async waitForEvents (subscribed) {
-    return new Promise((resolve, reject) => {
-      this.api.query.system.events((events) => {
-        const matches = this._matchingEvents(subscribed, events)
+  async waitForEvents(subscribed) {
+    return new Promise(resolve => {
+      this.api.query.system.events(events => {
+        const matches = RuntimeApi.matchingEvents(subscribed, events)
         if (matches && matches.length) {
           resolve(matches)
         }
@@ -156,24 +156,26 @@ class RuntimeApi {
    * If the subscribed events are given, and a callback as well, then the
    * callback is invoked with matching events.
    */
-  async signAndSend (accountId, tx, attempts, subscribed, callback) {
+  async signAndSend(accountId, tx, attempts, subscribed, callback) {
     accountId = this.identities.keyring.encodeAddress(accountId)
 
     // Key must be unlocked
-    const from_key = this.identities.keyring.getPair(accountId)
-    if (from_key.isLocked) {
+    const fromKey = this.identities.keyring.getPair(accountId)
+    if (fromKey.isLocked) {
       throw new Error('Must unlock key before using it to sign!')
     }
 
     const finalizedPromise = newExternallyControlledPromise()
 
-    let unsubscribe = await this.executeWithAccountLock(accountId, async () => {
+    await this.executeWithAccountLock(accountId, async () => {
       // Try to get the next nonce to use
       let nonce = this.nonces[accountId]
 
       let incrementNonce = () => {
         // only increment once
-        incrementNonce = () => {} // turn it into a no-op
+        incrementNonce = () => {
+          /* turn it into a no-op */
+        }
         nonce = nonce.addn(1)
         this.nonces[accountId] = nonce
       }
@@ -181,6 +183,8 @@ class RuntimeApi {
       // If the nonce isn't available, get it from chain.
       if (!nonce) {
         // current nonce
+        // TODO: possible race condition here found by the linter
+        // eslint-disable-next-line require-atomic-updates
         nonce = await this.api.query.system.accountNonce(accountId)
         debug(`Got nonce for ${accountId} from chain: ${nonce}`)
       }
@@ -188,15 +192,16 @@ class RuntimeApi {
       return new Promise((resolve, reject) => {
         debug('Signing and sending tx')
         // send(statusUpdates) returns a function for unsubscribing from status updates
-        let unsubscribe = tx.sign(from_key, { nonce })
-          .send(({events = [], status}) => {
+        const unsubscribe = tx
+          .sign(fromKey, { nonce })
+          .send(({ events = [], status }) => {
             debug(`TX status: ${status.type}`)
 
             // Whatever events we get, process them if there's someone interested.
             // It is critical that this event handling doesn't prevent
             try {
               if (subscribed && callback) {
-                const matched = this._matchingEvents(subscribed, events)
+                const matched = RuntimeApi.matchingEvents(subscribed, events)
                 debug('Matching events:', matched)
                 if (matched.length) {
                   callback(matched)
@@ -238,7 +243,7 @@ class RuntimeApi {
             isInvalid
             */
           })
-          .catch((err) => {
+          .catch(err => {
             // 1014 error: Most likely you are sending transaction with the same nonce,
             // so it assumes you want to replace existing one, but the priority is too low to replace it (priority = fee = len(encoded_transaction) currently)
             // Remember this can also happen if in the past we sent a tx with a future nonce, and the current nonce
@@ -247,9 +252,11 @@ class RuntimeApi {
               const errstr = err.toString()
               // not the best way to check error code.
               // https://github.com/polkadot-js/api/blob/master/packages/rpc-provider/src/coder/index.ts#L52
-              if (errstr.indexOf('Error: 1014:') < 0 && // low priority
-                  errstr.indexOf('Error: 1010:') < 0) // bad transaction
-              {
+              if (
+                errstr.indexOf('Error: 1014:') < 0 && // low priority
+                errstr.indexOf('Error: 1010:') < 0
+              ) {
+                // bad transaction
                 // Error but not nonce related. (bad arguments maybe)
                 debug('TX error', err)
               } else {
@@ -276,13 +283,15 @@ class RuntimeApi {
    * Sign and send a transaction expect event from
    * module and return eventProperty from the event.
    */
-  async signAndSendThenGetEventResult (senderAccountId, tx, { eventModule, eventName, eventProperty }) {
+  async signAndSendThenGetEventResult(senderAccountId, tx, { eventModule, eventName, eventProperty }) {
     // event from a module,
     const subscribed = [[eventModule, eventName]]
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.signAndSend(senderAccountId, tx, 1, subscribed, (events) => {
-          events.forEach((event) => {
+        await this.signAndSend(senderAccountId, tx, 1, subscribed, events => {
+          events.forEach(event => {
             // fix - we may not necessarily want the first event
             // if there are multiple events emitted,
             resolve(event[1][eventProperty])
@@ -293,9 +302,8 @@ class RuntimeApi {
       }
     })
   }
-
 }
 
 module.exports = {
-  RuntimeApi
+  RuntimeApi,
 }

+ 19 - 22
storage-node/packages/runtime-api/test/assets.js

@@ -16,36 +16,33 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
+const expect = require('chai').expect
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Assets', () => {
-  var api;
-  var key;
+  let api
   before(async () => {
-    api = await RuntimeApi.create();
-    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-  });
+    api = await RuntimeApi.create()
+    await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+  })
 
   it('returns DataObjects for a content ID', async () => {
-    const obj = await api.assets.getDataObject('foo');
-    expect(obj.isNone).to.be.true;
-  });
+    const obj = await api.assets.getDataObject('foo')
+    expect(obj.isNone).to.be.true
+  })
 
   it('can check the liaison for a DataObject', async () => {
-    expect(async _ => {
-      await api.assets.checkLiaisonForDataObject('foo', 'bar');
-    }).to.throw;
-  });
+    expect(async () => {
+      await api.assets.checkLiaisonForDataObject('foo', 'bar')
+    }).to.throw
+  })
 
   // Needs properly staked accounts
-  it('can accept content');
-  it('can reject content');
-  it('can create a storage relationship for content');
-  it('can toggle a storage relatsionship to ready state');
-});
+  it('can accept content')
+  it('can reject content')
+  it('can create a storage relationship for content')
+  it('can toggle a storage relationship to ready state')
+})

+ 18 - 20
storage-node/packages/runtime-api/test/balances.js

@@ -16,37 +16,35 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
+const expect = require('chai').expect
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Balances', () => {
-  var api;
-  var key;
+  let api
+  let key
   before(async () => {
-    api = await RuntimeApi.create();
-    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-  });
+    api = await RuntimeApi.create()
+    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+  })
 
   it('returns free balance for an account', async () => {
-    const balance = await api.balances.freeBalance(key.address);
+    const balance = await api.balances.freeBalance(key.address)
     // Should be exactly zero
-    expect(balance.cmpn(0)).to.equal(0);
-  });
+    expect(balance.cmpn(0)).to.equal(0)
+  })
 
   it('checks whether a minimum balance exists', async () => {
     // A minimum of 0 should exist, but no more.
-    expect(await api.balances.hasMinimumBalanceOf(key.address, 0)).to.be.true;
-    expect(await api.balances.hasMinimumBalanceOf(key.address, 1)).to.be.false;
-  });
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 0)).to.be.true
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 1)).to.be.false
+  })
 
   it('returns the base transaction fee of the chain', async () => {
-    const fee = await api.balances.baseTransactionFee();
+    const fee = await api.balances.baseTransactionFee()
     // >= 0 comparison works
-    expect(fee.cmpn(0)).to.be.at.least(0);
-  });
-});
+    expect(fee.cmpn(0)).to.be.at.least(0)
+  })
+})

+ 48 - 49
storage-node/packages/runtime-api/test/identities.js

@@ -16,84 +16,83 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
-const temp = require('temp').track();
+const expect = require('chai').expect
+const sinon = require('sinon')
+const temp = require('temp').track()
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Identities', () => {
-  var api;
+  let api
   before(async () => {
-    api = await RuntimeApi.create({ canPromptForPassphrase: true });
-  });
+    api = await RuntimeApi.create({ canPromptForPassphrase: true })
+  })
 
   it('imports keys', async () => {
     // Unlocked keys can be imported without asking for a passphrase
-    await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
     // Edwards and schnorr keys should unlock
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    await api.identities.loadUnlock('test/data/edwards.json');
-    await api.identities.loadUnlock('test/data/schnorr.json');
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    await api.identities.loadUnlock('test/data/edwards.json')
+    await api.identities.loadUnlock('test/data/schnorr.json')
+    passphraseStub.restore()
 
     // Except if the wrong passphrase is given
-    const passphrase_stub_bad = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'bad');
+    const passphraseStubBad = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'bad')
     expect(async () => {
-      await api.identities.loadUnlock('test/data/edwards.json');
-    }).to.throw;
-    passphrase_stub_bad.restore();
-  });
+      await api.identities.loadUnlock('test/data/edwards.json')
+    }).to.throw
+    passphraseStubBad.restore()
+  })
 
   it('knows about membership', async () => {
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-    const addr = key.address;
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+    const addr = key.address
 
     // Without seeding the runtime with data, we can only verify that the API
     // reacts well in the absence of membership
-    expect(await api.identities.isMember(addr)).to.be.false;
-    const member_id = await api.identities.firstMemberIdOf(addr);
+    expect(await api.identities.isMember(addr)).to.be.false
+    const memberId = await api.identities.firstMemberIdOf(addr)
 
-    expect(member_id).to.be.undefined;
-  });
+    expect(memberId).to.be.undefined
+  })
 
   it('exports keys', async () => {
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    const exported = await api.identities.exportKeyPair(key.address);
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    const exported = await api.identities.exportKeyPair(key.address)
+    passphraseStub.restore()
 
-    expect(exported).to.have.property('address');
-    expect(exported.address).to.equal(key.address);
+    expect(exported).to.have.property('address')
+    expect(exported.address).to.equal(key.address)
 
-    expect(exported).to.have.property('encoding');
+    expect(exported).to.have.property('encoding')
 
-    expect(exported.encoding).to.have.property('version', '2');
+    expect(exported.encoding).to.have.property('version', '2')
 
-    expect(exported.encoding).to.have.property('content');
-    expect(exported.encoding.content).to.include('pkcs8');
-    expect(exported.encoding.content).to.include('ed25519');
+    expect(exported.encoding).to.have.property('content')
+    expect(exported.encoding.content).to.include('pkcs8')
+    expect(exported.encoding.content).to.include('ed25519')
 
-    expect(exported.encoding).to.have.property('type');
-    expect(exported.encoding.type).to.include('salsa20');
-  });
+    expect(exported.encoding).to.have.property('type')
+    expect(exported.encoding.type).to.include('salsa20')
+  })
 
   it('writes key export files', async () => {
-    const prefix = temp.mkdirSync('joystream-runtime-api-test');
+    const prefix = temp.mkdirSync('joystream-runtime-api-test')
 
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    const filename = await api.identities.writeKeyPairExport(key.address, prefix);
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    const filename = await api.identities.writeKeyPairExport(key.address, prefix)
+    passphraseStub.restore()
 
-    const fs = require('fs');
-    const stat = fs.statSync(filename);
-    expect(stat.isFile()).to.be.true;
-  });
-});
+    const fs = require('fs')
+    const stat = fs.statSync(filename)
+    expect(stat.isFile()).to.be.true
+  })
+})

+ 6 - 9
storage-node/packages/runtime-api/test/index.js

@@ -16,16 +16,13 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('RuntimeApi', () => {
   it('can be created', async () => {
-    const api = await RuntimeApi.create();
-    api.disconnect();
-  });
-});
+    const api = await RuntimeApi.create()
+    api.disconnect()
+  })
+})

+ 45 - 44
storage-node/packages/runtime-api/workers.js

@@ -26,23 +26,22 @@ const { Worker } = require('@joystream/types/working-group')
  * Add worker related functionality to the substrate API.
  */
 class WorkersApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new WorkersApi()
     ret.base = base
     await ret.init()
     return ret
   }
 
-
   // eslint-disable-next-line class-methods-use-this, require-await
-  async init () {
+  async init() {
     debug('Init')
   }
 
   /*
    * Check whether the given account and id represent an enrolled storage provider
    */
-  async isRoleAccountOfStorageProvider (storageProviderId, roleAccountId) {
+  async isRoleAccountOfStorageProvider(storageProviderId, roleAccountId) {
     const id = new BN(storageProviderId)
     const roleAccount = this.base.identities.keyring.decodeAddress(roleAccountId)
     const providerAccount = await this.storageProviderRoleAccount(id)
@@ -52,7 +51,7 @@ class WorkersApi {
   /*
    * Returns true if the provider id is enrolled
    */
-  async isStorageProvider (storageProviderId) {
+  async isStorageProvider(storageProviderId) {
     const worker = await this.storageWorkerByProviderId(storageProviderId)
     return worker !== null
   }
@@ -60,7 +59,7 @@ class WorkersApi {
   /*
    * Returns a provider's role account or null if provider doesn't exist
    */
-  async storageProviderRoleAccount (storageProviderId) {
+  async storageProviderRoleAccount(storageProviderId) {
     const worker = await this.storageWorkerByProviderId(storageProviderId)
     return worker ? worker.role_account_id : null
   }
@@ -68,7 +67,7 @@ class WorkersApi {
   /*
    * Returns a Worker instance or null if provider does not exist
    */
-  async storageWorkerByProviderId (storageProviderId) {
+  async storageWorkerByProviderId(storageProviderId) {
     const id = new BN(storageProviderId)
     const { providers } = await this.getAllProviders()
     return providers[id.toNumber()] || null
@@ -77,7 +76,7 @@ class WorkersApi {
   /*
    * Returns the the first found provider id with a role account or null if not found
    */
-  async findProviderIdByRoleAccount (roleAccount) {
+  async findProviderIdByRoleAccount(roleAccount) {
     const { ids, providers } = await this.getAllProviders()
 
     for (let i = 0; i < ids.length; i++) {
@@ -93,7 +92,7 @@ class WorkersApi {
   /*
    * Returns the set of ids and Worker instances of providers enrolled on the network
    */
-  async getAllProviders () {
+  async getAllProviders() {
     // const workerEntries = await this.base.api.query.storageWorkingGroup.workerById()
     // can't rely on .isEmpty or isNone property to detect empty map
     // return workerEntries.isNone ? [] : workerEntries[0]
@@ -106,9 +105,7 @@ class WorkersApi {
     for (let id = 0; id < nextWorkerId; id++) {
       // We get back an Option. Will be None if value doesn't exist
       // eslint-disable-next-line no-await-in-loop
-      let value = await this.base.api.rpc.state.getStorage(
-        this.base.api.query.storageWorkingGroup.workerById.key(id)
-      )
+      let value = await this.base.api.rpc.state.getStorage(this.base.api.query.storageWorkingGroup.workerById.key(id))
 
       if (!value.isNone) {
         // no need to read from storage again!
@@ -142,32 +139,32 @@ class WorkersApi {
    * Add a new storage group opening using the lead account. Returns the
    * new opening id.
    */
-  async dev_addStorageOpening() {
-    const openTx = this.dev_makeAddOpeningTx('Worker')
-    return this.dev_submitAddOpeningTx(openTx, await this.getLeadRoleAccount())
+  async devAddStorageOpening() {
+    const openTx = this.devMakeAddOpeningTx('Worker')
+    return this.devSubmitAddOpeningTx(openTx, await this.getLeadRoleAccount())
   }
 
   /*
    * Add a new storage working group lead opening using sudo account. Returns the
    * new opening id.
    */
-  async dev_addStorageLeadOpening() {
-    const openTx = this.dev_makeAddOpeningTx('Leader')
+  async devAddStorageLeadOpening() {
+    const openTx = this.devMakeAddOpeningTx('Leader')
     const sudoTx = this.base.api.tx.sudo.sudo(openTx)
-    return this.dev_submitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
+    return this.devSubmitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
   }
 
   /*
    * Constructs an addOpening tx of openingType
    */
-  dev_makeAddOpeningTx(openingType) {
+  devMakeAddOpeningTx(openingType) {
     return this.base.api.tx.storageWorkingGroup.addOpening(
       'CurrentBlock',
       {
         application_rationing_policy: {
-          'max_active_applicants': 1
+          max_active_applicants: 1,
         },
-        max_review_period_length: 1000
+        max_review_period_length: 1000,
         // default values for everything else..
       },
       'dev-opening',
@@ -179,34 +176,39 @@ class WorkersApi {
    * Submits a tx (expecting it to dispatch storageWorkingGroup.addOpening) and returns
    * the OpeningId from the resulting event.
    */
-  async dev_submitAddOpeningTx(tx, senderAccount) {
+  async devSubmitAddOpeningTx(tx, senderAccount) {
     return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'OpeningAdded',
-      eventProperty: 'OpeningId'
+      eventProperty: 'OpeningId',
     })
   }
 
   /*
    * Apply on an opening, returns the application id.
    */
-  async dev_applyOnOpening(openingId, memberId, memberAccount, roleAccount) {
+  async devApplyOnOpening(openingId, memberId, memberAccount, roleAccount) {
     const applyTx = this.base.api.tx.storageWorkingGroup.applyOnOpening(
-      memberId, openingId, roleAccount, null, null, `colossus-${memberId}`
+      memberId,
+      openingId,
+      roleAccount,
+      null,
+      null,
+      `colossus-${memberId}`
     )
 
     return this.base.signAndSendThenGetEventResult(memberAccount, applyTx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'AppliedOnOpening',
-      eventProperty: 'ApplicationId'
+      eventProperty: 'ApplicationId',
     })
   }
 
   /*
    * Move lead opening to review state using sudo account
    */
-  async dev_beginLeadOpeningReview(openingId) {
-    const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+  async devBeginLeadOpeningReview(openingId) {
+    const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
     const sudoTx = this.base.api.tx.sudo.sudo(beginReviewTx)
     return this.base.signAndSend(await this.base.identities.getSudoAccount(), sudoTx)
   }
@@ -214,53 +216,52 @@ class WorkersApi {
   /*
    * Move a storage opening to review state using lead account
    */
-  async dev_beginStorageOpeningReview(openingId) {
-    const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+  async devBeginStorageOpeningReview(openingId) {
+    const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
     return this.base.signAndSend(await this.getLeadRoleAccount(), beginReviewTx)
   }
 
   /*
    * Constructs a beingApplicantReview tx for openingId, which puts an opening into the review state
    */
-  dev_makeBeginOpeningReviewTx(openingId) {
+  devMakeBeginOpeningReviewTx(openingId) {
     return this.base.api.tx.storageWorkingGroup.beginApplicantReview(openingId)
   }
 
   /*
    * Fill a lead opening, return the assigned worker id, using the sudo account
    */
-  async dev_fillLeadOpening(openingId, applicationId) {
-    const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
+  async devFillLeadOpening(openingId, applicationId) {
+    const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
     const sudoTx = this.base.api.tx.sudo.sudo(fillTx)
-    const filled = await this.dev_submitFillOpeningTx(
-      await this.base.identities.getSudoAccount(), sudoTx)
+    const filled = await this.devSubmitFillOpeningTx(await this.base.identities.getSudoAccount(), sudoTx)
     return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
   }
 
   /*
    * Fill a storage opening, return the assigned worker id, using the lead account
    */
-  async dev_fillStorageOpening(openingId, applicationId) {
-    const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
-    const filled = await this.dev_submitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
+  async devFillStorageOpening(openingId, applicationId) {
+    const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
+    const filled = await this.devSubmitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
     return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
   }
 
   /*
    * Constructs a FillOpening transaction
    */
-  dev_makeFillOpeningTx(openingId, applicationId) {
+  devMakeFillOpeningTx(openingId, applicationId) {
     return this.base.api.tx.storageWorkingGroup.fillOpening(openingId, [applicationId], null)
   }
 
   /*
    * Dispatches a fill opening tx and returns a map of the application id to their new assigned worker ids.
    */
-  async dev_submitFillOpeningTx(senderAccount, tx) {
+  async devSubmitFillOpeningTx(senderAccount, tx) {
     return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'OpeningFilled',
-      eventProperty: 'ApplicationIdToWorkerIdMap'
+      eventProperty: 'ApplicationIdToWorkerIdMap',
     })
   }
 }
@@ -270,14 +271,14 @@ class WorkersApi {
  * ApplicationIdToWorkerIdMap map in the OpeningFilled event. Expects map to
  * contain at least one entry.
  */
-function getWorkerIdFromApplicationIdToWorkerIdMap (filledMap, applicationId) {
+function getWorkerIdFromApplicationIdToWorkerIdMap(filledMap, applicationId) {
   if (filledMap.size === 0) {
     throw new Error('Expected opening to be filled!')
   }
 
   let ourApplicationIdKey
 
-  for (let key of filledMap.keys()) {
+  for (const key of filledMap.keys()) {
     if (key.eq(applicationId)) {
       ourApplicationIdKey = key
       break
@@ -294,5 +295,5 @@ function getWorkerIdFromApplicationIdToWorkerIdMap (filledMap, applicationId) {
 }
 
 module.exports = {
-  WorkersApi
+  WorkersApi,
 }

+ 45 - 49
storage-node/packages/storage/filter.js

@@ -16,74 +16,67 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:storage:filter');
+const debug = require('debug')('joystream:storage:filter')
 
-const DEFAULT_MAX_FILE_SIZE = 500 * 1024 * 1024;
-const DEFAULT_ACCEPT_TYPES = [
-  'video/*',
-  'audio/*',
-  'image/*',
-];
-const DEFAULT_REJECT_TYPES = [];
+const DEFAULT_MAX_FILE_SIZE = 500 * 1024 * 1024
+const DEFAULT_ACCEPT_TYPES = ['video/*', 'audio/*', 'image/*']
+const DEFAULT_REJECT_TYPES = []
 
 // Configuration defaults
-function config_defaults(config)
-{
-  const filter =  config.filter || {};
+function configDefaults(config) {
+  const filter = config.filter || {}
 
   // We accept zero as switching this check off.
-  if (typeof filter.max_size == 'undefined' || typeof filter.max_size == 'null') {
-    filter.max_size = DEFAULT_MAX_FILE_SIZE;
+  if (typeof filter.max_size === 'undefined') {
+    filter.max_size = DEFAULT_MAX_FILE_SIZE
   }
 
   // Figure out mime types
-  filter.mime = filter.mime || [];
-  filter.mime.accept = filter.mime.accept || DEFAULT_ACCEPT_TYPES;
-  filter.mime.reject = filter.mime.reject || DEFAULT_REJECT_TYPES;
+  filter.mime = filter.mime || []
+  filter.mime.accept = filter.mime.accept || DEFAULT_ACCEPT_TYPES
+  filter.mime.reject = filter.mime.reject || DEFAULT_REJECT_TYPES
 
-  return filter;
+  return filter
 }
 
 // Mime type matching
-function mime_matches(acceptable, provided)
-{
+function mimeMatches(acceptable, provided) {
   if (acceptable.endsWith('*')) {
     // Wildcard match
-    const prefix = acceptable.slice(0, acceptable.length - 1);
-    debug('wildcard matching', provided, 'against', acceptable, '/', prefix);
-    return provided.startsWith(prefix);
+    const prefix = acceptable.slice(0, acceptable.length - 1)
+    debug('wildcard matching', provided, 'against', acceptable, '/', prefix)
+    return provided.startsWith(prefix)
   }
   // Exact match
-  debug('exact matching', provided, 'against', acceptable);
-  return provided == acceptable;
+  debug('exact matching', provided, 'against', acceptable)
+  return provided === acceptable
 }
 
-function mime_matches_any(accept, reject, provided)
-{
+function mimeMatchesAny(accept, reject, provided) {
   // Pass accept
-  var accepted = false;
-  for (var item of accept) {
-    if (mime_matches(item, provided)) {
-      debug('Content type matches', item, 'which is acceptable.');
-      accepted = true;
-      break;
+  let accepted = false
+  for (const item of accept) {
+    if (mimeMatches(item, provided)) {
+      debug('Content type matches', item, 'which is acceptable.')
+      accepted = true
+      break
     }
   }
   if (!accepted) {
-    return false;
+    return false
   }
 
   // Don't pass reject
-  for (var item of reject) {
-    if (mime_matches(item, provided)) {
-      debug('Content type matches', item, 'which is unacceptable.');
-      return false;
+  for (const item of reject) {
+    if (mimeMatches(item, provided)) {
+      debug('Content type matches', item, 'which is unacceptable.')
+      return false
     }
   }
 
-  return true;
+  return true
 }
 
 /**
@@ -93,40 +86,43 @@ function mime_matches_any(accept, reject, provided)
  * This is a straightforward implementation of
  * https://github.com/Joystream/storage-node-joystream/issues/14 - but should
  * most likely be improved on in future.
+ * @param {object} config - configuration
+ * @param {object} headers - required headers
+ * @param {string} mimeType - expected MIME type
+ * @return {object} HTTP status code and error message.
  **/
-function filter_func(config, headers, mime_type)
-{
-  const filter = config_defaults(config);
+function filterFunc(config, headers, mimeType) {
+  const filter = configDefaults(config)
 
   // Enforce maximum file upload size
   if (filter.max_size) {
-    const size = parseInt(headers['content-length'], 10);
+    const size = parseInt(headers['content-length'], 10)
     if (!size) {
       return {
         code: 411,
         message: 'A Content-Length header is required.',
-      };
+      }
     }
 
     if (size > filter.max_size) {
       return {
         code: 413,
         message: 'The provided Content-Length is too large.',
-      };
+      }
     }
   }
 
   // Enforce mime type based filtering
-  if (!mime_matches_any(filter.mime.accept, filter.mime.reject, mime_type)) {
+  if (!mimeMatchesAny(filter.mime.accept, filter.mime.reject, mimeType)) {
     return {
       code: 415,
       message: 'Content has an unacceptable MIME type.',
-    };
+    }
   }
 
   return {
     code: 200,
-  };
+  }
 }
 
-module.exports = filter_func;
+module.exports = filterFunc

+ 4 - 4
storage-node/packages/storage/index.js

@@ -16,10 +16,10 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const { Storage } = require('./storage');
+const { Storage } = require('./storage')
 
 module.exports = {
-  Storage: Storage,
-};
+  Storage,
+}

+ 162 - 179
storage-node/packages/storage/storage.js

@@ -16,173 +16,165 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const { Transform } = require('stream');
-const fs = require('fs');
+const { Transform } = require('stream')
+const fs = require('fs')
 
-const debug = require('debug')('joystream:storage:storage');
+const debug = require('debug')('joystream:storage:storage')
+
+const Promise = require('bluebird')
 
-const Promise = require('bluebird');
 Promise.config({
   cancellation: true,
-});
+})
 
-const file_type = require('file-type');
-const ipfs_client = require('ipfs-http-client');
-const temp = require('temp').track();
-const _ = require('lodash');
+const fileType = require('file-type')
+const ipfsClient = require('ipfs-http-client')
+const temp = require('temp').track()
+const _ = require('lodash')
 
 // Default request timeout; imposed on top of the IPFS client, because the
 // client doesn't seem to care.
-const DEFAULT_TIMEOUT = 30 * 1000;
+const DEFAULT_TIMEOUT = 30 * 1000
 
 // Default/dummy resolution implementation.
-const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
-  debug('Warning: Default resolution returns original CID', original);
-  return original;
+const DEFAULT_RESOLVE_CONTENT_ID = async original => {
+  debug('Warning: Default resolution returns original CID', original)
+  return original
 }
 
 // Default file info if nothing could be detected.
 const DEFAULT_FILE_INFO = {
-  mime_type: 'application/octet-stream',
+  mimeType: 'application/octet-stream',
   ext: 'bin',
-};
-
+}
 
 /*
  * fileType is a weird name, because we're really looking at MIME types.
  * Also, the type field includes extension info, so we're going to call
- * it file_info { mime_type, ext } instead.
+ * it fileInfo { mimeType, ext } instead.
  * Nitpicking, but it also means we can add our default type if things
  * go wrong.
  */
-function fix_file_info(info)
-{
+function fixFileInfo(info) {
   if (!info) {
-    info = DEFAULT_FILE_INFO;
-  }
-  else {
-    info.mime_type = info.mime;
-    delete(info.mime);
+    info = DEFAULT_FILE_INFO
+  } else {
+    info.mimeType = info.mime
+    delete info.mime
   }
-  return info;
+  return info
 }
 
-function fix_file_info_on_stream(stream)
-{
-  var info = fix_file_info(stream.fileType);
-  delete(stream.fileType);
-  stream.file_info = info;
-  return stream;
+function fixFileInfoOnStream(stream) {
+  const info = fixFileInfo(stream.fileType)
+  delete stream.fileType
+  stream.fileInfo = info
+  return stream
 }
 
-
 /*
  * Internal Transform stream for helping write to a temporary location, adding
  * MIME type detection, and a commit() function.
  */
-class StorageWriteStream extends Transform
-{
-  constructor(storage, options)
-  {
-    options = _.clone(options || {});
+class StorageWriteStream extends Transform {
+  constructor(storage, options) {
+    options = _.clone(options || {})
 
-    super(options);
+    super(options)
 
-    this.storage = storage;
+    this.storage = storage
 
     // Create temp target.
-    this.temp = temp.createWriteStream();
-    this.buf = Buffer.alloc(0);
+    this.temp = temp.createWriteStream()
+    this.buf = Buffer.alloc(0)
   }
 
-  _transform(chunk, encoding, callback)
-  {
+  _transform(chunk, encoding, callback) {
     // Deal with buffers only
     if (typeof chunk === 'string') {
-      chunk = Buffer.from(chunk);
+      chunk = Buffer.from(chunk)
     }
 
     // Logging this all the time is too verbose
     // debug('Writing temporary chunk', chunk.length, chunk);
-    this.temp.write(chunk);
+    this.temp.write(chunk)
 
     // Try to detect file type during streaming.
-    if (!this.file_info && this.buf < file_type.minimumBytes) {
-      this.buf = Buffer.concat([this.buf, chunk]);
+    if (!this.fileInfo && this.buf < fileType.minimumBytes) {
+      this.buf = Buffer.concat([this.buf, chunk])
 
-      if (this.buf >= file_type.minimumBytes) {
-        const info = file_type(this.buf);
+      if (this.buf >= fileType.minimumBytes) {
+        const info = fileType(this.buf)
         // No info? We can try again at the end of the stream.
         if (info) {
-          this.file_info = fix_file_info(info);
-          this.emit('file_info', this.file_info);
+          this.fileInfo = fixFileInfo(info)
+          this.emit('fileInfo', this.fileInfo)
         }
       }
     }
 
-    callback(null);
+    callback(null)
   }
 
-  _flush(callback)
-  {
-    debug('Flushing temporary stream:', this.temp.path);
-    this.temp.end();
+  _flush(callback) {
+    debug('Flushing temporary stream:', this.temp.path)
+    this.temp.end()
 
     // Since we're finished, we can try to detect the file type again.
-    if (!this.file_info) {
-      const read = fs.createReadStream(this.temp.path);
-      file_type.stream(read)
-        .then((stream) => {
-          this.file_info = fix_file_info_on_stream(stream).file_info;
-          this.emit('file_info', this.file_info);
+    if (!this.fileInfo) {
+      const read = fs.createReadStream(this.temp.path)
+      fileType
+        .stream(read)
+        .then(stream => {
+          this.fileInfo = fixFileInfoOnStream(stream).fileInfo
+          this.emit('fileInfo', this.fileInfo)
+        })
+        .catch(err => {
+          debug('Error trying to detect file type at end-of-stream:', err)
         })
-        .catch((err) => {
-          debug('Error trying to detect file type at end-of-stream:', err);
-        });
     }
 
-    callback(null);
+    callback(null)
   }
 
   /*
    * Commit this stream to the IPFS backend.
    */
-  commit()
-  {
+  commit() {
     // Create a read stream from the temp file.
     if (!this.temp) {
-      throw new Error('Cannot commit a temporary stream that does not exist. Did you call cleanup()?');
+      throw new Error('Cannot commit a temporary stream that does not exist. Did you call cleanup()?')
     }
 
-    debug('Committing temporary stream: ', this.temp.path);
-    this.storage.ipfs.addFromFs(this.temp.path)
-      .then(async (result) => {
-        const hash = result[0].hash;
-        debug('Stream committed as', hash);
-        this.emit('committed', hash);
-        await this.storage.ipfs.pin.add(hash);
+    debug('Committing temporary stream: ', this.temp.path)
+    this.storage.ipfs
+      .addFromFs(this.temp.path)
+      .then(async result => {
+        const hash = result[0].hash
+        debug('Stream committed as', hash)
+        this.emit('committed', hash)
+        await this.storage.ipfs.pin.add(hash)
       })
-      .catch((err) => {
-        debug('Error committing stream', err);
-        this.emit('error', err);
+      .catch(err => {
+        debug('Error committing stream', err)
+        this.emit('error', err)
       })
   }
 
   /*
    * Clean up temporary data.
    */
-  cleanup()
-  {
-    debug('Cleaning up temporary file: ', this.temp.path);
-    fs.unlink(this.temp.path, () => {}); // Ignore errors
-    delete(this.temp);
+  cleanup() {
+    debug('Cleaning up temporary file: ', this.temp.path)
+    fs.unlink(this.temp.path, () => {
+      /* Ignore errors.*/
+    })
+    delete this.temp
   }
 }
 
-
-
 /*
  * Manages the storage backend interaction. This provides a Promise-based API.
  *
@@ -191,8 +183,7 @@ class StorageWriteStream extends Transform
  *   const store = await Storage.create({ ... });
  *   store.open(...);
  */
-class Storage
-{
+class Storage {
   /*
    * Create a Storage instance. Options include:
    *
@@ -211,32 +202,30 @@ class Storage
    * timeout is given, it is used - otherwise, the `option.timeout` value
    * above is used.
    */
-  static create(options)
-  {
-    const storage = new Storage();
-    storage._init(options);
-    return storage;
+  static create(options) {
+    const storage = new Storage()
+    storage._init(options)
+    return storage
   }
 
-  _init(options)
-  {
-    this.options = _.clone(options || {});
-    this.options.ipfs = this.options.ipfs || {};
+  _init(options) {
+    this.options = _.clone(options || {})
+    this.options.ipfs = this.options.ipfs || {}
 
-    this._timeout = this.options.timeout || DEFAULT_TIMEOUT;
-    this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID;
+    this._timeout = this.options.timeout || DEFAULT_TIMEOUT
+    this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID
 
-    this.ipfs = ipfs_client(this.options.ipfs.connect_options);
+    this.ipfs = ipfsClient(this.options.ipfs.connect_options)
 
-    this.pins = {};
+    this.pins = {}
 
     this.ipfs.id((err, identity) => {
       if (err) {
-        debug(`Warning IPFS daemon not running: ${err.message}`);
+        debug(`Warning IPFS daemon not running: ${err.message}`)
       } else {
-        debug(`IPFS node is up with identity: ${identity.id}`);
+        debug(`IPFS node is up with identity: ${identity.id}`)
       }
-    });
+    })
   }
 
   /*
@@ -244,64 +233,62 @@ class Storage
    * the given timeout interval, and tries to execute the given operation within
    * that time.
    */
-  async _with_specified_timeout(timeout, operation)
-  {
+  async withSpecifiedTimeout(timeout, operation) {
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        resolve(await new Promise(operation));
+        resolve(await new Promise(operation))
       } catch (err) {
-        reject(err);
+        reject(err)
       }
-    }).timeout(timeout || this._timeout);
+    }).timeout(timeout || this._timeout)
   }
 
   /*
    * Resolve content ID with timeout.
    */
-  async _resolve_content_id_with_timeout(timeout, content_id)
-  {
-    return await this._with_specified_timeout(timeout, async (resolve, reject) => {
+  async resolveContentIdWithTimeout(timeout, contentId) {
+    return await this.withSpecifiedTimeout(timeout, async (resolve, reject) => {
       try {
-        resolve(await this._resolve_content_id(content_id));
+        resolve(await this._resolve_content_id(contentId))
       } catch (err) {
-        reject(err);
+        reject(err)
       }
-    });
+    })
   }
 
   /*
    * Stat a content ID.
    */
-  async stat(content_id, timeout)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+  async stat(contentId, timeout) {
+    const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
-    return await this._with_specified_timeout(timeout, (resolve, reject) => {
+    return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
       this.ipfs.files.stat(`/ipfs/${resolved}`, { withLocal: true }, (err, res) => {
         if (err) {
-          reject(err);
-          return;
+          reject(err)
+          return
         }
-        resolve(res);
-      });
-    });
+        resolve(res)
+      })
+    })
   }
 
   /*
    * Return the size of a content ID.
    */
-  async size(content_id, timeout)
-  {
-    const stat = await this.stat(content_id, timeout);
-    return stat.size;
+  async size(contentId, timeout) {
+    const stat = await this.stat(contentId, timeout)
+    return stat.size
   }
 
   /*
    * Opens the specified content in read or write mode, and returns a Promise
    * with the stream.
    *
-   * Read streams will contain a file_info property, with:
-   *  - a `mime_type` field providing the file's MIME type, or a default.
+   * Read streams will contain a fileInfo property, with:
+   *  - a `mimeType` field providing the file's MIME type, or a default.
    *  - an `ext` property, providing a file extension suggestion, or a default.
    *
    * Write streams have a slightly different flow, in order to allow for MIME
@@ -312,98 +299,94 @@ class Storage
    * When the commit has finished, a `committed` event is emitted, which
    * contains the IPFS backend's content ID.
    *
-   * Write streams also emit a `file_info` event during writing. It is passed
-   * the `file_info` field as described above. Event listeners may now opt to
+   * Write streams also emit a `fileInfo` event during writing. It is passed
+   * the `fileInfo` field as described above. Event listeners may now opt to
    * abort the write or continue and eventually `commit()` the file. There is
    * an explicit `cleanup()` function that removes temporary files as well,
    * in case comitting is not desired.
    */
-  async open(content_id, mode, timeout)
-  {
-    if (mode != 'r' && mode != 'w') {
-      throw Error('The only supported modes are "r", "w" and "a".');
+  async open(contentId, mode, timeout) {
+    if (mode !== 'r' && mode !== 'w') {
+      throw Error('The only supported modes are "r", "w" and "a".')
     }
 
     // Write stream
     if (mode === 'w') {
-      return await this._create_write_stream(content_id, timeout);
+      return await this.createWriteStream(contentId, timeout)
     }
 
     // Read stream - with file type detection
-    return await this._create_read_stream(content_id, timeout);
+    return await this.createReadStream(contentId, timeout)
   }
 
-  async _create_write_stream(content_id)
-  {
+  async createWriteStream() {
     // IPFS wants us to just dump a stream into its storage, then returns a
     // content ID (of its own).
     // We need to instead return a stream immediately, that we eventually
     // decorate with the content ID when that's available.
-    return new Promise((resolve, reject) => {
-      const stream = new StorageWriteStream(this);
-      resolve(stream);
-    });
+    return new Promise(resolve => {
+      const stream = new StorageWriteStream(this)
+      resolve(stream)
+    })
   }
 
-  async _create_read_stream(content_id, timeout)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+  async createReadStream(contentId, timeout) {
+    const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
-    var found = false;
-    return await this._with_specified_timeout(timeout, (resolve, reject) => {
-      const ls = this.ipfs.getReadableStream(resolved);
-      ls.on('data', async (result) => {
+    let found = false
+    return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
+      const ls = this.ipfs.getReadableStream(resolved)
+      ls.on('data', async result => {
         if (result.path === resolved) {
-          found = true;
+          found = true
 
-          const ft_stream = await file_type.stream(result.content);
-          resolve(fix_file_info_on_stream(ft_stream));
+          const ftStream = await fileType.stream(result.content)
+          resolve(fixFileInfoOnStream(ftStream))
         }
-      });
-      ls.on('error', (err) => {
-        ls.end();
-        debug(err);
-        reject(err);
-      });
+      })
+      ls.on('error', err => {
+        ls.end()
+        debug(err)
+        reject(err)
+      })
       ls.on('end', () => {
         if (!found) {
-          const err = new Error('No matching content found for', content_id);
-          debug(err);
-          reject(err);
+          const err = new Error('No matching content found for', contentId)
+          debug(err)
+          reject(err)
         }
-      });
-      ls.resume();
-    });
+      })
+      ls.resume()
+    })
   }
 
   /*
    * Synchronize the given content ID
    */
-  async synchronize(content_id)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(this._timeout, content_id);
+  async synchronize(contentId) {
+    const resolved = await this.resolveContentIdWithTimeout(this._timeout, contentId)
 
     // validate resolved id is proper ipfs_cid, not null or empty string
 
     if (this.pins[resolved]) {
-      return;
+      return
     }
 
-    debug(`Pinning ${resolved}`);
+    debug(`Pinning ${resolved}`)
 
-    // This call blocks until file is retreived..
-    this.ipfs.pin.add(resolved, {quiet: true, pin: true}, (err, res) => {
+    // This call blocks until file is retrieved..
+    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, err => {
       if (err) {
         debug(`Error Pinning: ${resolved}`)
-        delete this.pins[resolved];
+        delete this.pins[resolved]
       } else {
-        debug(`Pinned ${resolved}`);
+        debug(`Pinned ${resolved}`)
         // why aren't we doing this.pins[resolved] = true
       }
-    });
+    })
   }
 }
 
 module.exports = {
-  Storage: Storage,
-};
+  Storage,
+}

+ 150 - 151
storage-node/packages/storage/test/storage.js

@@ -16,29 +16,28 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const chai = require('chai');
-const chai_as_promised = require('chai-as-promised');
-chai.use(chai_as_promised);
-const expect = chai.expect;
+const chai = require('chai')
+const chaiAsPromised = require('chai-as-promised')
 
-const fs = require('fs');
+chai.use(chaiAsPromised)
+const expect = chai.expect
 
-const { Storage } = require('@joystream/storage-node-backend');
+const fs = require('fs')
 
-const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/;
+const { Storage } = require('@joystream/storage-node-backend')
 
-function write(store, content_id, contents, callback)
-{
-  store.open(content_id, 'w')
-    .then((stream) => {
+const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/
 
+function write(store, contentId, contents, callback) {
+  store
+    .open(contentId, 'w')
+    .then(stream => {
       stream.on('finish', () => {
-        stream.commit();
-      });
-      stream.on('committed', callback);
+        stream.commit()
+      })
+      stream.on('committed', callback)
 
       if (!stream.write(contents)) {
         stream.once('drain', () => stream.end())
@@ -46,12 +45,12 @@ function write(store, content_id, contents, callback)
         process.nextTick(() => stream.end())
       }
     })
-    .catch((err) => {
-      expect.fail(err);
-    });
+    .catch(err => {
+      expect.fail(err)
+    })
 }
 
-function read_all (stream) {
+function readAll(stream) {
   return new Promise((resolve, reject) => {
     const chunks = []
     stream.on('data', chunk => chunks.push(chunk))
@@ -61,163 +60,163 @@ function read_all (stream) {
   })
 }
 
-function create_known_object(content_id, contents, callback)
-{
-  var hash;
+function createKnownObject(contentId, contents, callback) {
+  let hash
   const store = Storage.create({
     resolve_content_id: () => {
-      return hash;
+      return hash
     },
   })
 
-  write(store, content_id, contents, (the_hash) => {
-    hash = the_hash;
-
-    callback(store, hash);
-  });
+  write(store, contentId, contents, theHash => {
+    hash = theHash
 
+    callback(store, hash)
+  })
 }
 
 describe('storage/storage', () => {
-  var storage;
+  let storage
   before(async () => {
-    storage = await Storage.create({ timeout: 1900 });
-  });
+    storage = await Storage.create({ timeout: 1900 })
+  })
 
   describe('open()', () => {
-    it('can write a stream', (done) => {
-      write(storage, 'foobar', 'test-content', (hash) => {
-        expect(hash).to.not.be.undefined;
+    it('can write a stream', done => {
+      write(storage, 'foobar', 'test-content', hash => {
+        expect(hash).to.not.be.undefined
         expect(hash).to.match(IPFS_CID_REGEX)
-        done();
-      });
-    });
-
-    it('detects the MIME type of a write stream', (done) => {
-      const contents = fs.readFileSync('../../storage-node_new.svg');
-      storage.open('mime-test', 'w')
-        .then((stream) => {
-          var file_info;
-          stream.on('file_info', (info) => {
-            // Could filter & abort here now, but we're just going to set this,
-            // and expect it to be set later...
-            file_info = info;
-          });
-
-          stream.on('finish', () => {
-            stream.commit();
-          });
-
-          stream.on('committed', (hash) => {
-            // ... if file_info is not set here, there's an issue.
-            expect(file_info).to.have.property('mime_type', 'application/xml');
-            expect(file_info).to.have.property('ext', 'xml');
-            done();
-          });
-
-          if (!stream.write(contents)) {
-            stream.once('drain', () => stream.end())
-          } else {
-            process.nextTick(() => stream.end())
-          }
-        })
-        .catch((err) => {
-          expect.fail(err);
-        });
-    });
-
-    it('can read a stream', (done) => {
-      const contents = 'test-for-reading';
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
-            done();
+        done()
+      })
+    })
+
+    // it('detects the MIME type of a write stream', (done) => {
+    // 	const contents = fs.readFileSync('../../storage-node_new.svg')
+    // 	storage
+    // 		.open('mime-test', 'w')
+    // 		.then((stream) => {
+    // 			let fileInfo
+    // 			stream.on('fileInfo', (info) => {
+    // 				// Could filter & abort here now, but we're just going to set this,
+    // 				// and expect it to be set later...
+    // 				fileInfo = info
+    // 			})
+    //
+    // 			stream.on('finish', () => {
+    // 				stream.commit()
+    // 			})
+    //
+    // 			stream.on('committed', () => {
+    // 				// ... if fileInfo is not set here, there's an issue.
+    // 				expect(fileInfo).to.have.property('mimeType', 'application/xml')
+    // 				expect(fileInfo).to.have.property('ext', 'xml')
+    // 				done()
+    // 			})
+    //
+    // 			if (!stream.write(contents)) {
+    // 				stream.once('drain', () => stream.end())
+    // 			} else {
+    // 				process.nextTick(() => stream.end())
+    // 			}
+    // 		})
+    // 		.catch((err) => {
+    // 			expect.fail(err)
+    // 		})
+    // })
+
+    it('can read a stream', done => {
+      const contents = 'test-for-reading'
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
+            done()
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
-    it('detects the MIME type of a read stream', (done) => {
-      const contents = fs.readFileSync('../../storage-node_new.svg');
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(contents.length).to.equal(data.length);
-            expect(Buffer.compare(data, contents)).to.equal(0);
-            expect(stream).to.have.property('file_info');
+          .catch(err => {
+            expect.fail(err)
+          })
+      })
+    })
+
+    it('detects the MIME type of a read stream', done => {
+      const contents = fs.readFileSync('../../storage-node_new.svg')
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(contents.length).to.equal(data.length)
+            expect(Buffer.compare(data, contents)).to.equal(0)
+            expect(stream).to.have.property('fileInfo')
 
             // application/xml+svg would be better, but this is good-ish.
-            expect(stream.file_info).to.have.property('mime_type', 'application/xml');
-            expect(stream.file_info).to.have.property('ext', 'xml');
-            done();
+            expect(stream.fileInfo).to.have.property('mimeType', 'application/xml')
+            expect(stream.fileInfo).to.have.property('ext', 'xml')
+            done()
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
-    it('provides default MIME type for read streams', (done) => {
-      const contents = 'test-for-reading';
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
-
-            expect(stream.file_info).to.have.property('mime_type', 'application/octet-stream');
-            expect(stream.file_info).to.have.property('ext', 'bin');
-            done();
+          .catch(err => {
+            expect.fail(err)
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
+      })
+    })
 
-  });
+    it('provides default MIME type for read streams', done => {
+      const contents = 'test-for-reading'
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
+
+            expect(stream.fileInfo).to.have.property('mimeType', 'application/octet-stream')
+            expect(stream.fileInfo).to.have.property('ext', 'bin')
+            done()
+          })
+          .catch(err => {
+            expect.fail(err)
+          })
+      })
+    })
+  })
 
   describe('stat()', () => {
     it('times out for unknown content', async () => {
-      const content = Buffer.from('this-should-not-exist');
-      const x = await storage.ipfs.add(content, { onlyHash: true });
-      const hash = x[0].hash;
+      const content = Buffer.from('this-should-not-exist')
+      const x = await storage.ipfs.add(content, { onlyHash: true })
+      const hash = x[0].hash
 
       // Try to stat this entry, it should timeout.
-      expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out');
-    });
-
-    it('returns stats for a known object', (done) => {
-      const content = 'stat-test';
-      const expected_size = content.length;
-      create_known_object('foobar', content, (store, hash) => {
-        expect(store.stat(hash)).to.eventually.have.property('size', expected_size);
-        done();
-      });
-    });
-  });
+      expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out')
+    })
+
+    it('returns stats for a known object', done => {
+      const content = 'stat-test'
+      const expectedSize = content.length
+      createKnownObject('foobar', content, (store, hash) => {
+        expect(store.stat(hash)).to.eventually.have.property('size', expectedSize)
+        done()
+      })
+    })
+  })
 
   describe('size()', () => {
     it('times out for unknown content', async () => {
-      const content = Buffer.from('this-should-not-exist');
-      const x = await storage.ipfs.add(content, { onlyHash: true });
-      const hash = x[0].hash;
+      const content = Buffer.from('this-should-not-exist')
+      const x = await storage.ipfs.add(content, { onlyHash: true })
+      const hash = x[0].hash
 
       // Try to stat this entry, it should timeout.
-      expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out');
-    });
-
-    it('returns the size of a known object', (done) => {
-      create_known_object('foobar', 'stat-test', (store, hash) => {
-        expect(store.size(hash)).to.eventually.equal(15);
-        done();
-      });
-    });
-  });
-});
+      expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out')
+    })
+
+    it('returns the size of a known object', done => {
+      createKnownObject('foobar', 'stat-test', (store, hash) => {
+        expect(store.size(hash)).to.eventually.equal(15)
+        done()
+      })
+    })
+  })
+})

+ 10 - 9
storage-node/packages/util/externalPromise.js

@@ -1,19 +1,20 @@
 /**
- * Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
+ * Creates a new promise.
+ * @return { object} Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
  * so it can be fulfilled 'externally'. This is a bit of a hack, but most useful application is when
  * concurrent async operations are initiated that are all waiting on the same result value.
  */
-function newExternallyControlledPromise () {
-    let resolve, reject
+function newExternallyControlledPromise() {
+  let resolve, reject
 
-    const promise = new Promise((res, rej) => {
-      resolve = res
-      reject = rej
-    })
+  const promise = new Promise((res, rej) => {
+    resolve = res
+    reject = rej
+  })
 
-    return ({ resolve, reject, promise })
+  return { resolve, reject, promise }
 }
 
 module.exports = {
-    newExternallyControlledPromise
+  newExternallyControlledPromise,
 }

+ 19 - 21
storage-node/packages/util/fs/resolve.js

@@ -16,11 +16,11 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const path = require('path');
+const path = require('path')
 
-const debug = require('debug')('joystream:util:fs:resolve');
+const debug = require('debug')('joystream:util:fs:resolve')
 
 /*
  * Resolves name relative to base, throwing an error if the given
@@ -31,37 +31,35 @@ const debug = require('debug')('joystream:util:fs:resolve');
  * useless for our case because it does not care about breaking out of
  * a base directory.
  */
-function resolve(base, name)
-{
-  debug('Resolving', name);
+function resolve(base, name) {
+  debug('Resolving', name)
 
   // In a firs step, we strip leading slashes from the name, because they're
   // just saying "relative to the base" in our use case.
-  var res = name.replace(/^\/+/, '');
-  debug('Stripped', res);
+  let res = name.replace(/^\/+/, '')
+  debug('Stripped', res)
 
   // At this point resolving the path should stay within the base we specify.
   // We do specify a base other than the file system root, because the file
   // everything is always relative to the file system root.
-  const test_base = path.join(path.sep, 'test-base');
-  debug('Test base is', test_base);
-  res = path.resolve(test_base, res);
-  debug('Resolved', res);
+  const testBase = path.join(path.sep, 'test-base')
+  debug('Test base is', testBase)
+  res = path.resolve(testBase, res)
+  debug('Resolved', res)
 
   // Ok, we can check for violations now.
-  if (res.slice(0, test_base.length) != test_base) {
-    throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`);
+  if (res.slice(0, testBase.length) !== testBase) {
+    throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`)
   }
 
   // If we strip the base now, we have the relative name resolved.
-  res = res.slice(test_base.length + 1);
-  debug('Relative', res);
+  res = res.slice(testBase.length + 1)
+  debug('Relative', res)
 
   // Finally we can join this relative name to the requested base.
-  var res = path.join(base, res);
-  debug('Result', res);
-  return res;
+  res = path.join(base, res)
+  debug('Result', res)
+  return res
 }
 
-
-module.exports = resolve;
+module.exports = resolve

+ 53 - 62
storage-node/packages/util/fs/walk.js

@@ -16,60 +16,54 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
-
-const fs = require('fs');
-const path = require('path');
-
-const debug = require('debug')('joystream:util:fs:walk');
-
-class Walker
-{
-  constructor(archive, base, cb)
-  {
-    this.archive = archive;
-    this.base = base;
-    this.slice_offset = this.base.length;
-    if (this.base[this.slice_offset - 1] != '/') {
-      this.slice_offset += 1;
+'use strict'
+
+const fs = require('fs')
+const path = require('path')
+
+const debug = require('debug')('joystream:util:fs:walk')
+
+class Walker {
+  constructor(archive, base, cb) {
+    this.archive = archive
+    this.base = base
+    this.slice_offset = this.base.length
+    if (this.base[this.slice_offset - 1] !== '/') {
+      this.slice_offset += 1
     }
-    this.cb = cb;
-    this.pending = 0;
+    this.cb = cb
+    this.pending = 0
   }
 
   /*
    * Check pending
    */
-  check_pending(name)
-  {
+  checkPending(name) {
     // Decrease pending count again.
-    this.pending -= 1;
-    debug('Finishing', name, 'decreases pending to', this.pending);
+    this.pending -= 1
+    debug('Finishing', name, 'decreases pending to', this.pending)
     if (!this.pending) {
-      debug('No more pending.');
-      this.cb(null);
+      debug('No more pending.')
+      this.cb(null)
     }
   }
 
   /*
    * Helper function for walk; split out because it's used in two places.
    */
-  report_and_recurse(relname, fname, lstat, linktarget)
-  {
+  reportAndRecurse(relname, fname, lstat, linktarget) {
     // First report the value
-    this.cb(null, relname, lstat, linktarget);
+    this.cb(null, relname, lstat, linktarget)
 
     // Recurse
     if (lstat.isDirectory()) {
-      this.walk(fname);
+      this.walk(fname)
     }
 
-    this.check_pending(fname);
+    this.checkPending(fname)
   }
 
-
-  walk(dir)
-  {
+  walk(dir) {
     // This is a little hacky - since readdir() may take a while, and we don't
     // want the pending count to drop to zero before it's finished, we bump
     // it up and down while readdir() does it's job.
@@ -78,51 +72,49 @@ class Walker
     // pending count still has a value.
     // Note that in order not to hang on empty directories, we need to
     // explicitly check the pending count in cases when there are no files.
-    this.pending += 1;
+    this.pending += 1
     this.archive.readdir(dir, (err, files) => {
       if (err) {
-        this.cb(err);
-        return;
+        this.cb(err)
+        return
       }
 
       // More pending data.
-      this.pending += files.length;
-      debug('Reading', dir, 'bumps pending to', this.pending);
+      this.pending += files.length
+      debug('Reading', dir, 'bumps pending to', this.pending)
 
-      files.forEach((name) => {
-        const fname = path.resolve(dir, name);
+      files.forEach(name => {
+        const fname = path.resolve(dir, name)
         this.archive.lstat(fname, (err2, lstat) => {
           if (err2) {
-            this.cb(err2);
-            return;
+            this.cb(err2)
+            return
           }
 
           // The base is always prefixed, so a simple string slice should do.
-          const relname = fname.slice(this.slice_offset);
+          const relname = fname.slice(this.slice_offset)
 
           // We have a symbolic link? Resolve it.
           if (lstat.isSymbolicLink()) {
             this.archive.readlink(fname, (err3, linktarget) => {
               if (err3) {
-                this.cb(err3);
-                return;
+                this.cb(err3)
+                return
               }
 
-              this.report_and_recurse(relname, fname, lstat, linktarget);
-            });
+              this.reportAndRecurse(relname, fname, lstat, linktarget)
+            })
+          } else {
+            this.reportAndRecurse(relname, fname, lstat)
           }
-          else {
-            this.report_and_recurse(relname, fname, lstat);
-          }
-        });
-      });
+        })
+      })
 
-      this.check_pending(dir);
-    });
+      this.checkPending(dir)
+    })
   }
 }
 
-
 /*
  * Recursively walk a file system hierarchy (in undefined order), returning all
  * entries via the callback(err, relname, lstat, [linktarget]). The name relative
@@ -134,15 +126,14 @@ class Walker
  *
  * The callback is invoked one last time without data to signal the end of data.
  */
-module.exports = function(base, archive, cb)
-{
+module.exports = function(base, archive, cb) {
   // Archive is optional and defaults to fs, but cb is not.
   if (!cb) {
-    cb = archive;
-    archive = fs;
+    cb = archive
+    archive = fs
   }
 
-  const resolved = path.resolve(base);
-  const w = new Walker(archive, resolved, cb);
-  w.walk(resolved);
-};
+  const resolved = path.resolve(base)
+  const w = new Walker(archive, resolved, cb)
+  w.walk(resolved)
+}

+ 46 - 55
storage-node/packages/util/lru.js

@@ -16,111 +16,102 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const DEFAULT_CAPACITY = 100;
+const DEFAULT_CAPACITY = 100
 
-const debug = require('debug')('joystream:util:lru');
+const debug = require('debug')('joystream:util:lru')
 
 /*
  * Simple least recently used cache.
  */
-class LRUCache
-{
-  constructor(capacity = DEFAULT_CAPACITY)
-  {
-    this.capacity = capacity;
-    this.clear();
+class LRUCache {
+  constructor(capacity = DEFAULT_CAPACITY) {
+    this.capacity = capacity
+    this.clear()
   }
 
   /*
    * Return the entry with the given key, and update it's usage.
    */
-  get(key)
-  {
-    const val = this.store.get(key);
+  get(key) {
+    const val = this.store.get(key)
     if (val) {
-      this.access.set(key, Date.now());
+      this.access.set(key, Date.now())
     }
-    return val;
+    return val
   }
 
   /*
    * Return true if the key is the cache, false otherwise.
    */
-  has(key)
-  {
-    return this.store.has(key);
+  has(key) {
+    return this.store.has(key)
   }
 
   /*
    * Put a value into the cache.
    */
-  put(key, value)
-  {
-    this.store.set(key, value);
-    this.access.set(key, Date.now());
-    this._prune();
+  put(key, value) {
+    this.store.set(key, value)
+    this.access.set(key, Date.now())
+    this._prune()
   }
 
   /*
    * Delete a value from the cache.
    */
-  del(key)
-  {
-    this.store.delete(key);
-    this.access.delete(key);
+  del(key) {
+    this.store.delete(key)
+    this.access.delete(key)
   }
 
   /*
    * Current size of the cache
    */
-  size()
-  {
-    return this.store.size;
+  size() {
+    return this.store.size
   }
 
   /*
    * Clear the LRU cache entirely.
    */
-  clear()
-  {
-    this.store = new Map();
-    this.access = new Map();
+  clear() {
+    this.store = new Map()
+    this.access = new Map()
   }
 
   /*
    * Internal pruning function.
    */
-  _prune()
-  {
-    debug('About to prune; have', this.store.size, 'and capacity is', this.capacity);
+  _prune() {
+    debug('About to prune; have', this.store.size, 'and capacity is', this.capacity)
 
-    var sorted = Array.from(this.access.entries());
+    const sorted = Array.from(this.access.entries())
     sorted.sort((first, second) => {
-      if (first[1] == second[1]) {
-        return 0;
+      if (first[1] === second[1]) {
+        return 0
       }
-      return (first[1] < second[1] ? -1 : 1);
-    });
-    debug('Sorted keys are:', sorted);
+      return first[1] < second[1] ? -1 : 1
+    })
+    debug('Sorted keys are:', sorted)
 
-    debug('Have to prune', this.store.size - this.capacity, 'items.');
-    var idx = 0;
-    var to_prune = [];
-    while (idx < sorted.length && to_prune.length < (this.store.size - this.capacity)) {
-      to_prune.push(sorted[idx][0]);
-      ++idx;
+    debug('Have to prune', this.store.size - this.capacity, 'items.')
+    let idx = 0
+    const toPrune = []
+    while (idx < sorted.length && toPrune.length < this.store.size - this.capacity) {
+      toPrune.push(sorted[idx][0])
+      ++idx
     }
 
-    to_prune.forEach((key) => {
-      this.store.delete(key);
-      this.access.delete(key);
-    });
-    debug('Size after pruning', this.store.size);
+    toPrune.forEach(key => {
+      this.store.delete(key)
+      this.access.delete(key)
+    })
+    debug('Size after pruning', this.store.size)
   }
 }
 
 module.exports = {
-  LRUCache: LRUCache,
-};
+  LRUCache,
+}

+ 46 - 51
storage-node/packages/util/pagination.js

@@ -16,12 +16,12 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:middleware:pagination');
+const debug = require('debug')('joystream:middleware:pagination')
 
 // Pagination definitions
-const _api_defs = {
+const apiDefs = {
   parameters: {
     paginationLimit: {
       name: 'limit',
@@ -50,7 +50,7 @@ const _api_defs = {
       type: 'object',
       required: ['self'],
       properties: {
-        'self': {
+        self: {
           type: 'string',
         },
         next: {
@@ -68,7 +68,7 @@ const _api_defs = {
       },
     },
   },
-};
+}
 
 /**
  * Silly pagination because it's faster than getting other modules to work.
@@ -78,86 +78,81 @@ const _api_defs = {
  *   -> Validates pagination parameters
  * - apiDoc.responses.200.schema.pagination = pagination.response
  *   -> Generates pagination info on response
- * - paginate(req, res, [last_offset])
+ * - paginate(req, res, [lastOffset])
  *   -> add (valid) pagination fields to response object
- *      If last_offset is given, create a last link with that offset
+ *      If lastOffset is given, create a last link with that offset
  **/
 module.exports = {
-
   // Add pagination parameters and pagination info responses.
   parameters: [
-    { '$ref': '#/components/parameters/paginationLimit' },
-    { '$ref': '#/components/parameters/paginationOffset' },
-
+    { $ref: '#/components/parameters/paginationLimit' },
+    { $ref: '#/components/parameters/paginationOffset' },
   ],
 
   response: {
-    '$ref': '#/components/schema/PaginationInfo'
+    $ref: '#/components/schema/PaginationInfo',
   },
 
   // Update swagger/openapi specs with our own parameters and definitions
-  openapi: function(api)
-  {
-    api.components = api.components || {};
-    api.components.parameters = { ...api.components.parameters || {} , ..._api_defs.parameters };
-    api.components.schemas = { ...api.components.schemas || {}, ..._api_defs.schemas };
-    return api;
+  openapi(api) {
+    api.components = api.components || {}
+    api.components.parameters = { ...(api.components.parameters || {}), ...apiDefs.parameters }
+    api.components.schemas = { ...(api.components.schemas || {}), ...apiDefs.schemas }
+    return api
   },
 
   // Pagination function
-  paginate: function(req, res, last_offset)
-  {
+  paginate(req, res, lastOffset) {
     // Skip if the response is not an object.
-    if (Object.prototype.toString.call(res) != "[object Object]") {
-      debug('Cannot paginate non-objects.');
-      return res;
+    if (Object.prototype.toString.call(res) !== '[object Object]') {
+      debug('Cannot paginate non-objects.')
+      return res
     }
 
     // Defaults for parameters
-    var offset = req.query.offset || 0;
-    var limit = req.query.limit || 20;
-    debug('Create pagination links from offset=' + offset, 'limit=' + limit);
+    const offset = req.query.offset || 0
+    const limit = req.query.limit || 20
+    debug('Create pagination links from offset=' + offset, 'limit=' + limit)
 
     // Parse current url
-    const url = require('url');
-    var req_url = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl);
-    var params = new url.URLSearchParams(req_url.query);
+    const url = require('url')
+    const reqUrl = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl)
+    const params = new url.URLSearchParams(reqUrl.query)
 
     // Pagination object
-    var pagination = {
-      'self': req_url.href,
+    const pagination = {
+      self: reqUrl.href,
     }
 
-    var prev = offset - limit;
+    const prev = offset - limit
     if (prev >= 0) {
-      params.set('offset', prev);
-      req_url.search = params.toString();
-      pagination['prev'] = url.format(req_url);
-
+      params.set('offset', prev)
+      reqUrl.search = params.toString()
+      pagination.prev = url.format(reqUrl)
     }
 
-    var next = offset + limit;
+    const next = offset + limit
     if (next >= 0) {
-      params.set('offset', next);
-      req_url.search = params.toString();
-      pagination['next'] = url.format(req_url);
+      params.set('offset', next)
+      reqUrl.search = params.toString()
+      pagination.next = url.format(reqUrl)
     }
 
-    if (last_offset) {
-      params.set('offset', last_offset);
-      req_url.search = params.toString();
-      pagination['last'] = url.format(req_url);
+    if (lastOffset) {
+      params.set('offset', lastOffset)
+      reqUrl.search = params.toString()
+      pagination.last = url.format(reqUrl)
     }
 
     // First
-    params.set('offset', 0);
-    req_url.search = params.toString();
-    pagination['first'] = url.format(req_url);
+    params.set('offset', 0)
+    reqUrl.search = params.toString()
+    pagination.first = url.format(reqUrl)
 
-    debug('pagination', pagination);
+    debug('pagination', pagination)
 
     // Now set pagination values in response.
-    res.pagination = pagination;
-    return res;
+    res.pagination = pagination
+    return res
   },
-};
+}

+ 207 - 243
storage-node/packages/util/ranges.js

@@ -16,12 +16,12 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const uuid = require('uuid');
-const stream_buf = require('stream-buffers');
+const uuid = require('uuid')
+const streamBuf = require('stream-buffers')
 
-const debug = require('debug')('joystream:util:ranges');
+const debug = require('debug')('joystream:util:ranges')
 
 /*
  * Range parsing
@@ -31,109 +31,100 @@ const debug = require('debug')('joystream:util:ranges');
  * Parse a range string, e.g. '0-100' or '-100' or '0-'. Return the values
  * in an array of int or undefined (if not provided).
  */
-function _parse_range(range)
-{
-  var matches = range.match(/^(\d+-\d+|\d+-|-\d+|\*)$/u);
+function parseRange(range) {
+  const matches = range.match(/^(\d+-\d+|\d+-|-\d+|\*)$/u)
   if (!matches) {
-    throw new Error(`Not a valid range: ${range}`);
+    throw new Error(`Not a valid range: ${range}`)
   }
 
-  var vals = matches[1].split('-').map((v) => {
-    return v === '*' || v === '' ? undefined : parseInt(v, 10);
-  });
+  const vals = matches[1].split('-').map(v => {
+    return v === '*' || v === '' ? undefined : parseInt(v, 10)
+  })
 
   if (vals[1] <= vals[0]) {
-    throw new Error(`Invalid range: start "${vals[0]}" must be before end "${vals[1]}".`);
+    throw new Error(`Invalid range: start "${vals[0]}" must be before end "${vals[1]}".`)
   }
 
-  return [vals[0], vals[1]];
+  return [vals[0], vals[1]]
 }
 
-
 /*
  * Parse a range header value, e.g. unit=ranges, where ranges
- * are a comman separated list of individual ranges, and unit is any
+ * are a comma separated list of individual ranges, and unit is any
  * custom unit string. If the unit (and equal sign) are not given, assume
  * 'bytes'.
  */
-function parse(range_str)
-{
-  var res = {};
-  debug('Parse range header value:', range_str);
-  var matches = range_str.match(/^(([^\s]+)=)?((?:(?:\d+-\d+|-\d+|\d+-),?)+)$/u)
+function parse(rangeStr) {
+  const res = {}
+  debug('Parse range header value:', rangeStr)
+  const matches = rangeStr.match(/^(([^\s]+)=)?((?:(?:\d+-\d+|-\d+|\d+-),?)+)$/u)
   if (!matches) {
-    throw new Error(`Not a valid range header: ${range_str}`);
+    throw new Error(`Not a valid range header: ${rangeStr}`)
   }
 
-  res.unit = matches[2] || 'bytes';
-  res.range_str = matches[3];
-  res.ranges = [];
+  res.unit = matches[2] || 'bytes'
+  res.rangeStr = matches[3]
+  res.ranges = []
 
   // Parse individual ranges
-  var ranges = []
-  res.range_str.split(',').forEach((range) => {
-    ranges.push(_parse_range(range));
-  });
+  const ranges = []
+  res.rangeStr.split(',').forEach(range => {
+    ranges.push(parseRange(range))
+  })
 
   // Merge ranges into result.
-  ranges.forEach((new_range) => {
-    debug('Found range:', new_range);
+  ranges.forEach(newRange => {
+    debug('Found range:', newRange)
 
-    var is_merged = false;
-    for (var i in res.ranges) {
-      var old_range = res.ranges[i];
+    let isMerged = false
+    for (const i in res.ranges) {
+      const oldRange = res.ranges[i]
 
       // Skip if the new range is fully separate from the old range.
-      if (old_range[1] + 1 < new_range[0] || new_range[1] + 1 < old_range[0]) {
-        debug('Range does not overlap with', old_range);
-        continue;
+      if (oldRange[1] + 1 < newRange[0] || newRange[1] + 1 < oldRange[0]) {
+        debug('Range does not overlap with', oldRange)
+        continue
       }
 
       // If we know they're adjacent or overlapping, we construct the
       // merged range from the lower start and the higher end of both
       // ranges.
-      var merged = [
-        Math.min(old_range[0], new_range[0]),
-        Math.max(old_range[1], new_range[1])
-      ];
-      res.ranges[i] = merged;
-      is_merged = true;
-      debug('Merged', new_range, 'into', old_range, 'as', merged);
+      const merged = [Math.min(oldRange[0], newRange[0]), Math.max(oldRange[1], newRange[1])]
+      res.ranges[i] = merged
+      isMerged = true
+      debug('Merged', newRange, 'into', oldRange, 'as', merged)
     }
 
-    if (!is_merged) {
-      debug('Non-overlapping range!');
-      res.ranges.push(new_range);
+    if (!isMerged) {
+      debug('Non-overlapping range!')
+      res.ranges.push(newRange)
     }
-  });
+  })
 
   // Finally, sort ranges
   res.ranges.sort((first, second) => {
     if (first[0] === second[0]) {
       // Should not happen due to merging.
-      return 0;
+      return 0
     }
-    return (first[0] < second[0]) ? -1 : 1;
-  });
+    return first[0] < second[0] ? -1 : 1
+  })
 
-  debug('Result of parse is', res);
-  return res;
+  debug('Result of parse is', res)
+  return res
 }
 
-
 /*
  * Async version of parse().
  */
-function parseAsync(range_str, cb)
-{
+function parseAsync(rangeStr, cb) {
   try {
-    return cb(parse(range_str));
+    return cb(parse(rangeStr))
   } catch (err) {
-    return cb(null, err);
+    return cb(null, err)
   }
 }
 
-
 /*
  * Range streaming
  */
@@ -150,193 +141,178 @@ function parseAsync(range_str, cb)
  * with file system based streams. We'll see how likely that's going to be in
  * future.
  */
-class RangeSender
-{
-  constructor(response, stream, opts, end_callback)
-  {
+class RangeSender {
+  constructor(response, stream, opts, endCallback) {
     // Options
-    this.name = opts.name || 'content.bin';
-    this.type = opts.type || 'application/octet-stream';
-    this.size = opts.size;
-    this.ranges = opts.ranges;
-    this.download = opts.download || false;
+    this.name = opts.name || 'content.bin'
+    this.type = opts.type || 'application/octet-stream'
+    this.size = opts.size
+    this.ranges = opts.ranges
+    this.download = opts.download || false
 
     // Range handling related state.
-    this.read_offset = 0;             // Nothing read so far
-    this.range_index = -1;            // No range index yet.
-    this.range_boundary = undefined;  // Generate boundary when needed.
+    this.readOffset = 0 // Nothing read so far
+    this.rangeIndex = -1 // No range index yet.
+    this.rangeBoundary = undefined // Generate boundary when needed.
 
     // Event handlers & state
-    this.handlers = {};
-    this.opened = false;
+    this.handlers = {}
+    this.opened = false
 
-    debug('RangeSender:', this);
+    debug('RangeSender:', this)
     if (opts.ranges) {
-      debug('Parsed ranges:', opts.ranges.ranges);
+      debug('Parsed ranges:', opts.ranges.ranges)
     }
 
     // Parameters
-    this.response = response;
-    this.stream = stream;
-    this.opts = opts;
-    this.end_callback = end_callback;
+    this.response = response
+    this.stream = stream
+    this.opts = opts
+    this.endCallback = endCallback
   }
 
-  on_error(err)
-  {
+  onError(err) {
     // Assume hiding the actual error is best, and default to 404.
-    debug('Error:', err);
+    debug('Error:', err)
     if (!this.response.headersSent) {
       this.response.status(err.code || 404).send({
-        message: err.message || `File not found: ${this.name}`
-      });
+        message: err.message || `File not found: ${this.name}`,
+      })
     }
-    if (this.end_callback) {
-      this.end_callback(err);
+    if (this.endCallback) {
+      this.endCallback(err)
     }
   }
 
-  on_end()
-  {
-    debug('End of stream.');
-    this.response.end();
-    if (this.end_callback) {
-      this.end_callback();
+  onEnd() {
+    debug('End of stream.')
+    this.response.end()
+    if (this.endCallback) {
+      this.endCallback()
     }
   }
 
-
   // **** No ranges
-  on_open_no_range()
-  {
+  onOpenNoRange() {
     // File got opened, so we can set headers/status
-    debug('Open succeeded:', this.name, this.type);
-    this.opened = true;
+    debug('Open succeeded:', this.name, this.type)
+    this.opened = true
 
-    this.response.status(200);
-    this.response.contentType(this.type);
-    this.response.header('Accept-Ranges', 'bytes');
-    this.response.header('Content-Transfer-Encoding', 'binary');
+    this.response.status(200)
+    this.response.contentType(this.type)
+    this.response.header('Accept-Ranges', 'bytes')
+    this.response.header('Content-Transfer-Encoding', 'binary')
 
     if (this.download) {
-      this.response.header('Content-Disposition', `attachment; filename="${this.name}"`);
-    }
-    else {
-      this.response.header('Content-Disposition', 'inline');
+      this.response.header('Content-Disposition', `attachment; filename="${this.name}"`)
+    } else {
+      this.response.header('Content-Disposition', 'inline')
     }
 
     if (this.size) {
-      this.response.header('Content-Length', this.size);
+      this.response.header('Content-Length', this.size)
     }
   }
 
-
-  on_data_no_range(chunk)
-  {
+  onDataNoRange(chunk) {
     if (!this.opened) {
-      this.handlers['open']();
+      this.handlers.open()
     }
 
     // As simple as it can be.
-    this.response.write(Buffer.from(chunk, 'binary'));
+    this.response.write(Buffer.from(chunk, 'binary'))
   }
 
   // *** With ranges
-  next_range_headers()
-  {
+  nextRangeHeaders() {
     // Next range
-    this.range_index += 1;
-    if (this.range_index >= this.ranges.ranges.length) {
-      debug('Cannot advance range index; we are done.');
-      return undefined;
+    this.rangeIndex += 1
+    if (this.rangeIndex >= this.ranges.ranges.length) {
+      debug('Cannot advance range index; we are done.')
+      return undefined
     }
 
     // Calculate this range's size.
-    var range = this.ranges.ranges[this.range_index];
-    var total_size;
+    const range = this.ranges.ranges[this.rangeIndex]
+    let totalSize
     if (this.size) {
-      total_size = this.size;
+      totalSize = this.size
     }
     if (typeof range[0] === 'undefined') {
-      range[0] = 0;
+      range[0] = 0
     }
     if (typeof range[1] === 'undefined') {
       if (this.size) {
-        range[1] = total_size - 1;
+        range[1] = totalSize - 1
       }
     }
 
-    var send_size;
+    let sendSize
     if (typeof range[0] !== 'undefined' && typeof range[1] !== 'undefined') {
-      send_size = range[1] - range[0] + 1;
+      sendSize = range[1] - range[0] + 1
     }
 
     // Write headers, but since we may be in a multipart situation, write them
     // explicitly to the stream.
-    var start = (typeof range[0] === 'undefined') ? '' : `${range[0]}`;
-    var end = (typeof range[1] === 'undefined') ? '' : `${range[1]}`;
-
-    var size_str;
-    if (total_size) {
-      size_str = `${total_size}`;
-    }
-    else {
-      size_str = '*';
+    const start = typeof range[0] === 'undefined' ? '' : `${range[0]}`
+    const end = typeof range[1] === 'undefined' ? '' : `${range[1]}`
+
+    let sizeStr
+    if (totalSize) {
+      sizeStr = `${totalSize}`
+    } else {
+      sizeStr = '*'
     }
 
-    var ret = {
-      'Content-Range': `bytes ${start}-${end}/${size_str}`,
+    const ret = {
+      'Content-Range': `bytes ${start}-${end}/${sizeStr}`,
       'Content-Type': `${this.type}`,
-    };
-    if (send_size) {
-      ret['Content-Length'] = `${send_size}`;
     }
-    return ret;
+    if (sendSize) {
+      ret['Content-Length'] = `${sendSize}`
+    }
+    return ret
   }
 
-
-  next_range()
-  {
-    if (this.ranges.ranges.length == 1) {
-      debug('Cannot start new range; only one requested.');
-      this.stream.off('data', this.handlers['data']);
-      return false;
+  nextRange() {
+    if (this.ranges.ranges.length === 1) {
+      debug('Cannot start new range; only one requested.')
+      this.stream.off('data', this.handlers.data)
+      return false
     }
 
-    var headers = this.next_range_headers();
+    const headers = this.nextRangeHeaders()
 
     if (headers) {
-      var header_buf = new stream_buf.WritableStreamBuffer();
+      const onDataRanges = new streamBuf.WritableStreamBuffer()
       // We start a range with a boundary.
-      header_buf.write(`\r\n--${this.range_boundary}\r\n`);
+      onDataRanges.write(`\r\n--${this.rangeBoundary}\r\n`)
 
       // The we write the range headers.
-      for (var header in headers) {
-        header_buf.write(`${header}: ${headers[header]}\r\n`);
+      for (const header in headers) {
+        onDataRanges.write(`${header}: ${headers[header]}\r\n`)
       }
-      header_buf.write('\r\n');
-      this.response.write(header_buf.getContents());
-      debug('New range started.');
-      return true;
+      onDataRanges.write('\r\n')
+      this.response.write(onDataRanges.getContents())
+      debug('New range started.')
+      return true
     }
 
     // No headers means we're finishing the last range.
-    this.response.write(`\r\n--${this.range_boundary}--\r\n`);
-    debug('End of ranges sent.');
-    this.stream.off('data', this.handlers['data']);
-    return false;
+    this.response.write(`\r\n--${this.rangeBoundary}--\r\n`)
+    debug('End of ranges sent.')
+    this.stream.off('data', this.handlers.data)
+    return false
   }
 
-
-  on_open_ranges()
-  {
+  onOpenRanges() {
     // File got opened, so we can set headers/status
-    debug('Open succeeded:', this.name, this.type);
-    this.opened = true;
+    debug('Open succeeded:', this.name, this.type)
+    this.opened = true
 
-    this.response.header('Accept-Ranges', 'bytes');
-    this.response.header('Content-Transfer-Encoding', 'binary');
-    this.response.header('Content-Disposition', 'inline');
+    this.response.header('Accept-Ranges', 'bytes')
+    this.response.header('Content-Transfer-Encoding', 'binary')
+    this.response.header('Content-Disposition', 'inline')
 
     // For single ranges, the content length should be the size of the
     // range. For multiple ranges, we don't send a content length
@@ -344,23 +320,21 @@ class RangeSender
     //
     // Similarly, the type is different whether or not there is more than
     // one range.
-    if (this.ranges.ranges.length == 1) {
-      this.response.writeHead(206, 'Partial Content', this.next_range_headers());
-    }
-    else {
-      this.range_boundary = uuid.v4();
-      var headers = {
-        'Content-Type': `multipart/byteranges; boundary=${this.range_boundary}`,
-      };
-      this.response.writeHead(206, 'Partial Content', headers);
-      this.next_range();
+    if (this.ranges.ranges.length === 1) {
+      this.response.writeHead(206, 'Partial Content', this.nextRangeHeaders())
+    } else {
+      this.rangeBoundary = uuid.v4()
+      const headers = {
+        'Content-Type': `multipart/byteranges; boundary=${this.rangeBoundary}`,
+      }
+      this.response.writeHead(206, 'Partial Content', headers)
+      this.nextRange()
     }
   }
 
-  on_data_ranges(chunk)
-  {
+  onDataRanges(chunk) {
     if (!this.opened) {
-      this.handlers['open']();
+      this.handlers.open()
     }
     // Crap, node.js streams are stupid. No guarantee for seek support. Sure,
     // that makes node.js easier to implement, but offloads everything onto the
@@ -372,121 +346,111 @@ class RangeSender
     //
     // The simplest optimization would be at ever range start to seek() to the
     // start.
-    var chunk_range = [this.read_offset, this.read_offset + chunk.length - 1];
-    debug('= Got chunk with byte range', chunk_range);
+    const chunkRange = [this.readOffset, this.readOffset + chunk.length - 1]
+    debug('= Got chunk with byte range', chunkRange)
     while (true) {
-      var req_range = this.ranges.ranges[this.range_index];
-      if (!req_range) {
-        break;
+      let reqRange = this.ranges.ranges[this.rangeIndex]
+      if (!reqRange) {
+        break
       }
-      debug('Current requested range is', req_range);
-      if (!req_range[1]) {
-        req_range = [req_range[0], Number.MAX_SAFE_INTEGER];
-        debug('Treating as', req_range);
+      debug('Current requested range is', reqRange)
+      if (!reqRange[1]) {
+        reqRange = [reqRange[0], Number.MAX_SAFE_INTEGER]
+        debug('Treating as', reqRange)
       }
 
       // No overlap in the chunk and requested range; don't write.
-      if (chunk_range[1] < req_range[0] || chunk_range[0] > req_range[1]) {
-        debug('Ignoring chunk; it is out of range.');
-        break;
+      if (chunkRange[1] < reqRange[0] || chunkRange[0] > reqRange[1]) {
+        debug('Ignoring chunk; it is out of range.')
+        break
       }
 
       // Since there is overlap, find the segment that's entirely within the
       // chunk.
-      var segment = [
-        Math.max(chunk_range[0], req_range[0]),
-        Math.min(chunk_range[1], req_range[1]),
-      ];
-      debug('Segment to send within chunk is', segment);
+      const segment = [Math.max(chunkRange[0], reqRange[0]), Math.min(chunkRange[1], reqRange[1])]
+      debug('Segment to send within chunk is', segment)
 
       // Normalize the segment to a chunk offset
-      var start = segment[0] - this.read_offset;
-      var end = segment[1] - this.read_offset;
-      var len = end - start + 1;
-      debug('Offsets into buffer are', [start, end], 'with length', len);
+      const start = segment[0] - this.readOffset
+      const end = segment[1] - this.readOffset
+      const len = end - start + 1
+      debug('Offsets into buffer are', [start, end], 'with length', len)
 
       // Write the slice that we want to write. We first create a buffer from the
       // chunk. Then we slice a new buffer from the same underlying ArrayBuffer,
       // starting at the original buffer's offset, further offset by the segment
       // start. The segment length bounds the end of our slice.
-      var buf = Buffer.from(chunk, 'binary');
-      this.response.write(Buffer.from(buf.buffer, buf.byteOffset + start, len));
+      const buf = Buffer.from(chunk, 'binary')
+      this.response.write(Buffer.from(buf.buffer, buf.byteOffset + start, len))
 
       // If the requested range is finished, we should start the next one.
-      if (req_range[1] > chunk_range[1]) {
-        debug('Chunk is finished, but the requested range is missing bytes.');
-        break;
+      if (reqRange[1] > chunkRange[1]) {
+        debug('Chunk is finished, but the requested range is missing bytes.')
+        break
       }
 
-      if (req_range[1] <= chunk_range[1]) {
-        debug('Range is finished.');
-        if (!this.next_range(segment)) {
-          break;
+      if (reqRange[1] <= chunkRange[1]) {
+        debug('Range is finished.')
+        if (!this.nextRange(segment)) {
+          break
         }
       }
     }
 
     // Update read offset when chunk is finished.
-    this.read_offset += chunk.length;
+    this.readOffset += chunk.length
   }
 
-
-  start()
-  {
+  start() {
     // Before we start streaming, let's ensure our ranges don't contain any
     // without start - if they do, we nuke them all and treat this as a full
     // request.
-    var nuke = false;
+    let nuke = false
     if (this.ranges) {
-      for (var i in this.ranges.ranges) {
+      for (const i in this.ranges.ranges) {
         if (typeof this.ranges.ranges[i][0] === 'undefined') {
-          nuke = true;
-          break;
+          nuke = true
+          break
         }
       }
     }
     if (nuke) {
-      this.ranges = undefined;
+      this.ranges = undefined
     }
 
     // Register callbacks. Store them in a handlers object so we can
     // keep the bound version around for stopping to listen to events.
-    this.handlers['error'] = this.on_error.bind(this);
-    this.handlers['end'] = this.on_end.bind(this);
+    this.handlers.error = this.onError.bind(this)
+    this.handlers.end = this.onEnd.bind(this)
 
     if (this.ranges) {
-      debug('Preparing to handle ranges.');
-      this.handlers['open'] = this.on_open_ranges.bind(this);
-      this.handlers['data'] = this.on_data_ranges.bind(this);
-    }
-    else {
-      debug('No ranges, just send the whole file.');
-      this.handlers['open'] = this.on_open_no_range.bind(this);
-      this.handlers['data'] = this.on_data_no_range.bind(this);
+      debug('Preparing to handle ranges.')
+      this.handlers.open = this.onOpenRanges.bind(this)
+      this.handlers.data = this.onDataRanges.bind(this)
+    } else {
+      debug('No ranges, just send the whole file.')
+      this.handlers.open = this.onOpenNoRange.bind(this)
+      this.handlers.data = this.onDataNoRange.bind(this)
     }
 
-    for (var handler in this.handlers) {
-      this.stream.on(handler, this.handlers[handler]);
+    for (const handler in this.handlers) {
+      this.stream.on(handler, this.handlers[handler])
     }
   }
 }
 
-
-function send(response, stream, opts, end_callback)
-{
-  var sender = new RangeSender(response, stream, opts, end_callback);
-  sender.start();
+function send(response, stream, opts, endCallback) {
+  const sender = new RangeSender(response, stream, opts, endCallback)
+  sender.start()
 }
 
-
 /*
  * Exports
  */
 
-module.exports =
-{
-  parse: parse,
-  parseAsync: parseAsync,
-  RangeSender: RangeSender,
-  send: send,
-};
+module.exports = {
+  parse,
+  parseAsync,
+  RangeSender,
+  send,
+}

+ 5 - 6
storage-node/packages/util/stripEndingSlash.js

@@ -1,10 +1,9 @@
 // return url with last `/` removed
 function removeEndingForwardSlash(url) {
-    let st = new String(url)
-    if (st.endsWith('/')) {
-        return st.substring(0, st.length - 1);
-    }
-    return st.toString()
+  if (url.endsWith('/')) {
+    return url.substring(0, url.length - 1)
+  }
+  return url.toString()
 }
 
-module.exports = removeEndingForwardSlash
+module.exports = removeEndingForwardSlash

+ 38 - 50
storage-node/packages/util/test/fs/resolve.js

@@ -16,65 +16,53 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const path = require('path');
+const expect = require('chai').expect
+const path = require('path')
 
-const resolve = require('@joystream/storage-utils/fs/resolve');
+const resolve = require('@joystream/storage-utils/fs/resolve')
 
-function tests(base)
-{
-  it('resolves absolute paths relative to the base', function()
-  {
-    const resolved = resolve(base, '/foo');
-    const relative = path.relative(base, resolved);
-    expect(relative).to.equal('foo');
-  });
+function tests(base) {
+  it('resolves absolute paths relative to the base', function() {
+    const resolved = resolve(base, '/foo')
+    const relative = path.relative(base, resolved)
+    expect(relative).to.equal('foo')
+  })
 
-  it('allows for relative paths that stay in the base', function()
-  {
-    const resolved = resolve(base, 'foo/../bar');
-    const relative = path.relative(base, resolved);
-    expect(relative).to.equal('bar');
-  });
+  it('allows for relative paths that stay in the base', function() {
+    const resolved = resolve(base, 'foo/../bar')
+    const relative = path.relative(base, resolved)
+    expect(relative).to.equal('bar')
+  })
 
-  it('prevents relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, '../foo')).to.throw();
-  });
+  it('prevents relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, '../foo')).to.throw()
+  })
 
-  it('prevents long relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, '../../../foo')).to.throw();
-  });
+  it('prevents long relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, '../../../foo')).to.throw()
+  })
 
-  it('prevents sneaky relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, 'foo/../../../bar')).to.throw();
-  });
+  it('prevents sneaky relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, 'foo/../../../bar')).to.throw()
+  })
 }
 
-describe('util/fs/resolve', function()
-{
-  describe('slash base', function()
-  {
-    tests('/');
-  });
+describe('util/fs/resolve', function() {
+  describe('slash base', function() {
+    tests('/')
+  })
 
-  describe('empty base', function()
-  {
-    tests('');
-  });
+  describe('empty base', function() {
+    tests('')
+  })
 
-  describe('short base', function()
-  {
-    tests('/base');
-  });
+  describe('short base', function() {
+    tests('/base')
+  })
 
-  describe('long base', function()
-  {
-    tests('/this/base/is/very/long/indeed');
-  });
-});
+  describe('long base', function() {
+    tests('/this/base/is/very/long/indeed')
+  })
+})

+ 29 - 31
storage-node/packages/util/test/fs/walk.js

@@ -16,54 +16,52 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const temp = require('temp').track();
+const expect = require('chai').expect
+// Disabling the rule because of the 'temp' package API.
+// eslint-disable-next-line no-unused-vars
+const temp = require('temp').track()
 
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
-const fswalk = require('@joystream/storage-utils/fs/walk');
+const fswalk = require('@joystream/storage-utils/fs/walk')
 
-function walktest(archive, base, done)
-{
-  var results = new Map();
+function walktest(archive, base, done) {
+  const results = new Map()
 
   fswalk(base, archive, (err, relname, stat, linktarget) => {
-    expect(err).to.be.null;
+    expect(err).to.be.null
 
     if (relname) {
-      results.set(relname, [stat, linktarget]);
-      return;
+      results.set(relname, [stat, linktarget])
+      return
     }
 
     // End of data, do testing
-    const entries = Array.from(results.keys());
-    expect(entries).to.include('foo');
-    expect(results.get('foo')[0].isDirectory()).to.be.true;
+    const entries = Array.from(results.keys())
+    expect(entries).to.include('foo')
+    expect(results.get('foo')[0].isDirectory()).to.be.true
 
-    expect(entries).to.include('bar');
-    expect(results.get('bar')[0].isFile()).to.be.true;
+    expect(entries).to.include('bar')
+    expect(results.get('bar')[0].isFile()).to.be.true
 
     if (archive === fs) {
-      expect(entries).to.include('quux');
-      expect(results.get('quux')[0].isSymbolicLink()).to.be.true;
-      expect(results.get('quux')[1]).to.equal('foo/baz');
+      expect(entries).to.include('quux')
+      expect(results.get('quux')[0].isSymbolicLink()).to.be.true
+      expect(results.get('quux')[1]).to.equal('foo/baz')
     }
 
-    expect(entries).to.include('foo/baz');
-    expect(results.get('foo/baz')[0].isFile()).to.be.true;
+    expect(entries).to.include('foo/baz')
+    expect(results.get('foo/baz')[0].isFile()).to.be.true
 
-    done();
-  });
+    done()
+  })
 }
 
-describe('util/fs/walk', function()
-{
-  it('reports all files in a file system hierarchy', function(done)
-  {
+describe('util/fs/walk', function() {
+  it('reports all files in a file system hierarchy', function(done) {
     walktest(fs, path.resolve(__dirname, '../data'), done)
-  });
-});
+  })
+})

+ 119 - 131
storage-node/packages/util/test/lru.js

@@ -16,149 +16,137 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
+const expect = require('chai').expect
 
-const lru = require('@joystream/storage-utils/lru');
+const lru = require('@joystream/storage-utils/lru')
 
-const DEFAULT_SLEEP = 1;
-function sleep(ms = DEFAULT_SLEEP)
-{
+const DEFAULT_SLEEP = 1
+function sleep(ms = DEFAULT_SLEEP) {
   return new Promise(resolve => {
     setTimeout(resolve, ms)
   })
 }
 
-describe('util/lru', function()
-{
-  describe('simple usage', function()
-  {
-    it('does not contain keys that were not added', function()
-    {
-      var cache = new lru.LRUCache();
-      expect(cache.size()).to.equal(0);
-
-      var val = cache.get('something');
-      expect(val).to.be.undefined;
-
-      expect(cache.has('something')).to.be.false;
-    });
-
-    it('contains keys that were added', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-
-      var val = cache.get('something');
-      expect(val).to.be.equal('yay!');
-
-      expect(cache.has('something')).to.be.true;
-    });
-
-    it('does not contain keys that were deleted', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-      var val = cache.get('something');
-      expect(val).to.be.equal('yay!');
-      expect(cache.has('something')).to.be.true;
-
-      cache.del('something');
-      expect(cache.size()).to.equal(0);
-      val = cache.get('something');
-      expect(val).to.be.undefined;
-      expect(cache.has('something')).to.be.false;
-    });
-
-    it('can be cleared', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-
-      cache.clear();
-      expect(cache.size()).to.equal(0);
-    });
-  });
-
-  describe('capacity management', function()
-  {
-    it('does not grow beyond capacity', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-    });
-
-    it('removes the oldest key when pruning', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-      expect(cache.has('foo')).to.be.true;
-
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('bar')).to.be.true;
-
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-      expect(cache.has('bar')).to.be.true;
-      expect(cache.has('baz')).to.be.true;
-    });
-
-    it('updates LRU timestamp when reading', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-      expect(cache.has('foo')).to.be.true;
+describe('util/lru', function() {
+  describe('simple usage', function() {
+    it('does not contain keys that were not added', function() {
+      const cache = new lru.LRUCache()
+      expect(cache.size()).to.equal(0)
+
+      const val = cache.get('something')
+      expect(val).to.be.undefined
+
+      expect(cache.has('something')).to.be.false
+    })
+
+    it('contains keys that were added', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+
+      const val = cache.get('something')
+      expect(val).to.be.equal('yay!')
+
+      expect(cache.has('something')).to.be.true
+    })
+
+    it('does not contain keys that were deleted', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+      let val = cache.get('something')
+      expect(val).to.be.equal('yay!')
+      expect(cache.has('something')).to.be.true
+
+      cache.del('something')
+      expect(cache.size()).to.equal(0)
+      val = cache.get('something')
+      expect(val).to.be.undefined
+      expect(cache.has('something')).to.be.false
+    })
+
+    it('can be cleared', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+
+      cache.clear()
+      expect(cache.size()).to.equal(0)
+    })
+  })
+
+  describe('capacity management', function() {
+    it('does not grow beyond capacity', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+
+      await sleep()
+
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+    })
+
+    it('removes the oldest key when pruning', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+      expect(cache.has('foo')).to.be.true
 
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('bar')).to.be.true;
+      await sleep()
 
-      await sleep();
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('bar')).to.be.true
+
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+      expect(cache.has('bar')).to.be.true
+      expect(cache.has('baz')).to.be.true
+    })
+
+    it('updates LRU timestamp when reading', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+      expect(cache.has('foo')).to.be.true
+
+      await sleep()
+
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('bar')).to.be.true
+
+      await sleep()
 
       // 'foo' is older than 'bar' right now, so should be pruned first. But
       // if we get 'foo', it would be 'bar' that has to go.
-      var _ = cache.get('foo');
+      cache.get('foo')
 
       // Makes debugging a bit more obvious
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('baz')).to.be.true;
-    });
-  });
-});
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('baz')).to.be.true
+    })
+  })
+})

+ 59 - 66
storage-node/packages/util/test/pagination.js

@@ -16,63 +16,55 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const mock_http = require('node-mocks-http');
+const expect = require('chai').expect
+const mockHttp = require('node-mocks-http')
 
-const pagination = require('@joystream/storage-utils/pagination');
+const pagination = require('@joystream/storage-utils/pagination')
 
-describe('util/pagination', function()
-{
-  describe('openapi()', function()
-  {
-    it('should add parameters and definitions to an API spec', function()
-    {
-      var api = pagination.openapi({});
+describe('util/pagination', function() {
+  describe('openapi()', function() {
+    it('should add parameters and definitions to an API spec', function() {
+      const api = pagination.openapi({})
 
       // Parameters
-      expect(api).to.have.property('components');
+      expect(api).to.have.property('components')
 
-      expect(api.components).to.have.property('parameters');
-      expect(api.components.parameters).to.have.property('paginationLimit');
+      expect(api.components).to.have.property('parameters')
+      expect(api.components.parameters).to.have.property('paginationLimit')
 
-      expect(api.components.parameters.paginationLimit).to.have.property('name');
-      expect(api.components.parameters.paginationLimit.name).to.equal('limit');
+      expect(api.components.parameters.paginationLimit).to.have.property('name')
+      expect(api.components.parameters.paginationLimit.name).to.equal('limit')
 
-      expect(api.components.parameters.paginationLimit).to.have.property('schema');
-      expect(api.components.parameters.paginationLimit.schema).to.have.property('type');
-      expect(api.components.parameters.paginationLimit.schema.type).to.equal('integer');
+      expect(api.components.parameters.paginationLimit).to.have.property('schema')
+      expect(api.components.parameters.paginationLimit.schema).to.have.property('type')
+      expect(api.components.parameters.paginationLimit.schema.type).to.equal('integer')
 
-      expect(api.components.parameters.paginationOffset).to.have.property('name');
-      expect(api.components.parameters.paginationOffset.name).to.equal('offset');
-
-      expect(api.components.parameters.paginationOffset).to.have.property('schema');
-      expect(api.components.parameters.paginationOffset.schema).to.have.property('type');
-      expect(api.components.parameters.paginationOffset.schema.type).to.equal('integer');
+      expect(api.components.parameters.paginationOffset).to.have.property('name')
+      expect(api.components.parameters.paginationOffset.name).to.equal('offset')
 
+      expect(api.components.parameters.paginationOffset).to.have.property('schema')
+      expect(api.components.parameters.paginationOffset.schema).to.have.property('type')
+      expect(api.components.parameters.paginationOffset.schema.type).to.equal('integer')
 
       // Defintiions
-      expect(api.components).to.have.property('schemas');
-      expect(api.components.schemas).to.have.property('PaginationInfo');
+      expect(api.components).to.have.property('schemas')
+      expect(api.components.schemas).to.have.property('PaginationInfo')
 
-      expect(api.components.schemas.PaginationInfo).to.have.property('type');
-      expect(api.components.schemas.PaginationInfo.type).to.equal('object');
+      expect(api.components.schemas.PaginationInfo).to.have.property('type')
+      expect(api.components.schemas.PaginationInfo.type).to.equal('object')
 
-      expect(api.components.schemas.PaginationInfo).to.have.property('properties');
+      expect(api.components.schemas.PaginationInfo).to.have.property('properties')
       expect(api.components.schemas.PaginationInfo.properties)
         .to.be.an('object')
-        .that.has.all.keys('self', 'next', 'prev', 'first', 'last');
-    });
-  });
-
+        .that.has.all.keys('self', 'next', 'prev', 'first', 'last')
+    })
+  })
 
-  describe('paginate()', function()
-  {
-    it('should add pagination links to a response object', function()
-    {
-      var req = mock_http.createRequest({
+  describe('paginate()', function() {
+    it('should add pagination links to a response object', function() {
+      const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10',
         query: {
@@ -82,21 +74,21 @@ describe('util/pagination', function()
           host: 'localhost',
         },
         protocol: 'http',
-      });
+      })
 
-      var res = pagination.paginate(req, {});
+      const res = pagination.paginate(req, {})
 
-      expect(res).to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next');
+      expect(res)
+        .to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next')
 
-      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10');
-      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
-      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10');
-    });
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10')
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10')
+    })
 
-    it('should add a last pagination link when requested', function()
-    {
-      var req = mock_http.createRequest({
+    it('should add a last pagination link when requested', function() {
+      const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10&offset=15',
         query: {
@@ -107,18 +99,19 @@ describe('util/pagination', function()
           host: 'localhost',
         },
         protocol: 'http',
-      });
-
-      var res = pagination.paginate(req, {}, 35);
-
-      expect(res).to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next', 'prev', 'last');
-
-      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15');
-      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
-      expect(res.pagination.last).to.equal('http://localhost/foo?limit=10&offset=35');
-      expect(res.pagination.prev).to.equal('http://localhost/foo?limit=10&offset=5');
-      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=25');
-    });
-  });
-});
+      })
+
+      const res = pagination.paginate(req, {}, 35)
+
+      expect(res)
+        .to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next', 'prev', 'last')
+
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15')
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
+      expect(res.pagination.last).to.equal('http://localhost/foo?limit=10&offset=35')
+      expect(res.pagination.prev).to.equal('http://localhost/foo?limit=10&offset=5')
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=25')
+    })
+  })
+})

Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä