Explorar o código

Merge branch 'content-directory-schemas' into cli-content-directory-classes

Leszek Wiesner %!s(int64=4) %!d(string=hai) anos
pai
achega
f1127d23d2
Modificáronse 100 ficheiros con 8647 adicións e 502 borrados
  1. 2 1
      .dockerignore
  2. 2 2
      .github/workflows/joystream-cli.yml
  3. 37 0
      .github/workflows/joystream-node-checks.yml
  4. 79 0
      .github/workflows/joystream-node-docker.yml
  5. 2 2
      .github/workflows/joystream-types.yml
  6. 2 2
      .github/workflows/network-tests.yml
  7. 2 2
      .github/workflows/pioneer.yml
  8. 148 13
      .github/workflows/run-network-tests.yml
  9. 2 2
      .github/workflows/storage-node.yml
  10. 1 1
      .gitignore
  11. 0 43
      .travis.yml
  12. 43 8
      Cargo.lock
  13. 2 1
      Cargo.toml
  14. 9 1
      README.md
  15. 2 2
      cli/package.json
  16. 3 0
      content-directory-schemas/.gitignore
  17. 3 1
      content-directory-schemas/package.json
  18. 1 1
      content-directory-schemas/scripts/devInitAliceLead.ts
  19. 4 4
      content-directory-schemas/scripts/initializeContentDir.ts
  20. 1 1
      content-directory-schemas/scripts/inputSchemasToEntitySchemas.ts
  21. 1 1
      content-directory-schemas/scripts/validate.ts
  22. 2 2
      content-directory-schemas/src/helpers/InputParser.ts
  23. 0 0
      content-directory-schemas/src/helpers/extrinsics.ts
  24. 4 0
      content-directory-schemas/src/helpers/index.ts
  25. 0 0
      content-directory-schemas/src/helpers/inputs.ts
  26. 6 1
      content-directory-schemas/src/helpers/propertyType.ts
  27. 1 1
      content-directory-schemas/tsconfig.json
  28. 7 0
      content-directory-schemas/tsconfig.lib.json
  29. 0 22
      devops/ansible/build-and-run-tests-exported-chainspec-playbook.yml
  30. 0 22
      devops/ansible/build-and-run-tests-single-node-playbook.yml
  31. 0 22
      devops/ansible/build-and-run-tests-two-nodes-playbook.yml
  32. 0 18
      devops/ansible/build-image-playbook.yml
  33. 0 34
      devops/ansible/docker-compose.yml
  34. 0 2
      devops/ansible/hosts
  35. 0 4
      devops/ansible/roles/alter_block_creation_time/tasks/main.yml
  36. 0 4
      devops/ansible/roles/build_docker_image/tasks/main.yml
  37. 0 46
      devops/ansible/roles/install_dependencies/tasks/main.yml
  38. 0 38
      devops/ansible/roles/run_tests_exported_chainspec/tasks/main.yml
  39. 0 33
      devops/ansible/roles/run_tests_single_node/tasks/main.yml
  40. 0 18
      devops/ansible/roles/run_tests_two_nodes/tasks/main.yml
  41. 0 30
      devops/dockerfiles/node-and-runtime/Dockerfile_experimental
  42. 1 1
      devops/git-hooks/pre-commit
  43. 5 2
      devops/git-hooks/pre-push
  44. 32 0
      docker-compose-with-storage.yml
  45. 17 0
      docker-compose.yml
  46. 0 40
      docs/using-docker/building-node-and-runtime.md
  47. 5 1
      joystream-node.Dockerfile
  48. 0 9
      node/README.md
  49. 21 11
      node/src/chain_spec/mod.rs
  50. 2 5
      package.json
  51. 3 3
      pioneer/package.json
  52. 3 0
      pioneer/packages/apps-routing/src/index.ts
  53. 15 0
      pioneer/packages/apps-routing/src/joy-tokenomics.ts
  54. 1 0
      pioneer/packages/apps/public/locales/en/index.json
  55. 3 0
      pioneer/packages/apps/public/locales/en/joy-tokenomics.json
  56. 1 0
      pioneer/packages/apps/public/locales/en/translation.json
  57. 2 0
      pioneer/packages/apps/public/robots.txt
  58. 37 30
      pioneer/packages/joy-proposals/src/Proposal/discussion/DiscussionPostForm.tsx
  59. 201 0
      pioneer/packages/joy-tokenomics/LICENSE
  60. 22 0
      pioneer/packages/joy-tokenomics/README.md
  61. 16 0
      pioneer/packages/joy-tokenomics/package.json
  62. 104 0
      pioneer/packages/joy-tokenomics/src/Overview/OverviewTable.tsx
  63. 226 0
      pioneer/packages/joy-tokenomics/src/Overview/SpendingAndStakeDistributionTable.tsx
  64. 93 0
      pioneer/packages/joy-tokenomics/src/Overview/TokenomicsCharts.tsx
  65. 59 0
      pioneer/packages/joy-tokenomics/src/Overview/index.tsx
  66. 34 0
      pioneer/packages/joy-tokenomics/src/index.tsx
  67. 9 0
      pioneer/packages/joy-tokenomics/src/translate.ts
  68. 5 0
      pioneer/packages/joy-utils/src/consts/staking.ts
  69. 32 0
      pioneer/packages/joy-utils/src/functions/staking.ts
  70. 1 0
      pioneer/packages/joy-utils/src/react/hooks/index.ts
  71. 26 0
      pioneer/packages/joy-utils/src/react/hooks/useWindowDimensions.ts
  72. 3 0
      pioneer/packages/joy-utils/src/transport/index.ts
  73. 344 0
      pioneer/packages/joy-utils/src/transport/tokenomics.ts
  74. 53 0
      pioneer/packages/joy-utils/src/types/tokenomics.ts
  75. 6 6
      pioneer/packages/page-accounts/src/Accounts/index.tsx
  76. 3 3
      pioneer/packages/page-accounts/src/index.tsx
  77. 4 1
      pioneer/packages/page-js/src/Playground.tsx
  78. 3 3
      pioneer/packages/page-staking/src/index.tsx
  79. 67 0
      pioneer/packages/react-components/src/Chart/PieChart.tsx
  80. 3 1
      pioneer/tsconfig.json
  81. 4 0
      runtime-modules/common/Cargo.toml
  82. 5 1
      runtime-modules/common/src/working_group.rs
  83. 30 0
      runtime-modules/content-directory/Cargo.toml
  84. 244 0
      runtime-modules/content-directory/src/class.rs
  85. 202 0
      runtime-modules/content-directory/src/entity.rs
  86. 236 0
      runtime-modules/content-directory/src/errors.rs
  87. 340 0
      runtime-modules/content-directory/src/helpers.rs
  88. 2710 0
      runtime-modules/content-directory/src/lib.rs
  89. 1029 0
      runtime-modules/content-directory/src/mock.rs
  90. 141 0
      runtime-modules/content-directory/src/operations.rs
  91. 127 0
      runtime-modules/content-directory/src/permissions.rs
  92. 158 0
      runtime-modules/content-directory/src/permissions/class.rs
  93. 116 0
      runtime-modules/content-directory/src/permissions/curator_group.rs
  94. 176 0
      runtime-modules/content-directory/src/permissions/entity.rs
  95. 56 0
      runtime-modules/content-directory/src/permissions/entity_creation_voucher.rs
  96. 91 0
      runtime-modules/content-directory/src/schema.rs
  97. 62 0
      runtime-modules/content-directory/src/schema/convert.rs
  98. 137 0
      runtime-modules/content-directory/src/schema/input.rs
  99. 309 0
      runtime-modules/content-directory/src/schema/output.rs
  100. 646 0
      runtime-modules/content-directory/src/schema/property.rs

+ 2 - 1
.dockerignore

@@ -1,2 +1,3 @@
 **target*
-**node_modules*
+**node_modules*
+.tmp/

+ 2 - 2
.github/workflows/joystream-cli.yml

@@ -17,7 +17,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile
-        yarn workspace @joystream/cli checks
+        yarn workspace @joystream/cli checks --quiet
     - name: yarn pack test
       run: |
         yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
@@ -39,7 +39,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/cli checks
+        yarn workspace @joystream/cli checks --quiet
     - name: yarn pack test
       run: |
         yarn workspace @joystream/cli pack --filename cli-pack-test.tgz

+ 37 - 0
.github/workflows/joystream-node-checks.yml

@@ -0,0 +1,37 @@
+name: joystream-node-checks
+on:
+  pull_request:
+
+jobs:
+  checks:
+    name: joystream-node checks
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '12.x'
+      - uses: technote-space/get-diff-action@v3
+        with:
+          PREFIX_FILTER: |
+            node
+            runtime-modules
+            utils/chain-spec-builder
+          SUFFIX_FILTER: |
+            .rs
+          FILES: |
+            Cargo.lock
+            Cargo.toml
+
+      # TODO: Look for change in source code but no corresponding version bump of runtime or binaries
+      # - name: Check version modified correctly
+      #   if: env.GIT_DIFF
+
+      # This Building natively is not really necessary because we have the docker build which
+      # hapens in the run-network-tests workflow which is sufficient!
+      # - name: Build if runtime was modified
+      #   run: |
+      #     ./setup.sh
+      #     yarn cargo-checks
+      #     yarn cargo-build
+      #   if: env.GIT_DIFF

+ 79 - 0
.github/workflows/joystream-node-docker.yml

@@ -0,0 +1,79 @@
+name: joystream-node-docker
+on: push
+
+jobs:
+  build:
+    name: Build joystream/node Docker image
+    if: github.repository == 'Joystream/joystream'
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '12.x'
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Setup cache directory
+        run: mkdir ~/docker-images
+
+      - name: Cache docker images
+        uses: actions/cache@v2
+        env:
+          cache-name: joystream-node-docker
+        with:
+          path: ~/docker-images
+          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Check if we have cached image
+        continue-on-error: true
+        run: |
+          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
+            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
+            cp ~/docker-images/joystream-node-docker-image.tar.gz .
+          fi
+
+      - name: Check if we have pre-built image on Dockerhub
+        continue-on-error: true
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
+            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
+            docker save --output joystream-node-docker-image.tar joystream/node:latest
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+          fi
+
+      - name: Build new joystream/node image
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker build . --file joystream-node.Dockerfile --tag joystream/node
+            docker save --output joystream-node-docker-image.tar joystream/node
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+            echo "::set-env name=NEW_BUILD::true"
+          fi
+
+      - name: Save joystream/node image to Artifacts
+        uses: actions/upload-artifact@v2
+        with:
+          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
+          path: joystream-node-docker-image.tar.gz
+
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+        if: env.NEW_BUILD
+
+      - name: Publish new image to DockerHub
+        run: |
+          docker image tag joystream/node joystream/node:${{ steps.compute_shasum.outputs.shasum }}
+          docker push joystream/node:${{ steps.compute_shasum.outputs.shasum }}
+        if: env.NEW_BUILD
+  

+ 2 - 2
.github/workflows/joystream-types.yml

@@ -17,7 +17,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile
-        yarn workspace @joystream/types checks
+        yarn workspace @joystream/types checks --quiet
     - name: npm pack test
       run: |
         cd types
@@ -40,7 +40,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types checks
+        yarn workspace @joystream/types checks --quiet
     - name: npm pack test
       run: |
         cd types

+ 2 - 2
.github/workflows/network-tests.yml

@@ -17,7 +17,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile
-        yarn workspace joystream-testing checks
+        yarn workspace network-tests checks --quiet
 
   network_build_osx:
     name: MacOS Checks
@@ -34,4 +34,4 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace joystream-testing checks
+        yarn workspace network-tests checks --quiet

+ 2 - 2
.github/workflows/pioneer.yml

@@ -51,7 +51,7 @@ jobs:
     - name: lint
       run: |
         yarn install --frozen-lockfile
-        yarn workspace pioneer lint
+        yarn workspace pioneer lint --quiet
 
   pioneer_lint_osx:
     name: MacOS Linting
@@ -68,4 +68,4 @@ jobs:
     - name: lint
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace pioneer lint
+        yarn workspace pioneer lint --quiet

+ 148 - 13
.github/workflows/run-network-tests.yml

@@ -1,25 +1,160 @@
 name: run-network-tests
 on:
   pull_request:
-    types: [labeled]
+    types: [opened, labeled, synchronize]
+
   workflow_dispatch:
+    # TODO: add an input so dispatcher can specify a list of tests to run,
+    # composed of the job ids separated by `:`
+    # for eg.
+    #   'network_tests_1:network_tests_3'
+    #   'network_tests_2'
+    # inputs:
+    #   test_to_run:
+    #     description: 'Tests to run'
+    #     required: false
+    #     default: 'all'
 
 jobs:
-  run_ansible_tests:
-    if: github.event.label.name == 'run-network-tests' || github.event.action == null
-    name: run network tests using ansible
+  build_images:
+    name: Build joystream/node
+    runs-on: ubuntu-latest
+    outputs:
+      use_artifact: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '12.x'
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Setup cache directory
+        run: mkdir ~/docker-images
+
+      - name: Cache docker images
+        uses: actions/cache@v2
+        env:
+          cache-name: joystream-node-docker
+        with:
+          path: ~/docker-images
+          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Check if we have cached image
+        continue-on-error: true
+        run: |
+          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
+            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
+            cp ~/docker-images/joystream-node-docker-image.tar.gz .
+          fi
+
+      - name: Check if we have pre-built image on Dockerhub
+        continue-on-error: true
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
+            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
+            docker save --output joystream-node-docker-image.tar joystream/node:latest
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+          fi
+
+      - name: Build new joystream/node image
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker build . --file joystream-node.Dockerfile --tag joystream/node
+            docker save --output joystream-node-docker-image.tar joystream/node
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+          fi
+
+      - name: Save joystream/node image to Artifacts
+        uses: actions/upload-artifact@v2
+        with:
+          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
+          path: joystream-node-docker-image.tar.gz
+  
+  network_tests_1:
+    name: Network Integration Runtime Tests
+    if: contains(github.event.pull_request.labels.*.name, 'run-network-tests')
+    needs: build_images
     runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        node-version: [12.x]
     steps:
       - uses: actions/checkout@v1
       - uses: actions/setup-node@v1
         with:
-          node-version: ${{ matrix.node-version }}
-      - name: install toolchain
-        run: curl https://getsubstrate.io -sSf | bash -s -- --fast
-      - name: ansible build and tests
+          node-version: '12.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
+        run: yarn install --frozen-lockfile
+      - name: Ensure tests are runnable
+        run: yarn workspace network-tests build
+      - name: Execute network tests
+        run: tests/network-tests/run-tests.sh
+
+  network_tests_2:
+    name: Query Node Tests (Placeholder)
+    if: contains(github.event.pull_request.labels.*.name, 'run-network-tests')
+    needs: build_images
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '12.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
+        run: yarn install --frozen-lockfile
+      - name: Ensure tests are runnable
+        run: yarn workspace network-tests build
+      - name: Start chain
+        run: docker-compose up -d
+      # - name: Execute network tests
+      #   run: yarn workspace network-tests test
+
+  network_tests_3:
+    name: Storage Node Tests
+    if: contains(github.event.pull_request.labels.*.name, 'run-network-tests')
+    needs: build_images
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '12.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
         run: |
-          cd ./devops/ansible
-          ansible-playbook -i hosts build-and-run-tests-single-node-playbook.yml --become -v
+          yarn install --frozen-lockfile
+          yarn workspace storage-node build
+      - name: Build storage node
+        run: yarn workspace storage-node build
+      - name: Start chain
+        run: docker-compose up -d
+      - name: Execute tests
+        run: DEBUG=* yarn storage-cli dev-init

+ 2 - 2
.github/workflows/storage-node.yml

@@ -17,7 +17,7 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile
-        yarn workspace storage-node checks
+        yarn workspace storage-node checks --quiet
 
   storage_node_build_osx:
     name: MacOS Checks
@@ -34,4 +34,4 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace storage-node checks
+        yarn workspace storage-node checks --quiet

+ 1 - 1
.gitignore

@@ -28,7 +28,7 @@ yarn*
 *.wasm
 
 # Temporary files
-.tmp/
+**.tmp/
 
 # Istanbul report output
 **.nyc_output/

+ 0 - 43
.travis.yml

@@ -1,43 +0,0 @@
-language: rust
-
-# Caching of the runtime .wasm blob poses a problem.
-# See: https://github.com/Joystream/joystream/issues/466
-# Always starting with a clean slate is probably better, it allows us to ensure
-# the WASM runtime is always rebuilt. It also allows us to detect when certain upstream dependencies
-# sometimes break the build. When cache is enabled do not use the produced WASM build.
-# This also means the binary should not be used to produce the final chainspec file (because the same
-# one is embedded in the binary)
-# cache: cargo
-
-rust: stable
-
-# Skip Rust build in a pull request if no rust project files were modified
-before_install:
-  - |
-    if [ "$TRAVIS_PULL_REQUEST" != "false" ]
-      then
-      if ! git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qE "(.rs|Cargo.(lock|toml))$"
-        then
-        echo "No changes to Rust or Cargo Files, CI not running."
-        travis_terminate 0
-      fi
-    fi
-
-install:
-  - rustup install nightly-2020-05-23 --force
-  - rustup target add wasm32-unknown-unknown --toolchain nightly-2020-05-23
-  # travis installs rust using rustup with the "minimal" profile so these tools are not installed by default
-  - rustup component add rustfmt
-  - rustup component add clippy
-
-before_script:
-  - cargo fmt --all -- --check
-
-script:
-  - export WASM_BUILD_TOOLCHAIN=nightly-2020-05-23
-  - BUILD_DUMMY_WASM_BINARY=1 cargo clippy --release --all -- -D warnings
-  - travis_wait 75 cargo test --release --verbose --all -- --ignored
-  - cargo build --release
-  - ls -l ./target/release/wbuild/joystream-node-runtime/
-  - ./target/release/joystream-node --version
-  - ./target/release/chain-spec-builder --version

+ 43 - 8
Cargo.lock

@@ -2053,7 +2053,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node-runtime"
-version = "7.5.1"
+version = "7.5.2"
 dependencies = [
  "frame-benchmarking",
  "frame-executive",
@@ -2067,6 +2067,7 @@ dependencies = [
  "pallet-balances",
  "pallet-collective",
  "pallet-common",
+ "pallet-content-directory",
  "pallet-content-working-group",
  "pallet-finality-tracker",
  "pallet-forum",
@@ -2116,6 +2117,7 @@ dependencies = [
  "sp-std",
  "sp-transaction-pool",
  "sp-version",
+ "strum 0.19.2",
  "substrate-wasm-builder-runner",
 ]
 
@@ -3300,6 +3302,23 @@ dependencies = [
  "parity-scale-codec",
  "serde",
  "sp-runtime",
+ "strum 0.19.2",
+ "strum_macros 0.19.2",
+]
+
+[[package]]
+name = "pallet-content-directory"
+version = "3.0.0"
+dependencies = [
+ "frame-support",
+ "frame-system",
+ "parity-scale-codec",
+ "serde",
+ "sp-arithmetic",
+ "sp-core",
+ "sp-io",
+ "sp-runtime",
+ "sp-std",
 ]
 
 [[package]]
@@ -3513,7 +3532,6 @@ dependencies = [
  "frame-system",
  "pallet-balances",
  "pallet-common",
- "pallet-content-working-group",
  "pallet-governance",
  "pallet-hiring",
  "pallet-membership",
@@ -3525,8 +3543,6 @@ dependencies = [
  "pallet-staking-reward-curve",
  "pallet-timestamp",
  "pallet-token-mint",
- "pallet-versioned-store",
- "pallet-versioned-store-permissions",
  "pallet-working-group",
  "parity-scale-codec",
  "serde",
@@ -3536,6 +3552,7 @@ dependencies = [
  "sp-runtime",
  "sp-staking",
  "sp-std",
+ "strum 0.19.2",
 ]
 
 [[package]]
@@ -3980,9 +3997,9 @@ dependencies = [
 
 [[package]]
 name = "parity-scale-codec-derive"
-version = "1.2.0"
+version = "1.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a0ec292e92e8ec7c58e576adacc1e3f399c597c8f263c42f18420abe58e7245"
+checksum = "198db82bb1c18fc00176004462dd809b2a6d851669550aa17af6dacd21ae0c14"
 dependencies = [
  "proc-macro-crate",
  "proc-macro2",
@@ -6443,7 +6460,7 @@ dependencies = [
  "lazy_static",
  "sp-core",
  "sp-runtime",
- "strum",
+ "strum 0.16.0",
 ]
 
 [[package]]
@@ -6786,9 +6803,15 @@ version = "0.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6138f8f88a16d90134763314e3fc76fa3ed6a7db4725d6acf9a3ef95a3188d22"
 dependencies = [
- "strum_macros",
+ "strum_macros 0.16.0",
 ]
 
+[[package]]
+name = "strum"
+version = "0.19.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3924a58d165da3b7b2922c667ab0673c7b5fd52b5c19ea3442747bcb3cd15abe"
+
 [[package]]
 name = "strum_macros"
 version = "0.16.0"
@@ -6801,6 +6824,18 @@ dependencies = [
  "syn 1.0.17",
 ]
 
+[[package]]
+name = "strum_macros"
+version = "0.19.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d2ab682ecdcae7f5f45ae85cd7c1e6c8e68ea42c8a612d47fedf831c037146a"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote 1.0.7",
+ "syn 1.0.17",
+]
+
 [[package]]
 name = "substrate-bip39"
 version = "0.4.1"

+ 2 - 1
Cargo.toml

@@ -19,10 +19,11 @@ members = [
 	"runtime-modules/versioned-store",
 	"runtime-modules/versioned-store-permissions",
 	"runtime-modules/working-group",
+	"runtime-modules/content-directory",
 	"node",
 	"utils/chain-spec-builder/"
 ]
 
 [profile.release]
 # Substrate runtime requires unwinding.
-panic = "unwind"
+panic = "unwind"

+ 9 - 1
README.md

@@ -18,7 +18,7 @@ The following tools are required for building, testing and contributing to this
 - [Rust](https://www.rust-lang.org/tools/install) toolchain - _required_
 - [nodejs](https://nodejs.org/) v12.x - _required_
 - [yarn classic](https://classic.yarnpkg.com/en/docs/install) package manager v1.22.x- _required_
-- [docker](https://www.docker.com/get-started) - _optional_
+- [docker](https://www.docker.com/get-started) and docker-compose - _optional_
 - [ansible](https://www.ansible.com/) - _optional_
 
 If you use VSCode as your code editor we recommend using the workspace [settings](devops/vscode/settings.json) for recommend eslint plugin to function properly.
@@ -105,6 +105,14 @@ Learn more about [joystream-node](node/README.md).
 
 A step by step guide to setup a full node and validator on the Joystream testnet, can be found [here](https://github.com/Joystream/helpdesk/tree/master/roles/validators).
 
+### Integration tests
+
+```bash
+docker-compose up -d
+yarn workspace network-tests test
+docker-compose down
+```
+
 ### Contributing
 
 We have lots of good first [issues](https://github.com/Joystream/joystream/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) open to help you get started on contributing code. If you are not a developer you can still make valuable contributions by testing our software and providing feedback and opening new issues.

+ 2 - 2
cli/package.json

@@ -105,8 +105,8 @@
     "test": "nyc --extension .ts mocha --forbid-only \"test/**/*.test.ts\"",
     "build": "tsc --build tsconfig.json",
     "version": "oclif-dev readme && git add README.md",
-    "lint": "eslint ./ --quiet --ext .ts",
-    "checks": "yarn lint && tsc --noEmit --pretty && prettier ./ --check",
+    "lint": "eslint ./ --ext .ts",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write"
   },
   "types": "lib/index.d.ts"

+ 3 - 0
content-directory-schemas/.gitignore

@@ -5,3 +5,6 @@ schemas/entities
 schemas/entityBatches
 schemas/entityReferences
 types/entities
+
+# Build
+lib

+ 3 - 1
content-directory-schemas/package.json

@@ -3,8 +3,10 @@
   "version": "0.1.0",
   "description": "JSON schemas, inputs and related tooling for Joystream content directory 2.0",
   "author": "Joystream contributors",
+  "main": "lib/index.js",
   "scripts": {
-    "lint": "eslint ./ --ext .ts",
+    "build": "tsc --build tsconfig.lib.json",
+    "lint": "eslint ./ --ext .ts --ignore-path .gitignore",
     "ts-check": "tsc --noEmit --pretty",
     "pretty": "prettier ./ --write --ignore-path .gitignore",
     "validate": "ts-node ./scripts/validate.ts",

+ 1 - 1
content-directory-schemas/scripts/devInitAliceLead.ts

@@ -1,7 +1,7 @@
 import { types } from '@joystream/types'
 import { ApiPromise, WsProvider } from '@polkadot/api'
 import { SubmittableExtrinsic } from '@polkadot/api/types'
-import { ExtrinsicsHelper, getAlicePair } from './helpers/extrinsics'
+import { ExtrinsicsHelper, getAlicePair } from '../src/helpers/extrinsics'
 
 async function main() {
   // Init api

+ 4 - 4
content-directory-schemas/scripts/initializeContentDir.ts

@@ -2,12 +2,12 @@ import { CreateClass } from '../types/extrinsics/CreateClass'
 import { AddClassSchema } from '../types/extrinsics/AddClassSchema'
 import { types } from '@joystream/types'
 import { ApiPromise, WsProvider } from '@polkadot/api'
-import { getInputs } from './helpers/inputs'
+import { getInputs } from '../src/helpers/inputs'
 import fs from 'fs'
 import path from 'path'
-import { EntityBatch } from 'types/EntityBatch'
-import { InputParser } from './helpers/InputParser'
-import { ExtrinsicsHelper, getAlicePair } from './helpers/extrinsics'
+import { EntityBatch } from '../types/EntityBatch'
+import { InputParser } from '../src/helpers/InputParser'
+import { ExtrinsicsHelper, getAlicePair } from '../src/helpers/extrinsics'
 
 // Save entity operations output here for easier debugging
 const ENTITY_OPERATIONS_OUTPUT_PATH = path.join(__dirname, '../operations.json')

+ 1 - 1
content-directory-schemas/scripts/inputSchemasToEntitySchemas.ts

@@ -10,7 +10,7 @@ import {
   VecPropertyVariant,
 } from '../types/extrinsics/AddClassSchema'
 import PRIMITIVE_PROPERTY_DEFS from '../schemas/propertyValidationDefs.schema.json'
-import { getInputs } from './helpers/inputs'
+import { getInputs } from '../src/helpers/inputs'
 import { JSONSchema7 } from 'json-schema'
 
 const SINGLE_ENTITY_SCHEMAS_LOCATION = path.join(__dirname, '../schemas/entities')

+ 1 - 1
content-directory-schemas/scripts/validate.ts

@@ -1,6 +1,6 @@
 // TODO: Add entity batches validation
 import Ajv from 'ajv'
-import { FetchedInput, getInputs, InputType, INPUT_TYPES } from './helpers/inputs'
+import { FetchedInput, getInputs, InputType, INPUT_TYPES } from '../src/helpers/inputs'
 import path from 'path'
 import fs from 'fs'
 import $RefParser from '@apidevtools/json-schema-ref-parser'

+ 2 - 2
content-directory-schemas/scripts/helpers/InputParser.ts → content-directory-schemas/src/helpers/InputParser.ts

@@ -12,8 +12,8 @@ import {
 import { isSingle, isReference } from './propertyType'
 import { ApiPromise } from '@polkadot/api'
 import { JoyBTreeSet } from '@joystream/types/common'
-import { CreateClass } from 'types/extrinsics/CreateClass'
-import { EntityBatch } from 'types/EntityBatch'
+import { CreateClass } from '../../types/extrinsics/CreateClass'
+import { EntityBatch } from '../../types/EntityBatch'
 
 export class InputParser {
   private api: ApiPromise

+ 0 - 0
content-directory-schemas/scripts/helpers/extrinsics.ts → content-directory-schemas/src/helpers/extrinsics.ts


+ 4 - 0
content-directory-schemas/src/helpers/index.ts

@@ -0,0 +1,4 @@
+export { ExtrinsicsHelper, getAlicePair } from './extrinsics'
+export { InputParser } from './InputParser'
+export { getInputs, getInputsLocation } from './inputs'
+export { isReference, isSingle } from './propertyType'

+ 0 - 0
content-directory-schemas/scripts/helpers/inputs.ts → content-directory-schemas/src/helpers/inputs.ts


+ 6 - 1
content-directory-schemas/scripts/helpers/propertyType.ts → content-directory-schemas/src/helpers/propertyType.ts

@@ -1,4 +1,9 @@
-import { Property, ReferenceProperty, SinglePropertyType, SinglePropertyVariant } from 'types/extrinsics/AddClassSchema'
+import {
+  Property,
+  ReferenceProperty,
+  SinglePropertyType,
+  SinglePropertyVariant,
+} from '../../types/extrinsics/AddClassSchema'
 
 type PropertyType = Property['property_type']
 

+ 1 - 1
content-directory-schemas/tsconfig.json

@@ -23,5 +23,5 @@
       "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]
     }
   },
-  "include": [ "scripts/**/*", "typings/**/*" ]
+  "include": [ "src/**/*", "scripts/**/*", "typings/**/*" ]
 }

+ 7 - 0
content-directory-schemas/tsconfig.lib.json

@@ -0,0 +1,7 @@
+{
+  "extends": "./tsconfig.json",
+  "include": ["src/**/*"],
+  "compilerOptions": {
+    "outDir": "lib"
+  }
+}

+ 0 - 22
devops/ansible/build-and-run-tests-exported-chainspec-playbook.yml

@@ -1,22 +0,0 @@
-- hosts: 127.0.0.1
-  user: root
-  become: yes
-  become_method: sudo
-
-  tasks:
-
-    - name: install dependencies
-      include_role:
-        name: install_dependencies
-
-    - name: alter block creation time
-      include_role:
-        name: alter_block_creation_time
-
-    - name: build node
-      include_role:
-        name: build_docker_image
-
-    - name: run tests
-      include_role:
-        name: run_tests_exported_chainspec

+ 0 - 22
devops/ansible/build-and-run-tests-single-node-playbook.yml

@@ -1,22 +0,0 @@
-- hosts: 127.0.0.1
-  user: root
-  become: yes
-  become_method: sudo
-
-  tasks:
-
-    - name: install dependencies
-      include_role:
-        name: install_dependencies
-
-    - name: alter block creation time
-      include_role:
-        name: alter_block_creation_time
-
-    - name: build node
-      include_role:
-        name: build_docker_image
-
-    - name: run tests
-      include_role:
-        name: run_tests_single_node

+ 0 - 22
devops/ansible/build-and-run-tests-two-nodes-playbook.yml

@@ -1,22 +0,0 @@
-- hosts: 127.0.0.1
-  user: root
-  become: yes
-  become_method: sudo
-
-  tasks:
-
-    - name: install dependencies
-      include_role:
-        name: install_dependencies
-
-    - name: alter block creation time
-      include_role:
-        name: alter_block_creation_time
-
-    - name: build node
-      include_role:
-        name: build_docker_image
-
-    - name: run tests
-      include_role:
-        name: run_tests_two_nodes

+ 0 - 18
devops/ansible/build-image-playbook.yml

@@ -1,18 +0,0 @@
-- hosts: 127.0.0.1
-  user: root
-  become: yes
-  become_method: sudo
-
-  tasks:
-
-    - name: install dependencies
-      include_role:
-        name: install_dependencies
-
-    - name: alter block creation time
-      include_role:
-        name: alter_block_creation_time
-
-    - name: build node
-      include_role:
-        name: build_docker_image

+ 0 - 34
devops/ansible/docker-compose.yml

@@ -1,34 +0,0 @@
-version: "3"
-services:
-  node_alice:
-    image: joystream/node-testing
-    container_name: alice
-    entrypoint: ./node --dev --alice --validator --unsafe-ws-external --rpc-cors=all
-    ports:
-      - "30333:30333"
-      - "9933:9933"
-      - "9944:9944"
-    networks:
-      testing_net:
-        ipv4_address: 172.28.1.1
-
-  node_bob:
-    image: joystream/node-testing
-    container_name: bob
-    entrypoint: ./node --dev --bob --validator --unsafe-ws-external --rpc-cors=all
-    ports:
-      - "30335:30333"
-      - "9935:9933"
-      - "9945:9944"
-    links:
-      - "node_alice:alice"
-    networks:
-      testing_net:
-        ipv4_address: 172.28.1.2
-
-networks:
-  testing_net:
-    ipam:
-      driver: default
-      config:
-        - subnet: 172.28.0.0/16

+ 0 - 2
devops/ansible/hosts

@@ -1,2 +0,0 @@
-[sites]
-127.0.0.1 ansible_connection=local

+ 0 - 4
devops/ansible/roles/alter_block_creation_time/tasks/main.yml

@@ -1,4 +0,0 @@
-- name: alter block creation time
-  shell: ./scripts/alter-block-creation-time.sh
-  args:
-    chdir: ../../

+ 0 - 4
devops/ansible/roles/build_docker_image/tasks/main.yml

@@ -1,4 +0,0 @@
-- name: create testing node docker image
-  shell: ./scripts/build-joystream-node-docker-image.sh
-  args:
-    chdir: ../../

+ 0 - 46
devops/ansible/roles/install_dependencies/tasks/main.yml

@@ -1,46 +0,0 @@
-- name: install pip and npm on Debian
-  block:
-    - name: create temporary folder
-      file:
-        path: ../../.tmp
-        state: directory
-    - name: install pip using apt
-      apt: name=python-pip state=present
-    - name: install npm using apt
-      apt: name=npm state=present
-  when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu'
-
-- name: install pip on Mac
-  block:
-    - name: create temporary folder
-      file:
-        path: ../../.tmp
-        state: directory
-    - name: get pip installer using curl
-      get_url:
-        url: https://bootstrap.pypa.io/get-pip.py
-        dest: ../../.tmp/get-pip.py
-    - name: install pip
-      shell: python ../../.tmp/get-pip.py
-  when: ansible_distribution == 'MacOSX'
-  always:
-    - name: remove pip installer script
-      file:
-        path: ../../.tmp/get-pip.py
-        state: absent
-
-- name: install docker
-  pip: name=docker
-
-- name: Install yarn with npm
-  npm:
-    name: yarn
-    global: yes
-
-- name: Install pyrsistent
-  pip:
-    name: pyrsistent==0.16.0
-
-- name: Install docker compose
-  pip:
-    name: docker-compose==1.26.2

+ 0 - 38
devops/ansible/roles/run_tests_exported_chainspec/tasks/main.yml

@@ -1,38 +0,0 @@
-- name: run network
-  block:
-    - name: yarn install for joystream types
-      shell: yarn workspace @joystream/types install
-
-    - name: yarn build for joystream types
-      shell: yarn workspace @joystream/types build
-
-    - name: yarn install for network tests
-      shell: yarn workspace joystream-testing install
-
-    - name: run docker container
-      docker_container:
-        name: "joystream-node"
-        image: "joystream/node"
-        ports:
-          - "9944:9944"
-        mounts:
-          - target: /testnet-state
-            source: "{{ playbook_dir }}/../../testnets/nicaea-exported-state"
-            type: bind
-            read_only: yes
-        entrypoint: ./node --chain ../testnet-state/raw_chain_spec.json --alice --validator --unsafe-ws-external --rpc-cors=all
-        state: started
-
-    - name: execute network tests
-      shell: yarn test >> ../../.tmp/tests.log
-      args:
-        chdir: ../../tests/network-tests/
-
-  always:
-    - name: display tests log
-      shell: cat ../../.tmp/tests.log
-
-    - name: stop docker container
-      docker_container:
-        name: "joystream-node-testing"
-        state: absent

+ 0 - 33
devops/ansible/roles/run_tests_single_node/tasks/main.yml

@@ -1,33 +0,0 @@
-- name: run network
-  block:
-    - name: yarn install for joystream types
-      shell: yarn workspace @joystream/types install
-
-    - name: yarn build for joystream types
-      shell: yarn workspace @joystream/types build
-
-    - name: yarn install for network tests
-      shell: yarn workspace joystream-testing install
-
-    - name: run docker container
-      docker_container:
-        name: "joystream-node"
-        image: "joystream/node"
-        ports:
-          - "9944:9944"
-        entrypoint: ./node --dev --alice --validator --unsafe-ws-external --rpc-cors=all
-        state: started
-
-    - name: execute network tests
-      shell: yarn test >> ../../.tmp/tests.log
-      args:
-        chdir: ../../tests/network-tests/
-
-  always:
-    - name: display tests log
-      shell: cat ../../.tmp/tests.log
-
-    - name: stop docker container
-      docker_container:
-        name: "joystream-node-testing"
-        state: absent

+ 0 - 18
devops/ansible/roles/run_tests_two_nodes/tasks/main.yml

@@ -1,18 +0,0 @@
-- name: run network
-  block:
-
-    - name: run two nodes containerized network
-      docker_compose:
-        project_src: ./
-        state: present
-
-    - name: execute network tests
-      shell: yarn test >> ../../.tmp/tests.log
-      args:
-        chdir: ../../tests/network-tests/
-        
-  always:
-    - name: stop containers
-      docker_compose:
-        project_src: ./
-        state: absent

+ 0 - 30
devops/dockerfiles/node-and-runtime/Dockerfile_experimental

@@ -1,30 +0,0 @@
-# syntax=docker/dockerfile:experimental
-# must enable experimental features in docker daemon and set DOCKER_BUILDKIT=1 env variable
-# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/experimental.md
-FROM joystream/rust-builder AS builder
-LABEL description="compiles and caches dependencies, artifacts and node"
-WORKDIR /joystream
-COPY . /joystream
-RUN mkdir /build-output
-
-RUN --mount=type=cache,target=/joystream/target \
-    --mount=type=cache,target=/root/.cargo/git \
-    --mount=type=cache,target=/root/.cargo/registry \
-    cargo build --release \
-    && cp ./target/release/joystream-node /build-output/joystream-node
-# copy in last part could be done with nightly option --out-dir
-
-FROM debian:stretch
-LABEL description="Joystream node"
-WORKDIR /joystream
-COPY --from=builder /build-output/joystream-node /joystream/node
-
-# Use these volumes to persits chain state and keystore, eg.:
-# --base-path /data
-# optionally separate keystore (otherwise it will be stored in the base path)
-# --keystore-path /keystore
-# if base-path isn't specified, chain state is stored inside container in ~/.local/share/joystream-node/
-# which is not ideal
-VOLUME ["/data", "/keystore"]
-
-ENTRYPOINT ["/joystream/node"]

+ 1 - 1
devops/git-hooks/pre-commit

@@ -1,5 +1,5 @@
 #!/bin/sh
 set -e
 
-echo 'cargo fmt --all -- --check'
+echo 'running rust-fmt'
 cargo fmt --all -- --check

+ 5 - 2
devops/git-hooks/pre-push

@@ -3,8 +3,11 @@ set -e
 
 export WASM_BUILD_TOOLCHAIN=nightly-2020-05-23
 
-echo '+cargo clippy --all -- -D warnings'
+echo 'running clippy (rust linter)'
+# When custom build.rs triggers wasm-build-runner-impl to build we get error:
+# "Rust WASM toolchain not installed, please install it!"
+# So we skip building the WASM binary by setting BUILD_DUMMY_WASM_BINARY=1
 BUILD_DUMMY_WASM_BINARY=1 cargo clippy --release --all -- -D warnings
 
-echo '+cargo test --all'
+echo 'running cargo unit tests'
 cargo test --release --all

+ 32 - 0
docker-compose-with-storage.yml

@@ -0,0 +1,32 @@
+version: '3'
+services:
+  ipfs:
+    image: ipfs/go-ipfs:latest
+    ports:
+      - '127.0.0.1:5001:5001'
+      - '127.0.0.1:8080:8080'
+    volumes:
+      - ipfs-data:/data/ipfs
+    entrypoint: ''
+    command: |
+      /bin/sh -c "
+        set -e
+        /usr/local/bin/start_ipfs config profile apply lowpower
+        /usr/local/bin/start_ipfs config --json Gateway.PublicGateways '{\"localhost\": null }'
+        /sbin/tini -- /usr/local/bin/start_ipfs daemon --migrate=true
+      "
+  chain:
+    image: joystream/node
+    build:
+      context: .
+      dockerfile: joystream-node.Dockerfile
+    ports:
+      - '127.0.0.1:9944:9944'
+    volumes:
+      - chain-data:/data
+    command: --dev --ws-external --base-path /data
+volumes:
+  ipfs-data:
+    driver: local
+  chain-data:
+    driver: local

+ 17 - 0
docker-compose.yml

@@ -0,0 +1,17 @@
+# Compiles new joystream node image if local image not found,
+# and runs local development chain.
+# To prevent build run docker-compose with "--no-build" arg
+version: "3"
+services:
+  joystream-node:
+    image: joystream/node
+    build:
+      # context is relative to the compose file
+      context: .
+      # dockerfile is relative to the context
+      dockerfile: joystream-node.Dockerfile
+    container_name: joystream-node
+    command: --dev --alice --validator --unsafe-ws-external --rpc-cors=all
+    ports:
+      - "9944:9944"
+  

+ 0 - 40
docs/using-docker/building-node-and-runtime.md

@@ -1,40 +0,0 @@
-### Docker
-
-#### Building localy
-
-A joystream-node can be compiled with given [Dockerfile](./Dockerfile) file:
-
-```bash
-# Build and tag a new image, which will compile joystream-node from source
-docker build . -t joystream-node
-
-# run a development chain with the image just created publishing the websocket port
-docker run -p 9944:9944 joystream-node --dev --ws-external
-```
-
-#### Downloading joystream pre-built images from Docker Hub
-
-```bash
-docker pull joystream/node
-```
-
-#### Running a public node as a service
-
-Create a working directory to store the node's data and where you will need to place the chain file.
-
-```bash
-mkdir ${HOME}/joystream-node-data/
-
-cp rome-testnet.json ${HOME}/joystream-node-data/
-
-docker run -d -p 30333:30333 \
-    -v ${HOME}/joystream-node-data/:/data \
-    --name my-node \
-    joystream/node --base-path /data --chain /data/rome-testnet.json
-
-# check status
-docker ps
-
-# monitor logs
-docker logs --tail 100 -f my-node
-```

+ 5 - 1
devops/dockerfiles/node-and-runtime/Dockerfile → joystream-node.Dockerfile

@@ -4,7 +4,11 @@ WORKDIR /joystream
 COPY . /joystream
 
 # Build all cargo crates
-RUN WASM_BUILD_TOOLCHAIN=nightly-2020-05-23 cargo build --release
+# Ensure our tests and linter pass before actual build
+ENV WASM_BUILD_TOOLCHAIN=nightly-2020-05-23
+RUN BUILD_DUMMY_WASM_BINARY=1 cargo clippy --release --all -- -D warnings && \
+    cargo test --release --all && \
+    cargo build --release
 
 FROM debian:stretch
 LABEL description="Joystream node"

+ 0 - 9
node/README.md

@@ -72,15 +72,6 @@ Always format your rust code with `cargo fmt` before committing:
 cargo fmt --all
 ```
 
-### Integration tests
-
-```bash
-./scripts/run-dev-chain.sh
-yarn workspace joystream-testing test
-```
-
-To run the integration tests with a different chain, you can omit the step of running the local development chain and simply set the node URL using `NODE_URL` environment variable.
-
 ### Installing a release build
 
 If you are building a tagged release from `master` branch and want to install the executable to your path follow the step below.

+ 21 - 11
node/src/chain_spec/mod.rs

@@ -30,11 +30,12 @@ use sp_runtime::Perbill;
 
 use node_runtime::{
     membership, AuthorityDiscoveryConfig, BabeConfig, Balance, BalancesConfig,
-    ContentWorkingGroupConfig, CouncilConfig, CouncilElectionConfig, DataDirectoryConfig,
-    DataObjectStorageRegistryConfig, DataObjectTypeRegistryConfig, ElectionParameters, ForumConfig,
-    GrandpaConfig, ImOnlineConfig, MembersConfig, Moment, ProposalsCodexConfig, SessionConfig,
-    SessionKeys, Signature, StakerStatus, StakingConfig, StorageWorkingGroupConfig, SudoConfig,
-    SystemConfig, VersionedStoreConfig, VersionedStorePermissionsConfig, DAYS, WASM_BINARY,
+    ContentDirectoryConfig, ContentDirectoryWorkingGroupConfig, ContentWorkingGroupConfig,
+    CouncilConfig, CouncilElectionConfig, DataDirectoryConfig, DataObjectStorageRegistryConfig,
+    DataObjectTypeRegistryConfig, ElectionParameters, ForumConfig, GrandpaConfig, ImOnlineConfig,
+    MembersConfig, Moment, ProposalsCodexConfig, SessionConfig, SessionKeys, Signature,
+    StakerStatus, StakingConfig, StorageWorkingGroupConfig, SudoConfig, SystemConfig,
+    VersionedStoreConfig, VersionedStorePermissionsConfig, DAYS, WASM_BINARY,
 };
 
 // Exported to be used by chain-spec-builder
@@ -320,6 +321,21 @@ pub fn testnet_genesis(
             worker_application_human_readable_text_constraint: default_text_constraint,
             worker_exit_rationale_text_constraint: default_text_constraint,
         }),
+        working_group_Instance3: Some(ContentDirectoryWorkingGroupConfig {
+            phantom: Default::default(),
+            storage_working_group_mint_capacity: 0,
+            opening_human_readable_text_constraint: default_text_constraint,
+            worker_application_human_readable_text_constraint: default_text_constraint,
+            worker_exit_rationale_text_constraint: default_text_constraint,
+        }),
+        content_directory: Some({
+            ContentDirectoryConfig {
+                curator_group_by_id: vec![],
+                next_class_id: 1,
+                next_entity_id: 1,
+                next_curator_group_id: 1,
+            }
+        }),
         versioned_store: Some(versioned_store_config),
         versioned_store_permissions: Some(versioned_store_permissions_config),
         content_wg: Some(content_working_group_config),
@@ -336,12 +352,6 @@ pub fn testnet_genesis(
                 .set_election_parameters_proposal_voting_period,
             set_election_parameters_proposal_grace_period: cpcp
                 .set_election_parameters_proposal_grace_period,
-            set_content_working_group_mint_capacity_proposal_voting_period: cpcp
-                .set_content_working_group_mint_capacity_proposal_voting_period,
-            set_content_working_group_mint_capacity_proposal_grace_period: cpcp
-                .set_content_working_group_mint_capacity_proposal_grace_period,
-            set_lead_proposal_voting_period: cpcp.set_lead_proposal_voting_period,
-            set_lead_proposal_grace_period: cpcp.set_lead_proposal_grace_period,
             spending_proposal_voting_period: cpcp.spending_proposal_voting_period,
             spending_proposal_grace_period: cpcp.spending_proposal_grace_period,
             add_working_group_opening_proposal_voting_period: cpcp

+ 2 - 5
package.json

@@ -4,12 +4,9 @@
   "version": "1.0.0",
   "license": "GPL-3.0-only",
   "scripts": {
-    "test": "yarn && yarn workspaces run test",
-    "test-migration": "yarn && yarn workspaces run test-migration",
-    "postinstall": "yarn workspace @joystream/types build && yarn workspace cd-schemas generate:all",
+    "postinstall": "yarn workspace @joystream/types build && yarn workspace cd-schemas generate:all && yarn workspace cd-schemas build",
     "cargo-checks": "devops/git-hooks/pre-commit && devops/git-hooks/pre-push",
-    "cargo-build": "scripts/cargo-build.sh",
-    "lint": "yarn workspaces run lint"
+    "cargo-build": "scripts/cargo-build.sh"
   },
   "workspaces": [
     "tests/network-tests",

+ 3 - 3
pioneer/package.json

@@ -17,10 +17,10 @@
     "docs": "echo \"skipping docs\"",
     "clean": "polkadot-dev-clean-build",
     "clean:i18n": "rm -rf packages/apps/public/locales/en && mkdir -p packages/apps/public/locales/en",
-    "lint": "eslint --ext .js,.jsx,.ts,.tsx . && tsc --noEmit --pretty",
+    "lint": "tsc --noEmit --pretty && eslint --ext .js,.jsx,.ts,.tsx .",
     "lint:css": "stylelint './packages/**/src/**/*.tsx'",
-    "lint-only-errors": "eslint --quiet --ext .js,.jsx,.ts,.tsx . && tsc --noEmit --pretty",
-    "lint-autofix": "eslint --fix --ext .js,.jsx,.ts,.tsx . && tsc --noEmit --pretty",
+    "lint-only-errors": "yarn lint --quiet",
+    "lint-autofix": "yarn lint --fix",
     "postinstall": "polkadot-dev-yarn-only",
     "test": "echo \"skipping tests\"",
     "vanitygen": "node packages/app-accounts/scripts/vanitygen.js",

+ 3 - 0
pioneer/packages/apps-routing/src/index.ts

@@ -26,10 +26,12 @@ import election from './joy-election';
 import proposals from './joy-proposals';
 import roles from './joy-roles';
 import forum from './joy-forum';
+import tokenomics from './joy-tokenomics';
 
 export default function create (t: <T = string> (key: string, text: string, options: { ns: string }) => T): Routes {
   return appSettings.uiMode === 'light'
     ? [
+      tokenomics(t),
       members(t),
       roles(t),
       election(t),
@@ -45,6 +47,7 @@ export default function create (t: <T = string> (key: string, text: string, opti
       privacyPolicy(t)
     ]
     : [
+      tokenomics(t),
       members(t),
       roles(t),
       election(t),

+ 15 - 0
pioneer/packages/apps-routing/src/joy-tokenomics.ts

@@ -0,0 +1,15 @@
+import { Route } from './types';
+
+import Tokenomics from '@polkadot/joy-tokenomics/index';
+
+export default function create (t: <T = string> (key: string, text: string, options: { ns: string }) => T): Route {
+  return {
+    Component: Tokenomics,
+    display: {
+      needsApi: []
+    },
+    text: t<string>('nav.tokenomics', 'Overview', { ns: 'apps-routing' }),
+    icon: 'th',
+    name: 'tokenomics'
+  };
+}

+ 1 - 0
pioneer/packages/apps/public/locales/en/index.json

@@ -26,6 +26,7 @@
   "joy-members.json",
   "joy-proposals.json",
   "joy-roles.json",
+  "joy-tokenomics.json",
   "joy-utils.json",
   "react-components.json",
   "react-params.json",

+ 3 - 0
pioneer/packages/apps/public/locales/en/joy-tokenomics.json

@@ -0,0 +1,3 @@
+{
+  "Tokenomics": "Tokenomics"
+}

+ 1 - 0
pioneer/packages/apps/public/locales/en/translation.json

@@ -669,6 +669,7 @@
   "Tip (optional)": "",
   "To council": "",
   "To ensure optimal fund security using the same stash/controller is strongly discouraged, but not forbidden.": "",
+  "Tokenomics": "",
   "Transfer": "",
   "Translate": "",
   "Treasury overview": "",

+ 2 - 0
pioneer/packages/apps/public/robots.txt

@@ -0,0 +1,2 @@
+User-agent: *
+Disallow: /

+ 37 - 30
pioneer/packages/joy-proposals/src/Proposal/discussion/DiscussionPostForm.tsx

@@ -10,6 +10,7 @@ import { TxFailedCallback, TxCallback } from '@polkadot/react-components/Status/
 import { ParsedPost, DiscussionContraints } from '@polkadot/joy-utils/types/proposals';
 import { ThreadId } from '@joystream/types/common';
 import { MemberId } from '@joystream/types/members';
+import { Loading } from '@polkadot/joy-utils/react/components/PromiseComponent';
 
 type OuterProps = {
   post?: ParsedPost;
@@ -86,36 +87,42 @@ const DiscussionPostFormInner = (props: InnerProps) => {
           rows={5}
           placeholder='Content of the post...' />
       </LabelledField>
-      <LabelledField invisibleLabel {...props} flex>
-        <TxButton
-          type='submit'
-          label={isEditForm ? 'Update' : 'Add Post'}
-          isDisabled={isSubmitting || !isValid}
-          params={buildTxParams()}
-          tx={isEditForm ? 'proposalsDiscussion.updatePost' : 'proposalsDiscussion.addPost'}
-          onClick={onSubmit}
-          txFailedCb={onTxFailed}
-          txSuccessCb={onTxSuccess}
-        />
-        { isEditForm ? (
-          <Button
-            type='button'
-            size='large'
-            disabled={isSubmitting}
-            color='red'
-            onClick={() => onSuccess()}
-            content='Cancel'
-          />
-        ) : (
-          <Button
-            type='button'
-            size='large'
-            disabled={isSubmitting}
-            onClick={() => resetForm()}
-            content='Clear'
-          />
-        ) }
-      </LabelledField>
+      {
+        isSubmitting
+          ? <Loading text={'Submitting...'}/>
+          : (
+            <LabelledField invisibleLabel {...props} flex>
+              <TxButton
+                type='submit'
+                label={isEditForm ? 'Update' : 'Add Post'}
+                isDisabled={isSubmitting || !isValid}
+                params={buildTxParams()}
+                tx={isEditForm ? 'proposalsDiscussion.updatePost' : 'proposalsDiscussion.addPost'}
+                onClick={onSubmit}
+                txFailedCb={onTxFailed}
+                txSuccessCb={onTxSuccess}
+              />
+              { isEditForm ? (
+                <Button
+                  type='button'
+                  size='large'
+                  disabled={isSubmitting}
+                  color='red'
+                  onClick={() => onSuccess()}
+                  content='Cancel'
+                />
+              ) : (
+                <Button
+                  type='button'
+                  size='large'
+                  disabled={isSubmitting}
+                  onClick={() => resetForm()}
+                  content='Clear'
+                />
+              ) }
+            </LabelledField>
+          )
+      }
     </Form>
   );
 };

+ 201 - 0
pioneer/packages/joy-tokenomics/LICENSE

@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                    http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+  "License" shall mean the terms and conditions for use, reproduction,
+  and distribution as defined by Sections 1 through 9 of this document.
+
+  "Licensor" shall mean the copyright owner or entity authorized by
+  the copyright owner that is granting the License.
+
+  "Legal Entity" shall mean the union of the acting entity and all
+  other entities that control, are controlled by, or are under common
+  control with that entity. For the purposes of this definition,
+  "control" means (i) the power, direct or indirect, to cause the
+  direction or management of such entity, whether by contract or
+  otherwise, or (ii) ownership of fifty percent (50%) or more of the
+  outstanding shares, or (iii) beneficial ownership of such entity.
+
+  "You" (or "Your") shall mean an individual or Legal Entity
+  exercising permissions granted by this License.
+
+  "Source" form shall mean the preferred form for making modifications,
+  including but not limited to software source code, documentation
+  source, and configuration files.
+
+  "Object" form shall mean any form resulting from mechanical
+  transformation or translation of a Source form, including but
+  not limited to compiled object code, generated documentation,
+  and conversions to other media types.
+
+  "Work" shall mean the work of authorship, whether in Source or
+  Object form, made available under the License, as indicated by a
+  copyright notice that is included in or attached to the work
+  (an example is provided in the Appendix below).
+
+  "Derivative Works" shall mean any work, whether in Source or Object
+  form, that is based on (or derived from) the Work and for which the
+  editorial revisions, annotations, elaborations, or other modifications
+  represent, as a whole, an original work of authorship. For the purposes
+  of this License, Derivative Works shall not include works that remain
+  separable from, or merely link (or bind by name) to the interfaces of,
+  the Work and Derivative Works thereof.
+
+  "Contribution" shall mean any work of authorship, including
+  the original version of the Work and any modifications or additions
+  to that Work or Derivative Works thereof, that is intentionally
+  submitted to Licensor for inclusion in the Work by the copyright owner
+  or by an individual or Legal Entity authorized to submit on behalf of
+  the copyright owner. For the purposes of this definition, "submitted"
+  means any form of electronic, verbal, or written communication sent
+  to the Licensor or its representatives, including but not limited to
+  communication on electronic mailing lists, source code control systems,
+  and issue tracking systems that are managed by, or on behalf of, the
+  Licensor for the purpose of discussing and improving the Work, but
+  excluding communication that is conspicuously marked or otherwise
+  designated in writing by the copyright owner as "Not a Contribution."
+
+  "Contributor" shall mean Licensor and any individual or Legal Entity
+  on behalf of whom a Contribution has been received by Licensor and
+  subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+  this License, each Contributor hereby grants to You a perpetual,
+  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+  copyright license to reproduce, prepare Derivative Works of,
+  publicly display, publicly perform, sublicense, and distribute the
+  Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+  this License, each Contributor hereby grants to You a perpetual,
+  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+  (except as stated in this section) patent license to make, have made,
+  use, offer to sell, sell, import, and otherwise transfer the Work,
+  where such license applies only to those patent claims licensable
+  by such Contributor that are necessarily infringed by their
+  Contribution(s) alone or by combination of their Contribution(s)
+  with the Work to which such Contribution(s) was submitted. If You
+  institute patent litigation against any entity (including a
+  cross-claim or counterclaim in a lawsuit) alleging that the Work
+  or a Contribution incorporated within the Work constitutes direct
+  or contributory patent infringement, then any patent licenses
+  granted to You under this License for that Work shall terminate
+  as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+  Work or Derivative Works thereof in any medium, with or without
+  modifications, and in Source or Object form, provided that You
+  meet the following conditions:
+
+  (a) You must give any other recipients of the Work or
+      Derivative Works a copy of this License; and
+
+  (b) You must cause any modified files to carry prominent notices
+      stating that You changed the files; and
+
+  (c) You must retain, in the Source form of any Derivative Works
+      that You distribute, all copyright, patent, trademark, and
+      attribution notices from the Source form of the Work,
+      excluding those notices that do not pertain to any part of
+      the Derivative Works; and
+
+  (d) If the Work includes a "NOTICE" text file as part of its
+      distribution, then any Derivative Works that You distribute must
+      include a readable copy of the attribution notices contained
+      within such NOTICE file, excluding those notices that do not
+      pertain to any part of the Derivative Works, in at least one
+      of the following places: within a NOTICE text file distributed
+      as part of the Derivative Works; within the Source form or
+      documentation, if provided along with the Derivative Works; or,
+      within a display generated by the Derivative Works, if and
+      wherever such third-party notices normally appear. The contents
+      of the NOTICE file are for informational purposes only and
+      do not modify the License. You may add Your own attribution
+      notices within Derivative Works that You distribute, alongside
+      or as an addendum to the NOTICE text from the Work, provided
+      that such additional attribution notices cannot be construed
+      as modifying the License.
+
+  You may add Your own copyright statement to Your modifications and
+  may provide additional or different license terms and conditions
+  for use, reproduction, or distribution of Your modifications, or
+  for any such Derivative Works as a whole, provided Your use,
+  reproduction, and distribution of the Work otherwise complies with
+  the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+  any Contribution intentionally submitted for inclusion in the Work
+  by You to the Licensor shall be under the terms and conditions of
+  this License, without any additional terms or conditions.
+  Notwithstanding the above, nothing herein shall supersede or modify
+  the terms of any separate license agreement you may have executed
+  with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+  names, trademarks, service marks, or product names of the Licensor,
+  except as required for reasonable and customary use in describing the
+  origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+  agreed to in writing, Licensor provides the Work (and each
+  Contributor provides its Contributions) on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+  implied, including, without limitation, any warranties or conditions
+  of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+  PARTICULAR PURPOSE. You are solely responsible for determining the
+  appropriateness of using or redistributing the Work and assume any
+  risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+  whether in tort (including negligence), contract, or otherwise,
+  unless required by applicable law (such as deliberate and grossly
+  negligent acts) or agreed to in writing, shall any Contributor be
+  liable to You for damages, including any direct, indirect, special,
+  incidental, or consequential damages of any character arising as a
+  result of this License or out of the use or inability to use the
+  Work (including but not limited to damages for loss of goodwill,
+  work stoppage, computer failure or malfunction, or any and all
+  other commercial damages or losses), even if such Contributor
+  has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+  the Work or Derivative Works thereof, You may choose to offer,
+  and charge a fee for, acceptance of support, warranty, indemnity,
+  or other liability obligations and/or rights consistent with this
+  License. However, in accepting such obligations, You may act only
+  on Your own behalf and on Your sole responsibility, not on behalf
+  of any other Contributor, and only if You agree to indemnify,
+  defend, and hold each Contributor harmless for any liability
+  incurred by, or claims asserted against, such Contributor by reason
+  of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+  To apply the Apache License to your work, attach the following
+  boilerplate notice, with the fields enclosed by brackets "[]"
+  replaced with your own identifying information. (Don't include
+  the brackets!)  The text should be enclosed in the appropriate
+  comment syntax for the file format. We also recommend that a
+  file or class name and description of purpose be included on the
+  same "printed page" as the copyright notice for easier
+  identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.

+ 22 - 0
pioneer/packages/joy-tokenomics/README.md

@@ -0,0 +1,22 @@
+# @polkadot/app-123code
+
+A simple template to get started with adding an "app" to this UI. It contains the bare minimum for a nicely hackable app (if you just want to code _somewhere_) and the steps needed to create, add and register an new app that appears in the UI.
+
+## adding an app
+
+If you want to add a new app to the UI, this is the place to start.
+
+1. Duplicate this `app-123code` folder and give it an appropriate name, in this case we will select `app-example` to keep things clear.
+2. Edit the `apps-example/package.json` app description, i.e. the name, author and relevant overview.
+
+And we have the basic app source setup, time to get the tooling correct.
+
+3. Add the new app to the TypeScript config in root, `tsconfig.json`, i.e. an entry such as `"@polkadot/app-example/*": [ "packages/app-example/src/*" ],`
+
+At this point the app should be buildable, but not quite reachable. The final step is to add it to the actual sidebar in `apps`.
+
+4. In `apps-routing/src` duplicate the `123code.ts` file to `example.ts` and edit it with the appropriate information, including the hash link, name and icon (any icon name from semantic-ui-react/font-awesome 4 should be appropriate).
+5. In the above description file, the `isHidden` field needs to be toggled to make it appear - the base template is hidden by default.
+6. Finally add the `template` to the `apps-routing/src/index.ts` file at the appropriate place for both full and light mode (either optional)
+
+Yes. After all that we have things hooked up. Run `yarn start` and your new app (non-coded) should show up. Now start having fun and building something great.

+ 16 - 0
pioneer/packages/joy-tokenomics/package.json

@@ -0,0 +1,16 @@
+{
+  "name": "@polkadot/joy-tokenomics",
+  "version": "0.1.1",
+  "description": "Tokenomics page, basic overview of data from the whole website.",
+  "main": "index.js",
+  "scripts": {},
+  "author": "Edvin Dzidic <edvindzidic2000@gmail.com>",
+  "maintainers": [
+    "Edvin Dzidic <edvindzidic2000@gmail.com>"
+  ],
+  "license": "Apache-2.0",
+  "dependencies": {
+    "@babel/runtime": "^7.10.5",
+    "@polkadot/react-components": "0.51.1"
+  }
+}

+ 104 - 0
pioneer/packages/joy-tokenomics/src/Overview/OverviewTable.tsx

@@ -0,0 +1,104 @@
+import React from 'react';
+import { Table, Popup, Icon } from 'semantic-ui-react';
+import styled from 'styled-components';
+
+import { TokenomicsData, StatusServerData } from '@polkadot/joy-utils/src/types/tokenomics';
+
+const StyledTableRow = styled(Table.Row)`
+  .help-icon{
+    position: absolute !important;
+    right: 0rem !important;
+    top: 0 !important;
+    @media (max-width: 767px){
+      right: 1rem !important;
+      top:0.8rem !important;
+    }
+  }
+`;
+
+const OverviewTableRow: React.FC<{item: string; value: string; help?: string}> = ({ item, value, help }) => {
+  return (
+    <StyledTableRow>
+      <Table.Cell>
+        <div style={{ position: 'relative' }}>
+          {item}
+          {help &&
+            <Popup
+              trigger={<Icon className='help-icon' name='help circle' color='grey'/>}
+              content={help}
+              position='right center'
+            />}
+        </div>
+      </Table.Cell>
+      <Table.Cell>{value}</Table.Cell>
+    </StyledTableRow>
+  );
+};
+
+const OverviewTable: React.FC<{data?: TokenomicsData; statusData?: StatusServerData | null}> = ({ data, statusData }) => {
+  const displayStatusData = (val: string, unit: string): string => (
+    statusData === null ? 'Data currently unavailable...' : statusData ? `${val} ${unit}` : 'Loading...'
+  );
+
+  return (
+    <Table style={{ marginBottom: '1.5rem' }} celled>
+      <Table.Header>
+        <Table.Row>
+          <Table.HeaderCell width={10}>Item</Table.HeaderCell>
+          <Table.HeaderCell width={2}>Value</Table.HeaderCell>
+        </Table.Row>
+      </Table.Header>
+
+      <Table.Body>
+        <OverviewTableRow
+          item='Total Issuance'
+          help='The current supply of tokens.'
+          value={data ? `${data.totalIssuance} JOY` : 'Loading...'}
+        />
+        <OverviewTableRow
+          item='Fiat Pool'
+          help='The current value of the Fiat Pool.'
+          value={displayStatusData(statusData?.dollarPool.size.toFixed(2) || '', 'USD')}
+        />
+        <OverviewTableRow
+          item='Currently Staked Tokens'
+          value={data ? `${data.currentlyStakedTokens} JOY` : 'Loading...'}
+          help='All tokens currently staked for active roles.'
+        />
+        <OverviewTableRow
+          item='Currently Staked Value'
+          value={ data ? displayStatusData(`${(data.currentlyStakedTokens * Number(statusData?.price)).toFixed(2)}`, 'USD') : 'Loading...' }
+          help='The value of all tokens currently staked for active roles.'
+        />
+        <OverviewTableRow
+          item='Exchange Rate'
+          value={displayStatusData(`${(Number(statusData?.price) * 1000000).toFixed(2)}`, 'USD/1MJOY')}
+          help='The current exchange rate.'
+        />
+        {/* <OverviewTableRow help='Sum of all tokens burned through exchanges' item='Total Tokens Burned/Exchanged' value={statusData ? `${statusData.burned} JOY` : 'Loading...'}/> */}
+        <OverviewTableRow
+          item='Projected Weekly Token Mint Rate'
+          value={data ? `${Math.round(data.totalWeeklySpending)} JOY` : 'Loading...'}
+          help='Projection of tokens minted over the next week, based on current rewards for all roles.'
+        />
+        <OverviewTableRow
+          item='Projected Weekly Token Inflation Rate'
+          value={data ? `${((data.totalWeeklySpending / data.totalIssuance) * 100).toFixed(2)} %` : 'Loading...'}
+          help={'Based on \'Projected Weekly Token Mint Rate\'. Does not include any deflationary forces (fees, slashes, burns, etc.)'}
+        />
+        <OverviewTableRow
+          item='Projected Weekly Value Of Mint'
+          value={ data ? displayStatusData(`${(data.totalWeeklySpending * Number(statusData?.price)).toFixed(2)}`, 'USD') : 'Loading...'}
+          help={'Based on \'Projected Weekly Token Mint Rate\', and current \'Exchange Rate\'.'}
+        />
+        <OverviewTableRow
+          item='Weekly Top Ups'
+          value={displayStatusData((Number(statusData?.dollarPool.replenishAmount) / 2).toFixed(2) || '', 'USD')}
+          help={'The current weekly \'Fiat Pool\' replenishment amount. Does not include KPIs, or other potential top ups.'}
+        />
+      </Table.Body>
+    </Table>
+  );
+};
+
+export default OverviewTable;

+ 226 - 0
pioneer/packages/joy-tokenomics/src/Overview/SpendingAndStakeDistributionTable.tsx

@@ -0,0 +1,226 @@
+import React from 'react';
+import { Table, Popup, Icon } from 'semantic-ui-react';
+import styled from 'styled-components';
+import { useWindowDimensions } from '../../../joy-utils/src/react/hooks';
+
+import { TokenomicsData, StatusServerData } from '@polkadot/joy-utils/src/types/tokenomics';
+
+const round = (num: number): number => Math.round((num + Number.EPSILON) * 100) / 100;
+
+const applyCss = (columns: number[]): string => {
+  let columnString = '';
+
+  columns.forEach((column, index) => {
+    if (index === 0) {
+      columnString += `td:nth-of-type(${column}), th:nth-of-type(${column})`;
+    } else {
+      columnString += ` ,td:nth-of-type(${column}), th:nth-of-type(${column})`;
+    }
+  });
+
+  return columnString;
+};
+
+const StyledTable = styled(({ divideColumnsAt, ...rest }) => <Table {...rest} />)`
+  border: none !important;
+  width: 70% !important;
+  margin-bottom:1.5rem;
+  @media (max-width: 1400px){
+    width:100% !important;
+  }
+  & tr {
+    td:nth-of-type(1),
+    th:nth-of-type(1),
+    ${(props: { divideColumnsAt: number[]}): string => applyCss(props.divideColumnsAt)} {
+      border-left: 0.12rem solid rgba(20,20,20,0.3) !important;
+    }
+    td:nth-of-type(1){
+      position: relative !important;
+    }
+    td:last-child, th:last-child{
+      border-right: 0.12rem solid rgba(20,20,20,0.3) !important;
+    }
+  }
+  & tr:last-child > td{
+    border-bottom: 0.12rem solid rgba(20,20,20,0.3) !important;
+  }
+  & tr:last-child > td:nth-of-type(1){
+    border-bottom-left-radius: 0.2rem !important;
+  }
+  & tr:last-child > td:last-child{
+    border-bottom-right-radius: 0.2rem !important;
+  }
+  th{
+    border-top: 0.12rem solid rgba(20,20,20,0.3) !important;
+  }
+  & .tableColorBlock{
+    height: 1rem;
+    width:1rem;
+    margin: 0 auto;
+    @media (max-width: 768px){
+      margin: 0;
+    }
+  }
+`;
+
+const StyledTableRow = styled(Table.Row)`
+  .help-icon{
+    position: absolute !important;
+    right: 0.5rem !important;
+    top: 0.8rem !important;
+    @media (max-width: 767px){
+      top:0.8rem !important;
+    }
+  }
+`;
+
+const SpendingAndStakeTableRow: React.FC<{
+  role: string;
+  numberOfActors?: string;
+  groupEarning?: string;
+  groupEarningDollar?: string;
+  earningShare?: string;
+  groupStake?: string;
+  groupStakeDollar?: string;
+  stakeShare?: string;
+  color?: string;
+  active?: boolean;
+  helpContent?: string;
+}> = ({ role, numberOfActors, groupEarning, groupEarningDollar, earningShare, groupStake, groupStakeDollar, stakeShare, color, active, helpContent }) => {
+  const parseData = (data: string | undefined): string | JSX.Element => {
+    if (data && active) {
+      return <em>{data}</em>;
+    } else if (data) {
+      return data;
+    } else {
+      return 'Loading..';
+    }
+  };
+
+  return (
+    <StyledTableRow color={active && 'rgb(150, 150, 150)'}>
+      <Table.Cell>
+        {active ? <strong>{role}</strong> : role}
+        {helpContent && <Popup
+          trigger={<Icon className='help-icon' name='help circle' color='grey'/>}
+          content={helpContent}
+          position='right center'
+        />}
+      </Table.Cell>
+      <Table.Cell>{parseData(numberOfActors)}</Table.Cell>
+      <Table.Cell>{parseData(groupEarning)}</Table.Cell>
+      <Table.Cell>{parseData(groupEarningDollar)}</Table.Cell>
+      <Table.Cell>{parseData(earningShare)}</Table.Cell>
+      <Table.Cell>{parseData(groupStake)}</Table.Cell>
+      <Table.Cell>{parseData(groupStakeDollar)}</Table.Cell>
+      <Table.Cell>{parseData(stakeShare)}</Table.Cell>
+      <Table.Cell><div className='tableColorBlock' style={{ backgroundColor: color }}></div></Table.Cell>
+    </StyledTableRow>
+  );
+};
+
+const SpendingAndStakeDistributionTable: React.FC<{data?: TokenomicsData; statusData?: StatusServerData | null}> = ({ data, statusData }) => {
+  const { width } = useWindowDimensions();
+
+  const displayStatusData = (group: 'validators' | 'council' | 'storageProviders' | 'storageProviderLead' | 'contentCurators', action: 'rewardsPerWeek' | 'totalStake'): string | undefined => {
+    if (group === 'storageProviderLead') {
+      return statusData === null ? 'Data currently unavailable...' : (data && statusData) && `${(data.storageProviders.lead[action] * Number(statusData.price)).toFixed(2)}`;
+    } else {
+      return statusData === null ? 'Data currently unavailable...' : (data && statusData) && `${(data[group][action] * Number(statusData.price)).toFixed(2)}`;
+    }
+  };
+
+  return (
+    <StyledTable divideColumnsAt={[3, 6, 9]} celled>
+      <Table.Header>
+        <Table.Row>
+          <Table.HeaderCell width={4}>Group/Role</Table.HeaderCell>
+          <Table.HeaderCell><div>Actors</div>[Number]</Table.HeaderCell>
+          <Table.HeaderCell><div>Group earning</div> [JOY/Week]</Table.HeaderCell>
+          <Table.HeaderCell><div>Group earning</div> [USD/Week]</Table.HeaderCell>
+          <Table.HeaderCell><div>Share</div> [%]</Table.HeaderCell>
+          <Table.HeaderCell><div>Group Stake</div> [JOY]</Table.HeaderCell>
+          <Table.HeaderCell><div>Group Stake</div> [USD]</Table.HeaderCell>
+          <Table.HeaderCell><div>Share</div> [%]</Table.HeaderCell>
+          <Table.HeaderCell width={1}>Color</Table.HeaderCell>
+        </Table.Row>
+      </Table.Header>
+
+      <Table.Body>
+        <SpendingAndStakeTableRow
+          role={width <= 1050 ? 'Validators' : 'Validators (Nominators)'}
+          helpContent='The current set of active Validators (and Nominators), and the sum of the sets projected rewards and total stakes (including Nominators).'
+          numberOfActors={data && `${data.validators.number} (${data.validators.nominators.number})`}
+          groupEarning={data && `${Math.round(data.validators.rewardsPerWeek)}`}
+          groupEarningDollar={displayStatusData('validators', 'rewardsPerWeek')}
+          earningShare={data && `${round(data.validators.rewardsShare * 100)}`}
+          groupStake={data && `${data.validators.totalStake}`}
+          groupStakeDollar={displayStatusData('validators', 'totalStake')}
+          stakeShare={data && `${round(data.validators.stakeShare * 100)}`}
+          color='rgb(246, 109, 68)'
+        />
+        <SpendingAndStakeTableRow
+          role={width <= 1015 ? 'Council' : 'Council Members'}
+          helpContent='The current Council Members, and the sum of their projected rewards and total stakes (including voters/backers).'
+          numberOfActors={data && `${data.council.number}`}
+          groupEarning={data && `${Math.round(data.council.rewardsPerWeek)}`}
+          groupEarningDollar={displayStatusData('council', 'rewardsPerWeek')}
+          earningShare={data && `${round(data.council.rewardsShare * 100)}`}
+          groupStake={data && `${data.council.totalStake}`}
+          groupStakeDollar={displayStatusData('council', 'totalStake')}
+          stakeShare={data && `${round(data.council.stakeShare * 100)}`}
+          color='rgb(254, 174, 101)'
+        />
+        <SpendingAndStakeTableRow
+          role={width <= 1015 ? 'Storage' : 'Storage Providers'}
+          helpContent='The current Storage Providers, and the sum of their projected rewards and stakes.'
+          numberOfActors={data && `${data.storageProviders.number}`}
+          groupEarning={data && `${Math.round(data.storageProviders.rewardsPerWeek)}`}
+          groupEarningDollar={displayStatusData('storageProviders', 'rewardsPerWeek')}
+          earningShare={data && `${round(data.storageProviders.rewardsShare * 100)}`}
+          groupStake={data && `${data.storageProviders.totalStake}`}
+          groupStakeDollar={displayStatusData('storageProviders', 'totalStake')}
+          stakeShare={data && `${round(data.storageProviders.stakeShare * 100)}`}
+          color='rgb(230, 246, 157)'
+        />
+        <SpendingAndStakeTableRow
+          role={width <= 1015 ? 'S. Lead' : width <= 1050 ? 'Storage Lead' : 'Storage Provider Lead'}
+          helpContent='Current Storage Provider Lead, and their projected reward and stake.'
+          numberOfActors={data && `${data.storageProviders.lead.number}`}
+          groupEarning={data && `${Math.round(data.storageProviders.lead.rewardsPerWeek)}`}
+          groupEarningDollar={displayStatusData('storageProviderLead', 'rewardsPerWeek')}
+          earningShare={data && `${round(data.storageProviders.lead.rewardsShare * 100)}`}
+          groupStake={data && `${data.storageProviders.lead.totalStake}`}
+          groupStakeDollar={displayStatusData('storageProviderLead', 'totalStake')}
+          stakeShare={data && `${round(data.storageProviders.lead.stakeShare * 100)}`}
+          color='rgb(170, 222, 167)'
+        />
+        <SpendingAndStakeTableRow
+          role={width <= 1015 ? 'Content' : 'Content Curators'}
+          helpContent='The current Content Curators (and their Lead), and the sum of their projected rewards and stakes.'
+          numberOfActors={data && `${data.contentCurators.number} (${data.contentCurators.contentCuratorLead})`}
+          groupEarning={data && `${Math.round(data.contentCurators.rewardsPerWeek)}`}
+          groupEarningDollar={displayStatusData('contentCurators', 'rewardsPerWeek')}
+          earningShare={data && `${round(data.contentCurators.rewardsShare * 100)}`}
+          groupStake={data && `${data.contentCurators.totalStake}`}
+          groupStakeDollar={displayStatusData('contentCurators', 'totalStake')}
+          stakeShare={data && `${round(data.contentCurators.stakeShare * 100)}`}
+          color='rgb(100, 194, 166)'
+        />
+        <SpendingAndStakeTableRow
+          role='TOTAL'
+          active={true}
+          numberOfActors={data && `${data.totalNumberOfActors}`}
+          groupEarning={data && `${Math.round(data.totalWeeklySpending)}`}
+          groupEarningDollar={statusData === null ? 'Data currently unavailable..' : (data && statusData) && `${round(data.totalWeeklySpending * Number(statusData.price))}`}
+          earningShare={data && '100'}
+          groupStake={data && `${data.currentlyStakedTokens}`}
+          groupStakeDollar={statusData === null ? 'Data currently unavailable..' : (data && statusData) && `${round(data.currentlyStakedTokens * Number(statusData.price))}`}
+          stakeShare={data && '100'}
+        />
+      </Table.Body>
+    </StyledTable>
+  );
+};
+
+export default SpendingAndStakeDistributionTable;

+ 93 - 0
pioneer/packages/joy-tokenomics/src/Overview/TokenomicsCharts.tsx

@@ -0,0 +1,93 @@
+import React from 'react';
+import { Icon, Label } from 'semantic-ui-react';
+import PieChart from '../../../react-components/src/Chart/PieChart';
+import styled from 'styled-components';
+
+import { TokenomicsData } from '@polkadot/joy-utils/src/types/tokenomics';
+
+const StyledPieChart = styled(PieChart)`
+  width:15rem;
+  height:15rem;
+  margin-bottom:1rem;
+  @media (max-width: 1650px){
+    height:12rem;
+    width:12rem;
+  }
+  @media (max-width: 1400px){
+    height:15rem;
+    width:15rem;
+  }
+`;
+
+const ChartContainer = styled('div')`
+  display:flex;
+  flex-direction:column;
+  align-items:center;
+`;
+
+const TokenomicsCharts: React.FC<{data?: TokenomicsData; className?: string}> = ({ data, className }) => {
+  return (
+    <div className={className}>
+      {data ? <ChartContainer>
+        <StyledPieChart
+          values={[{
+            colors: ['rgb(246, 109, 68)'],
+            label: 'Validators',
+            value: data.validators.rewardsShare * 100
+          }, {
+            colors: ['rgb(254, 174, 101)'],
+            label: 'Council',
+            value: data.council.rewardsShare * 100
+          }, {
+            colors: ['rgb(230, 246, 157)'],
+            label: 'Storage Providers',
+            value: data.storageProviders.rewardsShare * 100
+          }, {
+            colors: ['rgb(170, 222, 167)'],
+            label: 'Storage Lead',
+            value: data.storageProviders.lead.rewardsShare * 100
+          }, {
+            colors: ['rgb(100, 194, 166)'],
+            label: 'Content Curators',
+            value: data.contentCurators.rewardsShare * 100
+          }
+          ]} />
+        <Label as='div'>
+          <Icon name='money' />
+          <span style={{ fontWeight: 600 }}>Spending</span>
+        </Label>
+      </ChartContainer> : <Icon name='circle notched' loading/>}
+      {data ? <ChartContainer>
+        <StyledPieChart
+          values={[{
+            colors: ['rgb(246, 109, 68)'],
+            label: 'Validators',
+            value: data.validators.stakeShare * 100
+          }, {
+            colors: ['rgb(254, 174, 101)'],
+            label: 'Council',
+            value: data.council.stakeShare * 100
+          }, {
+            colors: ['rgb(230, 246, 157)'],
+            label: 'Storage Providers',
+            value: data.storageProviders.stakeShare * 100
+          }, {
+            colors: ['rgb(170, 222, 167)'],
+            label: 'Storage Lead',
+            value: data.storageProviders.lead.stakeShare * 100
+          }, {
+            colors: ['rgb(100, 194, 166)'],
+            label: 'Content Curators',
+            value: data.contentCurators.stakeShare * 100
+          }
+          ]} />
+        <Label as='div'>
+          <Icon name='block layout' />
+          <span style={{ fontWeight: 600 }}>Stake</span>
+        </Label>
+      </ChartContainer> : <Icon name='circle notched' loading/>}
+    </div>
+  );
+};
+
+export default TokenomicsCharts;

+ 59 - 0
pioneer/packages/joy-tokenomics/src/Overview/index.tsx

@@ -0,0 +1,59 @@
+import React from 'react';
+import OverviewTable from './OverviewTable';
+import SpendingAndStakeDistributionTable from './SpendingAndStakeDistributionTable';
+import TokenomicsCharts from './TokenomicsCharts';
+import styled from 'styled-components';
+
+import usePromise from '@polkadot/joy-utils/react/hooks/usePromise';
+import { useTransport } from '@polkadot/joy-utils/react/hooks';
+import { StatusServerData } from '@polkadot/joy-utils/src/types/tokenomics';
+
+const SpendingAndStakeContainer = styled('div')`
+  display:flex;
+  justify-content:space-between;
+  @media (max-width: 1400px){
+    flex-direction:column;
+  }
+`;
+
+const Title = styled('h2')`
+  border-bottom: 1px solid #ddd;
+  margin: 0 0 2rem 0;
+`;
+
+const StyledTokenomicsCharts = styled(TokenomicsCharts)`
+  width:30%;
+  display:flex;
+  align-items:center;
+  justify-content:space-evenly;
+  padding: 2rem 0;
+  @media (max-width: 1400px){
+    width:100%;
+  }
+  @media (max-width: 550px){
+    flex-direction:column;
+    & > div {
+      margin-bottom: 1.5rem;
+    }
+  }
+`;
+
+const Overview: React.FC = () => {
+  const transport = useTransport();
+  const [statusDataValue, statusDataError] = usePromise<StatusServerData | undefined>(() => fetch('https://status.joystream.org/status').then((res) => res.json().then((data) => data as StatusServerData)), undefined, []);
+  const [tokenomicsData] = usePromise(() => transport.tokenomics.getTokenomicsData(), undefined, []);
+
+  return (
+    <>
+      <Title> Overview </Title>
+      <OverviewTable data={tokenomicsData} statusData={statusDataError ? null : statusDataValue}/>
+      <Title> Spending and Stake Distribution </Title>
+      <SpendingAndStakeContainer>
+        <SpendingAndStakeDistributionTable data={tokenomicsData} statusData={statusDataError ? null : statusDataValue}/>
+        <StyledTokenomicsCharts data={tokenomicsData} />
+      </SpendingAndStakeContainer>
+    </>
+  );
+};
+
+export default Overview;

+ 34 - 0
pioneer/packages/joy-tokenomics/src/index.tsx

@@ -0,0 +1,34 @@
+import React from 'react';
+import { useTranslation } from './translate';
+import { Route, Switch } from 'react-router';
+import { Tabs } from '@polkadot/react-components';
+import Overview from './Overview';
+import { AppProps } from '@polkadot/react-components/types';
+
+type Props = AppProps
+
+function App ({ basePath }: Props): React.ReactElement<Props> {
+  const { t } = useTranslation();
+
+  return (
+    <main>
+      <header>
+        <Tabs
+          basePath={basePath}
+          items={[
+            {
+              isRoot: true,
+              name: 'overview',
+              text: t('Tokenomics')
+            }
+          ]}
+        />
+      </header>
+      <Switch>
+        <Route component={Overview} />
+      </Switch>
+    </main>
+  );
+}
+
+export default App;

+ 9 - 0
pioneer/packages/joy-tokenomics/src/translate.ts

@@ -0,0 +1,9 @@
+// Copyright 2017-2019 @polkadot/app-123code authors & contributors
+// This software may be modified and distributed under the terms
+// of the Apache-2.0 license. See the LICENSE file for details.
+
+import { useTranslation as useTranslationBase, UseTranslationResponse } from 'react-i18next';
+
+export function useTranslation (): UseTranslationResponse {
+  return useTranslationBase('joy-tokenomics');
+}

+ 5 - 0
pioneer/packages/joy-utils/src/consts/staking.ts

@@ -0,0 +1,5 @@
+// Values based on REWARD_CURVE const in /runtime/src/lib.rs
+export const IDEAL_STAKING_RATE = 0.25;
+export const MIN_INFLATION_RATE = 0.05;
+export const MAX_INFLATION_RATE = 0.75;
+export const FALL_OFF_RATE = 0.05;

+ 32 - 0
pioneer/packages/joy-utils/src/functions/staking.ts

@@ -0,0 +1,32 @@
+import { IDEAL_STAKING_RATE, MIN_INFLATION_RATE, MAX_INFLATION_RATE, FALL_OFF_RATE } from '../consts/staking';
+
+// See: https://github.com/Joystream/helpdesk/tree/master/roles/validators#rewards-on-joystream for reference
+export function calculateValidatorsRewardsPerEra (
+  totalValidatorsStake: number,
+  totalIssuance: number,
+  minutesPerEra = 60
+): number {
+  let validatorsRewardsPerYear = 0;
+  const stakingRate = totalValidatorsStake / totalIssuance;
+  const minutesPerYear = 365.2425 * 24 * 60;
+
+  if (stakingRate > IDEAL_STAKING_RATE) {
+    validatorsRewardsPerYear =
+      totalIssuance * (
+        MIN_INFLATION_RATE + (
+          (MAX_INFLATION_RATE - MIN_INFLATION_RATE) *
+          (2 ** ((IDEAL_STAKING_RATE - stakingRate) / FALL_OFF_RATE))
+        )
+      );
+  } else if (stakingRate === IDEAL_STAKING_RATE) {
+    validatorsRewardsPerYear = totalIssuance * MAX_INFLATION_RATE;
+  } else {
+    validatorsRewardsPerYear =
+      totalIssuance * (
+        MIN_INFLATION_RATE +
+        (MAX_INFLATION_RATE - MIN_INFLATION_RATE) * (stakingRate / IDEAL_STAKING_RATE)
+      );
+  }
+
+  return validatorsRewardsPerYear / minutesPerYear * minutesPerEra;
+}

+ 1 - 0
pioneer/packages/joy-utils/src/react/hooks/index.ts

@@ -3,3 +3,4 @@ export { default as useMyMembership } from './useMyMembership';
 export { default as usePromise } from './usePromise';
 export { default as useTransport } from './useTransport';
 export { default as useProposalSubscription } from './proposals/useProposalSubscription';
+export { default as useWindowDimensions } from './useWindowDimensions';

+ 26 - 0
pioneer/packages/joy-utils/src/react/hooks/useWindowDimensions.ts

@@ -0,0 +1,26 @@
+import { useState, useEffect } from 'react';
+
+function getWindowDimensions () {
+  const { innerWidth: width, innerHeight: height } = window;
+
+  return {
+    width,
+    height
+  };
+}
+
+export default function useWindowDimensions () {
+  const [windowDimensions, setWindowDimensions] = useState(getWindowDimensions());
+
+  useEffect(() => {
+    function handleResize () {
+      setWindowDimensions(getWindowDimensions());
+    }
+
+    window.addEventListener('resize', handleResize);
+
+    return () => window.removeEventListener('resize', handleResize);
+  }, []);
+
+  return windowDimensions;
+}

+ 3 - 0
pioneer/packages/joy-utils/src/transport/index.ts

@@ -7,6 +7,7 @@ import CouncilTransport from './council';
 import ValidatorsTransport from './validators';
 import WorkingGroupsTransport from './workingGroups';
 import { APIQueryCache } from './APIQueryCache';
+import TokenomicsTransport from './tokenomics';
 
 export default class Transport {
   protected api: ApiPromise;
@@ -19,6 +20,7 @@ export default class Transport {
   public contentWorkingGroup: ContentWorkingGroupTransport;
   public validators: ValidatorsTransport;
   public workingGroups: WorkingGroupsTransport;
+  public tokenomics: TokenomicsTransport
 
   constructor (api: ApiPromise) {
     this.api = api;
@@ -30,5 +32,6 @@ export default class Transport {
     this.contentWorkingGroup = new ContentWorkingGroupTransport(api, this.cacheApi, this.members);
     this.proposals = new ProposalsTransport(api, this.cacheApi, this.members, this.chain, this.council);
     this.workingGroups = new WorkingGroupsTransport(api, this.cacheApi, this.members);
+    this.tokenomics = new TokenomicsTransport(api, this.cacheApi, this.council, this.workingGroups);
   }
 }

+ 344 - 0
pioneer/packages/joy-utils/src/transport/tokenomics.ts

@@ -0,0 +1,344 @@
+import BaseTransport from './base';
+import { ApiPromise } from '@polkadot/api';
+import CouncilTransport from './council';
+import WorkingGroupsTransport from './workingGroups';
+import { APIQueryCache } from './APIQueryCache';
+import { Seats } from '@joystream/types/council';
+import { Option } from '@polkadot/types';
+import { BlockNumber, BalanceOf, Exposure } from '@polkadot/types/interfaces';
+import { WorkerId } from '@joystream/types/working-group';
+import { RewardRelationshipId, RewardRelationship } from '@joystream/types/recurring-rewards';
+import { StakeId, Stake } from '@joystream/types/stake';
+import { CuratorId, Curator, LeadId } from '@joystream/types/content-working-group';
+import { TokenomicsData } from '@polkadot/joy-utils/src/types/tokenomics';
+import { calculateValidatorsRewardsPerEra } from '../functions/staking';
+
+export default class TokenomicsTransport extends BaseTransport {
+  private councilT: CouncilTransport;
+  private workingGroupT: WorkingGroupsTransport;
+
+  constructor (api: ApiPromise, cacheApi: APIQueryCache, councilTransport: CouncilTransport, workingGroups: WorkingGroupsTransport) {
+    super(api, cacheApi);
+    this.councilT = councilTransport;
+    this.workingGroupT = workingGroups;
+  }
+
+  async councilSizeAndStake () {
+    let totalCouncilStake = 0;
+    const activeCouncil = await this.council.activeCouncil() as Seats;
+
+    activeCouncil.map((member) => {
+      let stakeAmount = 0;
+
+      stakeAmount += member.stake.toNumber();
+      member.backers.forEach((backer) => {
+        stakeAmount += backer.stake.toNumber();
+      });
+      totalCouncilStake += stakeAmount;
+    });
+
+    return {
+      numberOfCouncilMembers: activeCouncil.length,
+      totalCouncilStake
+    };
+  }
+
+  private async councilRewardsPerWeek (numberOfCouncilMembers: number) {
+    const payoutInterval = Number((await this.api.query.council.payoutInterval() as Option<BlockNumber>).unwrapOr(0));
+    const amountPerPayout = (await this.api.query.council.amountPerPayout() as BalanceOf).toNumber();
+    const totalCouncilRewardsPerBlock = (amountPerPayout && payoutInterval)
+      ? (amountPerPayout * numberOfCouncilMembers) / payoutInterval
+      : 0;
+
+    const { new_term_duration, voting_period, revealing_period, announcing_period } = await this.councilT.electionParameters();
+    const termDuration = new_term_duration.toNumber();
+    const votingPeriod = voting_period.toNumber();
+    const revealingPeriod = revealing_period.toNumber();
+    const announcingPeriod = announcing_period.toNumber();
+    const weekInBlocks = 100800;
+
+    const councilTermDurationRatio = termDuration / (termDuration + votingPeriod + revealingPeriod + announcingPeriod);
+    const avgCouncilRewardPerBlock = councilTermDurationRatio * totalCouncilRewardsPerBlock;
+    const avgCouncilRewardPerWeek = avgCouncilRewardPerBlock * weekInBlocks;
+
+    return avgCouncilRewardPerWeek;
+  }
+
+  async getCouncilData () {
+    const { numberOfCouncilMembers, totalCouncilStake } = await this.councilSizeAndStake();
+    const totalCouncilRewardsInOneWeek = await this.councilRewardsPerWeek(numberOfCouncilMembers);
+
+    return {
+      numberOfCouncilMembers,
+      totalCouncilRewardsInOneWeek,
+      totalCouncilStake
+    };
+  }
+
+  private async storageProviderSizeAndIds () {
+    const stakeIds: StakeId[] = [];
+    const rewardIds: RewardRelationshipId[] = [];
+    let leadStakeId: StakeId | null = null;
+    let leadRewardId: RewardRelationshipId | null = null;
+    let numberOfStorageProviders = 0;
+    let leadNumber = 0;
+    const allWorkers = await this.workingGroupT.allWorkers('Storage');
+    const currentLeadId = (await this.api.query.storageWorkingGroup.currentLead() as Option<WorkerId>).unwrapOr(null)?.toNumber();
+
+    allWorkers.forEach(([workerId, worker]) => {
+      const stakeId = worker.role_stake_profile.isSome ? worker.role_stake_profile.unwrap().stake_id : null;
+      const rewardId = worker.reward_relationship.unwrapOr(null);
+
+      if (currentLeadId !== undefined && currentLeadId === workerId.toNumber()) {
+        leadStakeId = stakeId;
+        leadRewardId = rewardId;
+        leadNumber += 1;
+      } else {
+        numberOfStorageProviders += 1;
+
+        if (stakeId) {
+          stakeIds.push(stakeId);
+        }
+
+        if (rewardId) {
+          rewardIds.push(rewardId);
+        }
+      }
+    });
+
+    return {
+      numberOfStorageProviders,
+      stakeIds,
+      rewardIds,
+      leadNumber,
+      leadRewardId,
+      leadStakeId
+    };
+  }
+
+  private async storageProviderStakeAndRewards (
+    stakeIds: StakeId[],
+    leadStakeId: StakeId | null,
+    rewardIds: RewardRelationshipId[],
+    leadRewardId: RewardRelationshipId | null
+  ) {
+    let totalStorageProviderStake = 0;
+    let leadStake = 0;
+    let storageProviderRewardsPerBlock = 0;
+    let storageLeadRewardsPerBlock = 0;
+
+    (await this.api.query.stake.stakes.multi<Stake>(stakeIds)).forEach((stake) => {
+      totalStorageProviderStake += stake.value.toNumber();
+    });
+    (await this.api.query.recurringRewards.rewardRelationships.multi<RewardRelationship>(rewardIds)).map((rewardRelationship) => {
+      const amount = rewardRelationship.amount_per_payout.toNumber();
+      const payoutInterval = rewardRelationship.payout_interval.isSome
+        ? rewardRelationship.payout_interval.unwrap().toNumber()
+        : null;
+
+      if (amount && payoutInterval) {
+        storageProviderRewardsPerBlock += amount / payoutInterval;
+      }
+    });
+
+    if (leadStakeId !== null) {
+      leadStake += (await this.api.query.stake.stakes(leadStakeId) as Stake).value.toNumber();
+    }
+
+    if (leadRewardId !== null) {
+      const leadRewardData = (await this.api.query.recurringRewards.rewardRelationships(leadRewardId) as RewardRelationship);
+      const leadAmount = leadRewardData.amount_per_payout.toNumber();
+      const leadRewardInterval = leadRewardData.payout_interval.isSome ? leadRewardData.payout_interval.unwrap().toNumber() : null;
+
+      if (leadAmount && leadRewardInterval) {
+        storageLeadRewardsPerBlock += leadAmount / leadRewardInterval;
+      }
+    }
+
+    return {
+      totalStorageProviderStake,
+      leadStake,
+      storageProviderRewardsPerWeek: storageProviderRewardsPerBlock * 100800,
+      storageProviderLeadRewardsPerWeek: storageLeadRewardsPerBlock * 100800
+    };
+  }
+
+  async getStorageProviderData () {
+    const { numberOfStorageProviders, leadNumber, stakeIds, rewardIds, leadRewardId, leadStakeId } = await this.storageProviderSizeAndIds();
+    const { totalStorageProviderStake, leadStake, storageProviderRewardsPerWeek, storageProviderLeadRewardsPerWeek } =
+      await this.storageProviderStakeAndRewards(stakeIds, leadStakeId, rewardIds, leadRewardId);
+
+    return {
+      numberOfStorageProviders,
+      storageProviderLeadNumber: leadNumber,
+      totalStorageProviderStake,
+      totalStorageProviderLeadStake: leadStake,
+      storageProviderRewardsPerWeek,
+      storageProviderLeadRewardsPerWeek
+    };
+  }
+
+  private async contentCuratorSizeAndIds () {
+    const stakeIds: StakeId[] = []; const rewardIds: RewardRelationshipId[] = []; let numberOfContentCurators = 0;
+    const contentCurators = await this.entriesByIds<CuratorId, Curator>(this.api.query.contentWorkingGroup.curatorById);
+    const currentLeadId = (await this.api.query.contentWorkingGroup.currentLeadId() as Option<LeadId>).unwrapOr(null)?.toNumber();
+
+    contentCurators.forEach(([curatorId, curator]) => {
+      const stakeId = curator.role_stake_profile.isSome ? curator.role_stake_profile.unwrap().stake_id : null;
+      const rewardId = curator.reward_relationship.unwrapOr(null);
+
+      if (curator.is_active) {
+        numberOfContentCurators += 1;
+
+        if (stakeId) {
+          stakeIds.push(stakeId);
+        }
+
+        if (rewardId) {
+          rewardIds.push(rewardId);
+        }
+      }
+    });
+
+    return {
+      stakeIds,
+      rewardIds,
+      numberOfContentCurators,
+      contentCuratorLeadNumber: currentLeadId ? 1 : 0
+    };
+  }
+
+  private async contentCuratorStakeAndRewards (stakeIds: StakeId[], rewardIds: RewardRelationshipId[]) {
+    let totalContentCuratorStake = 0;
+    let contentCuratorRewardsPerBlock = 0;
+
+    (await this.api.query.stake.stakes.multi<Stake>(stakeIds)).forEach((stake) => {
+      totalContentCuratorStake += stake.value.toNumber();
+    });
+    (await this.api.query.recurringRewards.rewardRelationships.multi<RewardRelationship>(rewardIds)).map((rewardRelationship) => {
+      const amount = rewardRelationship.amount_per_payout.toNumber();
+      const payoutInterval = rewardRelationship.payout_interval.isSome
+        ? rewardRelationship.payout_interval.unwrap().toNumber()
+        : null;
+
+      if (amount && payoutInterval) {
+        contentCuratorRewardsPerBlock += amount / payoutInterval;
+      }
+    });
+
+    return {
+      totalContentCuratorStake,
+      contentCuratorRewardsPerBlock
+    };
+  }
+
+  async getContentCuratorData () {
+    const { stakeIds, rewardIds, numberOfContentCurators, contentCuratorLeadNumber } = await this.contentCuratorSizeAndIds();
+    const { totalContentCuratorStake, contentCuratorRewardsPerBlock } = await this.contentCuratorStakeAndRewards(stakeIds, rewardIds);
+
+    return {
+      numberOfContentCurators,
+      contentCuratorLeadNumber,
+      totalContentCuratorStake,
+      contentCuratorRewardsPerWeek: contentCuratorRewardsPerBlock * 100800
+    };
+  }
+
+  async validatorSizeAndStake () {
+    const validatorIds = await this.api.query.session.validators();
+    const currentEra = (await this.api.query.staking.currentEra()).unwrapOr(null);
+    let totalValidatorStake = 0; let numberOfNominators = 0;
+
+    if (currentEra !== null) {
+      const validatorStakeData = await this.api.query.staking.erasStakers.multi<Exposure>(
+        validatorIds.map((validatorId) => [currentEra, validatorId])
+      );
+
+      validatorStakeData.forEach((data) => {
+        if (!data.total.isEmpty) {
+          totalValidatorStake += data.total.toNumber();
+        }
+
+        if (!data.others.isEmpty) {
+          numberOfNominators += data.others.length;
+        }
+      });
+    }
+
+    return {
+      numberOfValidators: validatorIds.length,
+      numberOfNominators,
+      totalValidatorStake
+    };
+  }
+
+  async getValidatorData () {
+    const totalIssuance = (await this.api.query.balances.totalIssuance()).toNumber();
+    const { numberOfValidators, numberOfNominators, totalValidatorStake } = await this.validatorSizeAndStake();
+    const validatorRewardsPerEra = calculateValidatorsRewardsPerEra(totalValidatorStake, totalIssuance);
+
+    return {
+      totalIssuance,
+      numberOfValidators,
+      numberOfNominators,
+      totalValidatorStake,
+      validatorRewardsPerWeek: validatorRewardsPerEra * 168 // Assuming 1 era = 1h
+    };
+  }
+
+  async getTokenomicsData (): Promise<TokenomicsData> {
+    const { numberOfCouncilMembers, totalCouncilRewardsInOneWeek, totalCouncilStake } = await this.getCouncilData();
+    const { numberOfStorageProviders, storageProviderLeadNumber, totalStorageProviderStake, totalStorageProviderLeadStake, storageProviderLeadRewardsPerWeek, storageProviderRewardsPerWeek } = await this.getStorageProviderData();
+    const { numberOfContentCurators, contentCuratorLeadNumber, totalContentCuratorStake, contentCuratorRewardsPerWeek } = await this.getContentCuratorData();
+    const { numberOfValidators, numberOfNominators, totalValidatorStake, validatorRewardsPerWeek, totalIssuance } = await this.getValidatorData();
+    const currentlyStakedTokens = totalCouncilStake + totalStorageProviderStake + totalStorageProviderLeadStake + totalContentCuratorStake + totalValidatorStake;
+    const totalWeeklySpending = totalCouncilRewardsInOneWeek + storageProviderRewardsPerWeek + storageProviderLeadRewardsPerWeek + contentCuratorRewardsPerWeek + validatorRewardsPerWeek;
+    const totalNumberOfActors = numberOfCouncilMembers + numberOfStorageProviders + storageProviderLeadNumber + numberOfContentCurators + contentCuratorLeadNumber + numberOfValidators;
+
+    return {
+      totalIssuance,
+      currentlyStakedTokens,
+      totalWeeklySpending,
+      totalNumberOfActors,
+      validators: {
+        number: numberOfValidators,
+        nominators: {
+          number: numberOfNominators
+        },
+        rewardsPerWeek: validatorRewardsPerWeek,
+        rewardsShare: validatorRewardsPerWeek / totalWeeklySpending,
+        totalStake: totalValidatorStake,
+        stakeShare: totalValidatorStake / currentlyStakedTokens
+      },
+      council: {
+        number: numberOfCouncilMembers,
+        rewardsPerWeek: totalCouncilRewardsInOneWeek,
+        rewardsShare: totalCouncilRewardsInOneWeek / totalWeeklySpending,
+        totalStake: totalCouncilStake,
+        stakeShare: totalCouncilStake / currentlyStakedTokens
+      },
+      storageProviders: {
+        number: numberOfStorageProviders,
+        totalStake: totalStorageProviderStake,
+        stakeShare: totalStorageProviderStake / currentlyStakedTokens,
+        rewardsPerWeek: storageProviderRewardsPerWeek,
+        rewardsShare: storageProviderRewardsPerWeek / totalWeeklySpending,
+        lead: {
+          number: storageProviderLeadNumber,
+          totalStake: totalStorageProviderLeadStake,
+          stakeShare: totalStorageProviderLeadStake / currentlyStakedTokens,
+          rewardsPerWeek: storageProviderLeadRewardsPerWeek,
+          rewardsShare: storageProviderLeadRewardsPerWeek / totalWeeklySpending
+        }
+      },
+      contentCurators: {
+        number: numberOfContentCurators,
+        contentCuratorLead: contentCuratorLeadNumber,
+        rewardsPerWeek: contentCuratorRewardsPerWeek,
+        rewardsShare: contentCuratorRewardsPerWeek / totalWeeklySpending,
+        totalStake: totalContentCuratorStake,
+        stakeShare: totalContentCuratorStake / currentlyStakedTokens
+      }
+    };
+  }
+}

+ 53 - 0
pioneer/packages/joy-utils/src/types/tokenomics.ts

@@ -0,0 +1,53 @@
+export type TokenomicsData = {
+  totalIssuance: number;
+  currentlyStakedTokens: number;
+  totalWeeklySpending: number;
+  totalNumberOfActors: number;
+  validators: {
+    number: number;
+    nominators: {
+      number: number;
+    };
+    rewardsPerWeek: number;
+    rewardsShare: number;
+    totalStake: number;
+    stakeShare: number;
+  };
+  council: {
+    number: number;
+    rewardsPerWeek: number;
+    rewardsShare: number;
+    totalStake: number;
+    stakeShare: number;
+  };
+  storageProviders: {
+    number: number;
+    totalStake: number;
+    stakeShare: number;
+    rewardsPerWeek: number;
+    rewardsShare: number;
+    lead: {
+      number: number;
+      totalStake: number;
+      stakeShare: number;
+      rewardsPerWeek: number;
+      rewardsShare: number;
+    };
+  };
+  contentCurators: {
+    number: number;
+    contentCuratorLead: number;
+    rewardsPerWeek: number;
+    rewardsShare: number;
+    totalStake: number;
+    stakeShare: number;
+  };
+}
+
+export type StatusServerData = {
+  dollarPool: {
+    size: number;
+    replenishAmount: number;
+  };
+  price: string;
+};

+ 6 - 6
pioneer/packages/page-accounts/src/Accounts/index.tsx

@@ -205,11 +205,11 @@ function Overview ({ className = '', onStatusChange }: Props): React.ReactElemen
           label={t<string>('Restore JSON')}
           onClick={toggleImport}
         />
-        <Button
+        {/* <Button
           icon='qrcode'
           label={t<string>('Add via Qr')}
           onClick={toggleQr}
-        />
+        /> */}
         {isLedger() && (
           <>
             <Button
@@ -219,18 +219,18 @@ function Overview ({ className = '', onStatusChange }: Props): React.ReactElemen
             />
           </>
         )}
-        <Button
+        {/* <Button
           icon='plus'
           isDisabled={!(api.tx.multisig || api.tx.utility)}
           label={t<string>('Multisig')}
           onClick={toggleMultisig}
-        />
-        <Button
+        /> */}
+        {/* <Button
           icon='plus'
           isDisabled={!api.tx.proxy}
           label={t<string>('Proxied')}
           onClick={toggleProxy}
-        />
+        /> */}
       </Button.Group>
       <Table
         empty={t<string>("You don't have any accounts. Some features are currently hidden and will only become available once you have accounts.")}

+ 3 - 3
pioneer/packages/page-accounts/src/index.tsx

@@ -7,10 +7,10 @@ import { AppProps as Props } from '@polkadot/react-components/types';
 import React, { useMemo } from 'react';
 import { Route, Switch } from 'react-router';
 import { useAccounts, useIpfs } from '@polkadot/react-hooks';
-import { HelpOverlay, Tabs } from '@polkadot/react-components';
+import { Tabs } from '@polkadot/react-components'; // HelpOverlay
 import { MemoForm } from '@polkadot/joy-utils/react/components/Memo';
 
-import basicMd from './md/basic.md';
+// import basicMd from './md/basic.md';
 import { useTranslation } from './translate';
 import useCounter from './useCounter';
 import Accounts from './Accounts';
@@ -49,7 +49,7 @@ function AccountsApp ({ basePath, onStatusChange }: Props): React.ReactElement<P
 
   return (
     <main className='accounts--App'>
-      <HelpOverlay md={basicMd as string} />
+      {/* <HelpOverlay md={basicMd as string} /> */}
       <header>
         <Tabs
           basePath={basePath}

+ 4 - 1
pioneer/packages/page-js/src/Playground.tsx

@@ -16,6 +16,7 @@ import uiKeyring from '@polkadot/ui-keyring';
 import * as types from '@polkadot/types';
 import * as util from '@polkadot/util';
 import * as hashing from '@polkadot/util-crypto';
+import * as joy from '@joystream/types';
 
 import { STORE_EXAMPLES, STORE_SELECTED, CUSTOM_LABEL } from './constants';
 import makeWrapper from './snippets/wrapping';
@@ -36,6 +37,7 @@ interface Injected {
   setIsRunning: (isRunning: boolean) => void;
   types: typeof types;
   util: typeof util;
+  joy: typeof joy;
   [name: string]: any;
 }
 
@@ -69,7 +71,8 @@ function setupInjected ({ api, isDevelopment }: ApiProps, setIsRunning: (isRunni
     uiKeyring: isDevelopment
       ? uiKeyring
       : null,
-    util
+    util,
+    joy
   };
 }
 

+ 3 - 3
pioneer/packages/page-staking/src/index.tsx

@@ -10,12 +10,12 @@ import React, { useEffect, useMemo, useState } from 'react';
 import { Route, Switch } from 'react-router';
 import { useLocation } from 'react-router-dom';
 import styled from 'styled-components';
-import { HelpOverlay } from '@polkadot/react-components';
+// import { HelpOverlay } from '@polkadot/react-components';
 import Tabs from '@polkadot/react-components/Tabs';
 import { useAccounts, useApi, useCall, useFavorites, useOwnStashInfos, useStashIds } from '@polkadot/react-hooks';
 import { isFunction } from '@polkadot/util';
 
-import basicMd from './md/basic.md';
+// import basicMd from './md/basic.md';
 import Actions from './Actions';
 import Overview from './Overview';
 import Payouts from './Payouts';
@@ -99,7 +99,7 @@ function StakingApp ({ basePath, className = '' }: Props): React.ReactElement<Pr
 
   return (
     <main className={`staking--App ${className}`}>
-      <HelpOverlay md={basicMd as string} />
+      {/* <HelpOverlay md={basicMd as string} /> */}
       <header>
         <Tabs
           basePath={basePath}

+ 67 - 0
pioneer/packages/react-components/src/Chart/PieChart.tsx

@@ -0,0 +1,67 @@
+// Copyright 2017-2019 @polkadot/react-components authors & contributors
+// This software may be modified and distributed under the terms
+// of the Apache-2.0 license. See the LICENSE file for details.
+
+import { BareProps } from '../types';
+
+import BN from 'bn.js';
+import React from 'react';
+import { Pie } from 'react-chartjs-2';
+import { bnToBn } from '@polkadot/util';
+
+interface Value {
+  colors: string[];
+  label: string;
+  value: number | BN;
+}
+
+interface Props extends BareProps {
+  size?: number;
+  values: Value[];
+}
+
+interface Options {
+  colorNormal: string[];
+  colorHover: string[];
+  data: number[];
+  labels: string[];
+}
+
+export default function PieChart ({ className, style, values }: Props): React.ReactElement<Props> {
+  const options: Options = {
+    colorNormal: [],
+    colorHover: [],
+    data: [],
+    labels: []
+  };
+
+  values.forEach(({ colors: [normalColor = '#00f', hoverColor], label, value }): void => {
+    options.colorNormal.push(normalColor);
+    options.colorHover.push(hoverColor || normalColor);
+    options.data.push(bnToBn(value).toNumber());
+    options.labels.push(label);
+  });
+
+  return (
+    <div
+      className={className}
+    >
+      <Pie
+        legend={{
+          display: false
+        }}
+        options={{
+          maintainAspectRatio: false
+        }}
+        data={{
+          labels: options.labels,
+          datasets: [{
+            data: options.data,
+            backgroundColor: options.colorNormal,
+            hoverBackgroundColor: options.colorHover
+          }]
+        }}
+      />
+    </div>
+  );
+}

+ 3 - 1
pioneer/tsconfig.json

@@ -89,7 +89,9 @@
       "@polkadot/react-query": [ "packages/react-query/src" ],
       "@polkadot/react-query/*": [ "packages/react-query/src/*" ],
       "@polkadot/react-signer": [ "packages/react-signer/src" ],
-      "@polkadot/react-signer/*": [ "packages/react-signer/src/*" ]
+      "@polkadot/react-signer/*": [ "packages/react-signer/src/*" ],
+      "@polkadot/joy-tokenomics": [ "packages/joy-tokenomics/src" ],
+      "@polkadot/joy-tokenomics/*": [ "packages/joy-tokenomics/src/*" ]
     },
     "skipLibCheck": true,
     "typeRoots": [

+ 4 - 0
runtime-modules/common/Cargo.toml

@@ -6,6 +6,8 @@ edition = '2018'
 
 [dependencies]
 serde = { version = "1.0.101", optional = true, features = ["derive"] }
+strum = {version = "0.19", optional = true}
+strum_macros = {version = "0.19", optional = true}
 codec = { package = 'parity-scale-codec', version = '1.3.1', default-features = false, features = ['derive'] }
 sp-runtime = { package = 'sp-runtime', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
 frame-support = { package = 'frame-support', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
@@ -16,6 +18,8 @@ pallet-timestamp = { package = 'pallet-timestamp', default-features = false, git
 default = ['std']
 std = [
 	'serde',
+	'strum',
+	'strum_macros',
 	'codec/std',
 	'sp-runtime/std',
 	'frame-support/std',

+ 5 - 1
runtime-modules/common/src/working_group.rs

@@ -1,9 +1,11 @@
 use codec::{Decode, Encode};
 #[cfg(feature = "std")]
 use serde::{Deserialize, Serialize};
+#[cfg(feature = "std")]
+use strum_macros::EnumIter;
 
 /// Defines well-known working groups.
-#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, EnumIter))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Copy, Debug)]
 pub enum WorkingGroup {
     /* Reserved
@@ -12,4 +14,6 @@ pub enum WorkingGroup {
     */
     /// Storage working group: working_group::Instance2.
     Storage,
+    /// Storage working group: working_group::Instance3.
+    Content,
 }

+ 30 - 0
runtime-modules/content-directory/Cargo.toml

@@ -0,0 +1,30 @@
+[package]
+name = 'pallet-content-directory'
+version = '3.0.0'
+authors = ['Joystream contributors']
+edition = '2018'
+
+[dependencies]
+sp-std = { package = 'sp-std', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+sp-runtime = { package = 'sp-runtime', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+frame-support = { package = 'frame-support', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+system = { package = 'frame-system', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+sp-arithmetic = { package = 'sp-arithmetic', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+codec = { package = 'parity-scale-codec', version = '1.3.1', default-features = false, features = ['derive'] }
+serde = {version = '1.0.101', features = ['derive'], optional = true}
+
+[dev-dependencies]
+sp-io = { package = 'sp-io', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+sp-core = { package = 'sp-core', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '00768a1f21a579c478fe5d4f51e1fa71f7db9fd4'}
+
+[features]
+default = ['std']
+std = [
+	'sp-std/std',
+	'sp-runtime/std',
+	'frame-support/std',
+	'system/std',
+	'sp-arithmetic/std',
+	'codec/std',
+	'serde',
+]

+ 244 - 0
runtime-modules/content-directory/src/class.rs

@@ -0,0 +1,244 @@
+use super::*;
+
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct Class<T: Trait> {
+    /// Permissions for an instance of a Class.
+    class_permissions: ClassPermissions<T>,
+    /// All properties that have been used on this class across different class schemas.
+    /// Unlikely to be more than roughly 20 properties per class, often less.
+    /// For Person, think "height", "weight", etc.
+    properties: Vec<Property<T>>,
+
+    /// All schemas that are available for this class, think v0.0 Person, v.1.0 Person, etc.
+    schemas: Vec<Schema>,
+
+    name: Vec<u8>,
+
+    description: Vec<u8>,
+
+    /// The maximum number of entities which can be created.
+    maximum_entities_count: T::EntityId,
+
+    /// The current number of entities which exist.
+    current_number_of_entities: T::EntityId,
+
+    /// How many entities a given controller may create at most.
+    default_entity_creation_voucher_upper_bound: T::EntityId,
+}
+
+impl<T: Trait> Default for Class<T> {
+    fn default() -> Self {
+        Self {
+            class_permissions: ClassPermissions::<T>::default(),
+            properties: vec![],
+            schemas: vec![],
+            name: vec![],
+            description: vec![],
+            maximum_entities_count: T::EntityId::default(),
+            current_number_of_entities: T::EntityId::default(),
+            default_entity_creation_voucher_upper_bound: T::EntityId::default(),
+        }
+    }
+}
+
+impl<T: Trait> Class<T> {
+    /// Create new `Class` with provided parameters
+    pub fn new(
+        class_permissions: ClassPermissions<T>,
+        name: Vec<u8>,
+        description: Vec<u8>,
+        maximum_entities_count: T::EntityId,
+        default_entity_creation_voucher_upper_bound: T::EntityId,
+    ) -> Self {
+        Self {
+            class_permissions,
+            properties: vec![],
+            schemas: vec![],
+            name,
+            description,
+            maximum_entities_count,
+            current_number_of_entities: T::EntityId::zero(),
+            default_entity_creation_voucher_upper_bound,
+        }
+    }
+
+    pub fn get_name(&self) -> &[u8] {
+        &self.name
+    }
+
+    pub fn get_description(&self) -> &[u8] {
+        &self.description
+    }
+
+    pub fn set_name(&mut self, name: Vec<u8>) {
+        self.name = name;
+    }
+
+    pub fn set_description(&mut self, description: Vec<u8>) {
+        self.description = description;
+    }
+
+    /// Used to update `Schema` status under given `schema_index`
+    pub fn update_schema_status(&mut self, schema_index: SchemaId, schema_status: bool) {
+        if let Some(schema) = self.schemas.get_mut(schema_index as usize) {
+            schema.set_status(schema_status);
+        };
+    }
+
+    /// Used to update `Class` permissions
+    pub fn update_permissions(&mut self, permissions: ClassPermissions<T>) {
+        self.class_permissions = permissions
+    }
+
+    /// Get Class schemas by mutable reference
+    pub fn get_schemas_mut(&mut self) -> &mut Vec<Schema> {
+        &mut self.schemas
+    }
+
+    /// Get Class schemas by reference
+    pub fn get_schemas(&self) -> &Vec<Schema> {
+        &self.schemas
+    }
+
+    /// Increment number of entities, associated with this class
+    pub fn increment_entities_count(&mut self) {
+        self.current_number_of_entities += T::EntityId::one();
+    }
+
+    /// Decrement number of entities, associated with this class
+    pub fn decrement_entities_count(&mut self) {
+        self.current_number_of_entities -= T::EntityId::one();
+    }
+
+    /// Retrieve `ClassPermissions` by mutable reference
+    pub fn get_permissions_mut(&mut self) -> &mut ClassPermissions<T> {
+        &mut self.class_permissions
+    }
+
+    /// Retrieve `ClassPermissions` by reference
+    pub fn get_permissions_ref(&self) -> &ClassPermissions<T> {
+        &self.class_permissions
+    }
+
+    /// Retrieve `ClassPermissions` by value
+    pub fn get_permissions(self) -> ClassPermissions<T> {
+        self.class_permissions
+    }
+
+    /// Retrieve `Class` properties by value  
+    pub fn get_properties(self) -> Vec<Property<T>> {
+        self.properties
+    }
+
+    /// Replace `Class` properties with updated_class_properties
+    pub fn set_properties(&mut self, updated_class_properties: Vec<Property<T>>) {
+        self.properties = updated_class_properties;
+    }
+
+    /// Get per controller `Class`- specific limit
+    pub fn get_default_entity_creation_voucher_upper_bound(&self) -> T::EntityId {
+        self.default_entity_creation_voucher_upper_bound
+    }
+
+    /// Retrive the maximum entities count, which can be created for given `Class`
+    pub fn get_maximum_entities_count(&self) -> T::EntityId {
+        self.maximum_entities_count
+    }
+
+    /// Set per controller `Class`- specific limit
+    pub fn set_default_entity_creation_voucher_upper_bound(
+        &mut self,
+        new_default_entity_creation_voucher_upper_bound: T::EntityId,
+    ) {
+        self.default_entity_creation_voucher_upper_bound =
+            new_default_entity_creation_voucher_upper_bound;
+    }
+
+    /// Set the maximum entities count, which can be created for given `Class`
+    pub fn set_maximum_entities_count(&mut self, maximum_entities_count: T::EntityId) {
+        self.maximum_entities_count = maximum_entities_count;
+    }
+
+    /// Ensure `Class` `Schema` under given index exist, return corresponding `Schema`
+    pub fn ensure_schema_exists(&self, schema_index: SchemaId) -> Result<&Schema, Error<T>> {
+        self.schemas
+            .get(schema_index as usize)
+            .ok_or(Error::<T>::UnknownClassSchemaId)
+    }
+
+    /// Ensure `schema_id` is a valid index of `Class` schemas vector
+    pub fn ensure_schema_id_exists(&self, schema_id: SchemaId) -> Result<(), Error<T>> {
+        ensure!(
+            schema_id < self.schemas.len() as SchemaId,
+            Error::<T>::UnknownClassSchemaId
+        );
+        Ok(())
+    }
+
+    /// Ensure `Schema`s limit per `Class` not reached
+    pub fn ensure_schemas_limit_not_reached(&self) -> Result<(), Error<T>> {
+        ensure!(
+            (self.schemas.len() as MaxNumber) < T::MaxNumberOfSchemasPerClass::get(),
+            Error::<T>::ClassSchemasLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure properties limit per `Schema` not reached
+    pub fn ensure_properties_limit_not_reached(
+        &self,
+        new_properties: &[Property<T>],
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            T::MaxNumberOfPropertiesPerSchema::get()
+                >= (self.properties.len() + new_properties.len()) as MaxNumber,
+            Error::<T>::SchemaPropertiesLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure `Class` specific entities limit not reached
+    pub fn ensure_maximum_entities_count_limit_not_reached(&self) -> Result<(), Error<T>> {
+        ensure!(
+            self.current_number_of_entities < self.maximum_entities_count,
+            Error::<T>::NumberOfEntitiesPerClassLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure `Property` under given `PropertyId` is unlocked from actor with given `EntityAccessLevel`
+    /// return corresponding `Property` by value
+    pub fn ensure_class_property_type_unlocked_from(
+        &self,
+        in_class_schema_property_id: PropertyId,
+        entity_access_level: EntityAccessLevel,
+    ) -> Result<Property<T>, Error<T>> {
+        // Ensure property values were not locked on Class level
+        self.ensure_property_values_unlocked()?;
+
+        // Get class-level information about this `Property`
+        let class_property = self
+            .properties
+            .get(in_class_schema_property_id as usize)
+            // Throw an error if a property was not found on class
+            // by an in-class index of a property.
+            .ok_or(Error::<T>::ClassPropertyNotFound)?;
+
+        // Ensure Property is unlocked from Actor with given EntityAccessLevel
+        class_property.ensure_unlocked_from(entity_access_level)?;
+
+        Ok(class_property.to_owned())
+    }
+
+    /// Ensure property values were not locked on `Class` level
+    pub fn ensure_property_values_unlocked(&self) -> Result<(), Error<T>> {
+        ensure!(
+            !self
+                .get_permissions_ref()
+                .all_entity_property_values_locked(),
+            Error::<T>::AllPropertiesWereLockedOnClassLevel
+        );
+        Ok(())
+    }
+}

+ 202 - 0
runtime-modules/content-directory/src/entity.rs

@@ -0,0 +1,202 @@
+use super::*;
+
+/// Represents `Entity`, related to a specific `Class`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub struct Entity<T: Trait> {
+    /// Permissions for an instance of an Entity.
+    entity_permissions: EntityPermissions<T>,
+
+    /// The class id of this entity.
+    class_id: T::ClassId,
+
+    /// What schemas under which entity of the respective class is available, think
+    /// v.2.0 Person schema for John, v3.0 Person schema for John
+    /// Unlikely to be more than roughly 20ish, assuming schemas for a given class eventually stableize,
+    /// or that very old schema are eventually removed.
+    supported_schemas: BTreeSet<SchemaId>, // indices of schema in corresponding class
+
+    /// Values for properties on class that are used by some schema used by this entity
+    /// Length is no more than Class.properties.
+    values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+
+    /// Number of property values referencing current entity
+    reference_counter: InboundReferenceCounter,
+}
+
+impl<T: Trait> Default for Entity<T> {
+    fn default() -> Self {
+        Self {
+            entity_permissions: EntityPermissions::<T>::default(),
+            class_id: T::ClassId::default(),
+            supported_schemas: BTreeSet::new(),
+            values: BTreeMap::new(),
+            reference_counter: InboundReferenceCounter::default(),
+        }
+    }
+}
+
+impl<T: Trait> Entity<T> {
+    /// Create new `Entity` instance, related to a given `class_id` with provided parameters,  
+    pub fn new(
+        controller: EntityController<T>,
+        class_id: T::ClassId,
+        supported_schemas: BTreeSet<SchemaId>,
+        values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> Self {
+        Self {
+            entity_permissions: EntityPermissions::<T>::default_with_controller(controller),
+            class_id,
+            supported_schemas,
+            values,
+            reference_counter: InboundReferenceCounter::default(),
+        }
+    }
+
+    /// Get `class_id` of this `Entity`
+    pub fn get_class_id(&self) -> T::ClassId {
+        self.class_id
+    }
+
+    /// Get Entity supported schemas by mutable reference
+    pub fn get_supported_schemas_mut(&mut self) -> &mut BTreeSet<SchemaId> {
+        &mut self.supported_schemas
+    }
+
+    /// Get `Entity` values by value
+    pub fn get_values(self) -> BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        self.values
+    }
+
+    /// Get `Entity` values by reference
+    pub fn get_values_ref(&self) -> &BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        &self.values
+    }
+
+    /// Get `Entity` values by mutable reference
+    pub fn get_values_mut(&mut self) -> &mut BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        &mut self.values
+    }
+
+    /// Get mutable reference to `Entity` values
+    pub fn set_values(&mut self, new_values: BTreeMap<PropertyId, StoredPropertyValue<T>>) {
+        self.values = new_values;
+    }
+
+    /// Get mutable `EntityPermissions` reference, related to given `Entity`
+    pub fn get_permissions_mut(&mut self) -> &mut EntityPermissions<T> {
+        &mut self.entity_permissions
+    }
+
+    /// Get `EntityPermissions` reference, related to given `Entity`
+    pub fn get_permissions_ref(&self) -> &EntityPermissions<T> {
+        &self.entity_permissions
+    }
+
+    /// Get `EntityPermissions`, related to given `Entity` by value
+    pub fn get_permissions(self) -> EntityPermissions<T> {
+        self.entity_permissions
+    }
+
+    /// Update existing `EntityPermissions` with newly provided
+    pub fn update_permissions(&mut self, permissions: EntityPermissions<T>) {
+        self.entity_permissions = permissions
+    }
+
+    /// Ensure `Schema` under given id is not added to given `Entity` yet
+    pub fn ensure_schema_id_is_not_added(&self, schema_id: SchemaId) -> Result<(), Error<T>> {
+        let schema_not_added = !self.supported_schemas.contains(&schema_id);
+        ensure!(schema_not_added, Error::<T>::SchemaAlreadyAddedToTheEntity);
+        Ok(())
+    }
+
+    /// Ensure provided `property_values` are not added to the `Entity` `values` map yet
+    pub fn ensure_property_values_are_not_added(
+        &self,
+        property_values: &BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            property_values
+                .keys()
+                .all(|key| !self.values.contains_key(key)),
+            Error::<T>::EntityAlreadyContainsGivenPropertyId
+        );
+        Ok(())
+    }
+
+    /// Ensure InputPropertyValue under given `in_class_schema_property_id` is Vector
+    pub fn ensure_property_value_is_vec(
+        &self,
+        in_class_schema_property_id: PropertyId,
+    ) -> Result<VecStoredPropertyValue<T>, Error<T>> {
+        self.values
+            .get(&in_class_schema_property_id)
+            // Throw an error if a property was not found on entity
+            // by an in-class index of a property.
+            .ok_or(Error::<T>::UnknownEntityPropertyId)?
+            .as_vec_property_value()
+            .map(|property_value_vec| property_value_vec.to_owned())
+            // Ensure prop value under given class schema property id is vector
+            .ok_or(Error::<T>::PropertyValueUnderGivenIndexIsNotAVector)
+    }
+
+    /// Ensure any `InputPropertyValue` from external entity does not point to the given `Entity`
+    pub fn ensure_rc_is_zero(&self) -> Result<(), Error<T>> {
+        ensure!(
+            self.reference_counter.is_total_equal_to_zero(),
+            Error::<T>::EntityRcDoesNotEqualToZero
+        );
+        Ok(())
+    }
+
+    /// Ensure any inbound `InputPropertyValue` with `same_owner` flag set points to the given `Entity`
+    pub fn ensure_inbound_same_owner_rc_is_zero(&self) -> Result<(), Error<T>> {
+        ensure!(
+            self.reference_counter.is_same_owner_equal_to_zero(),
+            Error::<T>::EntityInboundSameOwnerRcDoesNotEqualToZero
+        );
+        Ok(())
+    }
+
+    /// Get mutable reference to the `Entity`'s `InboundReferenceCounter` instance
+    pub fn get_reference_counter_mut(&mut self) -> &mut InboundReferenceCounter {
+        &mut self.reference_counter
+    }
+}
+
+/// Structure, respresenting inbound entity rcs for each `Entity`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Copy)]
+pub struct InboundReferenceCounter {
+    /// Total number of inbound references from another entities
+    pub total: u32,
+    /// Number of inbound references from another entities with `SameOwner` flag set
+    pub same_owner: u32,
+}
+
+impl InboundReferenceCounter {
+    /// Create simple `InboundReferenceCounter` instance, based on `same_owner` flag provided
+    pub fn new(reference_counter: u32, same_owner: bool) -> Self {
+        if same_owner {
+            Self {
+                total: reference_counter,
+                same_owner: reference_counter,
+            }
+        } else {
+            Self {
+                total: reference_counter,
+                same_owner: 0,
+            }
+        }
+    }
+
+    /// Check if `total` is equal to zero
+    pub fn is_total_equal_to_zero(self) -> bool {
+        self.total == 0
+    }
+
+    /// Check if `same_owner` is equal to zero
+    pub fn is_same_owner_equal_to_zero(self) -> bool {
+        self.same_owner == 0
+    }
+}

+ 236 - 0
runtime-modules/content-directory/src/errors.rs

@@ -0,0 +1,236 @@
+use crate::*;
+use frame_support::decl_error;
+
+decl_error! {
+    /// Content directory errors
+    pub enum Error for Module<T: Trait> {
+
+        /// Validation errors
+        /// --------------------------------------
+
+
+        /// Property name is too short
+        PropertyNameTooShort,
+
+        /// Property name is too long
+        PropertyNameTooLong,
+
+        /// Property description is too short
+        PropertyDescriptionTooShort,
+
+        /// Property description is too long
+        PropertyDescriptionTooLong,
+
+        /// Class name is too short
+        ClassNameTooShort,
+
+        /// Class name is too long
+        ClassNameTooLong,
+
+        /// Class description is too short
+        ClassDescriptionTooShort,
+
+        /// Class description is too long
+        ClassDescriptionTooLong,
+
+        /// Maximum number of classes limit reached
+        ClassLimitReached,
+
+        /// Maximum number of given class schemas limit reached
+        ClassSchemasLimitReached,
+
+        /// Maximum number of properties in schema limit reached
+        SchemaPropertiesLimitReached,
+
+        /// Entities creation limit per controller should be less than overall entities creation limit
+        PerControllerEntitiesCreationLimitExceedsOverallLimit,
+
+        /// Number of entities per class is to big
+        EntitiesNumberPerClassConstraintViolated,
+
+        /// Number of class entities per actor constraint violated
+        NumberOfClassEntitiesPerActorConstraintViolated,
+
+        /// Individual number of class entities per actor is too big
+        IndividualNumberOfClassEntitiesPerActorIsTooBig,
+
+        /// Number of operations during atomic batching limit reached
+        NumberOfOperationsDuringAtomicBatchingLimitReached,
+
+        /// Text property is too long
+        TextPropertyTooLong,
+
+        /// Text property to be hashed is too long
+        HashedTextPropertyTooLong,
+
+        /// Vector property is too long
+        VecPropertyTooLong,
+
+        /// Propery value vector can`t contain more values
+        EntityPropertyValueVectorIsTooLong,
+
+        /// Given property value vector index is out of range
+        EntityPropertyValueVectorIndexIsOutOfRange,
+
+
+        /// Main logic errors
+        /// --------------------------------------
+
+
+        /// Class was not found by id
+        ClassNotFound,
+
+        /// Class property under given index not found
+        ClassPropertyNotFound,
+
+        /// Unknown class schema id
+        UnknownClassSchemaId,
+
+        /// Given class schema is not active
+        ClassSchemaNotActive,
+
+        /// New class schema refers to an unknown property index
+        ClassSchemaRefersUnknownPropertyIndex,
+
+        /// New class schema refers to an unknown class id
+        ClassSchemaRefersUnknownClass,
+
+        /// Cannot add a class schema with an empty list of properties
+        NoPropertiesInClassSchema,
+
+        /// Entity was not found by id
+        EntityNotFound,
+
+        /// Cannot add a schema that is already added to this entity
+        SchemaAlreadyAddedToTheEntity,
+
+        /// Some of the provided property values don't match the expected property type
+        PropertyValueDoNotMatchType,
+
+        /// Property value don't match the expected vector property type
+        PropertyValueDoNotMatchVecType,
+
+        /// Property value under given index is not a vector
+        PropertyValueUnderGivenIndexIsNotAVector,
+
+        /// Current property value vector nonce does not equal to provided one
+        PropertyValueVecNoncesDoesNotMatch,
+
+        /// Property name is not unique within its class
+        PropertyNameNotUniqueInAClass,
+
+        /// Some required property was not found when adding schema support to entity
+        MissingRequiredProperty,
+
+        /// Schema under provided schema_id does not contain given property
+        SchemaDoesNotContainProvidedPropertyId,
+
+        /// Some of the provided property ids cannot be found on the current list of propery values of this entity
+        UnknownEntityPropertyId,
+
+        /// Entity already contains property under provided index
+        EntityAlreadyContainsGivenPropertyId,
+
+        /// Propery value type does not match internal entity vector type
+        PropertyValueTypeDoesNotMatchInternalVectorType,
+
+        /// Provided property references entity, which class_id is not equal to class_id, declared in corresponding property type
+        ReferencedEntityDoesNotMatchItsClass,
+
+        /// Entity removal can`t be completed, as there are some property values pointing to given entity
+        EntityRcDoesNotEqualToZero,
+
+        /// Entity ownership transfer can`t be completed, as there are some property values pointing to given entity with same owner flag set
+        EntityInboundSameOwnerRcDoesNotEqualToZero,
+
+        /// Provided entity controller is equal to the current one
+        ProvidedEntityControllerIsEqualToTheCurrentOne,
+
+        /// All ids of new property value references with same owner flag set should match their respective Properties defined on Class level
+        AllProvidedPropertyValueIdsMustBeReferencesWithSameOwnerFlagSet,
+
+        /// Entity was not created in batched transaction
+        EntityNotCreatedByOperation,
+
+        /// Permission errors
+        /// --------------------------------------
+
+        /// Curator group can`t be removed, as it currently maintains at least one class
+        CuratorGroupRemovalForbidden,
+
+        /// All property values, related to a given Entity were locked on Class level
+        AllPropertiesWereLockedOnClassLevel,
+
+        /// Curator under provided curator id is not a member of curaror group under given id
+        CuratorIsNotAMemberOfGivenCuratorGroup,
+
+        /// Given curator group does not exist
+        CuratorGroupDoesNotExist,
+
+        /// Entity should be referenced from the entity, owned by the same controller
+        SameControllerConstraintViolation,
+
+        /// Given maintainer does not exist
+        MaintainerDoesNotExist,
+
+        /// Given maintainer already exist
+        MaintainerAlreadyExists,
+
+        /// Provided actor can`t create entities of given class
+        ActorCanNotCreateEntities,
+
+        /// Maximum numbers of entities per class limit reached
+        NumberOfEntitiesPerClassLimitReached,
+
+        /// Current class entities creation blocked
+        EntitiesCreationBlocked,
+
+        /// Entities voucher limit reached
+        VoucherLimitReached,
+
+        /// Lead authentication failed
+        LeadAuthFailed,
+
+        /// Member authentication failed
+        MemberAuthFailed,
+
+        /// Curator authentication failed
+        CuratorAuthFailed,
+
+        /// Expected root or signed origin
+        BadOrigin,
+
+        /// Entity removal access denied
+        EntityRemovalAccessDenied,
+
+        /// Add entity schema support access denied
+        EntityAddSchemaSupportAccessDenied,
+
+        /// Class access denied
+        ClassAccessDenied,
+
+        /// Entity access denied
+        EntityAccessDenied,
+
+        /// Given entity can`t be referenced
+        EntityCanNotBeReferenced,
+
+        /// Given class property type is locked for given actor
+        ClassPropertyTypeLockedForGivenActor,
+
+        /// Number of maintainers per class limit reached
+        ClassMaintainersLimitReached,
+
+        /// Max number of curators per group limit reached
+        CuratorsPerGroupLimitReached,
+
+        /// Curator group is not active
+        CuratorGroupIsNotActive,
+
+        /// Origin cannot be made into raw origin
+        OriginCanNotBeMadeIntoRawOrigin,
+
+        /// Property value should be unique across all Entities of this Class
+        PropertyValueShouldBeUnique
+    }
+}

+ 340 - 0
runtime-modules/content-directory/src/helpers.rs

@@ -0,0 +1,340 @@
+use crate::*;
+use core::ops::{Deref, DerefMut};
+
+/// Wrapper for existing `InputPropertyValue` and its respective `Class` `Property`
+pub struct InputValueForExistingProperty<'a, T: Trait>(&'a Property<T>, &'a InputPropertyValue<T>);
+
+impl<'a, T: Trait> InputValueForExistingProperty<'a, T> {
+    /// Create single instance of `InputValueForExistingProperty` from provided `property` and `value`
+    fn new(property: &'a Property<T>, value: &'a InputPropertyValue<T>) -> Self {
+        Self(property, value)
+    }
+
+    /// Retrieve `Property` reference
+    pub fn get_property(&self) -> &Property<T> {
+        self.0
+    }
+
+    /// Retrieve `InputPropertyValue` reference
+    pub fn get_value(&self) -> &InputPropertyValue<T> {
+        self.1
+    }
+
+    /// Retrieve `Property` and `InputPropertyValue` references
+    pub fn unzip(&self) -> (&Property<T>, &InputPropertyValue<T>) {
+        (self.0, self.1)
+    }
+}
+
+/// Mapping, used to represent `PropertyId` relation to its respective `InputValueForExistingProperty` structure
+pub struct InputValuesForExistingProperties<'a, T: Trait>(
+    BTreeMap<PropertyId, InputValueForExistingProperty<'a, T>>,
+);
+
+impl<'a, T: Trait> Default for InputValuesForExistingProperties<'a, T> {
+    fn default() -> Self {
+        Self(BTreeMap::default())
+    }
+}
+
+impl<'a, T: Trait> Deref for InputValuesForExistingProperties<'a, T> {
+    type Target = BTreeMap<PropertyId, InputValueForExistingProperty<'a, T>>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<'a, T: Trait> DerefMut for InputValuesForExistingProperties<'a, T> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl<'a, T: Trait> InputValuesForExistingProperties<'a, T> {
+    /// Create `InputValuesForExistingProperties` helper structure from provided `property_values` and their corresponding `Class` properties.
+    /// Throws an error, when `Class` `Property` under `property_id`, corresponding to provided `property_value` not found
+    pub fn from(
+        properties: &'a [Property<T>],
+        property_values: &'a BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> Result<Self, Error<T>> {
+        let mut values_for_existing_properties = InputValuesForExistingProperties::<T>::default();
+        for (&property_id, property_value) in property_values {
+            let property = properties
+                .get(property_id as usize)
+                .ok_or(Error::<T>::ClassPropertyNotFound)?;
+            values_for_existing_properties.insert(
+                property_id,
+                InputValueForExistingProperty::new(property, property_value),
+            );
+        }
+        Ok(values_for_existing_properties)
+    }
+}
+
+/// Wrapper for existing `StoredPropertyValue` and its respective `Class` `Property`
+pub struct StoredValueForExistingProperty<'a, T: Trait>(
+    &'a Property<T>,
+    &'a StoredPropertyValue<T>,
+);
+
+impl<'a, T: Trait> StoredValueForExistingProperty<'a, T> {
+    /// Create single instance of `StoredValueForExistingProperty` from provided `property` and `value`
+    pub fn new(property: &'a Property<T>, value: &'a StoredPropertyValue<T>) -> Self {
+        Self(property, value)
+    }
+
+    /// Retrieve `Property` reference
+    pub fn get_property(&self) -> &Property<T> {
+        self.0
+    }
+
+    /// Retrieve `StoredPropertyValue` reference
+    pub fn get_value(&self) -> &StoredPropertyValue<T> {
+        self.1
+    }
+
+    /// Retrieve `Property` and `StoredPropertyValue` references
+    pub fn unzip(&self) -> (&Property<T>, &StoredPropertyValue<T>) {
+        (self.0, self.1)
+    }
+
+    /// Check if Property is default and non `required`
+    pub fn is_default(&self) -> bool {
+        let (property, property_value) = self.unzip();
+        !property.required && *property_value == StoredPropertyValue::<T>::default()
+    }
+}
+
+/// Mapping, used to represent `PropertyId` relation to its respective `StoredValuesForExistingProperties` structure
+pub struct StoredValuesForExistingProperties<'a, T: Trait>(
+    BTreeMap<PropertyId, StoredValueForExistingProperty<'a, T>>,
+);
+
+impl<'a, T: Trait> Default for StoredValuesForExistingProperties<'a, T> {
+    fn default() -> Self {
+        Self(BTreeMap::default())
+    }
+}
+
+impl<'a, T: Trait> Deref for StoredValuesForExistingProperties<'a, T> {
+    type Target = BTreeMap<PropertyId, StoredValueForExistingProperty<'a, T>>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<'a, T: Trait> DerefMut for StoredValuesForExistingProperties<'a, T> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl<'a, T: Trait> StoredValuesForExistingProperties<'a, T> {
+    /// Create `StoredValuesForExistingProperties` helper structure from provided `property_values` and their corresponding `Class` properties.
+    pub fn from(
+        properties: &'a [Property<T>],
+        property_values: &'a BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> Result<Self, Error<T>> {
+        let mut values_for_existing_properties = StoredValuesForExistingProperties::<T>::default();
+
+        for (&property_id, property_value) in property_values {
+            let property = properties
+                .get(property_id as usize)
+                .ok_or(Error::<T>::ClassPropertyNotFound)?;
+            values_for_existing_properties.insert(
+                property_id,
+                StoredValueForExistingProperty::new(property, property_value),
+            );
+        }
+        Ok(values_for_existing_properties)
+    }
+
+    /// Used to compute hashes from `StoredPropertyValue`s and their respective property ids, which respective `Properties` have `unique` flag set
+    /// (skip `PropertyId`s, which respective `property values` under this `Entity` are default and non `required`)
+    pub fn compute_unique_hashes(&self) -> BTreeMap<PropertyId, T::Hash> {
+        self.iter()
+            .filter(|(_, value_for_property)| {
+                // skip `PropertyId`s, which respective `property values` under this `Entity` are default and non `required`
+                value_for_property.get_property().unique && !value_for_property.is_default()
+            })
+            .map(|(&property_id, property_value)| {
+                (
+                    property_id,
+                    property_value.get_value().compute_unique_hash(property_id),
+                )
+            })
+            .collect()
+    }
+}
+
+/// Length constraint for input validation
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Default, Clone, Copy, PartialEq, Eq)]
+pub struct InputValidationLengthConstraint {
+    /// Minimum length
+    min: u16,
+
+    /// Difference between minimum length and max length.
+    /// While having max would have been more direct, this
+    /// way makes max < min unrepresentable semantically,
+    /// which is safer.
+    max_min_diff: u16,
+}
+
+impl InputValidationLengthConstraint {
+    /// Create new `InputValidationLengthConstraint` constraint
+    pub const fn new(min: u16, max_min_diff: u16) -> Self {
+        Self { min, max_min_diff }
+    }
+
+    /// Helper for computing max
+    pub fn max(self) -> u16 {
+        self.min + self.max_min_diff
+    }
+
+    /// Retrieve min length value
+    pub fn min(self) -> u16 {
+        self.min
+    }
+
+    /// Ensure length is valid
+    pub fn ensure_valid<T: Trait>(
+        self,
+        len: usize,
+        too_short_msg: Error<T>,
+        too_long_msg: Error<T>,
+    ) -> Result<(), Error<T>> {
+        let length = len as u16;
+        if length < self.min {
+            Err(too_short_msg)
+        } else if length > self.max() {
+            Err(too_long_msg)
+        } else {
+            Ok(())
+        }
+    }
+}
+
+/// Enum, used to specify, which mode of operation should be chosen
+#[derive(Clone, PartialEq, Eq, Copy)]
+pub enum DeltaMode {
+    Increment,
+    Decrement,
+}
+
+impl Default for DeltaMode {
+    fn default() -> Self {
+        Self::Increment
+    }
+}
+
+/// Representing delta on which respective `InboundReferenceCounter` should be changed.
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Default, PartialEq, Eq)]
+pub struct EntityReferenceCounterSideEffect {
+    /// Delta number of all inbound references from another entities
+    pub total: i32,
+    /// Delta number of inbound references from another entities with `SameOwner` flag set
+    pub same_owner: i32,
+}
+
+impl Clone for EntityReferenceCounterSideEffect {
+    fn clone(&self) -> Self {
+        *self
+    }
+}
+
+impl Copy for EntityReferenceCounterSideEffect {}
+
+impl EntityReferenceCounterSideEffect {
+    /// Create atomic `EntityReferenceCounterSideEffect` instance, based on `same_owner` flag provided and `DeltaMode`
+    pub fn atomic(same_owner: bool, delta_mode: DeltaMode) -> Self {
+        let counter = if let DeltaMode::Increment = delta_mode {
+            1
+        } else {
+            -1
+        };
+
+        if same_owner {
+            Self {
+                total: counter,
+                same_owner: counter,
+            }
+        } else {
+            Self {
+                total: counter,
+                same_owner: 0,
+            }
+        }
+    }
+}
+
+impl AddAssign for EntityReferenceCounterSideEffect {
+    fn add_assign(&mut self, other: EntityReferenceCounterSideEffect) {
+        *self = Self {
+            total: self.total + other.total,
+            same_owner: self.same_owner + other.same_owner,
+        };
+    }
+}
+
+/// The net side effect on a set of entities from some operations.
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub struct ReferenceCounterSideEffects<T: Trait>(
+    BTreeMap<T::EntityId, EntityReferenceCounterSideEffect>,
+);
+
+impl<T: Trait> Deref for ReferenceCounterSideEffects<T> {
+    type Target = BTreeMap<T::EntityId, EntityReferenceCounterSideEffect>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<T: Trait> DerefMut for ReferenceCounterSideEffects<T> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl<T: Trait> Default for ReferenceCounterSideEffects<T> {
+    fn default() -> Self {
+        Self(BTreeMap::default())
+    }
+}
+
+impl<T: Trait> ReferenceCounterSideEffects<T> {
+    /// Updates all the elements of `other` with `Self`
+    pub fn update(mut self, other: Self) -> Self {
+        // Make a set, that includes both self and other entity_id keys
+        let entity_ids: BTreeSet<T::EntityId> = self.keys().chain(other.keys()).copied().collect();
+
+        for entity_id in entity_ids {
+            // If `self` contains value under provided `entity_id`,
+            // increase it on `EntityReferenceCounterSideEffect` value from `other` if exists,
+            // otherwise update `self` entry under provided `entity_id` with `EntityReferenceCounterSideEffect` from `other`
+            match (self.get_mut(&entity_id), other.get(&entity_id)) {
+                (Some(self_entity_rc_side_effect), Some(other_entity_rc_side_effect)) => {
+                    *self_entity_rc_side_effect += *other_entity_rc_side_effect
+                }
+                (_, Some(other_entity_rc_side_effect)) => {
+                    self.insert(entity_id, *other_entity_rc_side_effect);
+                }
+                _ => (),
+            }
+        }
+        self
+    }
+
+    /// Traverse `ReferenceCounterSideEffects`, updating each `Entity` respective reference counters
+    pub fn update_entities_rcs(&self) {
+        self.iter()
+            .for_each(|(entity_id, inbound_reference_counter_delta)| {
+                Module::<T>::update_entity_rc(*entity_id, *inbound_reference_counter_delta);
+            });
+    }
+}

+ 2710 - 0
runtime-modules/content-directory/src/lib.rs

@@ -0,0 +1,2710 @@
+//! # Content Directory Module
+//!
+//! The content directory is an on-chain index of all content and metadata,
+//! and related concepts - such as channels and playlists.
+//!
+//! - [`substrate_content_directory_module::Trait`](./trait.Trait.html)
+//! - [`Call`](./enum.Call.html)
+//! - [`Module`](./struct.Module.html)
+//!
+//! ## Overview
+//!
+//! The content directory provides functions for:
+//!
+//! - Creating/removal and managing curator groups
+//! - Creating classes and managing their permissions
+//! - Adding schemas to the class
+//! - Creating and removal of entities and managing their permissions
+//! - Adding schemas support to the respective class entities
+//! - Transfering entities ownership
+//! - Updating entity property values
+//!
+//! ## Terminology
+//!
+//! ### Class
+//!
+//! - **Class Properties:** All properties that have been used on this class across different class schemas.
+//! Unlikely to be more than roughly 20 properties per class, often less.
+//! For Person, think "height", "weight", etc.
+//!
+//! - **Schemas:**  All schemas, that are available for this class, think v0.0 Person, v.1.0 Person, etc.
+//!
+//! ### Entity
+//!
+//! - **Supported Schemas:**  What schemas under which entity of the respective class is available, think
+//! v.2.0 Person schema for John, v3.0 Person schema for John
+//! Unlikely to be more than roughly 20ish, assuming schemas for a given class eventually stableize,
+//! or that very old schema are eventually removed.
+//!
+//! - **Property Values:**  Values for properties, declared on class level,
+//! that are used in respective Class Entity after adding Schema support.
+//!
+//! ## Interface
+//!
+//! ### Dispatchable Functions
+//!
+//! #### Curator groups
+//!
+//! - `add_curator_group` - Add new curator group to the runtime storage
+//! - `remove_curator_group` - Remove curator group under given `curator_group_id` from runtime storage.
+//! The origin of this call must be a blog owner.
+//! - `set_curator_group_status` - Set activity status for curator group under given `curator_group_id`
+//! - `add_curator_to_group` - Add curator to curator group under given `curator_group_id`
+//! - `remove_curator_from_group` - Remove curator from a given curator group.
+//!
+//! #### Classes
+//!
+//! - `create_class` - Create new class with provided parameters
+//! - `add_maintainer_to_class` - Add curator group under given curator_group_id as class maintainer
+//! - `remove_maintainer_from_class` - Remove curator group under given curator_group_id from class maintainers set
+//! - `update_class_permissions` - Update class permissions under specific class_id
+//! - `add_class_schema` - Create new class schema from existing property ids and new properties
+//! - `update_class_schema_status` - Update schema status  under specific schema_id in class
+//!
+//! #### Entities
+//!
+//! - `create_entity` - Create new entity of respective class
+//! - `remove_entity` - Remove entity under provided entity_id
+//! - `update_entity_permissions` - Update entity permissions
+//! - `add_schema_support_to_entity` - add schema support to entity under given schema_id and provided property values
+//! - `update_entity_property_values` - Update entity property values with provided ones
+//! - `clear_entity_property_vector` - Clear property value vector under given entity_id & in class schema property id
+//! - `remove_at_entity_property_vector` - Remove value at given index_in_property_vector
+//! from property values vector under in_class schema property id
+//! - `insert_at_entity_property_vector` - Insert single input property values at given index in property vector
+//! into  property values vector under in class schema property id
+//!
+//! #### Others
+//!
+//! - `update_entity_creation_voucher` - Update/create new entity creation voucher for given entity controller with individual limit
+//! - `transaction` - This extrinsic allows a batch operation, which is atomic, over the following operations:
+//! **Entity creation**
+//! **Adding schema support to the entity**
+//! **Update property values of the entity**
+//!
+//! ## Usage
+//!
+//! The following example shows how to use the content directory module in your custom module.
+//!
+//! ### Prerequisites
+//!
+//! Import the content directory module into your custom module and derive the module configuration
+//! trait from the content directory trait.
+//!
+//! ### Add curator group
+//!
+//! ```
+//! use frame_support::{decl_module, assert_ok};
+//! use system::{self as system, ensure_signed};
+//!
+//! pub trait Trait: pallet_content_directory::Trait {}
+//!
+//! decl_module! {
+//!     pub struct Module<T: Trait> for enum Call where origin: T::Origin {
+//!         #[weight = 10_000_000]
+//!         pub fn add_curator_group(origin) -> Result<(), &'static str> {
+//!             <pallet_content_directory::Module<T>>::add_curator_group(origin)?;
+//!             Ok(())
+//!         }
+//!     }
+//! }
+//! # fn main() {}
+//! ```
+
+// Ensure we're `no_std` when compiling for Wasm.
+#![cfg_attr(not(feature = "std"), no_std)]
+#![recursion_limit = "256"]
+
+#[cfg(test)]
+mod tests;
+
+mod class;
+mod entity;
+mod errors;
+mod helpers;
+mod mock;
+mod operations;
+mod permissions;
+mod schema;
+
+pub use class::*;
+pub use entity::*;
+pub use errors::*;
+pub use helpers::*;
+pub use operations::*;
+pub use permissions::*;
+pub use schema::*;
+
+use core::hash::Hash;
+use core::ops::AddAssign;
+
+use codec::{Codec, Decode, Encode};
+use frame_support::storage::IterableStorageMap;
+
+use frame_support::{
+    decl_event, decl_module, decl_storage, dispatch::DispatchResult, ensure, traits::Get, Parameter,
+};
+#[cfg(feature = "std")]
+pub use serde::{Deserialize, Serialize};
+use sp_arithmetic::traits::{BaseArithmetic, One, Zero};
+use sp_runtime::traits::{MaybeSerializeDeserialize, Member};
+use sp_std::borrow::ToOwned;
+use sp_std::collections::{btree_map::BTreeMap, btree_set::BTreeSet};
+use sp_std::vec;
+use sp_std::vec::Vec;
+use system::ensure_signed;
+
+pub use errors::Error;
+
+use core::debug_assert;
+
+/// Type, used in diffrent numeric constraints representations
+pub type MaxNumber = u32;
+
+/// Module configuration trait for this Substrate module.
+pub trait Trait: system::Trait + ActorAuthenticator + Clone {
+    /// The overarching event type.
+    type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
+
+    /// Nonce type is used to avoid data race update conditions, when performing property value vector operations
+    type Nonce: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord
+        + From<u32>;
+
+    /// Type of identifier for classes
+    type ClassId: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + Hash
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord;
+
+    /// Type of identifier for entities
+    type EntityId: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + Hash
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord;
+
+    /// Security/configuration constraints
+
+    /// Type, representing min & max property name length constraints
+    type PropertyNameLengthConstraint: Get<InputValidationLengthConstraint>;
+
+    /// Type, representing min & max property description length constraints
+    type PropertyDescriptionLengthConstraint: Get<InputValidationLengthConstraint>;
+
+    /// Type, representing min & max class name length constraints
+    type ClassNameLengthConstraint: Get<InputValidationLengthConstraint>;
+
+    /// Type, representing min & max class description length constraints
+    type ClassDescriptionLengthConstraint: Get<InputValidationLengthConstraint>;
+
+    /// The maximum number of classes
+    type MaxNumberOfClasses: Get<MaxNumber>;
+
+    /// The maximum number of maintainers per class constraint
+    type MaxNumberOfMaintainersPerClass: Get<MaxNumber>;
+
+    /// The maximum number of curators per group constraint
+    type MaxNumberOfCuratorsPerGroup: Get<MaxNumber>;
+
+    /// The maximum number of schemas per class constraint
+    type MaxNumberOfSchemasPerClass: Get<MaxNumber>;
+
+    /// The maximum number of properties per class constraint
+    type MaxNumberOfPropertiesPerSchema: Get<MaxNumber>;
+
+    /// The maximum number of operations during single invocation of `transaction`
+    type MaxNumberOfOperationsDuringAtomicBatching: Get<MaxNumber>;
+
+    /// The maximum length of vector property value constarint
+    type VecMaxLengthConstraint: Get<VecMaxLength>;
+
+    /// The maximum length of text property value constarint
+    type TextMaxLengthConstraint: Get<TextMaxLength>;
+
+    /// The maximum length of text, that will be hashed property value constarint
+    type HashedTextMaxLengthConstraint: Get<HashedTextMaxLength>;
+
+    /// Entities creation constraint per class
+    type MaxNumberOfEntitiesPerClass: Get<Self::EntityId>;
+
+    /// Entities creation constraint per individual
+    type IndividualEntitiesCreationLimit: Get<Self::EntityId>;
+}
+
+decl_storage! {
+    trait Store for Module<T: Trait> as ContentDirectory {
+
+        /// Map, representing ClassId -> Class relation
+        pub ClassById get(fn class_by_id): map hasher(blake2_128_concat) T::ClassId => Class<T>;
+
+        /// Map, representing EntityId -> Entity relation
+        pub EntityById get(fn entity_by_id): map hasher(blake2_128_concat) T::EntityId => Entity<T>;
+
+        /// Map, representing  CuratorGroupId -> CuratorGroup relation
+        pub CuratorGroupById get(fn curator_group_by_id) config(): map hasher(blake2_128_concat) T::CuratorGroupId => CuratorGroup<T>;
+
+        /// Mapping of class id and its property id to the respective entity id and property value hash.
+        pub UniquePropertyValueHashes get(fn unique_property_value_hashes): double_map hasher(blake2_128_concat) (T::ClassId, PropertyId), hasher(blake2_128_concat) T::Hash => ();
+
+        /// Next runtime storage values used to maintain next id value, used on creation of respective curator groups, classes and entities
+
+        pub NextClassId get(fn next_class_id) config(): T::ClassId;
+
+        pub NextEntityId get(fn next_entity_id) config(): T::EntityId;
+
+        pub NextCuratorGroupId get(fn next_curator_group_id) config(): T::CuratorGroupId;
+
+        // The voucher associated with entity creation for a given class and controller.
+        // Is updated whenever an entity is created in a given class by a given controller.
+        // Constraint is updated by Root, an initial value comes from `ClassPermissions::default_entity_creation_voucher_upper_bound`.
+        pub EntityCreationVouchers get(fn entity_creation_vouchers):
+            double_map hasher(blake2_128_concat) T::ClassId, hasher(blake2_128_concat) EntityController<T> => EntityCreationVoucher<T>;
+    }
+}
+
+decl_module! {
+    pub struct Module<T: Trait> for enum Call where origin: T::Origin {
+
+        // ======
+        // Next set of extrinsics can only be invoked by lead.
+        // ======
+
+        /// Initializing events
+        fn deposit_event() = default;
+
+        /// Add new curator group to runtime storage
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn add_curator_group(
+            origin,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            let curator_group_id = Self::next_curator_group_id();
+
+            // Insert empty curator group with `active` parameter set to false
+            <CuratorGroupById<T>>::insert(curator_group_id, CuratorGroup::<T>::default());
+
+            // Increment the next curator curator_group_id:
+            <NextCuratorGroupId<T>>::mutate(|n| *n += T::CuratorGroupId::one());
+
+            // Trigger event
+            Self::deposit_event(RawEvent::CuratorGroupAdded(curator_group_id));
+            Ok(())
+        }
+
+        /// Remove curator group under given `curator_group_id` from runtime storage
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn remove_curator_group(
+            origin,
+            curator_group_id: T::CuratorGroupId,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure CuratorGroup under given curator_group_id exists
+            let curator_group = Self::ensure_curator_group_exists(&curator_group_id)?;
+
+            // We should previously ensure that curator_group  maintains no classes to be able to remove it
+            curator_group.ensure_curator_group_maintains_no_classes()?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+
+            // Remove curator group under given curator group id from runtime storage
+            <CuratorGroupById<T>>::remove(curator_group_id);
+
+            // Trigger event
+            Self::deposit_event(RawEvent::CuratorGroupRemoved(curator_group_id));
+            Ok(())
+        }
+
+        /// Set `is_active` status for curator group under given `curator_group_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn set_curator_group_status(
+            origin,
+            curator_group_id: T::CuratorGroupId,
+            is_active: bool,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure curator group under provided curator_group_id already exist
+            Self::ensure_curator_group_under_given_id_exists(&curator_group_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Set `is_active` status for curator group under given `curator_group_id`
+            <CuratorGroupById<T>>::mutate(curator_group_id, |curator_group| {
+                curator_group.set_status(is_active)
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::CuratorGroupStatusSet(curator_group_id, is_active));
+            Ok(())
+        }
+
+        /// Add curator to curator group under given `curator_group_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn add_curator_to_group(
+            origin,
+            curator_group_id: T::CuratorGroupId,
+            curator_id: T::CuratorId,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure curator group under provided curator_group_id already exist, retrieve corresponding one
+            let curator_group = Self::ensure_curator_group_exists(&curator_group_id)?;
+
+            // Ensure max number of curators per group limit not reached yet
+            curator_group.ensure_max_number_of_curators_limit_not_reached()?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Insert curator_id into curator_group under given curator_group_id
+            <CuratorGroupById<T>>::mutate(curator_group_id, |curator_group| {
+                curator_group.get_curators_mut().insert(curator_id);
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::CuratorAdded(curator_group_id, curator_id));
+            Ok(())
+        }
+
+        /// Remove curator from a given curator group
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn remove_curator_from_group(
+            origin,
+            curator_group_id: T::CuratorGroupId,
+            curator_id: T::CuratorId,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure curator group under provided curator_group_id already exist, retrieve corresponding one
+            let curator_group = Self::ensure_curator_group_exists(&curator_group_id)?;
+
+            // Ensure curator under provided curator_id is CuratorGroup member
+            curator_group.ensure_curator_in_group_exists(&curator_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Remove curator_id from curator_group under given curator_group_id
+            <CuratorGroupById<T>>::mutate(curator_group_id, |curator_group| {
+                curator_group.get_curators_mut().remove(&curator_id);
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::CuratorRemoved(curator_group_id, curator_id));
+            Ok(())
+        }
+
+        /// Updates or creates new `EntityCreationVoucher` for given `EntityController` with individual limit
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn update_entity_creation_voucher(
+            origin,
+            class_id: T::ClassId,
+            controller: EntityController<T>,
+            maximum_entities_count: T::EntityId
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            Self::ensure_known_class_id(class_id)?;
+
+            // Ensure maximum_entities_count does not exceed individual entities creation limit
+            Self::ensure_valid_number_of_class_entities_per_actor_constraint(maximum_entities_count)?;
+
+            // Check voucher existance
+            let voucher_exists = <EntityCreationVouchers<T>>::contains_key(class_id, &controller);
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            if voucher_exists {
+
+                // Set new maximum_entities_count limit for selected voucher
+                let mut entity_creation_voucher = Self::entity_creation_vouchers(class_id, &controller);
+
+                entity_creation_voucher.set_maximum_entities_count(maximum_entities_count);
+
+                <EntityCreationVouchers<T>>::insert(class_id, controller.clone(), entity_creation_voucher.clone());
+
+                // Trigger event
+                Self::deposit_event(RawEvent::EntityCreationVoucherUpdated(controller, entity_creation_voucher))
+            } else {
+                // Create new EntityCreationVoucher instance with provided maximum_entities_count
+                let entity_creation_voucher = EntityCreationVoucher::new(maximum_entities_count);
+
+                // Add newly created `EntityCreationVoucher` into `EntityCreationVouchers`
+                // runtime storage under given `class_id`, `controller` key
+                <EntityCreationVouchers<T>>::insert(class_id, controller.clone(), entity_creation_voucher.clone());
+
+                // Trigger event
+                Self::deposit_event(RawEvent::EntityCreationVoucherCreated(controller, entity_creation_voucher));
+            }
+
+            Ok(())
+        }
+
+        /// Create new `Class` with provided parameters
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn create_class(
+            origin,
+            name: Vec<u8>,
+            description: Vec<u8>,
+            class_permissions: ClassPermissions<T>,
+            maximum_entities_count: T::EntityId,
+            default_entity_creation_voucher_upper_bound: T::EntityId
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure, that all entities creation limits, defined for a given Class, are valid
+            Self::ensure_entities_creation_limits_are_valid(maximum_entities_count, default_entity_creation_voucher_upper_bound)?;
+
+            // Ensure max number of classes limit not reached
+            Self::ensure_class_limit_not_reached()?;
+
+            // Ensure ClassNameLengthConstraint conditions satisfied
+            Self::ensure_class_name_is_valid(&name)?;
+
+            // Ensure ClassDescriptionLengthConstraint conditions satisfied
+            Self::ensure_class_description_is_valid(&description)?;
+
+            // Perform required checks to ensure class_maintainers under provided class_permissions are valid
+            let class_maintainers = class_permissions.get_maintainers();
+            Self::ensure_class_maintainers_are_valid(class_maintainers)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Create new Class instance from provided values
+            let class = Class::new(
+                class_permissions, name, description, maximum_entities_count, default_entity_creation_voucher_upper_bound
+            );
+
+            let class_id = Self::next_class_id();
+
+            // Add new `Class` to runtime storage
+            <ClassById<T>>::insert(&class_id, class);
+
+            // Increment the next class id:
+            <NextClassId<T>>::mutate(|n| *n += T::ClassId::one());
+
+            // Trigger event
+            Self::deposit_event(RawEvent::ClassCreated(class_id));
+            Ok(())
+        }
+
+        /// Add curator group under given `curator_group_id` as `Class` maintainer
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn add_maintainer_to_class(
+            origin,
+            class_id: T::ClassId,
+            curator_group_id: T::CuratorGroupId,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under provided class_id exist, retrieve corresponding one
+            let class = Self::ensure_known_class_id(class_id)?;
+
+            // Ensure CuratorGroup under provided curator_group_id exist, retrieve corresponding one
+            Self::ensure_curator_group_under_given_id_exists(&curator_group_id)?;
+
+            // Ensure the max number of maintainers per Class limit not reached
+            let class_permissions = class.get_permissions_ref();
+
+            // Ensure max number of maintainers per Class constraint satisfied
+            Self::ensure_maintainers_limit_not_reached(class_permissions.get_maintainers())?;
+
+            // Ensure maintainer under provided curator_group_id is not added to the Class maintainers set yet
+            class_permissions.ensure_maintainer_does_not_exist(&curator_group_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Insert `curator_group_id` into `maintainers` set, associated with given `Class`
+            <ClassById<T>>::mutate(class_id, |class|
+                class.get_permissions_mut().get_maintainers_mut().insert(curator_group_id)
+            );
+
+            // Increment the number of classes, curator group under given `curator_group_id` maintains
+            <CuratorGroupById<T>>::mutate(curator_group_id, |curator_group| {
+                curator_group.increment_number_of_classes_maintained_count();
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::MaintainerAdded(class_id, curator_group_id));
+            Ok(())
+        }
+
+        /// Remove curator group under given `curator_group_id` from `Class` maintainers set
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn remove_maintainer_from_class(
+            origin,
+            class_id: T::ClassId,
+            curator_group_id: T::CuratorGroupId,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            let class = Self::ensure_known_class_id(class_id)?;
+
+            // Ensure maintainer under provided curator_group_id was previously added
+            // to the maintainers set, associated with corresponding Class
+            class.get_permissions_ref().ensure_maintainer_exists(&curator_group_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Remove `curator_group_id` from `maintainers` set, associated with given `Class`
+            <ClassById<T>>::mutate(class_id, |class|
+                class.get_permissions_mut().get_maintainers_mut().remove(&curator_group_id)
+            );
+
+            // Decrement the number of classes, curator group under given `curator_group_id` maintains
+            <CuratorGroupById<T>>::mutate(curator_group_id, |curator_group| {
+                curator_group.decrement_number_of_classes_maintained_count();
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::MaintainerRemoved(class_id, curator_group_id));
+            Ok(())
+        }
+
+        /// Update `ClassPermissions` under specific `class_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn update_class_permissions(
+            origin,
+            class_id: T::ClassId,
+            updated_any_member: Option<bool>,
+            updated_entity_creation_blocked: Option<bool>,
+            updated_all_entity_property_values_locked: Option<bool>,
+            updated_maintainers: Option<BTreeSet<T::CuratorGroupId>>,
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            let class = Self::ensure_known_class_id(class_id)?;
+
+            // Perform required checks to ensure class_maintainers are valid
+            if let Some(ref updated_maintainers) = updated_maintainers {
+                Self::ensure_class_maintainers_are_valid(updated_maintainers)?;
+            }
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            let class_permissions = class.get_permissions();
+
+            // Make updated class_permissions from parameters provided
+            let updated_class_permissions = Self::make_updated_class_permissions(
+                class_permissions, updated_any_member, updated_entity_creation_blocked,
+                updated_all_entity_property_values_locked, updated_maintainers
+            );
+
+            // If class_permissions update has been performed
+            if let Some(updated_class_permissions) = updated_class_permissions  {
+
+                // Update `class_permissions` under given class id
+                <ClassById<T>>::mutate(class_id, |class| {
+                    class.update_permissions(updated_class_permissions)
+                });
+
+                // Trigger event
+                Self::deposit_event(RawEvent::ClassPermissionsUpdated(class_id));
+            }
+
+            Ok(())
+        }
+
+        /// Create new class schema from existing property ids and new properties
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn add_class_schema(
+            origin,
+            class_id: T::ClassId,
+            existing_properties: BTreeSet<PropertyId>,
+            new_properties: Vec<Property<T>>
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            let class = Self::ensure_known_class_id(class_id)?;
+
+            // Ensure Schemas limit per Class not reached
+            class.ensure_schemas_limit_not_reached()?;
+
+            // Ensure both existing and new properties for future Schema are not empty
+            Self::ensure_non_empty_schema(&existing_properties, &new_properties)?;
+
+            // Ensure max number of properties per Schema limit not reached
+            class.ensure_properties_limit_not_reached(&new_properties)?;
+
+            // Complete all checks to ensure all provided new_properties are valid
+            Self::ensure_all_properties_are_valid(&new_properties)?;
+
+            // Id of next Class Schema being added
+            let schema_id = class.get_schemas().len() as SchemaId;
+
+            let class_properties = class.get_properties();
+
+            // Ensure all Property names are unique within Class
+            Self::ensure_all_property_names_are_unique(&class_properties, &new_properties)?;
+
+            // Ensure existing_properties are valid indices of properties, corresponding to chosen Class
+            Self::ensure_schema_properties_are_valid_indices(&existing_properties, &class_properties)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Create `Schema` instance from existing and new property ids
+            let schema = Self::create_class_schema(existing_properties, &class_properties, &new_properties);
+
+            // Update class properties after new `Schema` added
+            let updated_class_properties = Self::make_updated_class_properties(class_properties, new_properties);
+
+            // Update Class properties and schemas
+            <ClassById<T>>::mutate(class_id, |class| {
+                class.set_properties(updated_class_properties);
+                class.get_schemas_mut().push(schema);
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::ClassSchemaAdded(class_id, schema_id));
+
+            Ok(())
+        }
+
+        /// Update `schema_status` under specific `schema_id` in `Class`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn update_class_schema_status(
+            origin,
+            class_id: T::ClassId,
+            schema_id: SchemaId,
+            schema_status: bool
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            let class = Self::ensure_known_class_id(class_id)?;
+
+            // Ensure Class already contains schema under provided schema_id
+            class.ensure_schema_id_exists(schema_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Update class schema status
+            <ClassById<T>>::mutate(class_id, |class| {
+                class.update_schema_status(schema_id, schema_status)
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::ClassSchemaStatusUpdated(class_id, schema_id, schema_status));
+            Ok(())
+        }
+
+        /// Update entity permissions
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn update_entity_permissions(
+            origin,
+            entity_id: T::EntityId,
+            updated_frozen: Option<bool>,
+            updated_referenceable: Option<bool>
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Entity under given id exists, return corresponding one
+            let entity = Self::ensure_known_entity_id(entity_id)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Make updated entity_permissions from parameters provided
+            let entity_permissions = entity.get_permissions();
+
+            let updated_entity_permissions =
+                Self::make_updated_entity_permissions(entity_permissions, updated_frozen, updated_referenceable);
+
+            // Update entity permissions under given entity id
+            if let Some(updated_entity_permissions) = updated_entity_permissions {
+
+                <EntityById<T>>::mutate(entity_id, |entity| {
+                    entity.update_permissions(updated_entity_permissions)
+                });
+
+                // Trigger event
+                Self::deposit_event(RawEvent::EntityPermissionsUpdated(entity_id));
+            }
+            Ok(())
+        }
+
+        /// Transfer ownership to new `EntityController` for `Entity` under given `entity_id`
+        /// `new_property_value_references_with_same_owner_flag_set` should be provided manually
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn transfer_entity_ownership(
+            origin,
+            entity_id: T::EntityId,
+            new_controller: EntityController<T>,
+            new_property_value_references_with_same_owner_flag_set: BTreeMap<PropertyId, InputPropertyValue<T>>
+        ) -> DispatchResult {
+
+            // Ensure given origin is lead
+            ensure_is_lead::<T>(origin)?;
+
+            // Ensure Entity under given entity_id exists, retrieve corresponding Entity & Class
+            let (entity, class) = Self::ensure_known_entity_and_class(entity_id)?;
+
+            // Ensure provided new_entity_controller is not equal to current one
+            entity.get_permissions_ref().ensure_controllers_are_not_equal(&new_controller)?;
+
+            // Ensure any inbound InputPropertyValue::Reference with same_owner flag set points to the given Entity
+            entity.ensure_inbound_same_owner_rc_is_zero()?;
+
+            let class_properties = class.get_properties();
+
+            let class_id = entity.get_class_id();
+
+            let entity_property_values = entity.get_values();
+
+            // Create wrapper structure from provided entity_property_values and their corresponding Class properties
+            let values_for_existing_properties = match StoredValuesForExistingProperties::from(&class_properties, &entity_property_values) {
+                Ok(values_for_existing_properties) => values_for_existing_properties,
+                Err(e) => {
+                    debug_assert!(false, "Should not fail! {:?}", e);
+                    return Err(e.into())
+                }
+            };
+
+            // Filter provided values_for_existing_properties, leaving only `Reference`'s with `SameOwner` flag set
+            // Retrieve the set of corresponding property ids
+            let entity_property_id_references_with_same_owner_flag_set =
+                Self::get_property_id_references_with_same_owner_flag_set(values_for_existing_properties);
+
+            // Ensure all ids of provided `new_property_value_references_with_same_owner_flag_set`
+            // corresponding to property ids of respective Class Property references with same owner flag set
+            Self::ensure_only_reference_ids_with_same_owner_flag_set_provided(
+                &entity_property_id_references_with_same_owner_flag_set,
+                &new_property_value_references_with_same_owner_flag_set
+            )?;
+
+            // Retrieve ids of all entity property values, that are references with same owner flag set and which are not provided
+            // in new property value references with same owner flag set
+            let unused_property_id_references_with_same_owner_flag_set = Self::compute_unused_property_ids(
+                &new_property_value_references_with_same_owner_flag_set, &entity_property_id_references_with_same_owner_flag_set
+            );
+
+            // Perform checks to ensure all required property_values under provided unused_schema_property_ids provided
+            Self::ensure_all_required_properties_provided(&class_properties, &unused_property_id_references_with_same_owner_flag_set)?;
+
+            // Create wrapper structure from provided new_property_value_references_with_same_owner_flag_set and their corresponding Class properties
+            let new_values_for_existing_properties = InputValuesForExistingProperties::from(
+                &class_properties, &new_property_value_references_with_same_owner_flag_set
+            )?;
+
+            // Ensure all provided `new_property_value_references_with_same_owner_flag_set` are valid
+            Self::ensure_are_valid_references_with_same_owner_flag_set(
+                new_values_for_existing_properties, &new_controller
+            )?;
+
+            let new_output_property_value_references_with_same_owner_flag_set = Self::make_output_property_values(new_property_value_references_with_same_owner_flag_set);
+
+            // Compute StoredPropertyValues, which respective Properties have unique flag set
+            // (skip PropertyIds, which respective property values under this Entity are default and non required)
+            let new_output_values_for_existing_properties =
+                StoredValuesForExistingProperties::from(&class_properties, &new_output_property_value_references_with_same_owner_flag_set)?;
+
+            // Compute new unique property value hashes.
+            // Ensure new property value hashes with `unique` flag set are `unique` on `Class` level
+            let new_unique_hashes = Self::ensure_new_property_values_respect_uniquness(
+                class_id, new_output_values_for_existing_properties,
+            )?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Used to compute old unique hashes, that should be substituted with new ones.
+            let old_unique_hashes =
+                Self::compute_old_unique_hashes(&new_output_property_value_references_with_same_owner_flag_set, &entity_property_values);
+
+            // Add property values, that should be unique on Class level
+            Self::add_unique_property_value_hashes(class_id, new_unique_hashes);
+
+            // Remove unique hashes, that were substituted with new ones.
+            Self::remove_unique_property_value_hashes(class_id, old_unique_hashes);
+
+            // Make updated entity_property_values from parameters provided
+            let entity_property_values_updated =
+                    Self::make_updated_property_value_references_with_same_owner_flag_set(
+                        unused_property_id_references_with_same_owner_flag_set, &entity_property_values,
+                        &new_output_property_value_references_with_same_owner_flag_set,
+                    );
+
+            // Transfer entity ownership
+            let entities_inbound_rcs_delta = if let Some(entity_property_values_updated) = entity_property_values_updated {
+
+
+                // Calculate entities reference counter side effects for current operation
+                let entities_inbound_rcs_delta =
+                    Self::get_updated_inbound_rcs_delta(
+                        entity_id, class_properties, entity_property_values, new_output_property_value_references_with_same_owner_flag_set
+                    )?;
+
+                // Update InboundReferenceCounter, based on previously calculated ReferenceCounterSideEffects, for each Entity involved
+                Self::update_entities_rcs(&entities_inbound_rcs_delta);
+
+                <EntityById<T>>::mutate(entity_id, |entity| {
+
+                    // Update current Entity property values with updated ones
+                    entity.set_values(entity_property_values_updated);
+
+                    // Set up new controller for the current Entity instance
+                    entity.get_permissions_mut().set_conroller(new_controller.clone());
+                });
+
+                entities_inbound_rcs_delta
+            } else {
+                // Set up new controller for the current Entity instance
+                <EntityById<T>>::mutate(entity_id, |entity| {
+                    entity.get_permissions_mut().set_conroller(new_controller.clone());
+                });
+
+                None
+            };
+
+            // Trigger event
+            Self::deposit_event(RawEvent::EntityOwnershipTransfered(entity_id, new_controller, entities_inbound_rcs_delta));
+
+            Ok(())
+        }
+
+        // ======
+        // The next set of extrinsics can be invoked by anyone who can properly sign for provided value of `Actor<T>`.
+        // ======
+
+        /// Create entity.
+        /// If someone is making an entity of this class for first time,
+        /// then a voucher is also added with the class limit as the default limit value.
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn create_entity(
+            origin,
+            class_id: T::ClassId,
+            actor: Actor<T>,
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Ensure Class under given id exists, return corresponding one
+            let class = Self::ensure_class_exists(class_id)?;
+
+            // Ensure maximum entities limit per class not reached
+            class.ensure_maximum_entities_count_limit_not_reached()?;
+
+            let class_permissions = class.get_permissions_ref();
+
+            // Ensure entities creation is not blocked on Class level
+            class_permissions.ensure_entity_creation_not_blocked()?;
+
+            // Ensure actor can create entities
+            class_permissions.ensure_can_create_entities(&account_id, &actor)?;
+
+            let entity_controller = EntityController::from_actor(&actor);
+
+            // Check if entity creation voucher exists
+            let voucher_exists = if <EntityCreationVouchers<T>>::contains_key(class_id, &entity_controller) {
+
+                // Ensure voucher limit not reached
+                Self::entity_creation_vouchers(class_id, &entity_controller).ensure_voucher_limit_not_reached()?;
+                true
+            } else {
+                false
+            };
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Create voucher, update if exists
+
+            if voucher_exists {
+
+                // Increment number of created entities count, if specified voucher already exist
+                <EntityCreationVouchers<T>>::mutate(class_id, &entity_controller, |entity_creation_voucher| {
+                    entity_creation_voucher.increment_created_entities_count()
+                });
+            } else {
+
+                // Create new voucher for given entity creator with default limit
+                let mut entity_creation_voucher = EntityCreationVoucher::new(class.get_default_entity_creation_voucher_upper_bound());
+
+                // Increase created entities count by 1 to maintain valid entity_creation_voucher state after following Entity added
+                entity_creation_voucher.increment_created_entities_count();
+                <EntityCreationVouchers<T>>::insert(class_id, entity_controller.clone(), entity_creation_voucher);
+            }
+
+            // Create new entity
+
+            let entity_id = Self::next_entity_id();
+
+            let new_entity = Entity::<T>::new(
+                entity_controller,
+                class_id,
+                BTreeSet::new(),
+                BTreeMap::new(),
+            );
+
+            // Save newly created entity:
+            EntityById::insert(entity_id, new_entity);
+
+            // Increment the next entity id:
+            <NextEntityId<T>>::mutate(|n| *n += T::EntityId::one());
+
+            // Increment number of entities, associated with this class
+            <ClassById<T>>::mutate(class_id, |class| {
+                class.increment_entities_count();
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::EntityCreated(actor, entity_id));
+            Ok(())
+        }
+
+        /// Remove `Entity` under provided `entity_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn remove_entity(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, access_level) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure actor with given EntityAccessLevel can remove entity
+            EntityPermissions::<T>::ensure_group_can_remove_entity(access_level)?;
+
+            // Ensure any inbound InputPropertyValue::Reference points to the given Entity
+            entity.ensure_rc_is_zero()?;
+
+            let class_properties = class.get_properties();
+
+            let class_id = entity.get_class_id();
+
+            let entity_values = entity.get_values();
+
+            let unique_property_value_hashes = match StoredValuesForExistingProperties::from(&class_properties, &entity_values) {
+                Ok(values_for_existing_properties) => values_for_existing_properties.compute_unique_hashes(),
+                Err(e) => {
+                    debug_assert!(false, "Should not fail! {:?}", e);
+                    return Err(e.into())
+                }
+            };
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Remove property value entries, that should be unique on Class level
+            Self::remove_unique_property_value_hashes(class_id, unique_property_value_hashes);
+
+            // Remove entity
+            <EntityById<T>>::remove(entity_id);
+
+            // Decrement class entities counter
+            <ClassById<T>>::mutate(class_id, |class| class.decrement_entities_count());
+
+            let entity_controller = EntityController::<T>::from_actor(&actor);
+
+            // Decrement entity_creation_voucher after entity removal perfomed
+            <EntityCreationVouchers<T>>::mutate(class_id, entity_controller, |entity_creation_voucher| {
+                entity_creation_voucher.decrement_created_entities_count();
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::EntityRemoved(actor, entity_id));
+            Ok(())
+        }
+
+        /// Add schema support to entity under given `schema_id` and provided `property_values`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn add_schema_support_to_entity(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+            schema_id: SchemaId,
+            new_property_values: BTreeMap<PropertyId, InputPropertyValue<T>>
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, _) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure Class Schema under given index exists, return corresponding Schema
+            let schema = class.ensure_schema_exists(schema_id)?.to_owned();
+
+            let class_properties = class.get_properties();
+
+            // Create wrapper structure from provided new_property_values and their corresponding Class properties
+            let new_values_for_existing_properties = InputValuesForExistingProperties::from(&class_properties, &new_property_values)?;
+
+            // Ensure Schema under given id is not added to given Entity yet
+            entity.ensure_schema_id_is_not_added(schema_id)?;
+
+            // Ensure provided new_property_values are not added to the Entity values map yet
+            entity.ensure_property_values_are_not_added(&new_property_values)?;
+
+            // Ensure provided schema can be added to the Entity
+            schema.ensure_is_active::<T>()?;
+
+            // Ensure all provided new property values are for properties in the given schema
+            schema.ensure_has_properties(&new_property_values)?;
+
+            // Retrieve Schema property ids, which are not provided in new_property_values
+            let unused_schema_property_ids = Self::compute_unused_property_ids(&new_property_values, schema.get_properties());
+
+            // Perform checks to ensure all required property_values under provided unused_schema_property_ids provided
+            Self::ensure_all_required_properties_provided(&class_properties, &unused_schema_property_ids)?;
+
+            // Ensure all property_values under given Schema property ids are valid
+            let entity_controller = entity.get_permissions_ref().get_controller();
+
+            // Validate all values, provided in new_values_for_existing_properties,
+            // against the type of its Property and check any additional constraints
+            Self::ensure_property_values_are_valid(&entity_controller, &new_values_for_existing_properties)?;
+
+            let class_id = entity.get_class_id();
+
+            let entity_property_values = entity.get_values();
+
+            let new_output_property_values = Self::make_output_property_values(new_property_values);
+
+            // Compute updated entity values, after new schema support added
+            let entity_values_updated = Self::make_updated_entity_property_values(
+                schema, entity_property_values, &new_output_property_values
+            );
+
+            let new_output_values_for_existing_properties = StoredValuesForExistingProperties::from(&class_properties, &new_output_property_values)?;
+
+            // Retrieve StoredPropertyValues, which respective Properties have unique flag set
+            // (skip PropertyIds, which respective property values under this Entity are default and non required)
+            let new_unique_property_value_hashes = new_output_values_for_existing_properties.compute_unique_hashes();
+
+            // Ensure all provided Properties with unique flag set are unique on Class level
+            Self::ensure_property_value_hashes_unique_option_satisfied(class_id, &new_unique_property_value_hashes)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Add property value hashes, that should be unique on Class level
+            Self::add_unique_property_value_hashes(class_id, new_unique_property_value_hashes);
+
+            // Calculate entities reference counter side effects for current operation
+            let entities_inbound_rcs_delta = Self::calculate_entities_inbound_rcs_delta(
+                entity_id, new_output_values_for_existing_properties, DeltaMode::Increment
+            );
+
+            // Update InboundReferenceCounter, based on previously calculated entities_inbound_rcs_delta, for each Entity involved
+            Self::update_entities_rcs(&entities_inbound_rcs_delta);
+
+            // Add schema support to `Entity` under given `entity_id`
+            <EntityById<T>>::mutate(entity_id, |entity| {
+
+                // Add a new schema to the list of schemas supported by this entity.
+                entity.get_supported_schemas_mut().insert(schema_id);
+
+                // Update entity values only if new properties have been added.
+                if entity_values_updated.len() > entity.get_values_ref().len() {
+                    entity.set_values(entity_values_updated);
+                }
+            });
+
+            // Trigger event
+            Self::deposit_event(RawEvent::EntitySchemaSupportAdded(actor, entity_id, schema_id, entities_inbound_rcs_delta));
+            Ok(())
+        }
+
+        /// Update `Entity` `InputPropertyValue`'s with provided ones
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn update_entity_property_values(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+            new_property_values: BTreeMap<PropertyId, InputPropertyValue<T>>
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, access_level) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure property values were not locked on Class level
+            class.ensure_property_values_unlocked()?;
+
+            let entity_values_ref = entity.get_values_ref();
+
+            // Filter new_property_values, that are identical to entity_property_values.
+            // Get `new_property_values`, that are not in `entity_property_values`
+            let new_property_values = Self::try_filter_identical_property_values(entity_values_ref, new_property_values);
+
+            // Ensure all provided new_property_values are already added to the current Entity instance
+            Self::ensure_all_property_values_are_already_added(entity_values_ref, &new_property_values)?;
+
+            let class_properties = class.get_properties();
+
+            // Create wrapper structure from new_property_values and their corresponding Class properties
+            let new_values_for_existing_properties = InputValuesForExistingProperties::from(&class_properties, &new_property_values)?;
+
+            // Ensure all provided property values are unlocked for the actor with given access_level
+            Self::ensure_all_property_values_are_unlocked_from(&new_values_for_existing_properties, access_level)?;
+
+            let entity_controller = entity.get_permissions_ref().get_controller();
+
+            // Validate all values, provided in values_for_existing_properties,
+            // against the type of its Property and check any additional constraints
+            Self::ensure_property_values_are_valid(&entity_controller, &new_values_for_existing_properties)?;
+
+            let class_id = entity.get_class_id();
+
+            // Get current property values of an Entity
+
+            let entity_property_values = entity.get_values();
+
+            let new_output_property_values = Self::make_output_property_values(new_property_values);
+
+            // Compute StoredPropertyValues, which respective Properties have unique flag set
+            // (skip PropertyIds, which respective property values under this Entity are default and non required)
+            let new_output_values_for_existing_properties =
+                StoredValuesForExistingProperties::from(&class_properties, &new_output_property_values)?;
+
+            // Compute new unique property value hashes.
+            // Ensure new property value hashes with `unique` flag set are `unique` on `Class` level
+            let new_unique_hashes = Self::ensure_new_property_values_respect_uniquness(
+                class_id, new_output_values_for_existing_properties,
+            )?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // Used to compute old unique hashes, that should be substituted with new ones.
+            let old_unique_hashes =
+                Self::compute_old_unique_hashes(&new_output_property_values, &entity_property_values);
+
+            // Add property value hashes, that should be unique on Class level
+            Self::add_unique_property_value_hashes(class_id, new_unique_hashes);
+
+            // Remove unique hashes, that were substituted with new ones. (if some).
+            Self::remove_unique_property_value_hashes(class_id, old_unique_hashes);
+
+            // Make updated entity_property_values from current entity_property_values and new_output_property_values provided
+            let entity_property_values_updated =
+                Self::make_updated_property_values(&entity_property_values, &new_output_property_values);
+
+            // If property values should be updated
+            if let Some(entity_property_values_updated) = entity_property_values_updated {
+
+                // Calculate entities reference counter side effects for current operation (should always be safe)
+                let entities_inbound_rcs_delta =
+                    Self::get_updated_inbound_rcs_delta(entity_id, class_properties, entity_property_values, new_output_property_values)?;
+
+                // Update InboundReferenceCounter, based on previously calculated entities_inbound_rcs_delta, for each Entity involved
+                Self::update_entities_rcs(&entities_inbound_rcs_delta);
+
+                // Update entity property values
+                <EntityById<T>>::mutate(entity_id, |entity| {
+                    entity.set_values(entity_property_values_updated);
+                });
+
+                // Trigger event
+                Self::deposit_event(RawEvent::EntityPropertyValuesUpdated(actor, entity_id, entities_inbound_rcs_delta));
+            }
+
+            Ok(())
+        }
+
+        /// Clear `PropertyValueVec` under given `entity_id` & `in_class_schema_property_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn clear_entity_property_vector(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+            in_class_schema_property_id: PropertyId
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, access_level) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure Property under given PropertyId is unlocked from actor with given EntityAccessLevel
+            // Retrieve corresponding Property by value
+            let property = class.ensure_class_property_type_unlocked_from(
+                in_class_schema_property_id,
+                access_level,
+            )?;
+
+            // Ensure InputPropertyValue under given in_class_schema_property_id is Vector
+            let property_value_vector =
+                entity.ensure_property_value_is_vec(in_class_schema_property_id)?;
+
+            // Calculate side effects for clear_property_vector operation, based on property_value_vector provided and its respective property.
+            let entities_inbound_rcs_delta = Self::make_side_effects_for_clear_property_vector_operation(&property_value_vector, &property);
+
+            // Clear property_value_vector.
+            let empty_property_value_vector = Self::clear_property_vector(property_value_vector.clone());
+
+            let class_id = entity.get_class_id();
+
+            // Compute old and new vec unique property value hash.
+            // Ensure new property value hash with `unique` flag set is `unique` on `Class` level
+            let vec_property_value_hashes = if property.unique {
+                Some(
+                    Self::ensure_vec_property_value_hashes(class_id, in_class_schema_property_id, &empty_property_value_vector, property_value_vector)?
+                )
+            } else {
+                None
+            };
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            if let Some((new_property_value_hash, old_property_value_hash)) = vec_property_value_hashes {
+                // Add property value hash, that should be unique on `Class` level
+                Self::add_unique_property_value_hash(class_id, in_class_schema_property_id, new_property_value_hash);
+
+                // Remove property value hash, that should be unique on `Class` level
+                Self::remove_unique_property_value_hash(class_id, in_class_schema_property_id, old_property_value_hash);
+            }
+
+            // Decrease reference counters of involved entities (if some)
+            Self::update_entities_rcs(&entities_inbound_rcs_delta);
+
+            // Insert empty_property_value_vector into entity_property_values mapping at in_class_schema_property_id.
+            // Retrieve updated entity_property_values
+            let entity_values_updated = Self::insert_at_in_class_schema_property_id(
+                entity.get_values(), in_class_schema_property_id, empty_property_value_vector
+            );
+
+            // Update entity property values
+            <EntityById<T>>::mutate(entity_id, |entity| {
+                entity.set_values(entity_values_updated);
+            });
+
+            // Trigger event
+            Self::deposit_event(
+                RawEvent::VectorCleared(
+                    actor, entity_id, in_class_schema_property_id, entities_inbound_rcs_delta
+                )
+            );
+
+            Ok(())
+        }
+
+        /// Remove value at given `index_in_property_vector`
+        /// from `PropertyValueVec` under `in_class_schema_property_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn remove_at_entity_property_vector(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+            in_class_schema_property_id: PropertyId,
+            index_in_property_vector: VecMaxLength,
+            nonce: T::Nonce
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, access_level) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure Property under given PropertyId is unlocked from actor with given EntityAccessLevel
+            // Retrieve corresponding Property by value
+            let property = class.ensure_class_property_type_unlocked_from(
+                in_class_schema_property_id,
+                access_level,
+            )?;
+
+            // Ensure InputPropertyValue under given in_class_schema_property_id is Vector
+            let property_value_vector =
+                entity.ensure_property_value_is_vec(in_class_schema_property_id)?;
+
+            // Ensure `VecInputPropertyValue` nonce is equal to the provided one.
+            // Used to to avoid possible data races, when performing vector specific operations
+            property_value_vector.ensure_nonce_equality(nonce)?;
+
+            // Ensure, provided index_in_property_vec is valid index of VecInputValue
+            property_value_vector
+                .ensure_index_in_property_vector_is_valid(index_in_property_vector)?;
+
+            let involved_entity_id = property_value_vector
+                .get_vec_value_ref()
+                .get_involved_entities()
+                .and_then(|involved_entities| involved_entities.get(index_in_property_vector as usize).copied());
+
+            // Remove value at in_class_schema_property_id in property value vector
+            // Get VecInputPropertyValue wrapped in InputPropertyValue
+            let property_value_vector_updated = Self::remove_at_index_in_property_vector(
+                property_value_vector.clone(), index_in_property_vector
+            );
+
+            let class_id = entity.get_class_id();
+
+            // Compute old and new vec unique property value hash.
+            // Ensure new property value hash with `unique` flag set is `unique` on `Class` level
+            let vec_property_value_hashes = if property.unique {
+                Some(
+                    Self::ensure_vec_property_value_hashes(class_id, in_class_schema_property_id, &property_value_vector_updated, property_value_vector)?
+                )
+            } else {
+                None
+            };
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            if let Some((new_property_value_hash, old_property_value_hash)) = vec_property_value_hashes {
+                // Add property value hash, that should be unique on `Class` level
+                Self::add_unique_property_value_hash(class_id, in_class_schema_property_id, new_property_value_hash);
+
+                // Remove property value hash, that should be unique on `Class` level
+                Self::remove_unique_property_value_hash(class_id, in_class_schema_property_id, old_property_value_hash);
+            }
+
+            // Insert updated propery value into entity_property_values mapping at in_class_schema_property_id.
+            let entity_values_updated = Self::insert_at_in_class_schema_property_id(
+                entity.get_values(), in_class_schema_property_id, property_value_vector_updated
+            );
+
+            let involved_entity_and_side_effect = if let Some(involved_entity_id) = involved_entity_id {
+                // Decrease reference counter of involved entity (if some)
+                let same_controller_status = property.property_type.same_controller_status();
+                let rc_delta = EntityReferenceCounterSideEffect::atomic(same_controller_status, DeltaMode::Decrement);
+
+                // Update InboundReferenceCounter of involved entity, based on previously calculated rc_delta
+                Self::update_entity_rc(involved_entity_id, rc_delta);
+                Some((involved_entity_id, rc_delta))
+            } else {
+                None
+            };
+
+            // Update entity property values
+            <EntityById<T>>::mutate(entity_id, |entity| {
+                entity.set_values(entity_values_updated);
+            });
+
+            // Trigger event
+            Self::deposit_event(
+                RawEvent::RemovedAtVectorIndex(
+                    actor, entity_id, in_class_schema_property_id, index_in_property_vector,
+                    nonce + T::Nonce::one(), involved_entity_and_side_effect
+                )
+            );
+
+            Ok(())
+        }
+
+        /// Insert `SingleInputPropertyValue` at given `index_in_property_vector`
+        /// into `PropertyValueVec` under `in_class_schema_property_id`
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn insert_at_entity_property_vector(
+            origin,
+            actor: Actor<T>,
+            entity_id: T::EntityId,
+            in_class_schema_property_id: PropertyId,
+            index_in_property_vector: VecMaxLength,
+            value: InputValue<T>,
+            nonce: T::Nonce
+        ) -> DispatchResult {
+
+            let account_id = ensure_signed(origin)?;
+
+            // Retrieve Class, Entity and EntityAccessLevel for the actor, attemting to perform operation
+            let (class, entity, access_level) = Self::ensure_class_entity_and_access_level(account_id, entity_id, &actor)?;
+
+            // Ensure Property under given PropertyId is unlocked from actor with given EntityAccessLevel
+            // Retrieve corresponding Property by value
+            let property = class.ensure_class_property_type_unlocked_from(
+                in_class_schema_property_id,
+                access_level,
+            )?;
+
+            // Ensure InputPropertyValue under given in_class_schema_property_id is Vector
+            let property_value_vector =
+                entity.ensure_property_value_is_vec(in_class_schema_property_id)?;
+
+            // Ensure `VecInputPropertyValue` nonce is equal to the provided one.
+            // Used to to avoid possible data races, when performing vector specific operations
+            property_value_vector.ensure_nonce_equality(nonce)?;
+
+            let entity_controller = entity.get_permissions_ref().get_controller();
+
+            // Ensure property_value type is equal to the property_value_vector type and check all constraints
+            property.ensure_property_value_can_be_inserted_at_property_vector(
+                &value,
+                &property_value_vector,
+                index_in_property_vector,
+                entity_controller,
+            )?;
+
+            let involved_entity = value.get_involved_entity();
+
+            // Insert SingleInputPropertyValue at in_class_schema_property_id into property value vector
+            // Get VecInputPropertyValue wrapped in InputPropertyValue
+            let property_value_vector_updated = Self::insert_at_index_in_property_vector(
+                property_value_vector.clone(), index_in_property_vector, value
+            );
+
+            let class_id = entity.get_class_id();
+
+            // Compute old and new vec unique property value hash.
+            // Ensure new property value hash with `unique` flag set is `unique` on `Class` level
+            let vec_property_value_hashes = if property.unique {
+                Some(
+                    Self::ensure_vec_property_value_hashes(class_id, in_class_schema_property_id, &property_value_vector_updated, property_value_vector)?
+                )
+            } else {
+                None
+            };
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            if let Some((new_property_value_hash, old_property_value_hash)) = vec_property_value_hashes {
+                // Add property value hash, that should be unique on `Class` level
+                Self::add_unique_property_value_hash(class_id, in_class_schema_property_id, new_property_value_hash);
+
+                // Remove property value hash, that should be unique on `Class` level
+                Self::remove_unique_property_value_hash(class_id, in_class_schema_property_id, old_property_value_hash);
+            }
+
+            // Insert updated property value into entity_property_values mapping at in_class_schema_property_id.
+            // Retrieve updated entity_property_values
+            let entity_values_updated = Self::insert_at_in_class_schema_property_id(
+                entity.get_values(), in_class_schema_property_id, property_value_vector_updated
+            );
+
+            // Increase reference counter of involved entity (if some)
+            let involved_entity_and_side_effect = if let Some(entity_rc_to_increment) = involved_entity {
+                let same_controller_status = property.property_type.same_controller_status();
+                let rc_delta = EntityReferenceCounterSideEffect::atomic(same_controller_status, DeltaMode::Increment);
+
+                // Update InboundReferenceCounter of involved entity, based on previously calculated ReferenceCounterSideEffect
+                Self::update_entity_rc(entity_rc_to_increment, rc_delta);
+                Some((entity_rc_to_increment, rc_delta))
+            } else {
+                None
+            };
+
+            // Update entity property values
+            <EntityById<T>>::mutate(entity_id, |entity| {
+                entity.set_values(entity_values_updated);
+            });
+
+            // Trigger event
+            Self::deposit_event(
+                RawEvent::InsertedAtVectorIndex(
+                    actor, entity_id, in_class_schema_property_id, index_in_property_vector,
+                    nonce + T::Nonce::one(), involved_entity_and_side_effect
+                )
+            );
+
+            Ok(())
+        }
+
+        /// Batch transaction
+        #[weight = 10_000_000] // TODO: adjust weight
+        pub fn transaction(origin, actor: Actor<T>, operations: Vec<OperationType<T>>) -> DispatchResult {
+
+            // Ensure maximum number of operations during atomic batching limit not reached
+            Self::ensure_number_of_operations_during_atomic_batching_limit_not_reached(&operations)?;
+
+            //
+            // == MUTATION SAFE ==
+            //
+
+            // This BTreeMap holds the T::EntityId of the entity created as a result of executing a `CreateEntity` `Operation`
+            let mut entity_created_in_operation = BTreeMap::new();
+
+            // Create raw origin
+            let raw_origin = origin.into().map_err(|_| Error::<T>::OriginCanNotBeMadeIntoRawOrigin)?;
+
+            for (index, operation_type) in operations.into_iter().enumerate() {
+                let origin = T::Origin::from(raw_origin.clone());
+                let actor = actor.clone();
+                match operation_type {
+                    OperationType::CreateEntity(create_entity_operation) => {
+                        Self::create_entity(origin, create_entity_operation.class_id, actor)?;
+
+                        // entity id of newly created entity
+                        let entity_id = Self::next_entity_id() - T::EntityId::one();
+                        entity_created_in_operation.insert(index, entity_id);
+                    },
+                    OperationType::AddSchemaSupportToEntity(add_schema_support_to_entity_operation) => {
+                        let entity_id = operations::parametrized_entity_to_entity_id(
+                            &entity_created_in_operation, add_schema_support_to_entity_operation.entity_id
+                        )?;
+                        let schema_id = add_schema_support_to_entity_operation.schema_id;
+                        let property_values = operations::parametrized_property_values_to_property_values(
+                            &entity_created_in_operation, add_schema_support_to_entity_operation.parametrized_property_values
+                        )?;
+                        Self::add_schema_support_to_entity(origin, actor, entity_id, schema_id, property_values)?;
+                    },
+                    OperationType::UpdatePropertyValues(update_property_values_operation) => {
+                        let entity_id = operations::parametrized_entity_to_entity_id(
+                            &entity_created_in_operation, update_property_values_operation.entity_id
+                        )?;
+                        let property_values = operations::parametrized_property_values_to_property_values(
+                            &entity_created_in_operation, update_property_values_operation.new_parametrized_property_values
+                        )?;
+                        Self::update_entity_property_values(origin, actor, entity_id, property_values)?;
+                    },
+                }
+            }
+
+            // Trigger event
+            Self::deposit_event(RawEvent::TransactionCompleted(actor));
+
+            Ok(())
+        }
+    }
+}
+
+impl<T: Trait> Module<T> {
+    /// Updates corresponding `Entity` `reference_counter` by `reference_counter_delta`.
+    fn update_entity_rc(
+        entity_id: T::EntityId,
+        reference_counter_delta: EntityReferenceCounterSideEffect,
+    ) {
+        // Update both `total` and `same owner` number of inbound references for the Entity instance under given `entity_id`
+        <EntityById<T>>::mutate(entity_id, |entity| {
+            let entity_inbound_rc = entity.get_reference_counter_mut();
+            entity_inbound_rc.total =
+                (entity_inbound_rc.total as i32 + reference_counter_delta.total) as u32;
+            entity_inbound_rc.same_owner =
+                (entity_inbound_rc.same_owner as i32 + reference_counter_delta.same_owner) as u32;
+        })
+    }
+
+    /// Add property value hash, that should be unique on `Class` level
+    pub fn add_unique_property_value_hash(
+        class_id: T::ClassId,
+        property_id: PropertyId,
+        hash: T::Hash,
+    ) {
+        <UniquePropertyValueHashes<T>>::insert((class_id, property_id), hash, ());
+    }
+
+    /// Remove property value hash, that should be unique on `Class` level
+    pub fn remove_unique_property_value_hash(
+        class_id: T::ClassId,
+        property_id: PropertyId,
+        hash: T::Hash,
+    ) {
+        <UniquePropertyValueHashes<T>>::remove((class_id, property_id), hash);
+    }
+
+    /// Add property value hashes, that should be unique on `Class` level
+    pub fn add_unique_property_value_hashes(
+        class_id: T::ClassId,
+        unique_property_value_hashes: BTreeMap<PropertyId, T::Hash>,
+    ) {
+        unique_property_value_hashes
+            .into_iter()
+            .for_each(|(property_id, hash)| {
+                Self::add_unique_property_value_hash(class_id, property_id, hash);
+            });
+    }
+
+    /// Remove property value hashes, that should be unique on `Class` level
+    pub fn remove_unique_property_value_hashes(
+        class_id: T::ClassId,
+        unique_property_value_hashes: BTreeMap<PropertyId, T::Hash>,
+    ) {
+        unique_property_value_hashes
+            .into_iter()
+            .for_each(|(property_id, hash)| {
+                Self::remove_unique_property_value_hash(class_id, property_id, hash);
+            });
+    }
+
+    /// Convert all provided `InputPropertyValue`'s into `StoredPropertyValue`'s
+    pub fn make_output_property_values(
+        input_property_values: BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        input_property_values
+            .into_iter()
+            .map(|(property_id, property_value)| (property_id, property_value.into()))
+            .collect()
+    }
+
+    /// Update `entity_property_values` with `property_values`
+    /// Returns updated `entity_property_values`
+    fn make_updated_entity_property_values(
+        schema: Schema,
+        entity_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        output_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        // Concatenate existing `entity_property_values` with `property_values`, provided, when adding `Schema` support.
+        let updated_entity_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>> =
+            entity_property_values
+                .into_iter()
+                .chain(output_property_values.to_owned().into_iter())
+                .collect();
+
+        // Write all missing non required `Schema` `property_values` as `InputPropertyValue::default()`
+        let non_required_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>> = schema
+            .get_properties()
+            .iter()
+            .filter_map(|property_id| {
+                if !updated_entity_property_values.contains_key(property_id) {
+                    Some((*property_id, StoredPropertyValue::default()))
+                } else {
+                    None
+                }
+            })
+            .collect();
+
+        // Extend updated_entity_property_values with given Schema non_required_property_values
+        updated_entity_property_values
+            .into_iter()
+            .chain(non_required_property_values.into_iter())
+            .collect()
+    }
+
+    /// Calculate side effects for clear_property_vector operation, based on `property_value_vector` provided and its respective `property`.
+    /// Returns calculated `ReferenceCounterSideEffects`
+    pub fn make_side_effects_for_clear_property_vector_operation(
+        property_value_vector: &VecStoredPropertyValue<T>,
+        property: &Property<T>,
+    ) -> Option<ReferenceCounterSideEffects<T>> {
+        let entity_ids_to_decrease_rc = property_value_vector
+            .get_vec_value_ref()
+            .get_involved_entities();
+
+        if let Some(entity_ids_to_decrease_rcs) = entity_ids_to_decrease_rc {
+            // Calculate `ReferenceCounterSideEffects`, based on entity_ids involved, same_controller_status and chosen `DeltaMode`
+            let same_controller_status = property.property_type.same_controller_status();
+            let entities_inbound_rcs_delta = Self::perform_entities_inbound_rcs_delta_calculation(
+                ReferenceCounterSideEffects::<T>::default(),
+                entity_ids_to_decrease_rcs,
+                same_controller_status,
+                DeltaMode::Decrement,
+            );
+
+            if !entities_inbound_rcs_delta.is_empty() {
+                Some(entities_inbound_rcs_delta)
+            } else {
+                None
+            }
+        } else {
+            None
+        }
+    }
+
+    /// Update `inbound_rcs_delta`, based on `involved_entity_ids`, `same_controller_status` provided and chosen `DeltaMode`
+    /// Returns updated `inbound_rcs_delta`
+    fn perform_entities_inbound_rcs_delta_calculation(
+        mut inbound_rcs_delta: ReferenceCounterSideEffects<T>,
+        involved_entity_ids: Vec<T::EntityId>,
+        same_controller_status: bool,
+        delta_mode: DeltaMode,
+    ) -> ReferenceCounterSideEffects<T> {
+        for involved_entity_id in involved_entity_ids {
+            // If inbound_rcs_delta already contains entry for the given involved_entity_id, increment it
+            // with atomic EntityReferenceCounterSideEffect instance, based on same_owner flag provided and DeltaMode,
+            // otherwise create new atomic EntityReferenceCounterSideEffect instance
+            if let Some(inbound_rc_delta) = inbound_rcs_delta.get_mut(&involved_entity_id) {
+                *inbound_rc_delta +=
+                    EntityReferenceCounterSideEffect::atomic(same_controller_status, delta_mode);
+            } else {
+                inbound_rcs_delta.insert(
+                    involved_entity_id,
+                    EntityReferenceCounterSideEffect::atomic(same_controller_status, delta_mode),
+                );
+            }
+        }
+        inbound_rcs_delta
+    }
+
+    /// Filter references, pointing to the same `Entity`
+    fn filter_references_to_the_same_entity(
+        current_entity_id: T::EntityId,
+        involved_entity_ids: Vec<T::EntityId>,
+    ) -> Vec<T::EntityId> {
+        involved_entity_ids
+            .into_iter()
+            .filter(|involved_entity_id| current_entity_id != *involved_entity_id)
+            .collect()
+    }
+
+    /// Calculate `ReferenceCounterSideEffects`, based on `values_for_existing_properties` provided and chosen `DeltaMode`
+    /// Returns calculated `ReferenceCounterSideEffects`
+    fn calculate_entities_inbound_rcs_delta(
+        current_entity_id: T::EntityId,
+        values_for_existing_properties: StoredValuesForExistingProperties<T>,
+        delta_mode: DeltaMode,
+    ) -> Option<ReferenceCounterSideEffects<T>> {
+        let entities_inbound_rcs_delta = values_for_existing_properties
+            .values()
+            .map(|value_for_existing_property| value_for_existing_property.unzip())
+            .filter_map(|(property, value)| {
+                let involved_entity_ids =
+                    value.get_involved_entities().map(|involved_entity_ids| {
+                        Self::filter_references_to_the_same_entity(
+                            current_entity_id,
+                            involved_entity_ids,
+                        )
+                    });
+                match involved_entity_ids {
+                    Some(involved_entity_ids) if !involved_entity_ids.is_empty() => Some((
+                        involved_entity_ids,
+                        property.property_type.same_controller_status(),
+                    )),
+                    _ => None,
+                }
+            })
+            // Aggeregate all sideffects on a single entity together into one side effect map
+            .fold(
+                ReferenceCounterSideEffects::default(),
+                |inbound_rcs_delta, (involved_entity_ids, same_controller_status)| {
+                    Self::perform_entities_inbound_rcs_delta_calculation(
+                        inbound_rcs_delta,
+                        involved_entity_ids,
+                        same_controller_status,
+                        delta_mode,
+                    )
+                },
+            );
+
+        if !entities_inbound_rcs_delta.is_empty() {
+            Some(entities_inbound_rcs_delta)
+        } else {
+            None
+        }
+    }
+
+    /// Compute `ReferenceCounterSideEffects`, based on `InputPropertyValue` `Reference`'s involved into update process.
+    /// Returns updated `ReferenceCounterSideEffects`
+    pub fn get_updated_inbound_rcs_delta(
+        current_entity_id: T::EntityId,
+        class_properties: Vec<Property<T>>,
+        entity_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        new_output_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> Result<Option<ReferenceCounterSideEffects<T>>, Error<T>> {
+        // Filter entity_property_values to get only those, which will be substituted with new_property_values
+        let entity_property_values_to_update: BTreeMap<PropertyId, StoredPropertyValue<T>> =
+            entity_property_values
+                .into_iter()
+                .filter(|(entity_id, _)| new_output_property_values.contains_key(entity_id))
+                .collect();
+
+        // Calculate entities reference counter side effects for update operation
+
+        let stored_values_for_entity_property_values_to_update =
+            match StoredValuesForExistingProperties::from(
+                &class_properties,
+                &entity_property_values_to_update,
+            ) {
+                Ok(stored_values_for_entity_property_values_to_update) => {
+                    stored_values_for_entity_property_values_to_update
+                }
+                Err(e) => {
+                    debug_assert!(false, "Should not fail! {:?}", e);
+                    return Err(e);
+                }
+            };
+
+        // Calculate entities inbound reference counter delta with Decrement DeltaMode for entity_property_values_to_update,
+        // as involved InputPropertyValue References will be substituted with new ones
+        let decremental_reference_counter_side_effects = Self::calculate_entities_inbound_rcs_delta(
+            current_entity_id,
+            stored_values_for_entity_property_values_to_update,
+            DeltaMode::Decrement,
+        );
+
+        // Calculate entities inbound reference counter delta with Increment DeltaMode for new_property_values,
+        // as involved InputPropertyValue References will substitute the old ones
+        let incremental_reference_counter_side_effects = Self::calculate_entities_inbound_rcs_delta(
+            current_entity_id,
+            StoredValuesForExistingProperties::from(
+                &class_properties,
+                &new_output_property_values,
+            )?,
+            DeltaMode::Increment,
+        );
+
+        // Add up both net decremental_reference_counter_side_effects and incremental_reference_counter_side_effects
+        // to get one net sideffect per entity.
+        Ok(Self::calculate_updated_inbound_rcs_delta(
+            decremental_reference_counter_side_effects,
+            incremental_reference_counter_side_effects,
+        ))
+    }
+
+    /// Add up both net first_reference_counter_side_effects and second_reference_counter_side_effects (if some)
+    /// to get one net sideffect per entity.
+    /// Returns updated `ReferenceCounterSideEffects`
+    pub fn calculate_updated_inbound_rcs_delta(
+        first_reference_counter_side_effects: Option<ReferenceCounterSideEffects<T>>,
+        second_reference_counter_side_effects: Option<ReferenceCounterSideEffects<T>>,
+    ) -> Option<ReferenceCounterSideEffects<T>> {
+        match (
+            first_reference_counter_side_effects,
+            second_reference_counter_side_effects,
+        ) {
+            (
+                Some(first_reference_counter_side_effects),
+                Some(second_reference_counter_side_effects),
+            ) => {
+                let reference_counter_side_effects = first_reference_counter_side_effects
+                    .update(second_reference_counter_side_effects);
+                Some(reference_counter_side_effects)
+            }
+            (Some(first_reference_counter_side_effects), _) => {
+                Some(first_reference_counter_side_effects)
+            }
+            (_, Some(second_reference_counter_side_effects)) => {
+                Some(second_reference_counter_side_effects)
+            }
+            _ => None,
+        }
+    }
+
+    /// Used to update `class_permissions` with parameters provided.
+    /// Returns updated `class_permissions` if update performed
+    pub fn make_updated_class_permissions(
+        class_permissions: ClassPermissions<T>,
+        updated_any_member: Option<bool>,
+        updated_entity_creation_blocked: Option<bool>,
+        updated_all_entity_property_values_locked: Option<bool>,
+        updated_maintainers: Option<BTreeSet<T::CuratorGroupId>>,
+    ) -> Option<ClassPermissions<T>> {
+        // Used to check if update performed
+        let mut updated_class_permissions = class_permissions.clone();
+
+        if let Some(updated_any_member) = updated_any_member {
+            updated_class_permissions.set_any_member_status(updated_any_member);
+        }
+
+        if let Some(updated_entity_creation_blocked) = updated_entity_creation_blocked {
+            updated_class_permissions.set_entity_creation_blocked(updated_entity_creation_blocked);
+        }
+
+        if let Some(updated_all_entity_property_values_locked) =
+            updated_all_entity_property_values_locked
+        {
+            updated_class_permissions
+                .set_all_entity_property_values_locked(updated_all_entity_property_values_locked);
+        }
+
+        if let Some(updated_maintainers) = updated_maintainers {
+            updated_class_permissions.set_maintainers(updated_maintainers);
+        }
+
+        if updated_class_permissions != class_permissions {
+            Some(updated_class_permissions)
+        } else {
+            None
+        }
+    }
+
+    /// Used to update `entity_permissions` with parameters provided.
+    /// Returns updated `entity_permissions` if update performed
+    pub fn make_updated_entity_permissions(
+        entity_permissions: EntityPermissions<T>,
+        updated_frozen: Option<bool>,
+        updated_referenceable: Option<bool>,
+    ) -> Option<EntityPermissions<T>> {
+        // Used to check if update performed
+        let mut updated_entity_permissions = entity_permissions.clone();
+
+        if let Some(updated_frozen) = updated_frozen {
+            updated_entity_permissions.set_frozen(updated_frozen);
+        }
+
+        if let Some(updated_referenceable) = updated_referenceable {
+            updated_entity_permissions.set_referencable(updated_referenceable);
+        }
+
+        if updated_entity_permissions != entity_permissions {
+            Some(updated_entity_permissions)
+        } else {
+            None
+        }
+    }
+
+    /// Ensure property value hash with `unique` flag set is `unique` on `Class` level
+    pub fn ensure_property_value_hash_unique_option_satisfied(
+        class_id: T::ClassId,
+        property_id: PropertyId,
+        unique_property_value_hash: &T::Hash,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            !<UniquePropertyValueHashes<T>>::contains_key(
+                (class_id, property_id),
+                unique_property_value_hash
+            ),
+            Error::<T>::PropertyValueShouldBeUnique
+        );
+        Ok(())
+    }
+
+    /// Ensure all property value hashes with `unique` flag set are `unique` on `Class` level
+    pub fn ensure_property_value_hashes_unique_option_satisfied(
+        class_id: T::ClassId,
+        unique_property_value_hashes: &BTreeMap<PropertyId, T::Hash>,
+    ) -> Result<(), Error<T>> {
+        for (&property_id, unique_property_value_hash) in unique_property_value_hashes {
+            Self::ensure_property_value_hash_unique_option_satisfied(
+                class_id,
+                property_id,
+                unique_property_value_hash,
+            )?;
+        }
+        Ok(())
+    }
+
+    /// Compute old and new vec unique property value hash.
+    /// Ensure new property value hash with `unique` flag set is `unique` on `Class` level
+    pub fn ensure_vec_property_value_hashes(
+        class_id: T::ClassId,
+        in_class_schema_property_id: PropertyId,
+        property_value_vector_updated: &StoredPropertyValue<T>,
+        property_value_vector: VecStoredPropertyValue<T>,
+    ) -> Result<(T::Hash, T::Hash), Error<T>> {
+        // Compute new hash from unique property value and its respective property id
+        let new_property_value_hash =
+            property_value_vector_updated.compute_unique_hash(in_class_schema_property_id);
+
+        // Ensure `Property` with `unique` flag set is `unique` on `Class` level
+        Self::ensure_property_value_hash_unique_option_satisfied(
+            class_id,
+            in_class_schema_property_id,
+            &new_property_value_hash,
+        )?;
+
+        // Compute old hash from the old unique property value and its respective property id
+        let old_property_value_hash =
+            property_value_vector.compute_unique_hash(in_class_schema_property_id);
+
+        Ok((new_property_value_hash, old_property_value_hash))
+    }
+
+    /// Compute new unique property value hashes.
+    /// Ensure new property value hashes with `unique` flag set are `unique` on `Class` level
+    pub fn ensure_new_property_values_respect_uniquness(
+        class_id: T::ClassId,
+        new_output_values_for_existing_properties: StoredValuesForExistingProperties<T>,
+    ) -> Result<BTreeMap<PropertyId, T::Hash>, Error<T>> {
+        let new_unique_property_value_hashes =
+            new_output_values_for_existing_properties.compute_unique_hashes();
+
+        // Ensure all provided Properties with unique flag set are unique on Class level
+        Self::ensure_property_value_hashes_unique_option_satisfied(
+            class_id,
+            &new_unique_property_value_hashes,
+        )?;
+
+        Ok(new_unique_property_value_hashes)
+    }
+
+    /// Returns the stored `Class` if exist, error otherwise.
+    fn ensure_class_exists(class_id: T::ClassId) -> Result<Class<T>, Error<T>> {
+        ensure!(
+            <ClassById<T>>::contains_key(class_id),
+            Error::<T>::ClassNotFound
+        );
+        Ok(Self::class_by_id(class_id))
+    }
+
+    /// Returns `Class` and `Entity` under given id, if exists, and `EntityAccessLevel` corresponding to `origin`, if permitted
+    fn ensure_class_entity_and_access_level(
+        account_id: T::AccountId,
+        entity_id: T::EntityId,
+        actor: &Actor<T>,
+    ) -> Result<(Class<T>, Entity<T>, EntityAccessLevel), Error<T>> {
+        // Ensure Entity under given id exists, retrieve corresponding one
+        let entity = Self::ensure_known_entity_id(entity_id)?;
+
+        // Retrieve corresponding Class
+        let class = Self::class_by_id(entity.get_class_id());
+
+        // Derive EntityAccessLevel for the actor, attempting to act.
+        let access_level = EntityAccessLevel::derive(
+            &account_id,
+            entity.get_permissions_ref(),
+            class.get_permissions_ref(),
+            actor,
+        )?;
+
+        Ok((class, entity, access_level))
+    }
+
+    /// Ensure `Entity` under given `entity_id` exists, retrieve corresponding `Entity` & `Class`
+    pub fn ensure_known_entity_and_class(
+        entity_id: T::EntityId,
+    ) -> Result<(Entity<T>, Class<T>), Error<T>> {
+        // Ensure Entity under given id exists, retrieve corresponding one
+        let entity = Self::ensure_known_entity_id(entity_id)?;
+
+        let class = ClassById::get(entity.get_class_id());
+        Ok((entity, class))
+    }
+
+    /// Filter `provided values_for_existing_properties`, leaving only `Reference`'s with `SameOwner` flag set
+    /// Returns the set of corresponding property ids
+    pub fn get_property_id_references_with_same_owner_flag_set(
+        values_for_existing_properties: StoredValuesForExistingProperties<T>,
+    ) -> BTreeSet<PropertyId> {
+        values_for_existing_properties
+            // Iterate over the PropertyId's
+            .keys()
+            // Filter provided values_for_existing_properties, leaving only `Reference`'s with `SameOwner` flag set
+            .filter(|property_id| {
+                if let Some(value_for_existing_property) =
+                    values_for_existing_properties.get(property_id)
+                {
+                    value_for_existing_property
+                        .get_property()
+                        .property_type
+                        .same_controller_status()
+                } else {
+                    false
+                }
+            })
+            .copied()
+            .collect()
+    }
+
+    /// Ensure all ids of provided `new_property_value_references_with_same_owner_flag_set`
+    /// corresponding to property ids of respective Class Property references with same owner flag set
+    pub fn ensure_only_reference_ids_with_same_owner_flag_set_provided(
+        entity_property_id_references_with_same_owner_flag_set: &BTreeSet<PropertyId>,
+        new_property_value_references_with_same_owner_flag_set: &BTreeMap<
+            PropertyId,
+            InputPropertyValue<T>,
+        >,
+    ) -> Result<(), Error<T>> {
+        let new_property_value_id_references_with_same_owner_flag_set: BTreeSet<PropertyId> =
+            new_property_value_references_with_same_owner_flag_set
+                .keys()
+                .copied()
+                .collect();
+
+        ensure!(
+            new_property_value_id_references_with_same_owner_flag_set
+                .is_subset(entity_property_id_references_with_same_owner_flag_set),
+            Error::<T>::AllProvidedPropertyValueIdsMustBeReferencesWithSameOwnerFlagSet
+        );
+        Ok(())
+    }
+
+    /// Ensure all provided `new_property_value_references_with_same_owner_flag_set` are valid
+    fn ensure_are_valid_references_with_same_owner_flag_set(
+        new_property_value_references_with_same_owner_flag_set: InputValuesForExistingProperties<T>,
+        new_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        for updated_value_for_existing_property in
+            new_property_value_references_with_same_owner_flag_set.values()
+        {
+            let (property, value) = updated_value_for_existing_property.unzip();
+
+            // Perform all required checks to ensure provided property values are valid references
+            property.ensure_property_value_is_valid_reference(value, new_controller)?;
+        }
+        Ok(())
+    }
+
+    /// Used to update entity_property_values with parameters provided.
+    /// Returns updated `entity_property_values`, if update performed
+    pub fn make_updated_property_value_references_with_same_owner_flag_set(
+        unused_property_id_references_with_same_owner_flag_set: BTreeSet<PropertyId>,
+        entity_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        new_property_value_references_with_same_owner_flag_set: &BTreeMap<
+            PropertyId,
+            StoredPropertyValue<T>,
+        >,
+    ) -> Option<BTreeMap<PropertyId, StoredPropertyValue<T>>> {
+        // Used to check if update performed
+        let mut entity_property_values_updated = entity_property_values.clone();
+
+        for (property_id, new_property_value_reference_with_same_owner_flag_set) in
+            new_property_value_references_with_same_owner_flag_set
+        {
+            // Update entity_property_values map at property_id with new_property_value_reference_with_same_owner_flag_set
+            entity_property_values_updated.insert(
+                *property_id,
+                new_property_value_reference_with_same_owner_flag_set.to_owned(),
+            );
+        }
+
+        // Throw away old non required property value references with same owner flag set
+        // and replace them with Default ones
+        for unused_property_id_reference_with_same_owner_flag_set in
+            unused_property_id_references_with_same_owner_flag_set
+        {
+            entity_property_values_updated.insert(
+                unused_property_id_reference_with_same_owner_flag_set,
+                StoredPropertyValue::default(),
+            );
+        }
+
+        if *entity_property_values != entity_property_values_updated {
+            Some(entity_property_values_updated)
+        } else {
+            None
+        }
+    }
+
+    /// Update InboundReferenceCounter, based on previously calculated entities_inbound_rcs_delta, for each Entity involved
+    pub fn update_entities_rcs(
+        entities_inbound_rcs_delta: &Option<ReferenceCounterSideEffects<T>>,
+    ) {
+        if let Some(entities_inbound_rcs_delta) = entities_inbound_rcs_delta {
+            entities_inbound_rcs_delta.update_entities_rcs();
+        }
+    }
+
+    /// Retrieve `property_ids`, that are not in `property_values`
+    pub fn compute_unused_property_ids(
+        property_values: &BTreeMap<PropertyId, InputPropertyValue<T>>,
+        property_ids: &BTreeSet<PropertyId>,
+    ) -> BTreeSet<PropertyId> {
+        let property_value_indices: BTreeSet<PropertyId> =
+            property_values.keys().cloned().collect();
+
+        property_ids
+            .difference(&property_value_indices)
+            .copied()
+            .collect()
+    }
+
+    /// Used to compute old unique hashes, that should be substituted with new ones.
+    pub fn compute_old_unique_hashes(
+        new_output_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        entity_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> BTreeMap<PropertyId, T::Hash> {
+        entity_values
+            .iter()
+            .filter(|(property_id, _)| new_output_property_values.contains_key(property_id))
+            .map(|(&property_id, property_value)| {
+                (property_id, property_value.compute_unique_hash(property_id))
+            })
+            .collect()
+    }
+
+    /// Perform checks to ensure all required `property_values` under provided `unused_schema_property_ids` provided
+    pub fn ensure_all_required_properties_provided(
+        class_properties: &[Property<T>],
+        unused_schema_property_ids: &BTreeSet<PropertyId>,
+    ) -> Result<(), Error<T>> {
+        for &unused_schema_property_id in unused_schema_property_ids {
+            let class_property = &class_properties
+                .get(unused_schema_property_id as usize)
+                .ok_or(Error::<T>::ClassPropertyNotFound)?;
+
+            // All required property values should be provided
+            ensure!(
+                !class_property.required,
+                Error::<T>::MissingRequiredProperty
+            );
+        }
+        Ok(())
+    }
+
+    /// Validate all values, provided in `values_for_existing_properties`, against the type of its `Property`
+    /// and check any additional constraints
+    pub fn ensure_property_values_are_valid(
+        entity_controller: &EntityController<T>,
+        values_for_existing_properties: &InputValuesForExistingProperties<T>,
+    ) -> Result<(), Error<T>> {
+        for value_for_existing_property in values_for_existing_properties.values() {
+            let (property, value) = value_for_existing_property.unzip();
+
+            // Validate new InputPropertyValue against the type of this Property and check any additional constraints
+            property.ensure_property_value_to_update_is_valid(value, entity_controller)?;
+        }
+
+        Ok(())
+    }
+
+    /// Ensure all provided `new_property_values` are already exist in `entity_property_values` map
+    pub fn ensure_all_property_values_are_already_added(
+        entity_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        new_property_values: &BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            new_property_values
+                .keys()
+                .all(|key| entity_property_values.contains_key(key)),
+            Error::<T>::UnknownEntityPropertyId
+        );
+        Ok(())
+    }
+
+    /// Ensure `new_values_for_existing_properties` are accessible for actor with given `access_level`
+    pub fn ensure_all_property_values_are_unlocked_from(
+        new_values_for_existing_properties: &InputValuesForExistingProperties<T>,
+        access_level: EntityAccessLevel,
+    ) -> Result<(), Error<T>> {
+        for value_for_new_property in new_values_for_existing_properties.values() {
+            // Ensure Property is unlocked from Actor with given EntityAccessLevel
+            value_for_new_property
+                .get_property()
+                .ensure_unlocked_from(access_level)?;
+        }
+        Ok(())
+    }
+
+    /// Filter `new_property_values` identical to `entity_property_values`.
+    /// Return only `new_property_values`, that are not in `entity_property_values`
+    pub fn try_filter_identical_property_values(
+        entity_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        new_property_values: BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> BTreeMap<PropertyId, InputPropertyValue<T>> {
+        new_property_values
+            .into_iter()
+            .filter(|(id, new_property_value)| {
+                if let Some(entity_property_value) = entity_property_values.get(id) {
+                    StoredPropertyValue::<T>::from(new_property_value.to_owned())
+                        != *entity_property_value
+                } else {
+                    true
+                }
+            })
+            .collect()
+    }
+
+    /// Update existing `entity_property_values` with `new_property_values`.
+    /// if update performed, returns updated entity property values
+    pub fn make_updated_property_values(
+        entity_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        new_output_property_values: &BTreeMap<PropertyId, StoredPropertyValue<T>>,
+    ) -> Option<BTreeMap<PropertyId, StoredPropertyValue<T>>> {
+        // Used to check if updated performed
+        let mut entity_property_values_updated = entity_property_values.to_owned();
+
+        new_output_property_values
+            .iter()
+            .for_each(|(id, new_property_value)| {
+                if let Some(entity_property_value) = entity_property_values_updated.get_mut(&id) {
+                    entity_property_value.update(new_property_value.to_owned());
+                }
+            });
+
+        if entity_property_values_updated != *entity_property_values {
+            Some(entity_property_values_updated)
+        } else {
+            None
+        }
+    }
+
+    /// Insert `InputValue` into `VecStoredPropertyValue` at `index_in_property_vector`.
+    /// Returns `VecStoredPropertyValue` wrapped in `StoredPropertyValue`
+    pub fn insert_at_index_in_property_vector(
+        mut property_value_vector: VecStoredPropertyValue<T>,
+        index_in_property_vector: VecMaxLength,
+        value: InputValue<T>,
+    ) -> StoredPropertyValue<T> {
+        property_value_vector.insert_at(index_in_property_vector, value.into());
+        StoredPropertyValue::Vector(property_value_vector)
+    }
+
+    /// Remove `InputValue` at `index_in_property_vector` in `VecInputPropertyValue`.
+    /// Returns `VecInputPropertyValue` wrapped in `InputPropertyValue`
+    pub fn remove_at_index_in_property_vector(
+        mut property_value_vector: VecStoredPropertyValue<T>,
+        index_in_property_vector: VecMaxLength,
+    ) -> StoredPropertyValue<T> {
+        property_value_vector.remove_at(index_in_property_vector);
+        StoredPropertyValue::Vector(property_value_vector)
+    }
+
+    /// Clear `VecStoredPropertyValue`.
+    /// Returns empty `VecStoredPropertyValue` wrapped in `StoredPropertyValue`
+    pub fn clear_property_vector(
+        mut property_value_vector: VecStoredPropertyValue<T>,
+    ) -> StoredPropertyValue<T> {
+        property_value_vector.clear();
+        StoredPropertyValue::Vector(property_value_vector)
+    }
+
+    /// Insert `InputPropertyValue` into `entity_property_values` mapping at `in_class_schema_property_id`.
+    /// Returns updated `entity_property_values`
+    pub fn insert_at_in_class_schema_property_id(
+        mut entity_property_values: BTreeMap<PropertyId, StoredPropertyValue<T>>,
+        in_class_schema_property_id: PropertyId,
+        property_value: StoredPropertyValue<T>,
+    ) -> BTreeMap<PropertyId, StoredPropertyValue<T>> {
+        entity_property_values.insert(in_class_schema_property_id, property_value);
+        entity_property_values
+    }
+
+    /// Ensure `Class` under given id exists, return corresponding one
+    pub fn ensure_known_class_id(class_id: T::ClassId) -> Result<Class<T>, Error<T>> {
+        ensure!(
+            <ClassById<T>>::contains_key(class_id),
+            Error::<T>::ClassNotFound
+        );
+        Ok(Self::class_by_id(class_id))
+    }
+
+    /// Ensure `Entity` under given id exists, return corresponding one
+    pub fn ensure_known_entity_id(entity_id: T::EntityId) -> Result<Entity<T>, Error<T>> {
+        ensure!(
+            <EntityById<T>>::contains_key(entity_id),
+            Error::<T>::EntityNotFound
+        );
+        Ok(Self::entity_by_id(entity_id))
+    }
+
+    /// Ensure `CuratorGroup` under given id exists
+    pub fn ensure_curator_group_under_given_id_exists(
+        curator_group_id: &T::CuratorGroupId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            <CuratorGroupById<T>>::contains_key(curator_group_id),
+            Error::<T>::CuratorGroupDoesNotExist
+        );
+        Ok(())
+    }
+
+    /// Ensure `CuratorGroup` under given id exists, return corresponding one
+    pub fn ensure_curator_group_exists(
+        curator_group_id: &T::CuratorGroupId,
+    ) -> Result<CuratorGroup<T>, Error<T>> {
+        Self::ensure_curator_group_under_given_id_exists(curator_group_id)?;
+        Ok(Self::curator_group_by_id(curator_group_id))
+    }
+
+    /// Ensure `MaxNumberOfMaintainersPerClass` constraint satisfied
+    pub fn ensure_maintainers_limit_not_reached(
+        curator_groups: &BTreeSet<T::CuratorGroupId>,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            curator_groups.len() < T::MaxNumberOfMaintainersPerClass::get() as usize,
+            Error::<T>::ClassMaintainersLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure all `CuratorGroup`'s under given ids exist
+    pub fn ensure_curator_groups_exist(
+        curator_groups: &BTreeSet<T::CuratorGroupId>,
+    ) -> Result<(), Error<T>> {
+        for curator_group in curator_groups {
+            // Ensure CuratorGroup under given id exists
+            Self::ensure_curator_group_exists(curator_group)?;
+        }
+        Ok(())
+    }
+
+    /// Perform security checks to ensure provided `class_maintainers` are valid
+    pub fn ensure_class_maintainers_are_valid(
+        class_maintainers: &BTreeSet<T::CuratorGroupId>,
+    ) -> Result<(), Error<T>> {
+        // Ensure max number of maintainers per Class constraint satisfied
+        ensure!(
+            class_maintainers.len() <= T::MaxNumberOfMaintainersPerClass::get() as usize,
+            Error::<T>::ClassMaintainersLimitReached
+        );
+
+        // Ensure all curator groups provided are already exist in runtime
+        Self::ensure_curator_groups_exist(class_maintainers)?;
+        Ok(())
+    }
+
+    /// Ensure new `Schema` is not empty
+    pub fn ensure_non_empty_schema(
+        existing_properties: &BTreeSet<PropertyId>,
+        new_properties: &[Property<T>],
+    ) -> Result<(), Error<T>> {
+        // Schema is empty if both existing_properties and new_properties are empty
+        let non_empty_schema = !existing_properties.is_empty() || !new_properties.is_empty();
+        ensure!(non_empty_schema, Error::<T>::NoPropertiesInClassSchema);
+        Ok(())
+    }
+
+    /// Ensure `ClassNameLengthConstraint` conditions satisfied
+    pub fn ensure_class_name_is_valid(text: &[u8]) -> Result<(), Error<T>> {
+        T::ClassNameLengthConstraint::get().ensure_valid(
+            text.len(),
+            Error::<T>::ClassNameTooShort,
+            Error::<T>::ClassNameTooLong,
+        )
+    }
+
+    /// Ensure `ClassDescriptionLengthConstraint` conditions satisfied
+    pub fn ensure_class_description_is_valid(text: &[u8]) -> Result<(), Error<T>> {
+        T::ClassDescriptionLengthConstraint::get().ensure_valid(
+            text.len(),
+            Error::<T>::ClassDescriptionTooShort,
+            Error::<T>::ClassDescriptionTooLong,
+        )
+    }
+
+    /// Ensure `MaxNumberOfClasses` constraint satisfied
+    pub fn ensure_class_limit_not_reached() -> Result<(), Error<T>> {
+        ensure!(
+            (<ClassById<T>>::iter().count() as MaxNumber) < T::MaxNumberOfClasses::get(),
+            Error::<T>::ClassLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure `MaxNumberOfEntitiesPerClass` constraint satisfied
+    pub fn ensure_valid_number_of_entities_per_class(
+        maximum_entities_count: T::EntityId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            maximum_entities_count <= T::MaxNumberOfEntitiesPerClass::get(),
+            Error::<T>::EntitiesNumberPerClassConstraintViolated
+        );
+        Ok(())
+    }
+
+    /// Ensure `IndividualEntitiesCreationLimit` constraint satisfied
+    pub fn ensure_valid_number_of_class_entities_per_actor_constraint(
+        number_of_class_entities_per_actor: T::EntityId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            number_of_class_entities_per_actor <= T::IndividualEntitiesCreationLimit::get(),
+            Error::<T>::NumberOfClassEntitiesPerActorConstraintViolated
+        );
+        Ok(())
+    }
+
+    /// Ensure all entities creation limits, defined for a given `Class`, are valid
+    pub fn ensure_entities_creation_limits_are_valid(
+        maximum_entities_count: T::EntityId,
+        default_entity_creation_voucher_upper_bound: T::EntityId,
+    ) -> Result<(), Error<T>> {
+        // Ensure default_entity_creation_voucher_upper_bound does not exceed default_entity_creation_voucher_upper_bound
+        ensure!(
+            default_entity_creation_voucher_upper_bound <= maximum_entities_count,
+            Error::<T>::PerControllerEntitiesCreationLimitExceedsOverallLimit
+        );
+
+        // Ensure maximum_entities_count does not exceed MaxNumberOfEntitiesPerClass limit
+        Self::ensure_valid_number_of_entities_per_class(maximum_entities_count)?;
+
+        // Ensure default_entity_creation_voucher_upper_bound constraint does not exceed IndividualEntitiesCreationLimit
+        Self::ensure_valid_number_of_class_entities_per_actor_constraint(
+            default_entity_creation_voucher_upper_bound,
+        )
+    }
+
+    /// Ensure maximum number of operations during atomic batching constraint satisfied
+    pub fn ensure_number_of_operations_during_atomic_batching_limit_not_reached(
+        operations: &[OperationType<T>],
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            operations.len() <= T::MaxNumberOfOperationsDuringAtomicBatching::get() as usize,
+            Error::<T>::NumberOfOperationsDuringAtomicBatchingLimitReached
+        );
+        Ok(())
+    }
+
+    /// Complete all checks to ensure each `Property` is valid
+    pub fn ensure_all_properties_are_valid(new_properties: &[Property<T>]) -> Result<(), Error<T>> {
+        for new_property in new_properties.iter() {
+            // Ensure PropertyNameLengthConstraint satisfied
+            new_property.ensure_name_is_valid()?;
+
+            // Ensure PropertyDescriptionLengthConstraint satisfied
+            new_property.ensure_description_is_valid()?;
+
+            // Ensure Type specific constraints satisfied
+            new_property.ensure_property_type_size_is_valid()?;
+
+            // Ensure refers to existing class_id, if If Property Type is Reference,
+            new_property.ensure_property_type_reference_is_valid()?;
+        }
+        Ok(())
+    }
+
+    /// Ensure all `Property` names are  unique within `Class`
+    pub fn ensure_all_property_names_are_unique(
+        class_properties: &[Property<T>],
+        new_properties: &[Property<T>],
+    ) -> Result<(), Error<T>> {
+        // Used to ensure all property names are unique within class
+        let mut unique_prop_names = BTreeSet::new();
+
+        for property in class_properties.iter() {
+            unique_prop_names.insert(property.name.to_owned());
+        }
+
+        for new_property in new_properties {
+            // Ensure name of a new property is unique within its class.
+            ensure!(
+                !unique_prop_names.contains(&new_property.name),
+                Error::<T>::PropertyNameNotUniqueInAClass
+            );
+
+            unique_prop_names.insert(new_property.name.to_owned());
+        }
+
+        Ok(())
+    }
+
+    /// Ensure provided indices of `existing_properties`  are valid indices of `Class` properties
+    pub fn ensure_schema_properties_are_valid_indices(
+        existing_properties: &BTreeSet<PropertyId>,
+        class_properties: &[Property<T>],
+    ) -> Result<(), Error<T>> {
+        let has_unknown_properties = existing_properties
+            .iter()
+            .any(|&prop_id| prop_id >= class_properties.len() as PropertyId);
+        ensure!(
+            !has_unknown_properties,
+            Error::<T>::ClassSchemaRefersUnknownPropertyIndex
+        );
+        Ok(())
+    }
+
+    /// Create new `Schema` from existing and new property ids
+    pub fn create_class_schema(
+        existing_properties: BTreeSet<PropertyId>,
+        class_properties: &[Property<T>],
+        new_properties: &[Property<T>],
+    ) -> Schema {
+        // Calcualate new property ids
+        let properties = new_properties
+            .iter()
+            .enumerate()
+            .map(|(i, _)| (class_properties.len() + i) as PropertyId)
+            // Concatenate them with existing ones
+            .chain(existing_properties.into_iter())
+            .collect();
+
+        Schema::new(properties)
+    }
+
+    /// Update existing `Class` properties with new ones provided, return updated ones
+    pub fn make_updated_class_properties(
+        class_properties: Vec<Property<T>>,
+        new_properties: Vec<Property<T>>,
+    ) -> Vec<Property<T>> {
+        class_properties
+            .into_iter()
+            .chain(new_properties.into_iter())
+            .collect()
+    }
+}
+
+decl_event!(
+    pub enum Event<T>
+    where
+        CuratorGroupId = <T as ActorAuthenticator>::CuratorGroupId,
+        CuratorId = <T as ActorAuthenticator>::CuratorId,
+        ClassId = <T as Trait>::ClassId,
+        EntityId = <T as Trait>::EntityId,
+        EntityController = EntityController<T>,
+        EntityCreationVoucher = EntityCreationVoucher<T>,
+        Status = bool,
+        Actor = Actor<T>,
+        Nonce = <T as Trait>::Nonce,
+        SideEffects = Option<ReferenceCounterSideEffects<T>>,
+        SideEffect = Option<(<T as Trait>::EntityId, EntityReferenceCounterSideEffect)>,
+    {
+        CuratorGroupAdded(CuratorGroupId),
+        CuratorGroupRemoved(CuratorGroupId),
+        CuratorGroupStatusSet(CuratorGroupId, Status),
+        CuratorAdded(CuratorGroupId, CuratorId),
+        CuratorRemoved(CuratorGroupId, CuratorId),
+        MaintainerAdded(ClassId, CuratorGroupId),
+        MaintainerRemoved(ClassId, CuratorGroupId),
+        EntityCreationVoucherUpdated(EntityController, EntityCreationVoucher),
+        EntityCreationVoucherCreated(EntityController, EntityCreationVoucher),
+        ClassCreated(ClassId),
+        ClassPermissionsUpdated(ClassId),
+        ClassSchemaAdded(ClassId, SchemaId),
+        ClassSchemaStatusUpdated(ClassId, SchemaId, Status),
+        EntityPermissionsUpdated(EntityId),
+        EntityCreated(Actor, EntityId),
+        EntityRemoved(Actor, EntityId),
+        EntitySchemaSupportAdded(Actor, EntityId, SchemaId, SideEffects),
+        EntityPropertyValuesUpdated(Actor, EntityId, SideEffects),
+        VectorCleared(Actor, EntityId, PropertyId, SideEffects),
+        RemovedAtVectorIndex(Actor, EntityId, PropertyId, VecMaxLength, Nonce, SideEffect),
+        InsertedAtVectorIndex(Actor, EntityId, PropertyId, VecMaxLength, Nonce, SideEffect),
+        EntityOwnershipTransfered(EntityId, EntityController, SideEffects),
+        TransactionCompleted(Actor),
+    }
+);

+ 1029 - 0
runtime-modules/content-directory/src/mock.rs

@@ -0,0 +1,1029 @@
+#![cfg(test)]
+
+use crate::InputValidationLengthConstraint;
+use crate::*;
+use core::iter::FromIterator;
+use frame_support::traits::{OnFinalize, OnInitialize};
+pub use frame_support::{
+    assert_err, assert_ok, impl_outer_event, impl_outer_origin, parameter_types,
+};
+use sp_core::H256;
+use sp_runtime::{
+    testing::Header,
+    traits::{BlakeTwo256, IdentityLookup},
+    Perbill,
+};
+use std::cell::RefCell;
+
+/// Runtime Types
+
+type ClassId = <Runtime as Trait>::ClassId;
+type EntityId = <Runtime as Trait>::EntityId;
+type Nonce = <Runtime as Trait>::Nonce;
+
+type CuratorId = <Runtime as ActorAuthenticator>::CuratorId;
+pub type CuratorGroupId = <Runtime as ActorAuthenticator>::CuratorGroupId;
+type MemberId = <Runtime as ActorAuthenticator>::MemberId;
+
+/// Origins
+
+pub const LEAD_ORIGIN: u64 = 1;
+
+pub const FIRST_CURATOR_ORIGIN: u64 = 2;
+pub const SECOND_CURATOR_ORIGIN: u64 = 3;
+
+pub const FIRST_MEMBER_ORIGIN: u64 = 4;
+pub const SECOND_MEMBER_ORIGIN: u64 = 5;
+pub const UNKNOWN_ORIGIN: u64 = 7777;
+
+/// Runtime Id's
+
+pub const FIRST_CURATOR_ID: CuratorId = 1;
+pub const SECOND_CURATOR_ID: CuratorId = 2;
+
+pub const FIRST_CURATOR_GROUP_ID: CuratorGroupId = 1;
+pub const SECOND_CURATOR_GROUP_ID: CuratorGroupId = 2;
+
+pub const FIRST_MEMBER_ID: MemberId = 1;
+pub const SECOND_MEMBER_ID: MemberId = 2;
+
+pub const FIRST_CLASS_ID: ClassId = 1;
+pub const SECOND_CLASS_ID: ClassId = 2;
+
+pub const FIRST_ENTITY_ID: EntityId = 1;
+pub const SECOND_ENTITY_ID: EntityId = 2;
+pub const THIRD_ENTITY_ID: EntityId = 3;
+
+pub const UNKNOWN_CLASS_ID: ClassId = 111;
+pub const UNKNOWN_ENTITY_ID: EntityId = 222;
+pub const UNKNOWN_PROPERTY_ID: PropertyId = 333;
+pub const UNKNOWN_SCHEMA_ID: SchemaId = 444;
+
+pub const UNKNOWN_CURATOR_GROUP_ID: CuratorGroupId = 555;
+pub const UNKNOWN_CURATOR_ID: CuratorGroupId = 555;
+pub const UNKNOWN_MEMBER_ID: CuratorGroupId = 777;
+
+pub const FIRST_SCHEMA_ID: SchemaId = 0;
+pub const SECOND_SCHEMA_ID: SchemaId = 1;
+
+pub const FIRST_PROPERTY_ID: SchemaId = 0;
+pub const SECOND_PROPERTY_ID: SchemaId = 1;
+
+impl_outer_origin! {
+    pub enum Origin for Runtime {}
+}
+
+// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted.
+#[derive(Clone, Default, PartialEq, Eq, Debug)]
+pub struct Runtime;
+parameter_types! {
+    pub const BlockHashCount: u64 = 250;
+    pub const MaximumBlockWeight: u32 = 1024;
+    pub const MaximumBlockLength: u32 = 2 * 1024;
+    pub const AvailableBlockRatio: Perbill = Perbill::one();
+    pub const MinimumPeriod: u64 = 5;
+}
+
+thread_local! {
+    static PROPERTY_NAME_CONSTRAINT: RefCell<InputValidationLengthConstraint> = RefCell::new(InputValidationLengthConstraint::default());
+    static PROPERTY_DESCRIPTION_CONSTRAINT: RefCell<InputValidationLengthConstraint> = RefCell::new(InputValidationLengthConstraint::default());
+    static CLASS_NAME_CONSTRAINT: RefCell<InputValidationLengthConstraint> = RefCell::new(InputValidationLengthConstraint::default());
+    static CLASS_DESCRIPTION_CONSTRAINT: RefCell<InputValidationLengthConstraint> = RefCell::new(InputValidationLengthConstraint::default());
+    static MAX_NUMBER_OF_CLASSES: RefCell<MaxNumber> = RefCell::new(0);
+    static MAX_NUMBER_OF_MAINTAINERS_PER_CLASS: RefCell<MaxNumber> = RefCell::new(0);
+    static MAX_NUMBER_OF_SCHEMAS_PER_CLASS: RefCell<MaxNumber> = RefCell::new(0);
+    static MAX_NUMBER_OF_PROPERTIES_PER_CLASS: RefCell<MaxNumber> = RefCell::new(0);
+    static MAX_NUMBER_OF_ENTITIES_PER_CLASS: RefCell<EntityId> = RefCell::new(0);
+    static MAX_NUMBER_OF_CURATORS_PER_GROUP: RefCell<MaxNumber> = RefCell::new(0);
+    static MAX_NUMBER_OF_OPERATIONS_DURING_ATOMIC_BATCHING: RefCell<MaxNumber> = RefCell::new(0);
+    static VEC_MAX_LENGTH_CONSTRAINT: RefCell<VecMaxLength> = RefCell::new(0);
+    static TEXT_MAX_LENGTH_CONSTRAINT: RefCell<TextMaxLength> = RefCell::new(0);
+    static HASHED_TEXT_MAX_LENGTH_CONSTRAINT: RefCell<HashedTextMaxLength> = RefCell::new(Some(0));
+    static INDIVIDUAL_ENTITIES_CREATION_LIMIT: RefCell<EntityId> = RefCell::new(0);
+}
+
+pub struct PropertyNameLengthConstraint;
+impl Get<InputValidationLengthConstraint> for PropertyNameLengthConstraint {
+    fn get() -> InputValidationLengthConstraint {
+        PROPERTY_NAME_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct PropertyDescriptionLengthConstraint;
+impl Get<InputValidationLengthConstraint> for PropertyDescriptionLengthConstraint {
+    fn get() -> InputValidationLengthConstraint {
+        PROPERTY_DESCRIPTION_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct ClassNameLengthConstraint;
+impl Get<InputValidationLengthConstraint> for ClassNameLengthConstraint {
+    fn get() -> InputValidationLengthConstraint {
+        CLASS_NAME_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct ClassDescriptionLengthConstraint;
+impl Get<InputValidationLengthConstraint> for ClassDescriptionLengthConstraint {
+    fn get() -> InputValidationLengthConstraint {
+        CLASS_DESCRIPTION_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfClasses;
+impl Get<MaxNumber> for MaxNumberOfClasses {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_CLASSES.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfMaintainersPerClass;
+impl Get<MaxNumber> for MaxNumberOfMaintainersPerClass {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_MAINTAINERS_PER_CLASS.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfCuratorsPerGroup;
+impl Get<MaxNumber> for MaxNumberOfCuratorsPerGroup {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_CURATORS_PER_GROUP.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfSchemasPerClass;
+impl Get<MaxNumber> for MaxNumberOfSchemasPerClass {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_SCHEMAS_PER_CLASS.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfPropertiesPerSchema;
+impl Get<MaxNumber> for MaxNumberOfPropertiesPerSchema {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_PROPERTIES_PER_CLASS.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfOperationsDuringAtomicBatching;
+impl Get<MaxNumber> for MaxNumberOfOperationsDuringAtomicBatching {
+    fn get() -> MaxNumber {
+        MAX_NUMBER_OF_OPERATIONS_DURING_ATOMIC_BATCHING.with(|v| *v.borrow())
+    }
+}
+
+pub struct VecMaxLengthConstraint;
+impl Get<VecMaxLength> for VecMaxLengthConstraint {
+    fn get() -> VecMaxLength {
+        VEC_MAX_LENGTH_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct TextMaxLengthConstraint;
+impl Get<TextMaxLength> for TextMaxLengthConstraint {
+    fn get() -> TextMaxLength {
+        TEXT_MAX_LENGTH_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct HashedTextMaxLengthConstraint;
+impl Get<HashedTextMaxLength> for HashedTextMaxLengthConstraint {
+    fn get() -> HashedTextMaxLength {
+        HASHED_TEXT_MAX_LENGTH_CONSTRAINT.with(|v| *v.borrow())
+    }
+}
+
+pub struct MaxNumberOfEntitiesPerClass;
+impl Get<EntityId> for MaxNumberOfEntitiesPerClass {
+    fn get() -> EntityId {
+        MAX_NUMBER_OF_ENTITIES_PER_CLASS.with(|v| *v.borrow())
+    }
+}
+
+pub struct IndividualEntitiesCreationLimit;
+impl Get<EntityId> for IndividualEntitiesCreationLimit {
+    fn get() -> EntityId {
+        INDIVIDUAL_ENTITIES_CREATION_LIMIT.with(|v| *v.borrow())
+    }
+}
+
+impl system::Trait for Runtime {
+    type BaseCallFilter = ();
+    type Origin = Origin;
+    type Call = ();
+    type Index = u64;
+    type BlockNumber = u64;
+    type Hash = H256;
+    type Hashing = BlakeTwo256;
+    type AccountId = u64;
+    type Lookup = IdentityLookup<Self::AccountId>;
+    type Header = Header;
+    type Event = TestEvent;
+    type BlockHashCount = BlockHashCount;
+    type MaximumBlockWeight = MaximumBlockWeight;
+    type DbWeight = ();
+    type BlockExecutionWeight = ();
+    type ExtrinsicBaseWeight = ();
+    type MaximumExtrinsicWeight = ();
+    type MaximumBlockLength = MaximumBlockLength;
+    type AvailableBlockRatio = AvailableBlockRatio;
+    type Version = ();
+    type ModuleToIndex = ();
+    type AccountData = ();
+    type OnNewAccount = ();
+    type OnKilledAccount = ();
+}
+
+mod test_events {
+    pub use crate::Event;
+}
+
+impl_outer_event! {
+    pub enum TestEvent for Runtime {
+        test_events<T>,
+        system<T>,
+    }
+}
+
+impl Trait for Runtime {
+    type Event = TestEvent;
+    type Nonce = u64;
+    type ClassId = u64;
+    type EntityId = u64;
+    type PropertyNameLengthConstraint = PropertyNameLengthConstraint;
+    type PropertyDescriptionLengthConstraint = PropertyDescriptionLengthConstraint;
+    type ClassNameLengthConstraint = ClassNameLengthConstraint;
+    type ClassDescriptionLengthConstraint = ClassDescriptionLengthConstraint;
+    type MaxNumberOfClasses = MaxNumberOfClasses;
+    type MaxNumberOfMaintainersPerClass = MaxNumberOfMaintainersPerClass;
+    type MaxNumberOfSchemasPerClass = MaxNumberOfSchemasPerClass;
+    type MaxNumberOfPropertiesPerSchema = MaxNumberOfPropertiesPerSchema;
+    type MaxNumberOfEntitiesPerClass = MaxNumberOfEntitiesPerClass;
+    type MaxNumberOfCuratorsPerGroup = MaxNumberOfCuratorsPerGroup;
+    type MaxNumberOfOperationsDuringAtomicBatching = MaxNumberOfOperationsDuringAtomicBatching;
+    type VecMaxLengthConstraint = VecMaxLengthConstraint;
+    type TextMaxLengthConstraint = TextMaxLengthConstraint;
+    type HashedTextMaxLengthConstraint = HashedTextMaxLengthConstraint;
+    type IndividualEntitiesCreationLimit = IndividualEntitiesCreationLimit;
+}
+
+impl ActorAuthenticator for Runtime {
+    type CuratorId = u64;
+    type MemberId = u64;
+    type CuratorGroupId = u64;
+
+    // Consider lazy_static crate?
+
+    fn is_lead(account_id: &Self::AccountId) -> bool {
+        let lead_account_id = ensure_signed(Origin::signed(LEAD_ORIGIN)).unwrap();
+        *account_id == lead_account_id
+    }
+
+    fn is_curator(curator_id: &Self::CuratorId, account_id: &Self::AccountId) -> bool {
+        let first_curator_account_id = ensure_signed(Origin::signed(FIRST_CURATOR_ORIGIN)).unwrap();
+        let second_curator_account_id =
+            ensure_signed(Origin::signed(SECOND_CURATOR_ORIGIN)).unwrap();
+        (first_curator_account_id == *account_id && FIRST_CURATOR_ID == *curator_id)
+            || (second_curator_account_id == *account_id && SECOND_CURATOR_ID == *curator_id)
+    }
+
+    fn is_member(member_id: &Self::MemberId, account_id: &Self::AccountId) -> bool {
+        let unknown_member_account_id = ensure_signed(Origin::signed(UNKNOWN_ORIGIN)).unwrap();
+        *member_id < MaxNumberOfEntitiesPerClass::get() && unknown_member_account_id != *account_id
+    }
+}
+
+pub struct ExtBuilder {
+    property_name_constraint: InputValidationLengthConstraint,
+    property_description_constraint: InputValidationLengthConstraint,
+    class_name_constraint: InputValidationLengthConstraint,
+    class_description_constraint: InputValidationLengthConstraint,
+    max_number_of_classes: MaxNumber,
+    max_number_of_maintainers_per_class: MaxNumber,
+    max_number_of_schemas_per_class: MaxNumber,
+    max_number_of_properties_per_class: MaxNumber,
+    max_number_of_entities_per_class: EntityId,
+    max_number_of_curators_per_group: MaxNumber,
+    max_number_of_operations_during_atomic_batching: MaxNumber,
+    vec_max_length_constraint: VecMaxLength,
+    text_max_length_constraint: TextMaxLength,
+    hashed_text_max_length_constraint: HashedTextMaxLength,
+    individual_entities_creation_limit: EntityId,
+}
+
+impl Default for ExtBuilder {
+    fn default() -> Self {
+        Self {
+            property_name_constraint: InputValidationLengthConstraint::new(1, 49),
+            property_description_constraint: InputValidationLengthConstraint::new(1, 500),
+            class_name_constraint: InputValidationLengthConstraint::new(1, 49),
+            class_description_constraint: InputValidationLengthConstraint::new(1, 500),
+            max_number_of_classes: 100,
+            max_number_of_maintainers_per_class: 10,
+            max_number_of_schemas_per_class: 20,
+            max_number_of_properties_per_class: 40,
+            max_number_of_entities_per_class: 400,
+            max_number_of_curators_per_group: 50,
+            max_number_of_operations_during_atomic_batching: 500,
+            vec_max_length_constraint: 200,
+            text_max_length_constraint: 5000,
+            hashed_text_max_length_constraint: Some(25000),
+            individual_entities_creation_limit: 50,
+        }
+    }
+}
+
+impl ExtBuilder {
+    pub fn set_associated_consts(&self) {
+        PROPERTY_NAME_CONSTRAINT.with(|v| *v.borrow_mut() = self.property_name_constraint);
+        PROPERTY_DESCRIPTION_CONSTRAINT
+            .with(|v| *v.borrow_mut() = self.property_description_constraint);
+        CLASS_NAME_CONSTRAINT.with(|v| *v.borrow_mut() = self.class_name_constraint);
+        CLASS_DESCRIPTION_CONSTRAINT.with(|v| *v.borrow_mut() = self.class_description_constraint);
+        MAX_NUMBER_OF_CLASSES.with(|v| *v.borrow_mut() = self.max_number_of_classes);
+        MAX_NUMBER_OF_MAINTAINERS_PER_CLASS
+            .with(|v| *v.borrow_mut() = self.max_number_of_maintainers_per_class);
+        MAX_NUMBER_OF_SCHEMAS_PER_CLASS
+            .with(|v| *v.borrow_mut() = self.max_number_of_schemas_per_class);
+        MAX_NUMBER_OF_PROPERTIES_PER_CLASS
+            .with(|v| *v.borrow_mut() = self.max_number_of_properties_per_class);
+        MAX_NUMBER_OF_ENTITIES_PER_CLASS
+            .with(|v| *v.borrow_mut() = self.max_number_of_entities_per_class);
+        MAX_NUMBER_OF_CURATORS_PER_GROUP
+            .with(|v| *v.borrow_mut() = self.max_number_of_curators_per_group);
+        MAX_NUMBER_OF_OPERATIONS_DURING_ATOMIC_BATCHING
+            .with(|v| *v.borrow_mut() = self.max_number_of_operations_during_atomic_batching);
+        VEC_MAX_LENGTH_CONSTRAINT.with(|v| *v.borrow_mut() = self.vec_max_length_constraint);
+        TEXT_MAX_LENGTH_CONSTRAINT.with(|v| *v.borrow_mut() = self.text_max_length_constraint);
+        HASHED_TEXT_MAX_LENGTH_CONSTRAINT
+            .with(|v| *v.borrow_mut() = self.hashed_text_max_length_constraint);
+        INDIVIDUAL_ENTITIES_CREATION_LIMIT
+            .with(|v| *v.borrow_mut() = self.individual_entities_creation_limit);
+    }
+
+    pub fn build(self, config: GenesisConfig<Runtime>) -> sp_io::TestExternalities {
+        self.set_associated_consts();
+        let mut t = system::GenesisConfig::default()
+            .build_storage::<Runtime>()
+            .unwrap();
+        config.assimilate_storage(&mut t).unwrap();
+        t.into()
+    }
+}
+
+// This function basically just builds a genesis storage key/value store according to
+// our desired mockup.
+
+fn default_content_directory_genesis_config() -> GenesisConfig<Runtime> {
+    GenesisConfig {
+        curator_group_by_id: vec![],
+        next_class_id: 1,
+        next_entity_id: 1,
+        next_curator_group_id: 1,
+    }
+}
+
+pub fn with_test_externalities<R, F: FnOnce() -> R>(f: F) -> R {
+    let default_genesis_config = default_content_directory_genesis_config();
+    /*
+        Events are not emitted on block 0.
+        So any dispatchable calls made during genesis block formation will have no events emitted.
+        https://substrate.dev/recipes/2-appetizers/4-events.html
+    */
+    let func = || {
+        run_to_block(1);
+        f()
+    };
+
+    ExtBuilder::default()
+        .build(default_genesis_config)
+        .execute_with(func)
+}
+
+pub fn generate_text(len: usize) -> Vec<u8> {
+    vec![b'x'; len]
+}
+
+impl<T: Trait> Property<T> {
+    pub fn required(mut self) -> Self {
+        self.required = true;
+        self
+    }
+
+    pub fn unique(mut self) -> Self {
+        self.unique = true;
+        self
+    }
+}
+
+// Events
+
+type RawTestEvent = RawEvent<
+    CuratorGroupId,
+    CuratorId,
+    ClassId,
+    EntityId,
+    EntityController<Runtime>,
+    EntityCreationVoucher<Runtime>,
+    bool,
+    Actor<Runtime>,
+    Nonce,
+    Option<ReferenceCounterSideEffects<Runtime>>,
+    Option<(EntityId, EntityReferenceCounterSideEffect)>,
+>;
+
+pub fn get_test_event(raw_event: RawTestEvent) -> TestEvent {
+    TestEvent::test_events(raw_event)
+}
+
+pub fn assert_event_success(tested_event: TestEvent, number_of_events_after_call: usize) {
+    // Ensure  runtime events length is equal to expected number of events after call
+    assert_eq!(System::events().len(), number_of_events_after_call);
+
+    // Ensure  last emitted event is equal to expected one
+    assert!(matches!(
+            System::events()
+                .iter()
+                .last(),
+            Some(last_event) if last_event.event == tested_event
+    ));
+}
+
+pub fn assert_failure(
+    call_result: DispatchResult,
+    expected_error: Error<Runtime>,
+    number_of_events_before_call: usize,
+) {
+    // Ensure  call result is equal to expected error
+    assert_err!(call_result, expected_error);
+
+    // Ensure  no other events emitted after call
+    assert_eq!(System::events().len(), number_of_events_before_call);
+}
+
+// Curator groups
+
+pub fn next_curator_group_id() -> CuratorGroupId {
+    TestModule::next_curator_group_id()
+}
+
+pub fn add_curator_group(lead_origin: u64) -> DispatchResult {
+    TestModule::add_curator_group(Origin::signed(lead_origin))
+}
+
+pub fn remove_curator_group(lead_origin: u64, curator_group_id: CuratorGroupId) -> DispatchResult {
+    TestModule::remove_curator_group(Origin::signed(lead_origin), curator_group_id)
+}
+
+pub fn add_curator_to_group(
+    lead_origin: u64,
+    curator_group_id: CuratorGroupId,
+    curator_id: CuratorId,
+) -> DispatchResult {
+    TestModule::add_curator_to_group(Origin::signed(lead_origin), curator_group_id, curator_id)
+}
+
+pub fn remove_curator_from_group(
+    lead_origin: u64,
+    curator_group_id: CuratorGroupId,
+    curator_id: CuratorId,
+) -> DispatchResult {
+    TestModule::remove_curator_from_group(Origin::signed(lead_origin), curator_group_id, curator_id)
+}
+
+pub fn set_curator_group_status(
+    lead_origin: u64,
+    curator_group_id: CuratorGroupId,
+    is_active: bool,
+) -> DispatchResult {
+    TestModule::set_curator_group_status(Origin::signed(lead_origin), curator_group_id, is_active)
+}
+
+pub fn curator_group_by_id(curator_group_id: CuratorGroupId) -> CuratorGroup<Runtime> {
+    TestModule::curator_group_by_id(curator_group_id)
+}
+
+pub fn curator_group_exists(curator_group_id: CuratorGroupId) -> bool {
+    CuratorGroupById::<Runtime>::contains_key(curator_group_id)
+}
+
+// Classes
+
+pub enum ClassType {
+    Valid,
+    NameTooLong,
+    NameTooShort,
+    DescriptionTooLong,
+    DescriptionTooShort,
+    InvalidMaximumEntitiesCount,
+    InvalidDefaultVoucherUpperBound,
+    DefaultVoucherUpperBoundExceedsMaximumEntitiesCount,
+    MaintainersLimitReached,
+    CuratorGroupDoesNotExist,
+}
+
+pub fn create_simple_class(lead_origin: u64, class_type: ClassType) -> DispatchResult {
+    let mut class = create_class_with_default_permissions();
+    match class_type {
+        ClassType::Valid => (),
+        ClassType::NameTooShort => {
+            class.set_name(generate_text(
+                ClassNameLengthConstraint::get().min() as usize - 1,
+            ));
+        }
+        ClassType::NameTooLong => {
+            class.set_name(generate_text(
+                ClassNameLengthConstraint::get().max() as usize + 1,
+            ));
+        }
+        ClassType::DescriptionTooLong => {
+            class.set_description(generate_text(
+                ClassDescriptionLengthConstraint::get().max() as usize + 1,
+            ));
+        }
+        ClassType::DescriptionTooShort => {
+            class.set_description(generate_text(
+                ClassDescriptionLengthConstraint::get().min() as usize - 1,
+            ));
+        }
+        ClassType::InvalidMaximumEntitiesCount => {
+            class.set_maximum_entities_count(MaxNumberOfEntitiesPerClass::get() + 1);
+        }
+        ClassType::InvalidDefaultVoucherUpperBound => {
+            class.set_default_entity_creation_voucher_upper_bound(
+                IndividualEntitiesCreationLimit::get() + 1,
+            );
+        }
+        ClassType::DefaultVoucherUpperBoundExceedsMaximumEntitiesCount => {
+            class.set_maximum_entities_count(5);
+
+            class.set_maximum_entities_count(3);
+        }
+        ClassType::MaintainersLimitReached => {
+            let mut maintainers = BTreeSet::new();
+            for curator_group_id in 1..=(MaxNumberOfMaintainersPerClass::get() + 1) {
+                maintainers.insert(curator_group_id as CuratorGroupId);
+            }
+            class.get_permissions_mut().set_maintainers(maintainers);
+        }
+        ClassType::CuratorGroupDoesNotExist => {
+            let maintainers = BTreeSet::from_iter(vec![UNKNOWN_CURATOR_GROUP_ID].into_iter());
+            class.get_permissions_mut().set_maintainers(maintainers);
+        }
+    };
+    TestModule::create_class(
+        Origin::signed(lead_origin),
+        class.get_name().to_owned(),
+        class.get_description().to_owned(),
+        class.get_permissions_ref().to_owned(),
+        class.get_maximum_entities_count(),
+        class.get_default_entity_creation_voucher_upper_bound(),
+    )
+}
+
+pub fn create_class_with_default_permissions() -> Class<Runtime> {
+    Class::new(
+        ClassPermissions::default(),
+        generate_text(ClassNameLengthConstraint::get().max() as usize),
+        generate_text(ClassDescriptionLengthConstraint::get().max() as usize),
+        MaxNumberOfEntitiesPerClass::get(),
+        IndividualEntitiesCreationLimit::get(),
+    )
+}
+
+pub fn add_maintainer_to_class(
+    lead_origin: u64,
+    class_id: ClassId,
+    curator_group_id: CuratorGroupId,
+) -> DispatchResult {
+    TestModule::add_maintainer_to_class(Origin::signed(lead_origin), class_id, curator_group_id)
+}
+
+pub fn remove_maintainer_from_class(
+    lead_origin: u64,
+    class_id: ClassId,
+    curator_group_id: CuratorGroupId,
+) -> DispatchResult {
+    TestModule::remove_maintainer_from_class(
+        Origin::signed(lead_origin),
+        class_id,
+        curator_group_id,
+    )
+}
+
+pub fn update_class_permissions(
+    lead_origin: u64,
+    class_id: ClassId,
+    updated_any_member: Option<bool>,
+    updated_entity_creation_blocked: Option<bool>,
+    updated_all_entity_property_values_locked: Option<bool>,
+    updated_maintainers: Option<BTreeSet<CuratorGroupId>>,
+) -> DispatchResult {
+    TestModule::update_class_permissions(
+        Origin::signed(lead_origin),
+        class_id,
+        updated_any_member,
+        updated_entity_creation_blocked,
+        updated_all_entity_property_values_locked,
+        updated_maintainers,
+    )
+}
+
+pub fn add_class_schema(
+    lead_origin: u64,
+    class_id: ClassId,
+    existing_properties: BTreeSet<PropertyId>,
+    new_properties: Vec<Property<Runtime>>,
+) -> DispatchResult {
+    TestModule::add_class_schema(
+        Origin::signed(lead_origin),
+        class_id,
+        existing_properties,
+        new_properties,
+    )
+}
+
+pub fn update_class_schema_status(
+    lead_origin: u64,
+    class_id: ClassId,
+    schema_id: SchemaId,
+    status: bool,
+) -> DispatchResult {
+    TestModule::update_class_schema_status(Origin::signed(lead_origin), class_id, schema_id, status)
+}
+
+pub fn next_class_id() -> ClassId {
+    TestModule::next_class_id()
+}
+
+pub fn class_by_id(class_id: ClassId) -> Class<Runtime> {
+    TestModule::class_by_id(class_id)
+}
+
+pub fn class_exists(class_id: ClassId) -> bool {
+    ClassById::<Runtime>::contains_key(class_id)
+}
+
+// Vouchers
+
+pub fn update_entity_creation_voucher(
+    lead_origin: u64,
+    class_id: ClassId,
+    controller: EntityController<Runtime>,
+    maximum_entities_count: EntityId,
+) -> DispatchResult {
+    TestModule::update_entity_creation_voucher(
+        Origin::signed(lead_origin),
+        class_id,
+        controller,
+        maximum_entities_count,
+    )
+}
+
+pub fn entity_creation_vouchers(
+    class_id: ClassId,
+    entity_controller: &EntityController<Runtime>,
+) -> EntityCreationVoucher<Runtime> {
+    TestModule::entity_creation_vouchers(class_id, entity_controller)
+}
+
+pub fn entity_creation_voucher_exists(
+    class_id: ClassId,
+    entity_controller: &EntityController<Runtime>,
+) -> bool {
+    EntityCreationVouchers::<Runtime>::contains_key(class_id, entity_controller)
+}
+
+// Entities
+
+pub fn entity_exists(entity_id: EntityId) -> bool {
+    EntityById::<Runtime>::contains_key(entity_id)
+}
+
+pub fn entity_by_id(entity_id: EntityId) -> Entity<Runtime> {
+    TestModule::entity_by_id(entity_id)
+}
+
+pub fn next_entity_id() -> EntityId {
+    TestModule::next_entity_id()
+}
+
+pub fn create_entity(origin: u64, class_id: ClassId, actor: Actor<Runtime>) -> DispatchResult {
+    TestModule::create_entity(Origin::signed(origin), class_id, actor)
+}
+
+pub fn remove_entity(origin: u64, actor: Actor<Runtime>, entity_id: EntityId) -> DispatchResult {
+    TestModule::remove_entity(Origin::signed(origin), actor, entity_id)
+}
+
+pub fn update_entity_permissions(
+    lead_origin: u64,
+    entity_id: EntityId,
+    updated_frozen: Option<bool>,
+    updated_referenceable: Option<bool>,
+) -> DispatchResult {
+    TestModule::update_entity_permissions(
+        Origin::signed(lead_origin),
+        entity_id,
+        updated_frozen,
+        updated_referenceable,
+    )
+}
+
+pub fn add_schema_support_to_entity(
+    origin: u64,
+    actor: Actor<Runtime>,
+    entity_id: EntityId,
+    schema_id: SchemaId,
+    new_property_values: BTreeMap<PropertyId, InputPropertyValue<Runtime>>,
+) -> DispatchResult {
+    TestModule::add_schema_support_to_entity(
+        Origin::signed(origin),
+        actor,
+        entity_id,
+        schema_id,
+        new_property_values,
+    )
+}
+
+pub fn update_entity_property_values(
+    origin: u64,
+    actor: Actor<Runtime>,
+    entity_id: EntityId,
+    new_property_values: BTreeMap<PropertyId, InputPropertyValue<Runtime>>,
+) -> DispatchResult {
+    TestModule::update_entity_property_values(
+        Origin::signed(origin),
+        actor,
+        entity_id,
+        new_property_values,
+    )
+}
+
+pub fn clear_entity_property_vector(
+    origin: u64,
+    actor: Actor<Runtime>,
+    entity_id: EntityId,
+    in_class_schema_property_id: PropertyId,
+) -> DispatchResult {
+    TestModule::clear_entity_property_vector(
+        Origin::signed(origin),
+        actor,
+        entity_id,
+        in_class_schema_property_id,
+    )
+}
+
+pub fn insert_at_entity_property_vector(
+    origin: u64,
+    actor: Actor<Runtime>,
+    entity_id: EntityId,
+    in_class_schema_property_id: PropertyId,
+    index_in_property_vector: VecMaxLength,
+    property_value: InputValue<Runtime>,
+    nonce: Nonce,
+) -> DispatchResult {
+    TestModule::insert_at_entity_property_vector(
+        Origin::signed(origin),
+        actor,
+        entity_id,
+        in_class_schema_property_id,
+        index_in_property_vector,
+        property_value,
+        nonce,
+    )
+}
+
+pub fn remove_at_entity_property_vector(
+    origin: u64,
+    actor: Actor<Runtime>,
+    entity_id: EntityId,
+    in_class_schema_property_id: PropertyId,
+    index_in_property_vector: VecMaxLength,
+    nonce: Nonce,
+) -> DispatchResult {
+    TestModule::remove_at_entity_property_vector(
+        Origin::signed(origin),
+        actor,
+        entity_id,
+        in_class_schema_property_id,
+        index_in_property_vector,
+        nonce,
+    )
+}
+
+pub fn transfer_entity_ownership(
+    origin: u64,
+    entity_id: EntityId,
+    new_controller: EntityController<Runtime>,
+    new_property_value_references_with_same_owner_flag_set: BTreeMap<
+        PropertyId,
+        InputPropertyValue<Runtime>,
+    >,
+) -> DispatchResult {
+    TestModule::transfer_entity_ownership(
+        Origin::signed(origin),
+        entity_id,
+        new_controller,
+        new_property_value_references_with_same_owner_flag_set,
+    )
+}
+
+// Transaction
+
+pub fn transaction(
+    origin: u64,
+    actor: Actor<Runtime>,
+    operations: Vec<OperationType<Runtime>>,
+) -> DispatchResult {
+    TestModule::transaction(Origin::signed(origin), actor, operations)
+}
+
+pub enum InvalidPropertyType {
+    NameTooLong,
+    NameTooShort,
+    DescriptionTooLong,
+    DescriptionTooShort,
+    TextIsTooLong,
+    TextHashIsTooLong,
+    VecIsTooLong,
+}
+
+impl<T: Trait> Property<T> {
+    pub fn default_with_name(name_len: usize) -> Self {
+        let name = generate_text(name_len);
+        let description = generate_text(PropertyDescriptionLengthConstraint::get().min() as usize);
+        Self {
+            name,
+            description,
+            ..Property::<T>::default()
+        }
+    }
+
+    pub fn with_name_and_type(
+        name_len: usize,
+        property_type: PropertyType<T>,
+        required: bool,
+        unique: bool,
+    ) -> Self {
+        let name = generate_text(name_len);
+        let description = generate_text(PropertyDescriptionLengthConstraint::get().min() as usize);
+        Self {
+            name,
+            description,
+            property_type,
+            required,
+            unique,
+            ..Property::<T>::default()
+        }
+    }
+
+    pub fn invalid(invalid_property_type: InvalidPropertyType) -> Property<Runtime> {
+        let mut default_property = Property::<Runtime>::default_with_name(
+            PropertyNameLengthConstraint::get().min() as usize,
+        );
+        match invalid_property_type {
+            InvalidPropertyType::NameTooLong => {
+                default_property.name =
+                    generate_text(PropertyNameLengthConstraint::get().max() as usize + 1);
+            }
+            InvalidPropertyType::NameTooShort => {
+                default_property.name =
+                    generate_text(PropertyNameLengthConstraint::get().min() as usize - 1);
+            }
+            InvalidPropertyType::DescriptionTooLong => {
+                default_property.description =
+                    generate_text(PropertyDescriptionLengthConstraint::get().max() as usize + 1);
+            }
+            InvalidPropertyType::DescriptionTooShort => {
+                default_property.description =
+                    generate_text(PropertyDescriptionLengthConstraint::get().min() as usize - 1);
+            }
+            InvalidPropertyType::TextIsTooLong => {
+                default_property.property_type =
+                    PropertyType::<Runtime>::single_text(TextMaxLengthConstraint::get() + 1);
+            }
+            InvalidPropertyType::TextHashIsTooLong => {
+                if let Some(hashed_text_max_len) = HashedTextMaxLengthConstraint::get() {
+                    default_property.property_type =
+                        PropertyType::<Runtime>::single_text_hash(Some(hashed_text_max_len + 1));
+                }
+            }
+            InvalidPropertyType::VecIsTooLong => {
+                default_property.property_type = PropertyType::<Runtime>::vec_reference(
+                    FIRST_CLASS_ID,
+                    true,
+                    VecMaxLengthConstraint::get() + 1,
+                );
+            }
+        };
+        default_property
+    }
+}
+
+impl<T: Trait> PropertyType<T> {
+    pub fn vec_reference(
+        class_id: ClassId,
+        same_controller: bool,
+        max_length: VecMaxLength,
+    ) -> PropertyType<Runtime> {
+        let vec_type = Type::<Runtime>::Reference(class_id, same_controller);
+        let vec_reference = VecPropertyType::<Runtime>::new(vec_type, max_length);
+        PropertyType::<Runtime>::Vector(vec_reference)
+    }
+
+    pub fn vec_text(
+        text_max_len: TextMaxLength,
+        vec_max_length: VecMaxLength,
+    ) -> PropertyType<Runtime> {
+        let vec_type = Type::<Runtime>::Text(text_max_len);
+        let vec_text = VecPropertyType::<Runtime>::new(vec_type, vec_max_length);
+        PropertyType::<Runtime>::Vector(vec_text)
+    }
+
+    pub fn single_text(text_max_len: TextMaxLength) -> PropertyType<Runtime> {
+        let text_type = Type::<Runtime>::Text(text_max_len);
+        PropertyType::<Runtime>::Single(text_type)
+    }
+
+    pub fn single_text_hash(text_hash_max_len: HashedTextMaxLength) -> PropertyType<Runtime> {
+        let text_type = Type::<Runtime>::Hash(text_hash_max_len);
+        PropertyType::<Runtime>::Single(text_type)
+    }
+
+    pub fn vec_text_hash(
+        text_hash_max_len: HashedTextMaxLength,
+        vec_max_length: VecMaxLength,
+    ) -> PropertyType<Runtime> {
+        let vec_type = Type::<Runtime>::Hash(text_hash_max_len);
+        let vec_text_hash = VecPropertyType::<Runtime>::new(vec_type, vec_max_length);
+        PropertyType::<Runtime>::Vector(vec_text_hash)
+    }
+}
+
+impl<T: Trait> InputPropertyValue<T> {
+    pub fn vec_reference(entity_ids: Vec<EntityId>) -> InputPropertyValue<Runtime> {
+        let vec_value = VecInputValue::<Runtime>::Reference(entity_ids);
+        InputPropertyValue::<Runtime>::Vector(vec_value)
+    }
+
+    pub fn vec_text(texts: Vec<Vec<u8>>) -> InputPropertyValue<Runtime> {
+        let vec_value = VecInputValue::<Runtime>::Text(texts);
+        InputPropertyValue::<Runtime>::Vector(vec_value)
+    }
+
+    pub fn vec_text_to_hash(texts: Vec<Vec<u8>>) -> InputPropertyValue<Runtime> {
+        let vec_value = VecInputValue::<Runtime>::TextToHash(texts);
+        InputPropertyValue::<Runtime>::Vector(vec_value)
+    }
+
+    pub fn single_text(text_len: TextMaxLength) -> InputPropertyValue<Runtime> {
+        let text_value = InputValue::<Runtime>::Text(generate_text(text_len as usize));
+        InputPropertyValue::<Runtime>::Single(text_value)
+    }
+
+    pub fn single_text_to_hash(text_len: TextMaxLength) -> InputPropertyValue<Runtime> {
+        let text_value = InputValue::<Runtime>::TextToHash(generate_text(text_len as usize));
+        InputPropertyValue::<Runtime>::Single(text_value)
+    }
+}
+
+impl From<InboundReferenceCounter> for EntityReferenceCounterSideEffect {
+    fn from(inbound_rc: InboundReferenceCounter) -> Self {
+        Self {
+            total: inbound_rc.total as i32,
+            same_owner: inbound_rc.same_owner as i32,
+        }
+    }
+}
+
+impl EntityReferenceCounterSideEffect {
+    pub fn new(total: i32, same_owner: i32) -> Self {
+        Self { total, same_owner }
+    }
+}
+
+impl PropertyLockingPolicy {
+    pub fn new(is_locked_from_maintainer: bool, is_locked_from_controller: bool) -> Self {
+        Self {
+            is_locked_from_maintainer,
+            is_locked_from_controller,
+        }
+    }
+}
+
+// Assign back to type variables so we can make dispatched calls of these modules later.
+pub type System = system::Module<Runtime>;
+pub type TestModule = Module<Runtime>;
+
+// Recommendation from Parity on testing on_finalize
+// https://substrate.dev/docs/en/next/development/module/tests
+pub fn run_to_block(n: u64) {
+    while System::block_number() < n {
+        <System as OnFinalize<u64>>::on_finalize(System::block_number());
+        <TestModule as OnFinalize<u64>>::on_finalize(System::block_number());
+        System::set_block_number(System::block_number() + 1);
+        <System as OnInitialize<u64>>::on_initialize(System::block_number());
+        <TestModule as OnInitialize<u64>>::on_initialize(System::block_number());
+    }
+}

+ 141 - 0
runtime-modules/content-directory/src/operations.rs

@@ -0,0 +1,141 @@
+use crate::{Error, InputPropertyValue, InputValue, PropertyId, SchemaId, Trait, VecInputValue};
+use codec::{Decode, Encode};
+use sp_std::collections::btree_map::BTreeMap;
+use sp_std::prelude::*;
+
+/// Parametrized entity property value
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub enum ParametrizedPropertyValue<T: Trait> {
+    /// Same fields as normal InputPropertyValue
+    InputPropertyValue(InputPropertyValue<T>),
+
+    /// This is the index of an operation creating an entity in the transaction/batch operations
+    InternalEntityJustAdded(u32), // should really be usize but it doesn't have Encode/Decode support
+
+    /// Vector of mix of Entities already existing and just added in a recent operation
+    InternalEntityVec(Vec<ParameterizedEntity<T>>),
+}
+
+/// Parametrized entity
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub enum ParameterizedEntity<T: Trait> {
+    InternalEntityJustAdded(u32),
+    ExistingEntity(T::EntityId),
+}
+
+/// Parametrized class property value
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct ParametrizedClassPropertyValue<T: Trait> {
+    /// Index is into properties vector of class.
+    pub in_class_index: PropertyId,
+
+    /// InputValue of property with index `in_class_index` in a given class.
+    pub value: ParametrizedPropertyValue<T>,
+}
+
+/// Operation, that represents `Entity` creation
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct CreateEntityOperation<T: Trait> {
+    /// Class of an Entity
+    pub class_id: T::ClassId,
+}
+
+/// Operation, that represents property values update
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct UpdatePropertyValuesOperation<T: Trait> {
+    /// Entity id to perfrom operation
+    pub entity_id: ParameterizedEntity<T>,
+    /// Property values, that should be updated
+    pub new_parametrized_property_values: Vec<ParametrizedClassPropertyValue<T>>,
+}
+
+/// Operation, that represents adding `Entity` `Schema` support
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct AddSchemaSupportToEntityOperation<T: Trait> {
+    /// Entity id to perfrom operation
+    pub entity_id: ParameterizedEntity<T>,
+    /// Schema id defined on `Class` level to be added to the `Entity`
+    pub schema_id: SchemaId,
+    /// Property values, that should be added for the underlying schema_id
+    pub parametrized_property_values: Vec<ParametrizedClassPropertyValue<T>>,
+}
+
+/// The type of operation performed
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub enum OperationType<T: Trait> {
+    CreateEntity(CreateEntityOperation<T>),
+    UpdatePropertyValues(UpdatePropertyValuesOperation<T>),
+    AddSchemaSupportToEntity(AddSchemaSupportToEntityOperation<T>),
+}
+
+impl<T: Trait> core::fmt::Debug for OperationType<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "OperationType {:?}", self)
+    }
+}
+
+/// Retrieve entity_id of parametrized `Entity`
+pub fn parametrized_entity_to_entity_id<T: Trait>(
+    created_entities: &BTreeMap<usize, T::EntityId>,
+    entity: ParameterizedEntity<T>,
+) -> Result<T::EntityId, Error<T>> {
+    match entity {
+        ParameterizedEntity::ExistingEntity(entity_id) => Ok(entity_id),
+        ParameterizedEntity::InternalEntityJustAdded(op_index_u32) => {
+            let op_index = op_index_u32 as usize;
+            Ok(*created_entities
+                .get(&op_index)
+                .ok_or(Error::<T>::EntityNotCreatedByOperation)?)
+        }
+    }
+}
+
+/// Convert parametrized property values into property values
+pub fn parametrized_property_values_to_property_values<T: Trait>(
+    created_entities: &BTreeMap<usize, T::EntityId>,
+    parametrized_property_values: Vec<ParametrizedClassPropertyValue<T>>,
+) -> Result<BTreeMap<PropertyId, InputPropertyValue<T>>, Error<T>> {
+    let mut class_property_values = BTreeMap::new();
+
+    for parametrized_class_property_value in parametrized_property_values.into_iter() {
+        let property_value = match parametrized_class_property_value.value {
+            ParametrizedPropertyValue::InputPropertyValue(value) => value,
+            ParametrizedPropertyValue::InternalEntityJustAdded(
+                entity_created_in_operation_index,
+            ) => {
+                // Verify that referenced entity was indeed created created
+                let op_index = entity_created_in_operation_index as usize;
+                let entity_id = created_entities
+                    .get(&op_index)
+                    .ok_or(Error::<T>::EntityNotCreatedByOperation)?;
+                InputPropertyValue::Single(InputValue::Reference(*entity_id))
+            }
+            ParametrizedPropertyValue::InternalEntityVec(parametrized_entities) => {
+                let mut entities: Vec<T::EntityId> = vec![];
+
+                for parametrized_entity in parametrized_entities.into_iter() {
+                    match parametrized_entity {
+                        ParameterizedEntity::ExistingEntity(id) => entities.push(id),
+                        ParameterizedEntity::InternalEntityJustAdded(
+                            entity_created_in_operation_index,
+                        ) => {
+                            let op_index = entity_created_in_operation_index as usize;
+                            let entity_id = created_entities
+                                .get(&op_index)
+                                .ok_or(Error::<T>::EntityNotCreatedByOperation)?;
+                            entities.push(*entity_id);
+                        }
+                    }
+                }
+                InputPropertyValue::Vector(VecInputValue::Reference(entities))
+            }
+        };
+
+        class_property_values.insert(
+            parametrized_class_property_value.in_class_index,
+            property_value,
+        );
+    }
+
+    Ok(class_property_values)
+}

+ 127 - 0
runtime-modules/content-directory/src/permissions.rs

@@ -0,0 +1,127 @@
+mod class;
+mod curator_group;
+mod entity;
+mod entity_creation_voucher;
+
+pub use class::*;
+pub use curator_group::*;
+pub use entity::*;
+pub use entity_creation_voucher::*;
+
+pub use crate::errors::*;
+use crate::*;
+pub use codec::{Codec, Decode, Encode};
+use core::fmt::Debug;
+use frame_support::{ensure, Parameter};
+#[cfg(feature = "std")]
+pub use serde::{Deserialize, Serialize};
+use sp_arithmetic::traits::BaseArithmetic;
+use sp_runtime::traits::{MaybeSerializeDeserialize, Member};
+
+/// Model of authentication manager.
+pub trait ActorAuthenticator: system::Trait {
+    /// Curator identifier
+    type CuratorId: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord;
+
+    /// Member identifier
+    type MemberId: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord;
+
+    /// Curator group identifier
+    type CuratorGroupId: Parameter
+        + Member
+        + BaseArithmetic
+        + Codec
+        + Default
+        + Copy
+        + Clone
+        + MaybeSerializeDeserialize
+        + Eq
+        + PartialEq
+        + Ord;
+
+    /// Authorize actor as lead
+    fn is_lead(account_id: &Self::AccountId) -> bool;
+
+    /// Authorize actor as curator
+    fn is_curator(curator_id: &Self::CuratorId, account_id: &Self::AccountId) -> bool;
+
+    /// Authorize actor as member
+    fn is_member(member_id: &Self::MemberId, account_id: &Self::AccountId) -> bool;
+}
+
+/// Ensure curator authorization performed succesfully
+pub fn ensure_curator_auth_success<T: Trait>(
+    curator_id: &T::CuratorId,
+    account_id: &T::AccountId,
+) -> Result<(), Error<T>> {
+    ensure!(
+        T::is_curator(curator_id, account_id),
+        Error::<T>::CuratorAuthFailed
+    );
+    Ok(())
+}
+
+/// Ensure member authorization performed succesfully
+pub fn ensure_member_auth_success<T: Trait>(
+    member_id: &T::MemberId,
+    account_id: &T::AccountId,
+) -> Result<(), Error<T>> {
+    ensure!(
+        T::is_member(member_id, account_id),
+        Error::<T>::MemberAuthFailed
+    );
+    Ok(())
+}
+
+/// Ensure lead authorization performed succesfully
+pub fn ensure_lead_auth_success<T: Trait>(account_id: &T::AccountId) -> Result<(), Error<T>> {
+    ensure!(T::is_lead(account_id), Error::<T>::LeadAuthFailed);
+    Ok(())
+}
+
+/// Ensure given `Origin` is lead
+pub fn ensure_is_lead<T: Trait>(origin: T::Origin) -> DispatchResult {
+    let account_id = ensure_signed(origin)?;
+    Ok(ensure_lead_auth_success::<T>(&account_id)?)
+}
+
+/// Enum, representing all possible `Actor`s
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Eq, PartialEq, Clone, Copy)]
+pub enum Actor<T: Trait> {
+    Curator(T::CuratorGroupId, T::CuratorId),
+    Member(T::MemberId),
+    Lead,
+}
+
+impl<T: Trait> Default for Actor<T> {
+    fn default() -> Self {
+        Self::Lead
+    }
+}
+
+impl<T: Trait> core::fmt::Debug for Actor<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "Actor {:?}", self)
+    }
+}

+ 158 - 0
runtime-modules/content-directory/src/permissions/class.rs

@@ -0,0 +1,158 @@
+use super::*;
+
+/// Permissions for an instance of a `Class` in the versioned store.
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct ClassPermissions<T: Trait> {
+    /// For this permission, the individual member is allowed to create the entity and become controller.
+    any_member: bool,
+
+    /// Whether to prevent everyone from creating an entity.
+    ///
+    /// This could be useful in order to quickly, and possibly temporarily, block new entity creation, without
+    /// having to tear down `can_create_entities`.
+    entity_creation_blocked: bool,
+
+    /// Whether to prevent everyone from updating entity properties.
+    ///
+    /// This could be useful in order to quickly, and probably temporarily, block any editing of entities,
+    /// rather than for example having to set, and later clear.
+    all_entity_property_values_locked: bool,
+
+    /// Current class maintainer curator groups
+    maintainers: BTreeSet<T::CuratorGroupId>,
+}
+
+impl<T: Trait> core::fmt::Debug for ClassPermissions<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "ClassPermissions {:?}", self)
+    }
+}
+
+impl<T: Trait> Default for ClassPermissions<T> {
+    fn default() -> Self {
+        Self {
+            any_member: false,
+            entity_creation_blocked: false,
+            all_entity_property_values_locked: false,
+            maintainers: BTreeSet::new(),
+        }
+    }
+}
+
+impl<T: Trait> ClassPermissions<T> {
+    /// Retieve `all_entity_property_values_locked` status
+    pub fn all_entity_property_values_locked(&self) -> bool {
+        self.all_entity_property_values_locked
+    }
+
+    /// Retieve `any_member` status
+    pub fn any_member_status(&self) -> bool {
+        self.any_member
+    }
+
+    /// Check if given `curator_group_id` is maintainer of current `Class`
+    pub fn is_maintainer(&self, curator_group_id: &T::CuratorGroupId) -> bool {
+        self.maintainers.contains(curator_group_id)
+    }
+
+    /// Get `Class` maintainers by reference
+    pub fn get_maintainers(&self) -> &BTreeSet<T::CuratorGroupId> {
+        &self.maintainers
+    }
+
+    /// Get `Class` maintainers by mutable reference
+    pub fn get_maintainers_mut(&mut self) -> &mut BTreeSet<T::CuratorGroupId> {
+        &mut self.maintainers
+    }
+
+    /// Set `entity_creation_blocked` flag, as provided
+    pub fn set_entity_creation_blocked(&mut self, entity_creation_blocked: bool) {
+        self.entity_creation_blocked = entity_creation_blocked
+    }
+
+    /// Set `all_entity_property_values_locked` flag, as provided
+    pub fn set_all_entity_property_values_locked(
+        &mut self,
+        all_entity_property_values_locked: bool,
+    ) {
+        self.all_entity_property_values_locked = all_entity_property_values_locked
+    }
+
+    /// Set `any_member` flag, as provided
+    pub fn set_any_member_status(&mut self, any_member: bool) {
+        self.any_member = any_member;
+    }
+
+    /// Update `maintainers` set with provided one
+    pub fn set_maintainers(&mut self, maintainers: BTreeSet<T::CuratorGroupId>) {
+        self.maintainers = maintainers
+    }
+
+    /// Ensure provided actor can create entities of current `Class`
+    pub fn ensure_can_create_entities(
+        &self,
+        account_id: &T::AccountId,
+        actor: &Actor<T>,
+    ) -> Result<(), Error<T>> {
+        let can_create = match &actor {
+            Actor::Lead => {
+                // Ensure lead authorization performed succesfully
+                ensure_lead_auth_success::<T>(account_id)?;
+                true
+            }
+            Actor::Member(member_id) if self.any_member => {
+                // Ensure member authorization performed succesfully
+                ensure_member_auth_success::<T>(member_id, account_id)?;
+                true
+            }
+            Actor::Curator(curator_group_id, curator_id)
+                if self.maintainers.contains(curator_group_id) =>
+            {
+                // Authorize curator, performing all checks to ensure curator can act
+                CuratorGroup::<T>::perform_curator_in_group_auth(
+                    curator_id,
+                    curator_group_id,
+                    account_id,
+                )?;
+                true
+            }
+            _ => false,
+        };
+        ensure!(can_create, Error::<T>::ActorCanNotCreateEntities);
+        Ok(())
+    }
+
+    /// Ensure entities creation is not blocked on `Class` level
+    pub fn ensure_entity_creation_not_blocked(&self) -> Result<(), Error<T>> {
+        ensure!(
+            !self.entity_creation_blocked,
+            Error::<T>::EntitiesCreationBlocked
+        );
+        Ok(())
+    }
+
+    /// Ensure maintainer, associated with given `curator_group_id` is already added to `maintainers` set
+    pub fn ensure_maintainer_exists(
+        &self,
+        curator_group_id: &T::CuratorGroupId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            self.maintainers.contains(curator_group_id),
+            Error::<T>::MaintainerDoesNotExist
+        );
+        Ok(())
+    }
+
+    /// Ensure maintainer, associated with given `curator_group_id` is not yet added to `maintainers` set
+    pub fn ensure_maintainer_does_not_exist(
+        &self,
+        curator_group_id: &T::CuratorGroupId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            !self.maintainers.contains(curator_group_id),
+            Error::<T>::MaintainerAlreadyExists
+        );
+        Ok(())
+    }
+}

+ 116 - 0
runtime-modules/content-directory/src/permissions/curator_group.rs

@@ -0,0 +1,116 @@
+use super::*;
+
+/// A group, that consists of `curators` set
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Eq, PartialEq, Clone)]
+pub struct CuratorGroup<T: Trait> {
+    /// Curators set, associated with a iven curator group
+    curators: BTreeSet<T::CuratorId>,
+
+    /// When `false`, curator in a given group is forbidden to act
+    active: bool,
+
+    /// Used to count the number of `Class`(es), given curator group maintains
+    number_of_classes_maintained: u32,
+}
+
+impl<T: Trait> Default for CuratorGroup<T> {
+    fn default() -> Self {
+        Self {
+            curators: BTreeSet::new(),
+            // default curator group status right after creation
+            active: false,
+            number_of_classes_maintained: 0,
+        }
+    }
+}
+
+impl<T: Trait> CuratorGroup<T> {
+    /// Check if `CuratorGroup` contains curator under given `curator_id`
+    pub fn is_curator(&self, curator_id: &T::CuratorId) -> bool {
+        self.curators.contains(curator_id)
+    }
+
+    /// Check if `CuratorGroup` is active
+    pub fn is_active(&self) -> bool {
+        self.active
+    }
+
+    /// Set `CuratorGroup` status as provided
+    pub fn set_status(&mut self, is_active: bool) {
+        self.active = is_active
+    }
+
+    /// Retrieve set of all curator_ids related to `CuratorGroup` by reference
+    pub fn get_curators(&self) -> &BTreeSet<T::CuratorId> {
+        &self.curators
+    }
+
+    /// Retrieve set of all curator_ids related to `CuratorGroup` by mutable  reference
+    pub fn get_curators_mut(&mut self) -> &mut BTreeSet<T::CuratorId> {
+        &mut self.curators
+    }
+
+    /// Increment number of classes `CuratorGroup` maintains
+    pub fn increment_number_of_classes_maintained_count(&mut self) {
+        self.number_of_classes_maintained += 1;
+    }
+
+    /// Decrement number of classes `CuratorGroup` maintains
+    pub fn decrement_number_of_classes_maintained_count(&mut self) {
+        self.number_of_classes_maintained -= 1;
+    }
+
+    /// Ensure curator group does not maintain any `Class`
+    pub fn ensure_curator_group_maintains_no_classes(&self) -> Result<(), Error<T>> {
+        ensure!(
+            self.number_of_classes_maintained == 0,
+            Error::<T>::CuratorGroupRemovalForbidden
+        );
+        Ok(())
+    }
+
+    /// Ensure `MaxNumberOfCuratorsPerGroup` constraint satisfied
+    pub fn ensure_max_number_of_curators_limit_not_reached(&self) -> Result<(), Error<T>> {
+        ensure!(
+            self.curators.len() < T::MaxNumberOfCuratorsPerGroup::get() as usize,
+            Error::<T>::CuratorsPerGroupLimitReached
+        );
+        Ok(())
+    }
+
+    /// Ensure curator under given `curator_id` exists in `CuratorGroup`
+    pub fn ensure_curator_in_group_exists(
+        &self,
+        curator_id: &T::CuratorId,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            self.get_curators().contains(curator_id),
+            Error::<T>::CuratorIsNotAMemberOfGivenCuratorGroup
+        );
+        Ok(())
+    }
+
+    /// Authorize curator, performing all checks to ensure curator can act
+    pub fn perform_curator_in_group_auth(
+        curator_id: &T::CuratorId,
+        curator_group_id: &T::CuratorGroupId,
+        account_id: &T::AccountId,
+    ) -> Result<(), Error<T>> {
+        // Ensure curator authorization performed succesfully
+        ensure_curator_auth_success::<T>(curator_id, account_id)?;
+
+        // Retrieve corresponding curator group
+        let curator_group = Module::<T>::curator_group_by_id(curator_group_id);
+
+        // Ensure curator group is active
+        ensure!(
+            curator_group.is_active(),
+            Error::<T>::CuratorGroupIsNotActive
+        );
+
+        // Ensure curator under given curator_id exists in CuratorGroup
+        Self::ensure_curator_in_group_exists(&curator_group, curator_id)?;
+        Ok(())
+    }
+}

+ 176 - 0
runtime-modules/content-directory/src/permissions/entity.rs

@@ -0,0 +1,176 @@
+use super::*;
+
+/// Owner of an `Entity`.
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq)]
+pub enum EntityController<T: Trait> {
+    Maintainers,
+    Member(T::MemberId),
+    Lead,
+}
+
+impl<T: Trait> EntityController<T> {
+    /// Create `EntityController` enum representation, using provided `Actor`
+    pub fn from_actor(actor: &Actor<T>) -> Self {
+        match &actor {
+            Actor::Lead => Self::Lead,
+            Actor::Member(member_id) => Self::Member(*member_id),
+            Actor::Curator(_, _) => Self::Maintainers,
+        }
+    }
+}
+
+impl<T: Trait> Default for EntityController<T> {
+    fn default() -> Self {
+        Self::Lead
+    }
+}
+
+impl<T: Trait> core::fmt::Debug for EntityController<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "EntityController {:?}", self)
+    }
+}
+
+/// Permissions for a given entity.
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub struct EntityPermissions<T: Trait> {
+    /// Current controller, which is initially set based on who created entity
+    pub controller: EntityController<T>,
+
+    /// Forbid groups to mutate any property value.
+    /// Can be useful to use in concert with some curation censorship policy
+    pub frozen: bool,
+
+    /// Prevent from being referenced by any entity (including self-references).
+    /// Can be useful to use in concert with some curation censorship policy,
+    /// e.g. to block content from being included in some public playlist.
+    pub referenceable: bool,
+}
+
+impl<T: Trait> Default for EntityPermissions<T> {
+    fn default() -> Self {
+        Self {
+            controller: EntityController::<T>::default(),
+            frozen: false,
+            referenceable: true,
+        }
+    }
+}
+
+impl<T: Trait> EntityPermissions<T> {
+    /// Create an instance of `EntityPermissions` with `EntityController` equal to provided one
+    pub fn default_with_controller(controller: EntityController<T>) -> Self {
+        Self {
+            controller,
+            ..EntityPermissions::default()
+        }
+    }
+
+    /// Set current `controller` as provided
+    pub fn set_conroller(&mut self, controller: EntityController<T>) {
+        self.controller = controller
+    }
+
+    /// Check if inner `controller` is equal to the provided one
+    pub fn controller_is_equal_to(&self, new_entity_controller: &EntityController<T>) -> bool {
+        self.controller == *new_entity_controller
+    }
+
+    /// Set `frozen` flag as provided
+    pub fn set_frozen(&mut self, frozen: bool) {
+        self.frozen = frozen
+    }
+
+    /// Set `referenceable` flag as provided
+    pub fn set_referencable(&mut self, referenceable: bool) {
+        self.referenceable = referenceable;
+    }
+
+    /// Retrieve `referenceable` flag
+    pub fn is_referancable(&self) -> bool {
+        self.referenceable
+    }
+
+    /// Get current `controller` by reference
+    pub fn get_controller(&self) -> &EntityController<T> {
+        &self.controller
+    }
+
+    /// Ensure actor with given `EntityAccessLevel` can remove entity
+    pub fn ensure_group_can_remove_entity(access_level: EntityAccessLevel) -> Result<(), Error<T>> {
+        match access_level {
+            EntityAccessLevel::EntityController => Ok(()),
+            EntityAccessLevel::EntityControllerAndMaintainer => Ok(()),
+            _ => Err(Error::<T>::EntityRemovalAccessDenied),
+        }
+    }
+
+    /// Ensure provided new_entity_controller is not equal to current one
+    pub fn ensure_controllers_are_not_equal(
+        &self,
+        new_entity_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            !self.controller_is_equal_to(new_entity_controller),
+            Error::<T>::ProvidedEntityControllerIsEqualToTheCurrentOne
+        );
+        Ok(())
+    }
+}
+
+/// Type, derived from dispatchable call, identifies the caller
+#[derive(Encode, Decode, Eq, PartialEq, Ord, PartialOrd, Clone, Copy)]
+pub enum EntityAccessLevel {
+    /// Caller identified as the entity maintainer
+    EntityMaintainer,
+
+    /// Caller identified as the entity controller
+    EntityController,
+
+    /// Caller, that can act as controller and maintainer simultaneously
+    /// (can be useful, when controller and maintainer have features, that do not intersect)
+    EntityControllerAndMaintainer,
+}
+
+impl EntityAccessLevel {
+    /// Derives the `EntityAccessLevel` for the actor, attempting to act.
+    pub fn derive<T: Trait>(
+        account_id: &T::AccountId,
+        entity_permissions: &EntityPermissions<T>,
+        class_permissions: &ClassPermissions<T>,
+        actor: &Actor<T>,
+    ) -> Result<Self, Error<T>> {
+        let controller = EntityController::<T>::from_actor(actor);
+        match actor {
+            Actor::Lead if entity_permissions.controller_is_equal_to(&controller) => {
+                // Ensure lead authorization performed succesfully
+                ensure_lead_auth_success::<T>(account_id).map(|_| Self::EntityController)
+            }
+            Actor::Member(member_id) if entity_permissions.controller_is_equal_to(&controller) => {
+                // Ensure member authorization performed succesfully
+                ensure_member_auth_success::<T>(member_id, account_id)
+                    .map(|_| Self::EntityController)
+            }
+            Actor::Curator(curator_group_id, curator_id) => {
+                // Authorize curator, performing all checks to ensure curator can act
+                CuratorGroup::<T>::perform_curator_in_group_auth(
+                    curator_id,
+                    curator_group_id,
+                    account_id,
+                )?;
+                match (
+                    entity_permissions.controller_is_equal_to(&controller),
+                    class_permissions.is_maintainer(curator_group_id),
+                ) {
+                    (true, true) => Ok(Self::EntityControllerAndMaintainer),
+                    (false, true) => Ok(Self::EntityMaintainer),
+                    // Curator cannot be controller, but not maintainer simultaneously
+                    _ => Err(Error::<T>::EntityAccessDenied),
+                }
+            }
+            _ => Err(Error::<T>::EntityAccessDenied),
+        }
+    }
+}

+ 56 - 0
runtime-modules/content-directory/src/permissions/entity_creation_voucher.rs

@@ -0,0 +1,56 @@
+use super::*;
+
+/// A voucher for `Entity` creation
+#[derive(Encode, Decode, Clone, Copy, Debug, PartialEq, Eq)]
+pub struct EntityCreationVoucher<T: Trait> {
+    /// How many are allowed in total
+    pub maximum_entities_count: T::EntityId,
+
+    /// How many have currently been created
+    pub entities_created: T::EntityId,
+}
+
+impl<T: Trait> Default for EntityCreationVoucher<T> {
+    fn default() -> Self {
+        Self {
+            maximum_entities_count: T::EntityId::zero(),
+            entities_created: T::EntityId::zero(),
+        }
+    }
+}
+
+impl<T: Trait> EntityCreationVoucher<T> {
+    /// Create a new instance of `EntityCreationVoucher` with specified limit
+    pub fn new(maximum_entities_count: T::EntityId) -> Self {
+        Self {
+            maximum_entities_count,
+            entities_created: T::EntityId::zero(),
+        }
+    }
+
+    /// Set new `maximum_entities_count` limit
+    pub fn set_maximum_entities_count(&mut self, maximum_entities_count: T::EntityId) {
+        self.maximum_entities_count = maximum_entities_count
+    }
+
+    /// Increase `entities_created` by 1
+    pub fn increment_created_entities_count(&mut self) {
+        self.entities_created += T::EntityId::one();
+    }
+
+    /// Decrease `entities_created` by 1
+    pub fn decrement_created_entities_count(&mut self) {
+        self.entities_created -= T::EntityId::one();
+    }
+
+    /// Check if `entities_created` is less than `maximum_entities_count` limit set to this `EntityCreationVoucher`
+    pub fn limit_not_reached(&self) -> bool {
+        self.entities_created < self.maximum_entities_count
+    }
+
+    /// Ensure voucher limit not reached
+    pub fn ensure_voucher_limit_not_reached(&self) -> Result<(), Error<T>> {
+        ensure!(self.limit_not_reached(), Error::<T>::VoucherLimitReached);
+        Ok(())
+    }
+}

+ 91 - 0
runtime-modules/content-directory/src/schema.rs

@@ -0,0 +1,91 @@
+mod convert;
+mod input;
+mod output;
+mod property;
+
+pub use convert::*;
+pub use input::*;
+pub use output::*;
+pub use property::*;
+
+pub use crate::{permissions::EntityAccessLevel, *};
+pub use codec::{Decode, Encode};
+use core::ops::Deref;
+#[cfg(feature = "std")]
+pub use serde::{Deserialize, Serialize};
+
+/// Type identificator for schema id
+pub type SchemaId = u16;
+
+/// A schema defines what properties describe an entity
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub struct Schema {
+    /// Indices into properties vector for the corresponding class.
+    properties: BTreeSet<PropertyId>,
+    /// If schema can be added to an entity
+    is_active: bool,
+}
+
+impl Default for Schema {
+    fn default() -> Self {
+        Self {
+            properties: BTreeSet::new(),
+            // Default schema status
+            is_active: true,
+        }
+    }
+}
+
+impl Schema {
+    /// Create new schema with provided properties
+    pub fn new(properties: BTreeSet<PropertyId>) -> Self {
+        Self {
+            properties,
+            // Default schema status
+            is_active: true,
+        }
+    }
+
+    /// If `Schema` can be added to an entity
+    pub fn is_active(&self) -> bool {
+        self.is_active
+    }
+
+    /// Ensure schema in `active` status
+    pub fn ensure_is_active<T: Trait>(&self) -> Result<(), Error<T>> {
+        ensure!(self.is_active, Error::<T>::ClassSchemaNotActive);
+        Ok(())
+    }
+
+    /// Get `Schema` `properties` by reference
+    pub fn get_properties(&self) -> &BTreeSet<PropertyId> {
+        &self.properties
+    }
+
+    /// Ensure keys of provided `property_values` are valid indices of current `Schema`
+    pub fn ensure_has_properties<T: Trait>(
+        &self,
+        property_values: &BTreeMap<PropertyId, InputPropertyValue<T>>,
+    ) -> Result<(), Error<T>> {
+        let property_value_indices: BTreeSet<PropertyId> =
+            property_values.keys().cloned().collect();
+
+        ensure!(
+            property_value_indices.is_subset(&self.properties),
+            Error::<T>::SchemaDoesNotContainProvidedPropertyId
+        );
+
+        Ok(())
+    }
+
+    /// Get `Schema` `properties` by mutable reference
+    pub fn get_properties_mut(&mut self) -> &mut BTreeSet<PropertyId> {
+        &mut self.properties
+    }
+
+    /// Set `Schema`'s `is_active` flag as provided
+    pub fn set_status(&mut self, is_active: bool) {
+        self.is_active = is_active;
+    }
+}

+ 62 - 0
runtime-modules/content-directory/src/schema/convert.rs

@@ -0,0 +1,62 @@
+use super::*;
+use sp_runtime::traits::Hash;
+
+impl<T: Trait> From<InputPropertyValue<T>> for StoredPropertyValue<T> {
+    fn from(input_property_value: InputPropertyValue<T>) -> Self {
+        match input_property_value {
+            InputPropertyValue::Single(input_value) => {
+                StoredPropertyValue::Single(input_value.into())
+            }
+            InputPropertyValue::Vector(vector_input_value) => {
+                let vec_output_property_value =
+                    VecStoredPropertyValue::new(vector_input_value.into(), T::Nonce::default());
+                StoredPropertyValue::Vector(vec_output_property_value)
+            }
+        }
+    }
+}
+
+impl<T: Trait> From<InputValue<T>> for StoredValue<T> {
+    fn from(input_value: InputValue<T>) -> Self {
+        match input_value {
+            InputValue::Bool(value) => StoredValue::Bool(value),
+            InputValue::Uint16(value) => StoredValue::Uint16(value),
+            InputValue::Uint32(value) => StoredValue::Uint32(value),
+            InputValue::Uint64(value) => StoredValue::Uint64(value),
+            InputValue::Int16(value) => StoredValue::Int16(value),
+            InputValue::Int32(value) => StoredValue::Int32(value),
+            InputValue::Int64(value) => StoredValue::Int64(value),
+            InputValue::Text(value) => StoredValue::Text(value),
+
+            InputValue::TextToHash(value) => {
+                let hash_value = value.using_encoded(<T as system::Trait>::Hashing::hash);
+                StoredValue::Hash(hash_value)
+            }
+            InputValue::Reference(value) => StoredValue::Reference(value),
+        }
+    }
+}
+
+impl<T: Trait> From<VecInputValue<T>> for VecStoredValue<T> {
+    fn from(vec_input_value: VecInputValue<T>) -> Self {
+        match vec_input_value {
+            VecInputValue::Bool(vec_value) => VecStoredValue::Bool(vec_value),
+            VecInputValue::Uint16(vec_value) => VecStoredValue::Uint16(vec_value),
+            VecInputValue::Uint32(vec_value) => VecStoredValue::Uint32(vec_value),
+            VecInputValue::Uint64(vec_value) => VecStoredValue::Uint64(vec_value),
+            VecInputValue::Int16(vec_value) => VecStoredValue::Int16(vec_value),
+            VecInputValue::Int32(vec_value) => VecStoredValue::Int32(vec_value),
+            VecInputValue::Int64(vec_value) => VecStoredValue::Int64(vec_value),
+            VecInputValue::Text(vec_value) => VecStoredValue::Text(vec_value),
+
+            VecInputValue::TextToHash(vec_value) => {
+                let hash_vec_value: Vec<_> = vec_value
+                    .into_iter()
+                    .map(|value| value.using_encoded(<T as system::Trait>::Hashing::hash))
+                    .collect();
+                VecStoredValue::Hash(hash_vec_value)
+            }
+            VecInputValue::Reference(value) => VecStoredValue::Reference(value),
+        }
+    }
+}

+ 137 - 0
runtime-modules/content-directory/src/schema/input.rs

@@ -0,0 +1,137 @@
+use super::*;
+
+/// Enum, representing either `SingleInputPropertyValue` or `VecInputPropertyValue`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub enum InputPropertyValue<T: Trait> {
+    Single(InputValue<T>),
+    Vector(VecInputValue<T>),
+}
+
+impl<T: Trait> core::fmt::Debug for InputPropertyValue<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "InputPropertyValue {:?}", self)
+    }
+}
+
+impl<T: Trait> InputPropertyValue<T> {
+    pub fn as_single_value(&self) -> Option<&InputValue<T>> {
+        if let InputPropertyValue::Single(single_value) = self {
+            Some(single_value)
+        } else {
+            None
+        }
+    }
+
+    pub fn as_vec_value(&self) -> Option<&VecInputValue<T>> {
+        if let InputPropertyValue::Vector(vec_value) = self {
+            Some(vec_value)
+        } else {
+            None
+        }
+    }
+
+    pub fn as_vec_value_mut(&mut self) -> Option<&mut VecInputValue<T>> {
+        if let InputPropertyValue::Vector(vec_value) = self {
+            Some(vec_value)
+        } else {
+            None
+        }
+    }
+
+    /// Retrieve all involved `entity_id`'s, if current `InputPropertyValue` is reference
+    pub fn get_involved_entities(&self) -> Option<Vec<T::EntityId>> {
+        match self {
+            InputPropertyValue::Single(single_property_value) => {
+                if let Some(entity_id) = single_property_value.get_involved_entity() {
+                    Some(vec![entity_id])
+                } else {
+                    None
+                }
+            }
+            InputPropertyValue::Vector(vector_property_value) => {
+                vector_property_value.get_involved_entities()
+            }
+        }
+    }
+}
+
+impl<T: Trait> Default for InputPropertyValue<T> {
+    fn default() -> Self {
+        InputPropertyValue::Single(InputValue::default())
+    }
+}
+
+/// InputValue enum representation, related to corresponding `SingleInputPropertyValue` structure
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub enum InputValue<T: Trait> {
+    Bool(bool),
+    Uint16(u16),
+    Uint32(u32),
+    Uint64(u64),
+    Int16(i16),
+    Int32(i32),
+    Int64(i64),
+    Text(Vec<u8>),
+    // Used to pass text value, which respective hash should be stored
+    TextToHash(Vec<u8>),
+    Reference(T::EntityId),
+}
+
+impl<T: Trait> core::fmt::Debug for InputValue<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> sp_std::fmt::Result {
+        write!(formatter, "InputValue {:?}", self)
+    }
+}
+
+impl<T: Trait> Default for InputValue<T> {
+    fn default() -> InputValue<T> {
+        Self::Bool(false)
+    }
+}
+
+impl<T: Trait> InputValue<T> {
+    /// Retrieve involved `entity_id`, if current `InputValue` is reference
+    pub fn get_involved_entity(&self) -> Option<T::EntityId> {
+        if let InputValue::Reference(entity_id) = self {
+            Some(*entity_id)
+        } else {
+            None
+        }
+    }
+}
+
+/// Vector value enum representation
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub enum VecInputValue<T: Trait> {
+    Bool(Vec<bool>),
+    Uint16(Vec<u16>),
+    Uint32(Vec<u32>),
+    Uint64(Vec<u64>),
+    Int16(Vec<i16>),
+    Int32(Vec<i32>),
+    Int64(Vec<i64>),
+    // Used to pass text vec value, which respective hashes should be stored
+    TextToHash(Vec<Vec<u8>>),
+    Text(Vec<Vec<u8>>),
+    Reference(Vec<T::EntityId>),
+}
+
+impl<T: Trait> Default for VecInputValue<T> {
+    fn default() -> Self {
+        Self::Bool(vec![])
+    }
+}
+
+impl<T: Trait> VecInputValue<T> {
+    /// Retrieve all involved `entity_id`'s, if current `VecInputValue` is reference
+    pub fn get_involved_entities(&self) -> Option<Vec<T::EntityId>> {
+        if let Self::Reference(entity_ids) = self {
+            Some(entity_ids.to_owned())
+        } else {
+            None
+        }
+    }
+}

+ 309 - 0
runtime-modules/content-directory/src/schema/output.rs

@@ -0,0 +1,309 @@
+use super::*;
+use sp_runtime::traits::Hash;
+
+/// Enum, representing either `StoredValue` or `VecStoredPropertyValue`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub enum StoredPropertyValue<T: Trait> {
+    Single(StoredValue<T>),
+    Vector(VecStoredPropertyValue<T>),
+}
+
+impl<T: Trait> StoredPropertyValue<T> {
+    /// Returns single property value by reference if `StoredPropertyValue` is Single
+    pub fn as_single_value(&self) -> Option<&StoredValue<T>> {
+        if let StoredPropertyValue::Single(single_value) = self {
+            Some(single_value)
+        } else {
+            None
+        }
+    }
+
+    /// Returns vector property value by reference if `StoredPropertyValue` is Single
+    pub fn as_vec_property_value(&self) -> Option<&VecStoredPropertyValue<T>> {
+        if let StoredPropertyValue::Vector(vec_property_value) = self {
+            Some(vec_property_value)
+        } else {
+            None
+        }
+    }
+
+    /// Returns vector property value by mutable reference if `StoredPropertyValue` is Single
+    pub fn as_vec_property_value_mut(&mut self) -> Option<&mut VecStoredPropertyValue<T>> {
+        if let StoredPropertyValue::Vector(vec_property_value) = self {
+            Some(vec_property_value)
+        } else {
+            None
+        }
+    }
+
+    /// Update `Self` with provided `StoredPropertyValue`
+    pub fn update(&mut self, mut new_value: Self) {
+        if let (Some(vec_property_value), Some(new_vec_property_value)) = (
+            self.as_vec_property_value_mut(),
+            new_value.as_vec_property_value_mut(),
+        ) {
+            new_vec_property_value.nonce = vec_property_value.nonce;
+        }
+        *self = new_value
+    }
+
+    /// Retrieve all involved `entity_id`'s, if current `StoredPropertyValue` is reference
+    pub fn get_involved_entities(&self) -> Option<Vec<T::EntityId>> {
+        match self {
+            StoredPropertyValue::Single(single_property_value) => {
+                if let Some(entity_id) = single_property_value.get_involved_entity() {
+                    Some(vec![entity_id])
+                } else {
+                    None
+                }
+            }
+            StoredPropertyValue::Vector(vector_property_value) => vector_property_value
+                .get_vec_value_ref()
+                .get_involved_entities(),
+        }
+    }
+
+    /// Compute hash from unique property value and its respective property_id
+    pub fn compute_unique_hash(&self, property_id: PropertyId) -> T::Hash {
+        match self {
+            StoredPropertyValue::Single(output_value) => {
+                (property_id, output_value).using_encoded(<T as system::Trait>::Hashing::hash)
+            }
+            StoredPropertyValue::Vector(vector_output_value) => {
+                vector_output_value.compute_unique_hash(property_id)
+            }
+        }
+    }
+}
+
+impl<T: Trait> Default for StoredPropertyValue<T> {
+    fn default() -> Self {
+        StoredPropertyValue::Single(StoredValue::default())
+    }
+}
+
+/// StoredValue enum representation, related to corresponding `SingleStoredPropertyValue` structure
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Hash, Clone, PartialEq, PartialOrd, Ord, Eq)]
+pub enum StoredValue<T: Trait> {
+    Bool(bool),
+    Uint16(u16),
+    Uint32(u32),
+    Uint64(u64),
+    Int16(i16),
+    Int32(i32),
+    Int64(i64),
+    Text(Vec<u8>),
+    Hash(T::Hash),
+    Reference(T::EntityId),
+}
+
+impl<T: Trait> Default for StoredValue<T> {
+    fn default() -> StoredValue<T> {
+        Self::Bool(false)
+    }
+}
+
+impl<T: Trait> StoredValue<T> {
+    /// Retrieve involved `entity_id`, if current `StoredValue` is reference
+    pub fn get_involved_entity(&self) -> Option<T::EntityId> {
+        if let StoredValue::Reference(entity_id) = self {
+            Some(*entity_id)
+        } else {
+            None
+        }
+    }
+}
+
+/// Consists of `VecStoredPropertyValue` enum representation and `nonce`, used to avoid vector data race update conditions
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Default, Clone, PartialEq, Eq)]
+pub struct VecStoredPropertyValue<T: Trait> {
+    vec_value: VecStoredValue<T>,
+    nonce: T::Nonce,
+}
+
+impl<T: Trait> VecStoredPropertyValue<T> {
+    /// Compute hash from unique vec property value and its respective property_id
+    pub fn compute_unique_hash(&self, property_id: PropertyId) -> T::Hash {
+        // Do not hash nonce
+        (property_id, &self.vec_value).using_encoded(<T as system::Trait>::Hashing::hash)
+    }
+
+    /// Increase nonce by 1
+    fn increment_nonce(&mut self) -> T::Nonce {
+        self.nonce += T::Nonce::one();
+        self.nonce
+    }
+
+    /// Create new `VecStoredPropertyValue` from `vec value` provided and `nonce`
+    pub fn new(vec_value: VecStoredValue<T>, nonce: T::Nonce) -> Self {
+        Self { vec_value, nonce }
+    }
+
+    /// Retrieve `VecStoredValue`
+    pub fn get_vec_value(self) -> VecStoredValue<T> {
+        self.vec_value
+    }
+
+    /// Retrieve `VecStoredValue` by reference
+    pub fn get_vec_value_ref(&self) -> &VecStoredValue<T> {
+        &self.vec_value
+    }
+
+    fn len(&self) -> usize {
+        match &self.vec_value {
+            VecStoredValue::Bool(vec) => vec.len(),
+            VecStoredValue::Uint16(vec) => vec.len(),
+            VecStoredValue::Uint32(vec) => vec.len(),
+            VecStoredValue::Uint64(vec) => vec.len(),
+            VecStoredValue::Int16(vec) => vec.len(),
+            VecStoredValue::Int32(vec) => vec.len(),
+            VecStoredValue::Int64(vec) => vec.len(),
+            VecStoredValue::Text(vec) => vec.len(),
+            VecStoredValue::Hash(vec) => vec.len(),
+            VecStoredValue::Reference(vec) => vec.len(),
+        }
+    }
+
+    /// Clear current `vec_value`
+    pub fn clear(&mut self) {
+        match &mut self.vec_value {
+            VecStoredValue::Bool(vec) => *vec = vec![],
+            VecStoredValue::Uint16(vec) => *vec = vec![],
+            VecStoredValue::Uint32(vec) => *vec = vec![],
+            VecStoredValue::Uint64(vec) => *vec = vec![],
+            VecStoredValue::Int16(vec) => *vec = vec![],
+            VecStoredValue::Int32(vec) => *vec = vec![],
+            VecStoredValue::Int64(vec) => *vec = vec![],
+            VecStoredValue::Text(vec) => *vec = vec![],
+            VecStoredValue::Hash(vec) => *vec = vec![],
+            VecStoredValue::Reference(vec) => *vec = vec![],
+        }
+    }
+
+    /// Perform removal at given `index_in_property_vec`, increment `nonce`
+    pub fn remove_at(&mut self, index_in_property_vec: VecMaxLength) {
+        fn remove_at_checked<T>(vec: &mut Vec<T>, index_in_property_vec: VecMaxLength) {
+            if (index_in_property_vec as usize) < vec.len() {
+                vec.remove(index_in_property_vec as usize);
+            }
+        }
+
+        match &mut self.vec_value {
+            VecStoredValue::Bool(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Uint16(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Uint32(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Uint64(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Int16(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Int32(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Int64(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Text(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Hash(vec) => remove_at_checked(vec, index_in_property_vec),
+            VecStoredValue::Reference(vec) => remove_at_checked(vec, index_in_property_vec),
+        }
+
+        self.increment_nonce();
+    }
+
+    /// Insert provided `StoredValue` at given `index_in_property_vec`, increment `nonce`
+    pub fn insert_at(&mut self, index_in_property_vec: VecMaxLength, single_value: StoredValue<T>) {
+        fn insert_at<T>(vec: &mut Vec<T>, index_in_property_vec: VecMaxLength, value: T) {
+            if (index_in_property_vec as usize) < vec.len() {
+                vec.insert(index_in_property_vec as usize, value);
+            }
+        }
+
+        match (&mut self.vec_value, single_value) {
+            (VecStoredValue::Bool(vec), StoredValue::Bool(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Uint16(vec), StoredValue::Uint16(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Uint32(vec), StoredValue::Uint32(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Uint64(vec), StoredValue::Uint64(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Int16(vec), StoredValue::Int16(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Int32(vec), StoredValue::Int32(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            (VecStoredValue::Int64(vec), StoredValue::Int64(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+
+            // Match by move, when https://github.com/rust-lang/rust/issues/68354 stableize
+            (VecStoredValue::Text(vec), StoredValue::Text(ref value)) => {
+                insert_at(vec, index_in_property_vec, value.to_owned())
+            }
+            (VecStoredValue::Reference(vec), StoredValue::Reference(value)) => {
+                insert_at(vec, index_in_property_vec, value)
+            }
+            _ => return,
+        }
+
+        self.increment_nonce();
+    }
+
+    /// Ensure `VecStoredPropertyValue` nonce is equal to the provided one.
+    /// Used to to avoid possible data races, when performing vector specific operations
+    pub fn ensure_nonce_equality(&self, new_nonce: T::Nonce) -> Result<(), Error<T>> {
+        ensure!(
+            self.nonce == new_nonce,
+            Error::<T>::PropertyValueVecNoncesDoesNotMatch
+        );
+        Ok(())
+    }
+
+    /// Ensure, provided `index_in_property_vec` is valid index of `VecStoredValue`
+    pub fn ensure_index_in_property_vector_is_valid(
+        &self,
+        index_in_property_vec: VecMaxLength,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            (index_in_property_vec as usize) <= self.len(),
+            Error::<T>::EntityPropertyValueVectorIndexIsOutOfRange
+        );
+
+        Ok(())
+    }
+}
+
+/// Vector value enum representation, related to corresponding `VecStoredPropertyValue` structure
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub enum VecStoredValue<T: Trait> {
+    Bool(Vec<bool>),
+    Uint16(Vec<u16>),
+    Uint32(Vec<u32>),
+    Uint64(Vec<u64>),
+    Int16(Vec<i16>),
+    Int32(Vec<i32>),
+    Int64(Vec<i64>),
+    Hash(Vec<T::Hash>),
+    Text(Vec<Vec<u8>>),
+    Reference(Vec<T::EntityId>),
+}
+
+impl<T: Trait> Default for VecStoredValue<T> {
+    fn default() -> Self {
+        Self::Bool(vec![])
+    }
+}
+
+impl<T: Trait> VecStoredValue<T> {
+    /// Retrieve all involved `entity_id`'s, if current `VecStoredValue` is reference
+    pub fn get_involved_entities(&self) -> Option<Vec<T::EntityId>> {
+        if let Self::Reference(entity_ids) = self {
+            Some(entity_ids.to_owned())
+        } else {
+            None
+        }
+    }
+}

+ 646 - 0
runtime-modules/content-directory/src/schema/property.rs

@@ -0,0 +1,646 @@
+use super::*;
+
+/// Type identificator for property id
+pub type PropertyId = u16;
+
+/// Type representing max length of vector property type
+pub type VecMaxLength = u16;
+
+/// Type representing max length of text property type
+pub type TextMaxLength = u16;
+
+/// Type representing optional max length of text property type, that will be subsequently hashed
+pub type HashedTextMaxLength = Option<u16>;
+
+/// Used to force property values to only reference entities, owned by the same controller
+type SameController = bool;
+
+/// Locking policy, representing `Property` locking status for both controller and maintainer
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Default, Decode, Clone, Copy, PartialEq, Eq)]
+pub struct PropertyLockingPolicy {
+    /// If property is locked from maintainer
+    pub is_locked_from_maintainer: bool,
+    /// If property is locked from controller
+    pub is_locked_from_controller: bool,
+}
+
+/// Enum, used for `PropertyType` representation
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq)]
+pub enum Type<T: Trait> {
+    Bool,
+    Uint16,
+    Uint32,
+    Uint64,
+    Int16,
+    Int32,
+    Int64,
+    /// Max length of text item.
+    Text(TextMaxLength),
+    Hash(HashedTextMaxLength),
+    /// Can reference only specific class id entities
+    Reference(T::ClassId, SameController),
+}
+
+impl<T: Trait> Default for Type<T> {
+    fn default() -> Self {
+        Self::Bool
+    }
+}
+
+impl<T: Trait> Type<T> {
+    /// Ensure `Type` specific `TextMaxLengthConstraint` or `HashedTextMaxLengthConstraint` satisfied
+    pub fn ensure_property_type_size_is_valid(&self) -> Result<(), Error<T>> {
+        if let Type::Text(text_max_len) = self {
+            ensure!(
+                *text_max_len <= T::TextMaxLengthConstraint::get(),
+                Error::<T>::TextPropertyTooLong
+            );
+        }
+
+        if let Type::Hash(hashed_text_max_len) = self {
+            ensure!(
+                *hashed_text_max_len <= T::HashedTextMaxLengthConstraint::get(),
+                Error::<T>::HashedTextPropertyTooLong
+            );
+        }
+
+        Ok(())
+    }
+}
+
+/// Vector property type representation
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq)]
+pub struct VecPropertyType<T: Trait> {
+    vec_type: Type<T>,
+    /// Max length of vector, corresponding to a given type
+    max_length: VecMaxLength,
+}
+
+impl<T: Trait> Default for VecPropertyType<T> {
+    fn default() -> Self {
+        Self {
+            vec_type: Type::default(),
+            max_length: 0,
+        }
+    }
+}
+
+impl<T: Trait> VecPropertyType<T> {
+    /// Create new `VecPropertyType` from provided `type` and `max_length`
+    pub fn new(vec_type: Type<T>, max_length: VecMaxLength) -> Self {
+        Self {
+            vec_type,
+            max_length,
+        }
+    }
+
+    /// Ensure `Type` specific `TextMaxLengthConstraint` & `VecMaxLengthConstraint` satisfied
+    fn ensure_property_type_size_is_valid(&self) -> Result<(), Error<T>> {
+        // Ensure Type specific TextMaxLengthConstraint or HashedTextMaxLengthConstraint satisfied
+        self.vec_type.ensure_property_type_size_is_valid()?;
+
+        ensure!(
+            self.max_length <= T::VecMaxLengthConstraint::get(),
+            Error::<T>::VecPropertyTooLong
+        );
+        Ok(())
+    }
+
+    fn get_vec_type(&self) -> &Type<T> {
+        &self.vec_type
+    }
+
+    fn get_max_len(&self) -> VecMaxLength {
+        self.max_length
+    }
+}
+
+/// Enum, representing either `Type` or `VecPropertyType`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
+#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq)]
+pub enum PropertyType<T: Trait> {
+    Single(Type<T>),
+    Vector(VecPropertyType<T>),
+}
+
+impl<T: Trait> Default for PropertyType<T> {
+    fn default() -> Self {
+        Self::Single(Type::default())
+    }
+}
+
+impl<T: Trait> PropertyType<T> {
+    fn as_single_value_type(&self) -> Option<&Type<T>> {
+        if let PropertyType::Single(single_value_property_type) = self {
+            Some(single_value_property_type)
+        } else {
+            None
+        }
+    }
+
+    pub fn as_vec_type(&self) -> Option<&VecPropertyType<T>> {
+        if let PropertyType::Vector(vec_value_property_type) = self {
+            Some(vec_value_property_type)
+        } else {
+            None
+        }
+    }
+
+    fn get_inner_type(&self) -> &Type<T> {
+        match self {
+            PropertyType::Single(single_property_type) => single_property_type,
+            PropertyType::Vector(vec_property_type) => vec_property_type.get_vec_type(),
+        }
+    }
+
+    /// Retrives `same_controller` flag.
+    /// Always returns false if `Type` is not a reference,
+    pub fn same_controller_status(&self) -> SameController {
+        if let Type::Reference(_, same_controller) = self.get_inner_type() {
+            *same_controller
+        } else {
+            false
+        }
+    }
+}
+
+/// `Property` representation, related to a given `Class`
+#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
+#[derive(Encode, Decode, Clone, PartialEq, Eq)]
+pub struct Property<T: Trait> {
+    /// The type of `Property`
+    pub property_type: PropertyType<T>,
+    /// If property value can be skipped, when adding entity schema support
+    pub required: bool,
+    /// Used to enforce uniquness of a property across all entities that have this property
+    pub unique: bool,
+    /// Property name
+    pub name: Vec<u8>,
+    /// Property description
+    pub description: Vec<u8>,
+    /// Locking policy, representing `Property` locking status for both controller and maintainer
+    pub locking_policy: PropertyLockingPolicy,
+}
+
+impl<T: Trait> Default for Property<T> {
+    fn default() -> Self {
+        Self {
+            property_type: PropertyType::<T>::default(),
+            required: false,
+            unique: false,
+            name: vec![],
+            description: vec![],
+            locking_policy: PropertyLockingPolicy::default(),
+        }
+    }
+}
+
+impl<T: Trait> core::fmt::Debug for Property<T> {
+    fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(formatter, "Property {:?}", self)
+    }
+}
+
+impl<T: Trait> Property<T> {
+    /// Check if property is locked from actor with provided `EntityAccessLevel`
+    pub fn is_locked_from(&self, access_level: EntityAccessLevel) -> bool {
+        let is_locked_from_controller = self.locking_policy.is_locked_from_controller;
+        let is_locked_from_maintainer = self.locking_policy.is_locked_from_maintainer;
+        match access_level {
+            EntityAccessLevel::EntityControllerAndMaintainer => {
+                is_locked_from_controller && is_locked_from_maintainer
+            }
+            EntityAccessLevel::EntityController => is_locked_from_controller,
+            EntityAccessLevel::EntityMaintainer => is_locked_from_maintainer,
+        }
+    }
+
+    /// Ensure `Property` is unlocked from `Actor` with given `EntityAccessLevel`
+    pub fn ensure_unlocked_from(&self, access_level: EntityAccessLevel) -> Result<(), Error<T>> {
+        ensure!(
+            !self.is_locked_from(access_level),
+            Error::<T>::ClassPropertyTypeLockedForGivenActor
+        );
+        Ok(())
+    }
+
+    /// Validate new `InputPropertyValue` against the type of this `Property`
+    /// and check any additional constraints
+    pub fn ensure_property_value_to_update_is_valid(
+        &self,
+        value: &InputPropertyValue<T>,
+        current_entity_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        // Ensure provided InputPropertyValue matches its Type
+        self.ensure_property_value_matches_its_type(value)?;
+
+        // Perform all required checks to ensure provided InputPropertyValue is valid, when current PropertyType is Reference
+        self.ensure_property_value_is_valid_reference(value, current_entity_controller)?;
+
+        // Ensure text property does not exceed its max length
+        self.validate_max_len_if_text_property(value)?;
+
+        // Ensure vector property does not exceed its max length
+        self.validate_max_len_if_vec_property(value)?;
+        Ok(())
+    }
+
+    /// Ensure property vector length after value inserted is valid
+    fn validate_property_vector_length_after_value_insert<V>(
+        vec: &[V],
+        max_len: VecMaxLength,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            vec.len() < max_len as usize,
+            Error::<T>::EntityPropertyValueVectorIsTooLong
+        );
+        Ok(())
+    }
+
+    /// Ensure `SingleInputPropertyValue` type is equal to the `VecInputPropertyValue` type
+    /// and check all constraints
+    pub fn ensure_property_value_can_be_inserted_at_property_vector(
+        &self,
+        single_value: &InputValue<T>,
+        vec_value: &VecStoredPropertyValue<T>,
+        index_in_property_vec: VecMaxLength,
+        current_entity_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        // Ensure, provided index_in_property_vec is valid index of VecInputValue
+        vec_value.ensure_index_in_property_vector_is_valid(index_in_property_vec)?;
+
+        let property_type_vec = self
+            .property_type
+            .as_vec_type()
+            .ok_or(Error::<T>::PropertyValueTypeDoesNotMatchInternalVectorType)?;
+
+        let max_vec_len = property_type_vec.get_max_len();
+
+        match (
+            single_value,
+            vec_value.get_vec_value_ref(),
+            property_type_vec.get_vec_type(),
+        ) {
+            // Single values
+            (InputValue::Bool(_), VecStoredValue::Bool(vec), Type::Bool) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Uint16(_), VecStoredValue::Uint16(vec), Type::Uint16) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Uint32(_), VecStoredValue::Uint32(vec), Type::Uint32) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Uint64(_), VecStoredValue::Uint64(vec), Type::Uint64) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Int16(_), VecStoredValue::Int16(vec), Type::Int16) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Int32(_), VecStoredValue::Int32(vec), Type::Int32) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Int64(_), VecStoredValue::Int64(vec), Type::Int64) => {
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (InputValue::Text(text_item), VecStoredValue::Text(vec), Type::Text(text_max_len)) => {
+                Self::validate_max_len_of_text(text_item, *text_max_len)?;
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (
+                InputValue::TextToHash(text_item),
+                VecStoredValue::Hash(vec),
+                Type::Hash(text_max_len),
+            ) => {
+                if let Some(text_max_len) = text_max_len {
+                    Self::validate_max_len_of_text_to_be_hashed(text_item, *text_max_len)?;
+                }
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            (
+                InputValue::Reference(entity_id),
+                VecStoredValue::Reference(vec),
+                Type::Reference(class_id, same_controller_status),
+            ) => {
+                // Ensure class_id of Entity under provided entity_id references Entity,
+                // which class_id is equal to class_id, declared in corresponding PropertyType
+                // Retrieve corresponding Entity
+                let entity = Self::ensure_referenced_entity_match_its_class(*entity_id, *class_id)?;
+                // Ensure Entity can be referenced.
+                Self::ensure_entity_can_be_referenced(
+                    entity,
+                    *same_controller_status,
+                    current_entity_controller,
+                )?;
+                Self::validate_property_vector_length_after_value_insert(vec, max_vec_len)
+            }
+            _ => Err(Error::<T>::PropertyValueTypeDoesNotMatchInternalVectorType),
+        }
+    }
+
+    /// Ensure text property does not exceed its max len
+    pub fn validate_max_len_if_text_property(
+        &self,
+        value: &InputPropertyValue<T>,
+    ) -> Result<(), Error<T>> {
+        let single_value = value.as_single_value();
+
+        match (single_value, &self.property_type.as_single_value_type()) {
+            (Some(InputValue::Text(text)), Some(Type::Text(text_max_len))) => {
+                Self::validate_max_len_of_text(text, *text_max_len)
+            }
+            (
+                Some(InputValue::TextToHash(text_to_be_hashed)),
+                Some(Type::Hash(Some(text_to_be_hashed_max_len))),
+            ) => Self::validate_max_len_of_text_to_be_hashed(
+                text_to_be_hashed,
+                *text_to_be_hashed_max_len,
+            ),
+            _ => Ok(()),
+        }
+    }
+
+    fn validate_max_len_of_text(text: &[u8], text_max_len: TextMaxLength) -> Result<(), Error<T>> {
+        ensure!(
+            text.len() <= text_max_len as usize,
+            Error::<T>::TextPropertyTooLong
+        );
+        Ok(())
+    }
+
+    fn validate_max_len_of_text_to_be_hashed(
+        text_to_be_hashed: &[u8],
+        text_to_be_hashed_max_len: u16,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            text_to_be_hashed.len() <= text_to_be_hashed_max_len as usize,
+            Error::<T>::HashedTextPropertyTooLong
+        );
+        Ok(())
+    }
+
+    fn validate_vec_len<V>(vec: &[V], max_len: VecMaxLength) -> Result<(), Error<T>> {
+        ensure!(
+            vec.len() <= max_len as usize,
+            Error::<T>::VecPropertyTooLong
+        );
+        Ok(())
+    }
+
+    /// Ensure `VecInputValue` does not exceed its max len
+    pub fn validate_max_len_if_vec_property(
+        &self,
+        value: &InputPropertyValue<T>,
+    ) -> Result<(), Error<T>> {
+        let (vec_value, vec_property_type) = if let (Some(vec_value), Some(vec_property_type)) =
+            (value.as_vec_value(), self.property_type.as_vec_type())
+        {
+            (vec_value, vec_property_type)
+        } else {
+            return Ok(());
+        };
+
+        let max_len = vec_property_type.get_max_len();
+
+        match vec_value {
+            VecInputValue::Bool(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Uint16(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Uint32(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Uint64(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Int16(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Int32(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::Int64(vec) => Self::validate_vec_len(vec, max_len),
+            VecInputValue::TextToHash(vec) => {
+                Self::validate_vec_len(vec, max_len)?;
+                if let Type::Hash(Some(text_to_be_hashed_max_len)) =
+                    vec_property_type.get_vec_type()
+                {
+                    for text_to_be_hashed_item in vec.iter() {
+                        Self::validate_max_len_of_text_to_be_hashed(
+                            text_to_be_hashed_item,
+                            *text_to_be_hashed_max_len,
+                        )?;
+                    }
+                }
+                Ok(())
+            }
+            VecInputValue::Text(vec) => {
+                Self::validate_vec_len(vec, max_len)?;
+                if let Type::Text(text_max_len) = vec_property_type.get_vec_type() {
+                    for text_item in vec.iter() {
+                        Self::validate_max_len_of_text(text_item, *text_max_len)?;
+                    }
+                }
+                Ok(())
+            }
+            VecInputValue::Reference(vec) => Self::validate_vec_len(vec, max_len),
+        }
+    }
+
+    /// Ensure provided `InputPropertyValue` matches its `Type`
+    pub fn ensure_property_value_matches_its_type(
+        &self,
+        value: &InputPropertyValue<T>,
+    ) -> Result<(), Error<T>> {
+        ensure!(
+            self.does_prop_value_match_type(value),
+            Error::<T>::PropertyValueDoNotMatchType
+        );
+        Ok(())
+    }
+
+    /// Check if provided `InputPropertyValue` matches its `Type`
+    pub fn does_prop_value_match_type(&self, value: &InputPropertyValue<T>) -> bool {
+        // A non required property can be updated to Bool(false):
+        if !self.required && *value == InputPropertyValue::default() {
+            return true;
+        }
+        match (value, &self.property_type) {
+            (
+                InputPropertyValue::Single(single_property_value),
+                PropertyType::Single(ref single_property_type),
+            ) => match (single_property_value, single_property_type.deref()) {
+                (InputValue::Bool(_), Type::Bool)
+                | (InputValue::Uint16(_), Type::Uint16)
+                | (InputValue::Uint32(_), Type::Uint32)
+                | (InputValue::Uint64(_), Type::Uint64)
+                | (InputValue::Int16(_), Type::Int16)
+                | (InputValue::Int32(_), Type::Int32)
+                | (InputValue::Int64(_), Type::Int64)
+                | (InputValue::Text(_), Type::Text(_))
+                | (InputValue::TextToHash(_), Type::Hash(_))
+                | (InputValue::Reference(_), Type::Reference(_, _)) => true,
+                _ => false,
+            },
+            (
+                InputPropertyValue::Vector(vec_value),
+                PropertyType::Vector(ref vec_property_type),
+            ) => match (vec_value, vec_property_type.get_vec_type()) {
+                (VecInputValue::Bool(_), Type::Bool)
+                | (VecInputValue::Uint16(_), Type::Uint16)
+                | (VecInputValue::Uint32(_), Type::Uint32)
+                | (VecInputValue::Uint64(_), Type::Uint64)
+                | (VecInputValue::Int16(_), Type::Int16)
+                | (VecInputValue::Int32(_), Type::Int32)
+                | (VecInputValue::Int64(_), Type::Int64)
+                | (VecInputValue::Text(_), Type::Text(_))
+                | (VecInputValue::TextToHash(_), Type::Hash(_))
+                | (VecInputValue::Reference(_), Type::Reference(_, _)) => true,
+                _ => false,
+            },
+            _ => false,
+        }
+    }
+
+    /// Perform all required checks to ensure provided `InputPropertyValue` is valid,
+    /// when current `PropertyType` is `Reference`
+    pub fn ensure_property_value_is_valid_reference(
+        &self,
+        value: &InputPropertyValue<T>,
+        current_entity_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        match (value, &self.property_type) {
+            (
+                InputPropertyValue::Single(single_property_value),
+                PropertyType::Single(single_property_type),
+            ) => {
+                if let (
+                    InputValue::Reference(entity_id),
+                    Type::Reference(class_id, same_controller_status),
+                ) = (single_property_value, single_property_type.deref())
+                {
+                    // Ensure class_id of Entity under provided entity_id references Entity,
+                    // which class_id is equal to class_id, declared in corresponding PropertyType
+                    // Retrieve corresponding Entity
+                    let entity =
+                        Self::ensure_referenced_entity_match_its_class(*entity_id, *class_id)?;
+
+                    // Ensure Entity can be referenced.
+                    Self::ensure_entity_can_be_referenced(
+                        entity,
+                        *same_controller_status,
+                        current_entity_controller,
+                    )?;
+                }
+            }
+            (InputPropertyValue::Vector(vec_value), PropertyType::Vector(vec_property_type)) => {
+                if let (
+                    VecInputValue::Reference(entity_ids),
+                    Type::Reference(class_id, same_controller_status),
+                ) = (vec_value, vec_property_type.get_vec_type())
+                {
+                    for entity_id in entity_ids.iter() {
+                        // Ensure class_id of Entity under provided entity_id references Entity,
+                        // which class_id is equal to class_id, declared in corresponding PropertyType
+                        // Retrieve corresponding Entity
+                        let entity =
+                            Self::ensure_referenced_entity_match_its_class(*entity_id, *class_id)?;
+
+                        // Ensure Entity can be referenced.
+                        Self::ensure_entity_can_be_referenced(
+                            entity,
+                            *same_controller_status,
+                            current_entity_controller,
+                        )?;
+                    }
+                }
+            }
+            _ => (),
+        }
+        Ok(())
+    }
+
+    /// Ensure `class_id` of `Entity` under provided `entity_id` references `Entity`, which `class_id` is equal to `class_id`,
+    /// declared in corresponding `PropertyType`.
+    /// Returns  corresponding `Entity` instance
+    pub fn ensure_referenced_entity_match_its_class(
+        entity_id: T::EntityId,
+        class_id: T::ClassId,
+    ) -> Result<Entity<T>, Error<T>> {
+        // Ensure Entity under given id exists
+        Module::<T>::ensure_known_entity_id(entity_id)?;
+
+        let entity = Module::<T>::entity_by_id(entity_id);
+        ensure!(
+            entity.get_class_id() == class_id,
+            Error::<T>::ReferencedEntityDoesNotMatchItsClass
+        );
+        Ok(entity)
+    }
+
+    /// Ensure `Entity` can be referenced.
+    pub fn ensure_entity_can_be_referenced(
+        entity: Entity<T>,
+        same_controller_status: bool,
+        current_entity_controller: &EntityController<T>,
+    ) -> Result<(), Error<T>> {
+        let entity_permissions = entity.get_permissions();
+
+        // Ensure Entity is referencable
+        ensure!(
+            entity_permissions.is_referancable(),
+            Error::<T>::EntityCanNotBeReferenced
+        );
+
+        if same_controller_status {
+            // Ensure Entity controller is equal to the provided one
+            ensure!(
+                entity_permissions.controller_is_equal_to(current_entity_controller),
+                Error::<T>::SameControllerConstraintViolation
+            );
+        }
+        Ok(())
+    }
+
+    /// Ensure `PropertyNameLengthConstraint` satisfied
+    pub fn ensure_name_is_valid(&self) -> Result<(), Error<T>> {
+        T::PropertyNameLengthConstraint::get().ensure_valid(
+            self.name.len(),
+            Error::<T>::PropertyNameTooShort,
+            Error::<T>::PropertyNameTooLong,
+        )
+    }
+
+    /// Ensure `PropertyDescriptionLengthConstraint` satisfied
+    pub fn ensure_description_is_valid(&self) -> Result<(), Error<T>> {
+        T::PropertyDescriptionLengthConstraint::get().ensure_valid(
+            self.description.len(),
+            Error::<T>::PropertyDescriptionTooShort,
+            Error::<T>::PropertyDescriptionTooLong,
+        )
+    }
+
+    /// Ensure `Type` specific constraints satisfied
+    pub fn ensure_property_type_size_is_valid(&self) -> Result<(), Error<T>> {
+        match &self.property_type {
+            PropertyType::Single(single_property_type) => {
+                // Ensure Type specific TextMaxLengthConstraint satisfied
+                single_property_type.ensure_property_type_size_is_valid()
+            }
+            PropertyType::Vector(vec_property_type) => {
+                // Ensure Type specific TextMaxLengthConstraint & VecMaxLengthConstraint satisfied
+                vec_property_type.ensure_property_type_size_is_valid()
+            }
+        }
+    }
+
+    /// Ensure refers to existing `class_id`, if If `Property` `Type` is `Reference`,
+    pub fn ensure_property_type_reference_is_valid(&self) -> Result<(), Error<T>> {
+        let has_unknown_reference =
+            if let Type::Reference(other_class_id, _) = self.property_type.get_inner_type() {
+                !<ClassById<T>>::contains_key(other_class_id)
+            } else {
+                false
+            };
+
+        ensure!(
+            !has_unknown_reference,
+            Error::<T>::ClassSchemaRefersUnknownClass
+        );
+
+        Ok(())
+    }
+}

Algúns arquivos non se mostraron porque demasiados arquivos cambiaron neste cambio