Преглед на файлове

Merge branch 'olympia' into olympia-playground

Mokhtar Naamani преди 3 години
родител
ревизия
2d51001169

+ 7 - 5
.env

@@ -22,6 +22,7 @@ TYPEORM_LOGGING=error
 BLOCK_HEIGHT=0
 
 # Query node GraphQL server port
+# Remember to change it in COLOSSUS_QUERY_NODE_URL and DISTRIBUTOR_QUERY_NODE_URL as well
 GRAPHQL_SERVER_PORT=8081
 
 # Query node playground endpoint
@@ -36,6 +37,7 @@ GRAPHQL_PLAYGROUND_ENDPOINT=""
 GRAPHQL_PLAYGROUND_CDN_URL=""
 
 # Hydra indexer gateway GraphQL server port
+# Remember to change it in PROCESSOR_INDEXER_GATEWAY as well
 HYDRA_INDEXER_GATEWAY_PORT=4000
 
 # Default GraphQL server host. It is required during "query-node config:dev"
@@ -45,13 +47,13 @@ GRAPHQL_SERVER_HOST=localhost
 JOYSTREAM_NODE_WS=ws://joystream-node:9944/
 
 # Query node which colossus will use
-COLOSSUS_QUERY_NODE_URL=http://graphql-server:${GRAPHQL_SERVER_PORT}/graphql
+COLOSSUS_QUERY_NODE_URL=http://graphql-server:8081/graphql
 
 # Query node which distributor will use
-DISTRIBUTOR_QUERY_NODE_URL=http://graphql-server:${GRAPHQL_SERVER_PORT}/graphql
+DISTRIBUTOR_QUERY_NODE_URL=http://graphql-server:8081/graphql
 
 # Indexer gateway used by processor. If you don't use the local indexer set this to a remote gateway
-PROCESSOR_INDEXER_GATEWAY=http://hydra-indexer-gateway:${HYDRA_INDEXER_GATEWAY_PORT}/graphql
+PROCESSOR_INDEXER_GATEWAY=http://hydra-indexer-gateway:4000/graphql
 
 # Colossus services identities
 COLOSSUS_1_WORKER_ID=0
@@ -64,10 +66,10 @@ COLOSSUS_2_TRANSACTOR_URI=//Colossus2
 
 # Distributor node services identities
 DISTRIBUTOR_1_WORKER_ID=0
-DISTRIBUTOR_1_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_1_WORKER_ID}
+DISTRIBUTOR_1_ACCOUNT_URI=//testing//worker//Distribution//0
 
 DISTRIBUTOR_2_WORKER_ID=1
-DISTRIBUTOR_2_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_2_WORKER_ID}
+DISTRIBUTOR_2_ACCOUNT_URI=//testing//worker//Distribution//1
 
 # Membership Faucet
 SCREENING_AUTHORITY_SEED=//Alice

+ 1 - 1
.github/workflows/deploy-node-network.yml

@@ -126,7 +126,7 @@ jobs:
           echo -e "[rpc]\n${{ steps.deploy_stack.outputs.RPCPublicIp }}" >> inventory
           cat inventory
 
-      - name: Run playbook to compile joystream-node on build server
+      - name: Run playbook to setup all hosts and compile joystream-node
         uses: dawidd6/action-ansible-playbook@v2
         # Build binaries if AMI not specified or a custom proposals parameter is passed
         if: steps.myoutputs.outputs.ec2AMI == '' || steps.myoutputs.outputs.proposalParameters != ''

+ 30 - 0
.github/workflows/joystream-apps-docker.yml

@@ -0,0 +1,30 @@
+name: Build joystream/apps and publish to Docker Hub
+
+on:
+  workflow_dispatch:
+    inputs:
+      tag_suffix:
+        description: 'Tag suffix'
+        required: true
+jobs:
+  docker:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+      - name: Build and push
+        id: docker_build
+        uses: docker/build-push-action@v2
+        with:
+          file: apps.Dockerfile
+          push: true
+          tags: joystream/apps:${{ steps.extract_branch.outputs.branch }}-${{ github.event.inputs.tag_suffix }}

Файловите разлики са ограничени, защото са твърде много
+ 441 - 296
cli/src/graphql/generated/schema.ts


+ 14 - 5
devops/aws/build-code.yml

@@ -1,7 +1,7 @@
 ---
 
-- name: Get latest Joystream code, build it and copy binary to local
-  hosts: build
+- name: Get latest Joystream code and run setup on all hosts
+  hosts: build,rpc
   gather_facts: no
   tasks:
     - name: Get code from local or git repo
@@ -9,10 +9,19 @@
         name: common
         tasks_from: "{{ 'get-code-local' if build_local_code|bool else 'get-code-git' }}"
 
-    - name: Run setup and build
+    - name: Run setup
+      include_role:
+        name: common
+        tasks_from: run-setup
+
+- name: Build joystream-node on build host and copy binary to local
+  hosts: build
+  gather_facts: no
+  tasks:
+    - name: Run build
       include_role:
         name: common
-        tasks_from: run-setup-build
+        tasks_from: run-build
 
     - name: Copy joystream-node binary from build to local
       fetch:
@@ -20,7 +29,7 @@
         dest: "{{ data_path }}/joystream-node"
         flat: yes
 
-- name: Copy binary to remote servers
+- name: Copy joystream-node binary to all hosts
   hosts: all
   gather_facts: no
   tasks:

+ 7 - 2
devops/aws/create-joystream-node-ami-playbook.yml

@@ -10,10 +10,15 @@
         name: common
         tasks_from: get-code-git
 
-    - name: Run setup and build
+    - name: Run setup
       include_role:
         name: common
-        tasks_from: run-setup-build
+        tasks_from: run-setup
+
+    - name: Run build
+      include_role:
+        name: common
+        tasks_from: run-build
 
     - name: Install subkey
       include_role:

+ 3 - 0
devops/aws/deploy-infra.sample.cfg

@@ -9,6 +9,9 @@ DEFAULT_EC2_INSTANCE_TYPE=t2.medium
 VALIDATOR_EC2_INSTANCE_TYPE=t2.medium
 BUILD_EC2_INSTANCE_TYPE=t2.large
 RPC_EC2_INSTANCE_TYPE=t2.medium
+# Validator volume size
+VOLUME_SIZE=120
+RPC_VOLUME_SIZE=120
 
 # prebuilt AMI with joystream-node, chain-spec and subkey already built
 EC2_AMI_ID="ami-0ce5f13e91397239a"

+ 3 - 1
devops/aws/deploy-infra.sh

@@ -39,7 +39,9 @@ aws cloudformation deploy \
     BuildEC2InstanceType=$BUILD_EC2_INSTANCE_TYPE \
     KeyName=$AWS_KEY_PAIR_NAME \
     EC2AMI=$EC2_AMI_ID \
-    NumberOfValidators=$NUMBER_OF_VALIDATORS
+    NumberOfValidators=$NUMBER_OF_VALIDATORS \
+    VolumeSize=$VOLUME_SIZE \
+    RPCVolumeSize=$RPC_VOLUME_SIZE
 
 # If the deploy succeeded, get the IP, create inventory and configure the created instances
 if [ $? -eq 0 ]; then

+ 0 - 10
devops/aws/roles/build-server/tasks/main.yml

@@ -1,16 +1,6 @@
 ---
 # Configure build server to be able to create chain-spec file and subkey commands
 
-- name: Copy bash_profile content
-  shell: cat ~/.bash_profile
-  register: bash_data
-
-- name: Copy bash_profile content to bashrc for non-interactive sessions
-  blockinfile:
-    block: '{{ bash_data.stdout }}'
-    path: ~/.bashrc
-    insertbefore: BOF
-
 - name: Get dependencies for subkey
   shell: curl https://getsubstrate.io -sSf | bash -s -- --fast
 

+ 13 - 1
devops/aws/roles/common/tasks/chain-spec-node-keys.yml

@@ -51,11 +51,23 @@
 
 - name: Save output of chain spec to local file
   copy:
-    content: '{{ chain_spec_output.stdout | regex_replace("\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]", "") }}'
+    content: '{{ chain_spec_output.stdout }}'
     dest: '{{ remote_data_path }}/chain_spec_output.txt'
   delegate_to: '{{ build_instance }}'
   run_once: true
 
+- name: Format chain spec output
+  set_fact:
+    chain_spec_output_formatted: '{{ chain_spec_output.stdout | regex_replace("=", ": ") | from_yaml }}'
+  run_once: true
+
+- name: Extract keys from chain spec output
+  set_fact:
+    sudo_key: '{{ chain_spec_output_formatted.sudo }}'
+    endowed_key: '{{ chain_spec_output_formatted.endowed_0 }}'
+  delegate_to: '{{ build_instance }}'
+  run_once: true
+
 - name: Change chain spec name, id, protocolId
   json_modify:
     chain_spec_path: '{{ chain_spec_path }}'

+ 1 - 9
devops/aws/roles/common/tasks/run-setup-build.yml → devops/aws/roles/common/tasks/run-build.yml

@@ -1,13 +1,5 @@
 ---
-# Run setup and build code
-
-- name: Create bash profile file
-  command: 'touch /home/ubuntu/.bash_profile'
-
-- name: Run setup script
-  command: ./setup.sh
-  args:
-    chdir: '{{ remote_code_path }}'
+# Build code
 
 - name: Build joystream node
   shell: . ~/.bash_profile && yarn cargo-build

+ 20 - 0
devops/aws/roles/common/tasks/run-setup.yml

@@ -0,0 +1,20 @@
+---
+# Run setup
+
+- name: Create bash profile file
+  command: 'touch /home/ubuntu/.bash_profile'
+
+- name: Run setup script
+  command: ./setup.sh
+  args:
+    chdir: '{{ remote_code_path }}'
+
+- name: Copy bash_profile content
+  shell: cat ~/.bash_profile
+  register: bash_data
+
+- name: Copy bash_profile content to bashrc for non-interactive sessions
+  blockinfile:
+    block: '{{ bash_data.stdout }}'
+    path: ~/.bashrc
+    insertbefore: BOF

+ 8 - 2
devops/aws/roles/rpc/tasks/main.yml

@@ -41,6 +41,10 @@
   # Check for the status every 100s
   delay: 100
 
+- name: Make sure docker is running
+  command: systemctl start docker
+  become: yes
+
 - name: Run service containers
   command: yarn start
   environment:
@@ -50,9 +54,11 @@
     DISTRIBUTOR_1_URL: 'https://{{ inventory_hostname }}.nip.io/distributor-1/'
     SKIP_NODE: 'true'
     SKIP_CHAIN_SETUP: '{{ skip_chain_setup }}'
+    TREASURY_ACCOUNT_URI: '{{ endowed_key }}'
+    SUDO_ACCOUNT_URI: '{{ sudo_key }}'
   args:
     chdir: '{{ remote_code_path }}'
-  async: 1800
+  async: 3600
   poll: 0
   register: compose_result
 
@@ -62,7 +68,7 @@
   register: job_result
   until: job_result.finished
   # Max number of times to check for status
-  retries: 18
+  retries: 36
   # Check for the status every 100s
   delay: 100
 

Файловите разлики са ограничени, защото са твърде много
+ 441 - 296
distributor-node/src/services/networking/query-node/generated/schema.ts


+ 16 - 0
query-node/README.md

@@ -46,3 +46,19 @@ You can stop the query-node and remove all associated docker containers without
 ```bash
 yarn workspace query-node-root kill
 ```
+
+- Database connection settings: DB_NAME, DB_HOST, DB_PORT, DB_USER, DB_PASS
+- Chain RPC endpoint: WS_PROVIDER_ENDPOINT_URI
+- If non-standard types are being used by the Substrate runtime, map type definitions in the json format as an external volume
+
+Follow the links for more information about the [indexer](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer) service and [indexer-api-gateway](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer-gateway).
+
+## GraphQL Playground assets url
+Query node's user interface, GraphQL Playground, is expecting to be served at `/graphql`. 
+If you are serving the files on path like `/query/server/graphql` via some nginx proxy, aliasing, etc. you will need to provide
+the base url to query node server via `GRAPHQL_PLAYGROUND_CDN` environment variable.
+
+```
+# use the following when serving playground at `/query/server/graphql`
+GRAPHQL_PLAYGROUND_CDN="query/server" yarn workspace query-node-root query-node:start:dev 
+```

+ 4 - 0
query-node/schemas/bounty.graphql

@@ -207,6 +207,10 @@ type BountyContribution @entity {
   "Member making the contribution (if a member and not the council)"
   contributor: Membership
 
+  # This exposes internal Hydra value related to `contributor`
+  "The id of the contributor"
+  contributorId: ID
+
   "Amount of the contribution"
   amount: BigInt!
 

+ 0 - 4
start.sh

@@ -36,10 +36,6 @@ docker-compose up -d orion
 
 ## Init the chain with some state
 if [[ $SKIP_CHAIN_SETUP != 'true' ]]; then
-  set -a
-  . ./.env
-  set +a
-
   export SKIP_MOCK_CONTENT=true
   export SKIP_QUERY_NODE_CHECKS=true
   HOST_IP=$(tests/network-tests/get-host-ip.sh)

Файловите разлики са ограничени, защото са твърде много
+ 441 - 296
storage-node/src/services/queryNode/generated/schema.ts


+ 9 - 4
tests/network-tests/.env

@@ -6,6 +6,14 @@ QUERY_NODE_URL=http://127.0.0.1:8081/graphql
 TREASURY_ACCOUNT_URI=//Alice
 # Sudo Account
 SUDO_ACCOUNT_URI=//Alice
+# to be able to use all accounts generated in a prior scenario run against the same chain
+START_KEY_ID=0
+# Mini-secret or mnemonic used in SURI for deterministic key derivation
+SURI_MINI_SECRET=""
+
+## TODO: Do the below settings really make sense as env variables?
+## Probably a configure them in flows or fixutres or maybe a general test-config.yml file
+
 # Amount of members able to buy membership in membership creation test.
 MEMBERSHIP_CREATION_N=2
 # Amount of members able to invite in members invite test.
@@ -20,7 +28,4 @@ APPLICATION_STATUS_WITHDRAW_N=3
 WORKER_ACTIONS_WORKERS_N=6
 # Amount of workers to terminate in workerActions flow
 WORKER_ACTIONS_TERMINATE_N=3
-# to be able to use all accounts generated in a prior scenario run against the same chain
-START_KEY_ID=0
-# Mini-secret or mnemonic used in SURI for deterministic key derivation
-SURI_MINI_SECRET=""
+

+ 0 - 4
tests/network-tests/run-full-tests.sh

@@ -4,10 +4,6 @@ set -e
 SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
 cd $SCRIPT_PATH
 
-set -a
-. ../../.env
-set +a
-
 # Clean start
 docker-compose -f ../../docker-compose.yml down -v
 

+ 0 - 7
tests/network-tests/run-tests.sh

@@ -29,13 +29,6 @@ fi
 
 # Execute tests
 
-# We can load env config used to start docker services and pass them on to the
-# tests. This could be useful to capture keys used or URLs.
-# We just have to watchout for clashing env var names.
-set -a
-. ../../.env
-set +a
-
 if [ "${NO_STORAGE}" != true ]
 then
   ./start-storage.sh

+ 6 - 2
tests/network-tests/src/Scenario.ts

@@ -11,6 +11,7 @@ import { ResourceManager } from './Resources'
 import fetch from 'cross-fetch'
 import fs, { existsSync, readFileSync } from 'fs'
 import { KeyGenInfo, FaucetInfo } from './types'
+import path from 'path'
 
 export type ScenarioProps = {
   env: NodeJS.ProcessEnv
@@ -48,8 +49,11 @@ function writeOutput(api: Api, miniSecret: string) {
 }
 
 export async function scenario(label: string, scene: (props: ScenarioProps) => Promise<void>): Promise<void> {
-  // Load env variables
-  config()
+  // Load env variables - test framework specific
+  config({ path: path.join(__dirname, '../.env') })
+  // root workspace .env used by docker-compose services
+  config({ path: path.join(__dirname, '../../../.env') })
+
   const env = process.env
 
   // Connect api to the chain

+ 174 - 0
tests/network-tests/src/graphql/generated/schema.ts

@@ -4115,6 +4115,141 @@ export type BudgetSpendingEventWhereUniqueInput = {
   id: Scalars['ID']
 }
 
+export type BudgetUpdatedEvent = Event &
+  BaseGraphQlObject & {
+    /** Hash of the extrinsic which caused the event to be emitted */
+    inExtrinsic?: Maybe<Scalars['String']>
+    /** Blocknumber of the block in which the event was emitted. */
+    inBlock: Scalars['Int']
+    /** Network the block was produced in */
+    network: Network
+    /** Index of event in block from which it was emitted. */
+    indexInBlock: Scalars['Int']
+    /** Filtering options for interface implementers */
+    type?: Maybe<EventTypeOptions>
+    id: Scalars['ID']
+    createdAt: Scalars['DateTime']
+    createdById: Scalars['String']
+    updatedAt?: Maybe<Scalars['DateTime']>
+    updatedById?: Maybe<Scalars['String']>
+    deletedAt?: Maybe<Scalars['DateTime']>
+    deletedById?: Maybe<Scalars['String']>
+    version: Scalars['Int']
+    group: WorkingGroup
+    groupId: Scalars['String']
+    /** Amount substracted from / added to the current budget */
+    budgetChangeAmount: Scalars['BigInt']
+  }
+
+export type BudgetUpdatedEventConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<BudgetUpdatedEventEdge>
+  pageInfo: PageInfo
+}
+
+export type BudgetUpdatedEventCreateInput = {
+  inExtrinsic?: Maybe<Scalars['String']>
+  inBlock: Scalars['Float']
+  network: Network
+  indexInBlock: Scalars['Float']
+  group: Scalars['ID']
+  budgetChangeAmount: Scalars['String']
+}
+
+export type BudgetUpdatedEventEdge = {
+  node: BudgetUpdatedEvent
+  cursor: Scalars['String']
+}
+
+export enum BudgetUpdatedEventOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  InExtrinsicAsc = 'inExtrinsic_ASC',
+  InExtrinsicDesc = 'inExtrinsic_DESC',
+  InBlockAsc = 'inBlock_ASC',
+  InBlockDesc = 'inBlock_DESC',
+  NetworkAsc = 'network_ASC',
+  NetworkDesc = 'network_DESC',
+  IndexInBlockAsc = 'indexInBlock_ASC',
+  IndexInBlockDesc = 'indexInBlock_DESC',
+  GroupAsc = 'group_ASC',
+  GroupDesc = 'group_DESC',
+  BudgetChangeAmountAsc = 'budgetChangeAmount_ASC',
+  BudgetChangeAmountDesc = 'budgetChangeAmount_DESC',
+}
+
+export type BudgetUpdatedEventUpdateInput = {
+  inExtrinsic?: Maybe<Scalars['String']>
+  inBlock?: Maybe<Scalars['Float']>
+  network?: Maybe<Network>
+  indexInBlock?: Maybe<Scalars['Float']>
+  group?: Maybe<Scalars['ID']>
+  budgetChangeAmount?: Maybe<Scalars['String']>
+}
+
+export type BudgetUpdatedEventWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  inExtrinsic_eq?: Maybe<Scalars['String']>
+  inExtrinsic_contains?: Maybe<Scalars['String']>
+  inExtrinsic_startsWith?: Maybe<Scalars['String']>
+  inExtrinsic_endsWith?: Maybe<Scalars['String']>
+  inExtrinsic_in?: Maybe<Array<Scalars['String']>>
+  inBlock_eq?: Maybe<Scalars['Int']>
+  inBlock_gt?: Maybe<Scalars['Int']>
+  inBlock_gte?: Maybe<Scalars['Int']>
+  inBlock_lt?: Maybe<Scalars['Int']>
+  inBlock_lte?: Maybe<Scalars['Int']>
+  inBlock_in?: Maybe<Array<Scalars['Int']>>
+  network_eq?: Maybe<Network>
+  network_in?: Maybe<Array<Network>>
+  indexInBlock_eq?: Maybe<Scalars['Int']>
+  indexInBlock_gt?: Maybe<Scalars['Int']>
+  indexInBlock_gte?: Maybe<Scalars['Int']>
+  indexInBlock_lt?: Maybe<Scalars['Int']>
+  indexInBlock_lte?: Maybe<Scalars['Int']>
+  indexInBlock_in?: Maybe<Array<Scalars['Int']>>
+  budgetChangeAmount_eq?: Maybe<Scalars['BigInt']>
+  budgetChangeAmount_gt?: Maybe<Scalars['BigInt']>
+  budgetChangeAmount_gte?: Maybe<Scalars['BigInt']>
+  budgetChangeAmount_lt?: Maybe<Scalars['BigInt']>
+  budgetChangeAmount_lte?: Maybe<Scalars['BigInt']>
+  budgetChangeAmount_in?: Maybe<Array<Scalars['BigInt']>>
+  group?: Maybe<WorkingGroupWhereInput>
+  AND?: Maybe<Array<BudgetUpdatedEventWhereInput>>
+  OR?: Maybe<Array<BudgetUpdatedEventWhereInput>>
+}
+
+export type BudgetUpdatedEventWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
 export type BuyNowCanceledEvent = Event &
   BaseGraphQlObject & {
     /** Hash of the extrinsic which caused the event to be emitted. */
@@ -7864,6 +7999,7 @@ export enum EventTypeOptions {
   BudgetRefillPlannedEvent = 'BudgetRefillPlannedEvent',
   BudgetSetEvent = 'BudgetSetEvent',
   BudgetSpendingEvent = 'BudgetSpendingEvent',
+  BudgetUpdatedEvent = 'BudgetUpdatedEvent',
   BuyNowCanceledEvent = 'BuyNowCanceledEvent',
   CandidacyNoteSetEvent = 'CandidacyNoteSetEvent',
   CandidacyStakeReleaseEvent = 'CandidacyStakeReleaseEvent',
@@ -17025,6 +17161,9 @@ export type Query = {
   budgetSpendingEvents: Array<BudgetSpendingEvent>
   budgetSpendingEventByUniqueInput?: Maybe<BudgetSpendingEvent>
   budgetSpendingEventsConnection: BudgetSpendingEventConnection
+  budgetUpdatedEvents: Array<BudgetUpdatedEvent>
+  budgetUpdatedEventByUniqueInput?: Maybe<BudgetUpdatedEvent>
+  budgetUpdatedEventsConnection: BudgetUpdatedEventConnection
   buyNowCanceledEvents: Array<BuyNowCanceledEvent>
   buyNowCanceledEventByUniqueInput?: Maybe<BuyNowCanceledEvent>
   buyNowCanceledEventsConnection: BuyNowCanceledEventConnection
@@ -18082,6 +18221,26 @@ export type QueryBudgetSpendingEventsConnectionArgs = {
   orderBy?: Maybe<Array<BudgetSpendingEventOrderByInput>>
 }
 
+export type QueryBudgetUpdatedEventsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<BudgetUpdatedEventWhereInput>
+  orderBy?: Maybe<Array<BudgetUpdatedEventOrderByInput>>
+}
+
+export type QueryBudgetUpdatedEventByUniqueInputArgs = {
+  where: BudgetUpdatedEventWhereUniqueInput
+}
+
+export type QueryBudgetUpdatedEventsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<BudgetUpdatedEventWhereInput>
+  orderBy?: Maybe<Array<BudgetUpdatedEventOrderByInput>>
+}
+
 export type QueryBuyNowCanceledEventsArgs = {
   offset?: Maybe<Scalars['Int']>
   limit?: Maybe<Scalars['Int']>
@@ -28357,6 +28516,7 @@ export type WorkingGroup = BaseGraphQlObject & {
   appliedonopeningeventgroup?: Maybe<Array<AppliedOnOpeningEvent>>
   budgetseteventgroup?: Maybe<Array<BudgetSetEvent>>
   budgetspendingeventgroup?: Maybe<Array<BudgetSpendingEvent>>
+  budgetupdatedeventgroup?: Maybe<Array<BudgetUpdatedEvent>>
   leaderseteventgroup?: Maybe<Array<LeaderSetEvent>>
   leaderunseteventgroup?: Maybe<Array<LeaderUnsetEvent>>
   newmissedrewardlevelreachedeventgroup?: Maybe<Array<NewMissedRewardLevelReachedEvent>>
@@ -28774,6 +28934,8 @@ export type WorkingGroupOpeningMetadata = BaseGraphQlObject & {
   version: Scalars['Int']
   /** Whether the originally provided metadata was valid */
   originallyValid: Scalars['Boolean']
+  /** Opening title */
+  title?: Maybe<Scalars['String']>
   /** Opening short description */
   shortDescription?: Maybe<Scalars['String']>
   /** Opening description (md-formatted) */
@@ -28797,6 +28959,7 @@ export type WorkingGroupOpeningMetadataConnection = {
 
 export type WorkingGroupOpeningMetadataCreateInput = {
   originallyValid: Scalars['Boolean']
+  title?: Maybe<Scalars['String']>
   shortDescription?: Maybe<Scalars['String']>
   description?: Maybe<Scalars['String']>
   hiringLimit?: Maybe<Scalars['Float']>
@@ -28818,6 +28981,8 @@ export enum WorkingGroupOpeningMetadataOrderByInput {
   DeletedAtDesc = 'deletedAt_DESC',
   OriginallyValidAsc = 'originallyValid_ASC',
   OriginallyValidDesc = 'originallyValid_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
   ShortDescriptionAsc = 'shortDescription_ASC',
   ShortDescriptionDesc = 'shortDescription_DESC',
   DescriptionAsc = 'description_ASC',
@@ -28832,6 +28997,7 @@ export enum WorkingGroupOpeningMetadataOrderByInput {
 
 export type WorkingGroupOpeningMetadataUpdateInput = {
   originallyValid?: Maybe<Scalars['Boolean']>
+  title?: Maybe<Scalars['String']>
   shortDescription?: Maybe<Scalars['String']>
   description?: Maybe<Scalars['String']>
   hiringLimit?: Maybe<Scalars['Float']>
@@ -28866,6 +29032,11 @@ export type WorkingGroupOpeningMetadataWhereInput = {
   deletedById_in?: Maybe<Array<Scalars['ID']>>
   originallyValid_eq?: Maybe<Scalars['Boolean']>
   originallyValid_in?: Maybe<Array<Scalars['Boolean']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
   shortDescription_eq?: Maybe<Scalars['String']>
   shortDescription_contains?: Maybe<Scalars['String']>
   shortDescription_startsWith?: Maybe<Scalars['String']>
@@ -29106,6 +29277,9 @@ export type WorkingGroupWhereInput = {
   budgetspendingeventgroup_none?: Maybe<BudgetSpendingEventWhereInput>
   budgetspendingeventgroup_some?: Maybe<BudgetSpendingEventWhereInput>
   budgetspendingeventgroup_every?: Maybe<BudgetSpendingEventWhereInput>
+  budgetupdatedeventgroup_none?: Maybe<BudgetUpdatedEventWhereInput>
+  budgetupdatedeventgroup_some?: Maybe<BudgetUpdatedEventWhereInput>
+  budgetupdatedeventgroup_every?: Maybe<BudgetUpdatedEventWhereInput>
   leaderseteventgroup_none?: Maybe<LeaderSetEventWhereInput>
   leaderseteventgroup_some?: Maybe<LeaderSetEventWhereInput>
   leaderseteventgroup_every?: Maybe<LeaderSetEventWhereInput>

+ 0 - 5
tests/network-tests/start-storage.sh

@@ -1,11 +1,6 @@
 TMP=$0
 THIS_DIR=`dirname $TMP`
 
-# make sure env variables are loaded before calling this script
-#set -a
-#. ../../.env
-#set +a
-
 HOST_IP=`$THIS_DIR/get-host-ip.sh`
 export COLOSSUS_1_URL="http://${HOST_IP}:3333"
 export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"

Някои файлове не бяха показани, защото твърде много файлове са промени