Browse Source

fee-analysis: include CLI tool

conectado 4 years ago
parent
commit
ad5879f16b

+ 1 - 0
analyses/fee-analysis/.gitignore

@@ -2,3 +2,4 @@
 .vim/
 __pycache__/
 .venv/
+*.csv

+ 22 - 1
analyses/fee-analysis/README.md

@@ -1,4 +1,9 @@
-# Notebook with fee analysis
+# Fee analysis
+
+This directory has a CLI to output the analysis of weights into a `CSV` and a more interactive notebook
+
+
+
 ## Requirements
 
 * python3
@@ -10,5 +15,21 @@
 * `pip install -r requirements.txt`
 * `jupyter notebook`
 * a browser tab should launch with the files in this directory
+
+## To run the CLI
+* `python analysis_cli --help` will output the help
+
+You will need to provide a path to a directory containing the weight files or the path to a single
+weight file.
+
+For the CSV to include an analysis pass the parameter `-p` (Meaning the calculated price in tokens and prices)
+
+The `config.json` file in the `analysis_cli` there is configuration for said analysis. Such as specializing the
+parametrs and length for a given extrinsics, the coefficients, issuance and marke caps.
+
+**Note:** The output csv file already sums the `EXTRINSIC_BASE_WEIGHT` to the weight column
+
+## To run the notebook
+
 * open `main_notebook.ipynb`
 * The notebook has the information on how to proceed

+ 61 - 0
analyses/fee-analysis/analysis_cli/__main__.py

@@ -0,0 +1,61 @@
+from parser import parse_weights
+from pathlib import Path
+import pathlib
+import argparse
+from os import listdir
+import os
+from analysis import get_weight_info
+import json
+
+
+def main():
+    arg_parser = argparse.ArgumentParser(description="Fee analysis")
+    arg_parser.add_argument(
+        'weight_path', type=str, help='Path to weight files or directory with weights'
+    )
+    arg_parser.add_argument('-o', '--output', type=str,
+                            help='Path for csv file defaults to output.csv', default="output.csv")
+    arg_parser.add_argument('-c', '--config', type=str,
+                            help='Path of a config file', default=Path(__file__).parent / "config.json")
+    arg_parser.add_argument('-p', '--process-data', action='store_true',
+                            help='Process data given a config if not used only weights will be dumped into the csv output')
+
+    args = arg_parser.parse_args()
+    weight_path = args.weight_path
+    output_file = args.output
+    config_file = args.config
+    process_data = args.process_data
+
+    with open(config_file) as f:
+        config = json.load(f)
+
+    weight_coeff = config.get("weight_coefficient", 0)
+    issuance = config.get("issuance", 0)
+    length_coeff = config.get("length_coefficient", 0)
+    min_market_cap = config.get("min_market_cap", 0)
+    max_market_cap = config.get("max_market_cap", 0)
+    lengths = config.get("lengths", {})
+    params = config.get("params", {})
+
+    w = {}
+
+    if os.path.isdir(weight_path):
+        for f in listdir(weight_path):
+            path = weight_path + f
+            if os.path.isfile(path):
+                w |= parse_weights(weight_path + f)
+    elif os.path.isfile(weight_path):
+        w = parse_weights(weight_path)
+    else:
+        print("Error: ", weight_path, " is not a valid directory or file")
+
+    df = get_weight_info(w, weight_coeff, issuance,
+                         length_coeff, min_market_cap, max_market_cap, params, lengths)
+    if process_data:
+        df.to_csv(output_file, index=False, )
+    else:
+        df.to_csv(output_file, index=False, columns=["Extrinsic", "Weight"])
+
+
+if __name__ == '__main__':
+    main()

+ 172 - 0
analyses/fee-analysis/analysis_cli/analysis.py

@@ -0,0 +1,172 @@
+import pandas as pd
+from constants import *
+
+
+def weight_to_fee(weight, coeff):
+    return coeff * weight
+
+
+def length_to_fee(length, coeff):
+    return coeff * length
+
+
+def token_to_price(token, market_cap, issuance):
+    return (market_cap / issuance) * token
+
+
+def price_weight_function(x, weight_coefficient, market_cap, issuance):
+    return token_to_price(weight_to_fee(x, weight_coefficient), market_cap, issuance)
+
+
+def price_length_function(x, length_coefficient, market_cap, issuance):
+    return token_to_price(length_to_fee(x, length_coefficient), market_cap, issuance)
+
+
+def print_var_err(var, extrn):
+    print("WARNING: the parameter {} isn't defined in the calculation for extrinsic: {}".format(
+        var[0], extrn))
+
+
+def calc_vars_weight(weight, extrinsic, params):
+    total = 0
+    if extrinsic in params:
+        for var in weight[VARS]:
+            if var[0] in params[extrinsic]:
+                total += params[extrinsic][var[0]] * var[1]
+            else:
+                print_var_err(var, extrinsic)
+        for var in weight[DB_READS][DB_VARS]:
+            if var[0] in params[extrinsic]:
+                total += params[extrinsic][var[0]] * var[1] * READ_WEIGHT
+            else:
+                print_var_err(var, extrinsic)
+        for var in weight[DB_WRITES][DB_VARS]:
+            if var[0] in params[extrinsic]:
+                total += params[extrinsic][var] * WRITE_WEIGHT
+            else:
+                print_var_err(var, extrinsic)
+    return total
+
+
+def calc_weight(weight, extrinsic, params):
+    vars_weight = calc_vars_weight(weight, extrinsic, params)
+    return vars_weight + \
+        weight[BASE_WEIGHT] + \
+        weight[DB_READS][BASE_DB] * READ_WEIGHT + \
+        weight[DB_WRITES][BASE_DB] * WRITE_WEIGHT + EXTRINSIC_BASE_WEIGHT
+
+
+def calc_total_price_given_params(extrinsic, weight_coeff, market_cap, issuance, length_coeff, params, lengths, weights):
+    return price_weight_function(calc_weight(weights[extrinsic], extrinsic, params), weight_coeff, market_cap, issuance) + \
+        price_length_function(lengths.get(extrinsic, 0),
+                              length_coeff, market_cap, issuance)
+
+
+def calc_total_fee(extrinsic, weight_coeff, length_coeff, params, lengths, weights):
+    return weight_to_fee(calc_weight(weights[extrinsic], extrinsic, params) + EXTRINSIC_BASE_WEIGHT, weight_coeff) + \
+        length_to_fee(lengths.get(extrinsic, 0), length_coeff)
+
+
+def get_computed_values(
+    extrinsic,
+    weight_model,
+    weight_coeff,
+    min_market_cap,
+    max_market_cap,
+    issuance,
+    length_coeff,
+    params,
+    lengths,
+    weights
+):
+    weight = calc_weight(weight_model, extrinsic, params)
+    tokens = calc_total_fee(extrinsic, weight_coeff,
+                            length_coeff, params, lengths, weights)
+    min_price = calc_total_price_given_params(
+        extrinsic,
+        weight_coeff,
+        min_market_cap,
+        issuance,
+        length_coeff,
+        params,
+        lengths,
+        weights
+    )
+    max_price = calc_total_price_given_params(
+        extrinsic,
+        weight_coeff,
+        max_market_cap,
+        issuance,
+        length_coeff,
+        params,
+        lengths,
+        weights
+    )
+    return weight, tokens, min_price, max_price
+
+
+def calc_all_price(weight_coeff, issuance, length_coeff, min_market_cap, max_market_cap, weights, params, lengths):
+    names = []
+    computed_weights = []
+    computed_tokens = []
+    min_prices = []
+    max_prices = []
+    for (key, val) in weights.items():
+        weight, tokens, min_price, max_price = get_computed_values(
+            key,
+            val,
+            weight_coeff,
+            min_market_cap,
+            max_market_cap,
+            issuance,
+            length_coeff,
+            params,
+            lengths,
+            weights
+        )
+        names.append(key)
+        computed_weights.append(weight)
+        min_prices.append(min_price)
+        max_prices.append(max_price)
+        computed_tokens.append(tokens)
+
+    weight_table = {
+        "Extrinsic": names,
+        "Weight": computed_weights,
+        "Tokens(JOY)": computed_tokens,
+        "Min Price(¢)": min_prices,
+        "Max Price(¢)": max_prices
+    }
+    df = pd.DataFrame(weight_table)
+
+    return df, min_prices, max_prices
+
+
+def get_weight_info(weights, weight_coeff=1, issuance=1, length_coeff=1, min_market_cap=1, max_market_cap=1, params={},
+                    lengths={}):
+    weights[RUNTIME_UPGRADE] = {
+        BASE_WEIGHT: MAX_BLOCK_WEIGHT,
+        DB_READS: {
+            BASE_DB: 0,
+            DB_VARS: []
+        },
+        DB_WRITES: {
+            BASE_DB: 0,
+            DB_VARS: []
+        },
+        VARS: []
+
+    }
+
+    df, _, _ = calc_all_price(
+        weight_coeff,
+        issuance,
+        length_coeff,
+        min_market_cap,
+        max_market_cap,
+        weights,
+        params,
+        lengths,
+    )
+
+    return df

+ 15 - 0
analyses/fee-analysis/analysis_cli/config.json

@@ -0,0 +1,15 @@
+{
+    "weight_coefficient": 2,
+    "issuance": 250000000,
+    "length_coefficient": 1,
+    "min_market_cap": 1250000,
+    "max_market_cap": 100000000000,
+    "lengths": {
+        "runtime_upgrade": 0
+    },
+    "params": {
+        "proposals_discussion::add_post": {
+            "i": 0
+        }
+    }
+}

+ 14 - 0
analyses/fee-analysis/analysis_cli/constants.py

@@ -0,0 +1,14 @@
+RUNTIME_UPGRADE = "proposal_codex::runtime_upgrade"
+BASE_WEIGHT = "base_weight"
+DB_READS = "db_reads"
+DB_WRITES = "db_writes"
+BASE_DB = "base"
+DB_VARS = "vars"
+VARS = "vars"
+WRITE_WEIGHT = 100 * 1_000_000
+READ_WEIGHT = 25 * 1_000_000
+WEIGHT_PER_SECOND = 1_000_000_000_000
+WEIGHT_PER_MILLIS = WEIGHT_PER_SECOND / 1000  # 1_000_000_000
+WEIGHT_PER_MICROS = WEIGHT_PER_MILLIS / 1000  # 1_000_000
+MAX_BLOCK_WEIGHT = 2 * 1_000_000_000_000
+EXTRINSIC_BASE_WEIGHT = 125 * WEIGHT_PER_MICROS

+ 96 - 0
analyses/fee-analysis/analysis_cli/parser.py

@@ -0,0 +1,96 @@
+import re
+from constants import *
+
+
+match_parenthesis = r'\(.*'
+match_base_weight = r'\(((\d+_{0,1})+)'
+re_match_base_weight = re.compile(match_base_weight)
+match_db_ops_reads = r'DbWeight::get\(\).reads\((\d+) as Weight\)'
+match_db_ops_writes = r'DbWeight::get\(\).writes\((\d+) as Weight\)'
+re_match_db_ops_reads = re.compile(match_db_ops_reads)
+re_match_db_ops_writes = re.compile(match_db_ops_writes)
+match_scaling_var = r'\((\D) as Weight\)'
+re_match_scaling_var = re.compile(match_scaling_var)
+
+
+def parse_weights(weight_file):
+    weights = {}
+    with open(weight_file) as f:
+        start_reading = False
+        reading_func = False
+        function_name = ""
+        weight = 0
+        db_reads_base = 0
+        db_reads = []
+        db_writes_base = 0
+        db_writes = []
+        variables = []
+        pallet_name = ""
+        for line in f:
+            words = line.strip().split(" ")
+            if words[0] == "impl":
+                start_reading = True
+                pallet_name = words[1].split("::")[0]
+
+            if reading_func:
+                if reading_func and "}" in words:
+                    reading_func = False
+                    weights[function_name] = {
+                        BASE_WEIGHT: weight,
+                        DB_READS: {
+                            BASE_DB: db_reads_base,
+                            DB_VARS: db_reads
+                        },
+                        DB_WRITES: {
+                            BASE_DB: db_writes_base,
+                            DB_VARS: db_writes,
+                        },
+                        VARS: variables
+                    }
+                    weight = 0
+                    db_reads_base = 0
+                    db_writes_base = 0
+                    variables = []
+                    db_reads = []
+                    db_writes = []
+
+                if "DbWeight::get()" in line:
+                    if "reads" in line:
+                        if re.search(re_match_scaling_var, line):
+                            var = re.search(
+                                re_match_scaling_var, line).group(1)
+                            weight_factor = re.search(
+                                re_match_base_weight, line).group(1)
+                            db_reads.append((var, int(weight_factor)))
+                        else:
+                            db_reads_base = int(
+                                re.search(re_match_db_ops_reads, line).group(1))
+
+                    if "writes" in line:
+                        if re.search(re_match_scaling_var, line):
+                            var = re.search(
+                                re_match_scaling_var, line).group(1)
+                            weight_factor = re.search(
+                                re_match_base_weight, line).group(1)
+                            db_writes.append((var, int(weight_factor)))
+                        else:
+                            db_writes_base = int(
+                                re.search(re_match_db_ops_writes, line).group(1))
+                else:
+                    if re.match(re_match_base_weight, words[0]) is not None:
+                        match = re.match(re_match_base_weight, words[0])
+                        weight = int(match.group(1))
+
+                    if re.search(re_match_scaling_var, line):
+                        var = re.search(
+                            re_match_scaling_var, line).group(1)
+                        weight_factor = re.search(
+                            re_match_base_weight, line).group(1)
+                        variables.append((var, int(weight_factor)))
+
+            if start_reading and words[0] == "fn":
+                reading_func = True
+                function_name = re.sub(match_parenthesis, '', words[1])
+                function_name = pallet_name + "::" + function_name
+
+    return weights

File diff suppressed because it is too large
+ 0 - 686
analyses/fee-analysis/main_notebook.ipynb


Some files were not shown because too many files changed in this diff