diff --git a/.github/workflows/develop.yaml b/.github/workflows/develop.yaml index 1a729a5c..bf2ee73a 100644 --- a/.github/workflows/develop.yaml +++ b/.github/workflows/develop.yaml @@ -39,7 +39,7 @@ jobs: platforms: linux/amd64 push: true build-args: | - "CHIA_BRANCH=1.2.2" + "CHIA_BRANCH=1.2.3" "FLAX_BRANCH=main" tags: | ${{ secrets.DOCKERHUB_USERNAME }}/machinaris:develop diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 5bf38741..8e2b05d3 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -37,10 +37,10 @@ jobs: uses: docker/build-push-action@v2 with: context: . - platforms: linux/amd64 + platforms: linux/amd64,linux/arm64 push: true build-args: | - "CHIA_BRANCH=1.2.2" + "CHIA_BRANCH=1.2.3" "FLAX_BRANCH=main" tags: | ${{ secrets.DOCKERHUB_USERNAME }}/machinaris:latest diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 537e8a00..0e597a99 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -1,10 +1,9 @@ name: test -on: - workflow_dispatch: - inputs: - version: - description: 'Test Version' +on: + push: + branches: + - 'integration' jobs: docker: @@ -37,10 +36,10 @@ jobs: uses: docker/build-push-action@v2 with: context: . - platforms: linux/amd64 + platforms: linux/amd64,linux/arm64 push: true build-args: | - "CHIA_BRANCH=1.2.2" + "CHIA_BRANCH=1.2.3" "FLAX_BRANCH=main" tags: | ${{ secrets.DOCKERHUB_USERNAME }}/machinaris:test diff --git a/CHANGELOG.md b/CHANGELOG.md index 76736ef6..84e769e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,11 +2,24 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.5.2] - 2021-08-13 + +- Machinaris - Docker images now available for [Apple M1](https://github.com/guydavis/machinaris/issues/43) and [Raspberry Pi OS](https://github.com/guydavis/machinaris/issues/155) architectures. +- Chiadog - Update to new v0.7.0 to [support for parsing partials and solo blocks](https://github.com/martomi/chiadog/pull/268). +- Chia - Update to patch release of 1.2.3. See their [changelog for details](https://github.com/Chia-Network/chia-blockchain/releases/tag/1.2.3). +- Flax - Update to version 0.1.1. See their [changelog for details](https://github.com/Flax-Network/flax-blockchain/releases/tag/0.1.1). +- TrueNAS - Support for Machinaris deployment via helm chart. [Issue #78](https://github.com/guydavis/machinaris/issues/78) - Big thanks to @kmoore134 for this! +- Machinaris - Daily Farming Summary now available on Farming page for both Chia and Flax. Add new plot type column. +- Machinaris - Pools - Show each Pool's status including link to your pool provider. List pool point events on Summary page. +- Machinaris - Workers - Use hostname for Worker display name, even when using IP addresses behind the scenes. Also show versions. Automated harvester setup. +- Machinaris - Connections page has link to test your router port forward for farming. +- Machinaris - New [public website](http://www.machinaris.app) with launch Wizard for generating first Docker run/compose of Machinaris. + ## [0.5.1] - 2021-07-22 - Wizard on Workers page to create a Docker run/compose based on your settings. [Issue #97](https://github.com/guydavis/machinaris/issues/97) -- Update to patch release of Chia 1.2.2, including a fix for harvester cache updates. See their [changelog for details](https://github.com/Chia-Network/chia-blockchain/releases/tag/1.2.2). - Latest Madmax plotter with support for n_buckets3 and n_rmulti2 settings in Plotman. +- Update to patch release of Chia 1.2.2, including a fix for harvester cache updates. See their [changelog for details](https://github.com/Chia-Network/chia-blockchain/releases/tag/1.2.2). ## [0.5.0] - 2021-07-09 diff --git a/CREDITS.md b/CREDITS.md index 80f911e7..c7d53e12 100644 --- a/CREDITS.md +++ b/CREDITS.md @@ -52,6 +52,7 @@ A big thanks to all that contributed with dev and test including: * barkcollar * dartec * elexx +* danicraftscz ## Trademark Notice diff --git a/README.md b/README.md index e7541327..e71addd9 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # machinaris -A Docker image for plotting and farming the Chia™ cryptocurrency on [one computer](https://github.com/guydavis/machinaris/wiki/Docker) or across [many](https://github.com/guydavis/machinaris/wiki/Workers). Now with [official Pool support](https://github.com/guydavis/machinaris/wiki/Pooling)! +A Docker image for plotting and farming the Chia™ cryptocurrency on [one computer](https://github.com/guydavis/machinaris/wiki/Docker) or across [many](https://github.com/guydavis/machinaris/wiki/Workers). Try the easy install using [Launch Wizard](https://machinaris.app). ![Home](https://raw.githubusercontent.com/guydavis/machinaris-unraid/master/docs/img/machinaris_home.png) -To get started with Machinaris, follow an install guide for your platform: [Windows](https://github.com/guydavis/machinaris/wiki/Windows), [Linux](https://github.com/guydavis/machinaris/wiki/Linux), [Macintosh](https://github.com/guydavis/machinaris/wiki/MacOS), [Unraid](https://github.com/guydavis/machinaris/wiki/Unraid), and [others](https://github.com/guydavis/machinaris/wiki/Docker). +For details, see your particular platform: [Windows](https://github.com/guydavis/machinaris/wiki/Windows), [Linux](https://github.com/guydavis/machinaris/wiki/Linux), [Macintosh](https://github.com/guydavis/machinaris/wiki/MacOS), [Unraid](https://github.com/guydavis/machinaris/wiki/Unraid), and [others](https://github.com/guydavis/machinaris/wiki/Docker). ## Plotting View diff --git a/VERSION b/VERSION index 5d4294b9..2411653a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.5.1 \ No newline at end of file +0.5.2 \ No newline at end of file diff --git a/api/commands/chia_cli.py b/api/commands/chia_cli.py index 801213a6..9bb52171 100644 --- a/api/commands/chia_cli.py +++ b/api/commands/chia_cli.py @@ -2,6 +2,7 @@ # CLI interactions with the chia binary. # +import asyncio import datetime import os import pexpect @@ -313,3 +314,11 @@ def check_plots(first_load): ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') proc = Popen(['tail', '-n', str(MAX_LOG_LINES), output_file], stdout=PIPE) return class_escape.sub('', ansi_escape.sub('', proc.stdout.read().decode("utf-8"))) + +def get_pool_login_link(launcher_id): + try: + stream = os.popen("chia plotnft get_login_link -l {0}".format(launcher_id)) + return stream.read() + except Exception as ex: + app.logger.info("Failed to get_login_link: {0}".format(str(ex))) + return "" diff --git a/api/commands/log_parser.py b/api/commands/log_parser.py index 22a96365..c9225b90 100644 --- a/api/commands/log_parser.py +++ b/api/commands/log_parser.py @@ -27,6 +27,9 @@ # Roughly 1 minutes worth of challenges CHALLENGES_TO_LOAD = 8 +# Most recent partial proofs, actually double as 2 log lines per partial +PARTIALS_TO_LOAD = 50 + # When reading tail of a log, only send this many lines MAX_LOG_LINES = 250 @@ -56,6 +59,34 @@ def recent_challenges(blockchain): # app.logger.debug(challenges) return challenges +def recent_partials(blockchain): + log_file = CHIA_LOG + if blockchain == 'flax': + log_file = FLAX_LOG + if not os.path.exists(log_file): + app.logger.debug( + "Skipping partials parsing as no such log file: {0}".format(log_file)) + return [] + rotated_log_file = '' + if os.path.exists(log_file + '.1'): + rotated_log_file = log_file + '.1' + proc = Popen("grep -h --text -C1 -i partial {0} {1} | tail -n {2}".format(rotated_log_file, log_file, PARTIALS_TO_LOAD), + stdout=PIPE, stderr=PIPE, shell=True) + try: + outs, errs = proc.communicate(timeout=90) + except TimeoutExpired: + proc.kill() + proc.communicate() + abort(500, description="The timeout is expired!") + if errs: + app.logger.error(errs.decode('utf-8')) + abort(500, description=errs.decode('utf-8')) + cli_stdout = outs.decode('utf-8') + #app.logger.debug("Partials grep: {0}".format(cli_stdout)) + partials = log.Partials(cli_stdout.splitlines()) + # app.logger.debug(partials) + return partials + def find_plotting_job_log(plot_id): dir_path = '/root/.chia/plotman/logs' diff --git a/api/gunicorn.conf.py b/api/gunicorn.conf.py index f6819ddf..25e968b4 100644 --- a/api/gunicorn.conf.py +++ b/api/gunicorn.conf.py @@ -9,32 +9,37 @@ def on_starting(server): from api import app from api.schedules import status_worker, status_farm, status_plotting, \ status_plots, status_challenges, status_wallets, status_blockchains, \ - status_connections, status_keys, status_alerts, status_controller, status_plotnfts + status_connections, status_keys, status_alerts, status_controller, \ + status_plotnfts, status_points, status_pools, status_partials from api.schedules import stats_disk, stats_farm scheduler = BackgroundScheduler() # Statistics gathering locally - scheduler.add_job(func=stats_farm.collect, trigger='cron', minute=0) # Hourly - scheduler.add_job(func=stats_disk.collect, trigger='cron', minute="*/5") # Every 5 minutes + scheduler.add_job(func=stats_farm.collect, name="stats_farm", trigger='cron', minute=0) # Hourly + scheduler.add_job(func=stats_disk.collect, name="stats_disk", trigger='cron', minute="*/5") # Every 5 minutes # Testing only #scheduler.add_job(func=stats_farm.collect, trigger='interval', seconds=10) # Test immediately #scheduler.add_job(func=stats_disk.collect, trigger='interval', seconds=10) # Test immediately # Status gathering - reported via API - scheduler.add_job(func=status_challenges.update, trigger='interval', seconds=5) - scheduler.add_job(func=status_worker.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_controller.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_farm.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_plotting.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_plots.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_wallets.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_plotnfts.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_blockchains.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_connections.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_keys.update, trigger='interval', seconds=60, jitter=30) - scheduler.add_job(func=status_alerts.update, trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_challenges.update, name="challenges", trigger='interval', seconds=5) + scheduler.add_job(func=status_worker.update, name="workers", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_controller.update, name="controller", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_farm.update, name="farms", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_plotting.update, name="plottings", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_plots.update, name="plots", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_wallets.update, name="wallets", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_plotnfts.update, name="plotnfts", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_blockchains.update, name="blockchains", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_connections.update, name="connections", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_keys.update, name="keys", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_alerts.update, name="alerts", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_pools.update, name="pools", trigger='interval', seconds=60, jitter=30) + scheduler.add_job(func=status_partials.update, name="partials", trigger='interval', seconds=60, jitter=30) + + #scheduler.add_job(func=status_points.update, name="points", trigger='interval', seconds=10, jitter=0) app.logger.debug("Starting background scheduler...") scheduler.start() diff --git a/api/migrations/README b/api/migrations/README index 2d5a97b2..e39f6839 100644 --- a/api/migrations/README +++ b/api/migrations/README @@ -5,7 +5,8 @@ As developer, modify models, then run this to generate a new migration and commi ``` cd /code/machinaris/api -FLASK_APP=__init__.py flask db migrate +FLASK_APP=__init__.py flask db migrate +chown -R 1000.users . ``` This creates migration based on current model. diff --git a/api/migrations/versions/5ea0ec1d8711_.py b/api/migrations/versions/5ea0ec1d8711_.py new file mode 100644 index 00000000..4d29c011 --- /dev/null +++ b/api/migrations/versions/5ea0ec1d8711_.py @@ -0,0 +1,62 @@ +"""empty message + +Revision ID: 5ea0ec1d8711 +Revises: bdfe8db75307 +Create Date: 2021-07-29 17:21:19.447778 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5ea0ec1d8711' +down_revision = 'bdfe8db75307' +branch_labels = None +depends_on = None + + +def upgrade(engine_name): + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name): + globals()["downgrade_%s" % engine_name]() + + + + + +def upgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('pools', + sa.Column('unique_id', sa.String(length=255), nullable=False), + sa.Column('hostname', sa.String(length=255), nullable=False), + sa.Column('blockchain', sa.String(length=64), nullable=True), + sa.Column('launcher_id', sa.String(length=255), nullable=False), + sa.Column('pool_state', sa.String(), nullable=False), + sa.Column('updated_at', sa.String(length=64), nullable=False), + sa.PrimaryKeyConstraint('unique_id') + ) + op.add_column('workers', sa.Column('displayname', sa.String(length=255), nullable=True)) + # ### end Alembic commands ### + + +def downgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('workers', 'displayname') + op.drop_table('pools') + # ### end Alembic commands ### + + +def upgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + diff --git a/api/migrations/versions/ce840f016302_.py b/api/migrations/versions/ce840f016302_.py new file mode 100644 index 00000000..e359795a --- /dev/null +++ b/api/migrations/versions/ce840f016302_.py @@ -0,0 +1,52 @@ +"""empty message + +Revision ID: ce840f016302 +Revises: f529fc25f0d6 +Create Date: 2021-08-11 10:15:42.883128 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ce840f016302' +down_revision = 'f529fc25f0d6' +branch_labels = None +depends_on = None + + +def upgrade(engine_name): + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name): + globals()["downgrade_%s" % engine_name]() + + + + + +def upgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('pools', sa.Column('login_link', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('pools', 'login_link') + # ### end Alembic commands ### + + +def upgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + diff --git a/api/migrations/versions/f529fc25f0d6_.py b/api/migrations/versions/f529fc25f0d6_.py new file mode 100644 index 00000000..581da917 --- /dev/null +++ b/api/migrations/versions/f529fc25f0d6_.py @@ -0,0 +1,61 @@ +"""empty message + +Revision ID: f529fc25f0d6 +Revises: 5ea0ec1d8711 +Create Date: 2021-07-30 13:38:56.345029 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'f529fc25f0d6' +down_revision = '5ea0ec1d8711' +branch_labels = None +depends_on = None + + +def upgrade(engine_name): + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name): + globals()["downgrade_%s" % engine_name]() + + + + + +def upgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('partials', + sa.Column('unique_id', sa.String(length=255), nullable=False), + sa.Column('hostname', sa.String(length=255), nullable=False), + sa.Column('blockchain', sa.String(length=64), nullable=False), + sa.Column('launcher_id', sa.String(length=255), nullable=False), + sa.Column('pool_url', sa.String(length=255), nullable=False), + sa.Column('pool_response', sa.String(), nullable=False), + sa.Column('created_at', sa.String(length=64), nullable=False), + sa.PrimaryKeyConstraint('unique_id') + ) + # ### end Alembic commands ### + + +def downgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('partials') + # ### end Alembic commands ### + + +def upgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + diff --git a/api/migrations/versions/fc3cc23ea246_.py b/api/migrations/versions/fc3cc23ea246_.py new file mode 100644 index 00000000..fa2904b1 --- /dev/null +++ b/api/migrations/versions/fc3cc23ea246_.py @@ -0,0 +1,52 @@ +"""empty message + +Revision ID: fc3cc23ea246 +Revises: ce840f016302 +Create Date: 2021-08-11 16:39:10.975245 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'fc3cc23ea246' +down_revision = 'ce840f016302' +branch_labels = None +depends_on = None + + +def upgrade(engine_name): + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name): + globals()["downgrade_%s" % engine_name]() + + + + + +def upgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('plots', sa.Column('type', sa.String(length=32), nullable=True)) + # ### end Alembic commands ### + + +def downgrade_(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('plots', 'type') + # ### end Alembic commands ### + + +def upgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade_stats(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + diff --git a/api/models/log.py b/api/models/log.py index 7ab6756e..d0abe622 100644 --- a/api/models/log.py +++ b/api/models/log.py @@ -10,7 +10,7 @@ class Challenges: - # Parse the provided most recent 5 lines of grepped output for challenges + # Parse the provided most recent lines of grepped output for challenges def __init__(self, cli_stdout): self.columns = [ 'challenge_id', 'plots_past_filter', 'proofs_found', 'time_taken', 'created_at'] self.rows = [] @@ -29,3 +29,29 @@ def __init__(self, cli_stdout): app.logger.info("Failed to parse challenge line: {0}".format(line)) app.logger.info(traceback.format_exc()) self.rows.reverse() + +class Partials: + + # Parse the provided most recent lines for partials. Grep grabs 2 lines (partial submit and response) per. + def __init__(self, cli_stdout): + self.columns = [ 'challenge_id', 'plots_past_filter', 'proofs_found', 'time_taken', 'created_at'] + self.rows = [] + for line in cli_stdout: + try: + if "Submitting partial" in line: + app.logger.debug(line) + created_at = line.split()[0].replace('T', ' ') + launcher_id = re.search('partial for (\w+) to', line, re.IGNORECASE).group(1) + pool_url = re.search('to (.*)$', line, re.IGNORECASE).group(1) + elif "Pool response" in line: + pool_response = line[line.index('{'):] + self.rows.append({ + 'launcher_id': launcher_id, + 'pool_url': pool_url.strip(), + 'pool_response': pool_response, + 'created_at': created_at + }) + except: + app.logger.info("Failed to parse partial line: {0}".format(line)) + app.logger.info(traceback.format_exc()) + self.rows.reverse() diff --git a/api/rpc/chia.py b/api/rpc/chia.py new file mode 100644 index 00000000..294a4790 --- /dev/null +++ b/api/rpc/chia.py @@ -0,0 +1,100 @@ +# +# RPC interactions with Chia +# + +import asyncio +import datetime + +from chia.rpc.full_node_rpc_client import FullNodeRpcClient +from chia.rpc.farmer_rpc_client import FarmerRpcClient +from chia.util.default_root import DEFAULT_ROOT_PATH +from chia.util.ints import uint16 +from chia.util.config import load_config as load_chia_config + +from api import app + +# Unused as I am getting signage points from debug.log as this API returns no dates +async def get_signage_points(blockchain): + config = load_chia_config(DEFAULT_ROOT_PATH, 'config.yaml') + farmer_rpc_port = config["farmer"]["rpc_port"] + farmer = await FarmerRpcClient.create( + 'localhost', uint16(farmer_rpc_port), DEFAULT_ROOT_PATH, config + ) + points = await farmer.get_signage_points() + farmer.close() + await farmer.await_closed() + config = load_chia_config(DEFAULT_ROOT_PATH, 'config.yaml') + full_node_rpc_port = config["full_node"]["rpc_port"] + fullnode = await FullNodeRpcClient.create( + 'localhost', uint16(full_node_rpc_port), DEFAULT_ROOT_PATH, config + ) + for point in points: + sp = point['signage_point'] + signage_point = await fullnode.get_recent_signage_point_or_eos( + sp_hash=sp['challenge_chain_sp'], + challenge_hash=sp['challenge_hash']) + app.logger.info(signage_point) + fullnode.close() + await fullnode.await_closed() + return points + +# Used on Pools page to display each pool's state +async def get_pool_state(blockchain): + pools = [] + try: + config = load_chia_config(DEFAULT_ROOT_PATH, 'config.yaml') + farmer_rpc_port = config["farmer"]["rpc_port"] + farmer = await FarmerRpcClient.create( + 'localhost', uint16(farmer_rpc_port), DEFAULT_ROOT_PATH, config + ) + result = await farmer.get_pool_state() + farmer.close() + await farmer.await_closed() + if 'pool_state' in result: + for pool in result["pool_state"]: + pools.append(pool) + except Exception as ex: + app.logger.info("Error getting {0} blockchain pool states: {1}".format(blockchain, str(ex))) + return pools + +# Used to load plot type (solo or portable) via RPC +plots_via_rpc = None +last_plots_via_rpc = None +def get_all_plots(): + global plots_via_rpc + global last_plots_via_rpc + if plots_via_rpc and last_plots_via_rpc >= (datetime.datetime.now() - datetime.timedelta(minutes=3)): + return plots_via_rpc + #app.logger.info("Reloading all plots on all harvesters via RPC") + plots_via_rpc = asyncio.run(load_all_plots()) + last_plots_via_rpc = datetime.datetime.now() + return plots_via_rpc + +async def load_all_plots(): + all_plots = [] + try: + config = load_chia_config(DEFAULT_ROOT_PATH, 'config.yaml') + farmer_rpc_port = config["farmer"]["rpc_port"] + farmer = await FarmerRpcClient.create( + 'localhost', uint16(farmer_rpc_port), DEFAULT_ROOT_PATH, config + ) + result = await farmer.get_harvesters() + farmer.close() + await farmer.await_closed() + for harvester in result["harvesters"]: + host = harvester["connection"]["host"] + plots = harvester["plots"] + for plot in plots: + all_plots.append({ + "hostname": host, + "type": "solo" if (plot["pool_contract_puzzle_hash"] is None) else "portable", + "plot_id": plot['plot_id'], + "file_size": plot['file_size'], # bytes + "filename": plot['filename'], # full path and name + "plot_public_key": plot['plot_public_key'], + "pool_contract_puzzle_hash": plot['pool_contract_puzzle_hash'], + "pool_public_key": plot['pool_public_key'], + }) + except Exception as ex: + app.logger.info("Error getting plots via RPC: {0}".format(str(ex))) + return all_plots diff --git a/api/schedules/stats_disk.py b/api/schedules/stats_disk.py index 2e6f06b2..9b44e481 100644 --- a/api/schedules/stats_disk.py +++ b/api/schedules/stats_disk.py @@ -3,6 +3,7 @@ # import datetime +import os import shutil import sqlite3 import socket @@ -53,6 +54,9 @@ def store_disk_stats(db, current_datetime, disk_type): cur = db.cursor() disks = globals.get_disks(disk_type) for disk in disks: + if not os.path.exists(disk): + app.logger.info("Skipping disk stat collection for non-existant path: {0}".format(disk)) + continue try: total, used, free = shutil.disk_usage(disk) cur.execute("INSERT INTO stat_{0}_disk_used (hostname, path, value, created_at) VALUES (?,?,?,?)".format(disk_type), diff --git a/api/schedules/status_challenges.py b/api/schedules/status_challenges.py index a78920df..20938449 100644 --- a/api/schedules/status_challenges.py +++ b/api/schedules/status_challenges.py @@ -19,7 +19,7 @@ def update(): return with app.app_context(): try: - hostname = utils.get_hostname() + hostname = utils.get_displayname() blockchains = ['chia'] if globals.flax_enabled(): blockchains.append('flax') diff --git a/api/schedules/status_controller.py b/api/schedules/status_controller.py index 2d264559..26b79936 100644 --- a/api/schedules/status_controller.py +++ b/api/schedules/status_controller.py @@ -35,13 +35,13 @@ def update(): app.logger.info("Failed to load and send worker status.") def ping_workers(workers): - tz = pytz.timezone('Etc/UTC') for worker in workers: try: #app.logger.info("Pinging worker api endpoint: {0}".format(worker.hostname)) utils.send_get(worker, "/ping/", timeout=3, debug=False) worker.latest_ping_result = "Responding" - worker.ping_success_at = datetime.datetime.now(tz=tz) + worker.updated_at = datetime.datetime.now() + worker.ping_success_at = datetime.datetime.now() except requests.exceptions.ConnectTimeout as ex: app.logger.info(str(ex)) worker.latest_ping_result = "Connection Timeout" diff --git a/api/schedules/status_partials.py b/api/schedules/status_partials.py new file mode 100644 index 00000000..88fd199f --- /dev/null +++ b/api/schedules/status_partials.py @@ -0,0 +1,44 @@ +# +# Performs a REST call to controller (possibly localhost) of latest blockchain partials. +# + +import os +import traceback + +from flask import g + +from common.config import globals +from common.utils import converters +from api import app +from api.commands import log_parser +from api import utils + +def update(): + if not globals.farming_enabled() and not globals.harvesting_enabled(): + #app.logger.info("Skipping recent partials collection on plotting-only instance.") + return + with app.app_context(): + try: + hostname = utils.get_hostname() + blockchains = ['chia'] + if globals.flax_enabled(): + blockchains.append('flax') + payload = [] + for blockchain in blockchains: + recent_partials = log_parser.recent_partials(blockchain) + for partial in recent_partials.rows: + app.logger.debug(partial) + payload.append({ + "unique_id": hostname + '_' + partial['launcher_id'] + '_' + partial['created_at'], + "hostname": hostname, + "blockchain": blockchain, + "launcher_id": partial['launcher_id'], + "pool_url": partial['pool_url'], + "pool_response": partial['pool_response'], + "created_at": partial['created_at'], + }) + app.logger.debug(payload) + utils.send_post('/partials/', payload, debug=False) + except: + app.logger.info("Failed to load recent partials and send.") + app.logger.info(traceback.format_exc()) diff --git a/api/schedules/status_plots.py b/api/schedules/status_plots.py index 5f93ed97..c78e6974 100644 --- a/api/schedules/status_plots.py +++ b/api/schedules/status_plots.py @@ -37,7 +37,7 @@ def update(): "dir": plot['dir'], "file": plot['file'], "created_at": plot['created_at'], - "size": plot['size'], + "size": plot['size'] }) if len(payload) > 0: utils.send_post('/plots/', payload, debug=False) diff --git a/api/schedules/status_points.py b/api/schedules/status_points.py new file mode 100644 index 00000000..25434212 --- /dev/null +++ b/api/schedules/status_points.py @@ -0,0 +1,45 @@ +# +# Performs a REST call to controller (possibly localhost) of latest points status. +# + +import asyncio +import datetime +import http +import json +import os +import requests +import socket +import sqlite3 +import traceback + +from flask import g + +from common.config import globals +from api.rpc import chia +from api import app +from api import utils + +def update(): + if not globals.farming_enabled(): + #app.logger.info("Skipping recent signage points collection on non-farming instance.") + return + with app.app_context(): + try: + blockchains = ['chia'] + # Flax doesn't support this yet. + #if globals.flax_enabled(): + # blockchains.append('flax') + for blockchain in blockchains: + hostname = utils.get_hostname() + points = asyncio.run(chia.get_signage_points(blockchain)) + payload = { + "hostname": hostname, + "blockchain": blockchain, + "details": points, + } + for point in points: + app.logger.info(point) + #utils.send_post('/plotnfts/', payload, debug=False) + except: + app.logger.info("Failed to load and send recent signage points.") + app.logger.info(traceback.format_exc()) diff --git a/api/schedules/status_pools.py b/api/schedules/status_pools.py new file mode 100644 index 00000000..7ae92132 --- /dev/null +++ b/api/schedules/status_pools.py @@ -0,0 +1,56 @@ +# +# Performs a REST call to controller (possibly localhost) of latest pools status. +# + +import asyncio +import datetime +import http +import json +import os +import requests +import socket +import sqlite3 +import traceback + +from flask import g + +from common.config import globals +from api.commands import chia_cli +from api.rpc import chia +from api import app +from api import utils + +def update(): + if not globals.farming_enabled(): + #app.logger.info("Skipping recent pools state collection on non-farming instance.") + return + with app.app_context(): + try: + blockchains = ['chia'] + # Flax doesn't support this yet. + #if globals.flax_enabled(): + # blockchains.append('flax') + for blockchain in blockchains: + payload = [] + hostname = utils.get_hostname() + pools = asyncio.run(chia.get_pool_state(blockchain)) + for pool in pools: + launcher_id = pool['pool_config']['launcher_id'] + login_link = chia_cli.get_pool_login_link(launcher_id) + if launcher_id.startswith('0x'): + launcher_id = launcher_id[2:] + payload.append({ + "unique_id": hostname + '_' + blockchain + '_' + launcher_id, + "hostname": hostname, + "blockchain": blockchain, + "launcher_id": launcher_id, + "login_link": login_link, + "pool_state": json.dumps(pool), + "updated_at": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + }) + #app.logger.info(payload) + response = utils.send_post('/pools/', payload, debug=False) + #app.logger.info(response.content) + except: + app.logger.info("Failed to load and send pools state.") + app.logger.info(traceback.format_exc()) diff --git a/api/schedules/status_worker.py b/api/schedules/status_worker.py index 7c926fa8..aacc1a48 100644 --- a/api/schedules/status_worker.py +++ b/api/schedules/status_worker.py @@ -23,12 +23,15 @@ def update(): with app.app_context(): try: hostname = utils.get_hostname() + displayname = utils.get_displayname() + config = globals.load() payload = { "hostname": hostname, + "displayname": displayname, "mode": os.environ['mode'], "services": gather_services_status(), "url": utils.get_remote_url(), - "config": json.dumps(globals.load()), + "config": json.dumps(config), } utils.send_post('/workers/', payload, debug=False) except: diff --git a/api/utils.py b/api/utils.py index 3d3abaa9..a3b278a2 100644 --- a/api/utils.py +++ b/api/utils.py @@ -56,5 +56,8 @@ def get_hostname(): hostname = socket.gethostname() return hostname +def get_displayname(): + return socket.gethostname() + def is_controller(): return app.config['CONTROLLER_HOST'] == "localhost" diff --git a/api/views/__init__.py b/api/views/__init__.py index d62166c4..3ccd68dd 100644 --- a/api/views/__init__.py +++ b/api/views/__init__.py @@ -2,16 +2,19 @@ from . import analysis from . import alerts from . import blockchains +from . import certificates from . import challenges from . import configs from . import connections from . import farms from . import keys from . import logs +from . import partials from . import ping from . import plotnfts from . import plots from . import plottings +from . import pools from . import wallets from . import workers @@ -22,15 +25,18 @@ alerts, blockchains, challenges, + certificates, configs, connections, farms, keys, logs, + partials, ping, plotnfts, plots, plottings, + pools, wallets, workers, ) diff --git a/api/views/certificates/__init__.py b/api/views/certificates/__init__.py new file mode 100644 index 00000000..172d2b79 --- /dev/null +++ b/api/views/certificates/__init__.py @@ -0,0 +1 @@ +from .resources import blp # noqa \ No newline at end of file diff --git a/api/views/certificates/resources.py b/api/views/certificates/resources.py new file mode 100644 index 00000000..d6e8ace4 --- /dev/null +++ b/api/views/certificates/resources.py @@ -0,0 +1,54 @@ +import datetime +import json +import os +import re +import shutil +import time +import traceback + +from flask import request, Response, abort +from flask.views import MethodView + +from api import app +from api.extensions.api import Blueprint + +from api.commands import chia_cli, plotman_cli + +blp = Blueprint( + 'Certificates', + __name__, + url_prefix='/certificates', + description="Certificates to perform" +) + + +@blp.route('/') +class Certificates(MethodView): + + def get(self): + if not self.allow_download(): + abort(401) + blockchain = request.args.get('type') + dir = "/root/.{0}/mainnet/config/ssl/ca".format(blockchain) + zip = "/tmp/certs".format(blockchain) + zipname = "{0}.zip".format(zip) + try: + os.remove(zipname) + except: + pass + shutil.make_archive(zip, 'zip', dir) + with open(zipname, 'rb') as f: + data = f.readlines() + os.remove(zipname) + return Response(data, headers={ + 'Content-Type': 'application/zip', + 'Content-Disposition': 'attachment; filename=certs.zip;' + }) + + def allow_download(self): + worker_setup_marker = "/root/.chia/machinaris/tmp/worker_launch.tmp" + if os.path.exists(worker_setup_marker): + last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(worker_setup_marker)) + fifteen_minutes_ago = datetime.datetime.now() - datetime.timedelta(minutes=15) + return last_modified_date >= fifteen_minutes_ago + return False \ No newline at end of file diff --git a/api/views/partials/__init__.py b/api/views/partials/__init__.py new file mode 100644 index 00000000..172d2b79 --- /dev/null +++ b/api/views/partials/__init__.py @@ -0,0 +1 @@ +from .resources import blp # noqa \ No newline at end of file diff --git a/api/views/partials/resources.py b/api/views/partials/resources.py new file mode 100644 index 00000000..804929a7 --- /dev/null +++ b/api/views/partials/resources.py @@ -0,0 +1,73 @@ +import datetime as dt + +from flask.views import MethodView + +from api import app +from api.extensions.api import Blueprint, SQLCursorPage +from common.extensions.database import db +from common.models import Partial + +from .schemas import PartialSchema, PartialQueryArgsSchema, BatchOfPartialSchema, BatchOfPartialQueryArgsSchema + + +blp = Blueprint( + 'Partial', + __name__, + url_prefix='/partials', + description="Operations on all partials recorded on farmer" +) + + +@blp.route('/') +class Partials(MethodView): + + @blp.etag + @blp.arguments(BatchOfPartialQueryArgsSchema, location='query') + @blp.response(200, PartialSchema(many=True)) + @blp.paginate(SQLCursorPage) + def get(self, args): + ret = Partial.query.filter_by(**args) + return ret + + @blp.etag + @blp.arguments(BatchOfPartialSchema) + @blp.response(201, PartialSchema(many=True)) + def post(self, new_items): + if len(new_items) == 0: + return "No partials provided.", 400 + db.session.query(Partial).filter(Partial.hostname==new_items[0]['hostname']).delete() + items = [] + for new_item in new_items: + item = Partial(**new_item) + items.append(item) + db.session.add(item) + db.session.commit() + return items + + +@blp.route('//') +class PartialByHostname(MethodView): + + @blp.etag + @blp.response(200, PartialSchema) + def get(self, hostname, blockchain): + return db.session.query(Partial).filter(Partial.hostname==hostname, Partial.blockchain==blockchain) + + @blp.etag + @blp.arguments(BatchOfPartialSchema) + @blp.response(200, PartialSchema(many=True)) + def put(self, new_items, hostname, blockchain): + db.session.query(Partial).filter(Partial.hostname==hostname, Partial.blockchain==blockchain).delete() + items = [] + for new_item in new_items: + item = Partial(**new_item) + items.append(item) + db.session.add(item) + db.session.commit() + return items + + @blp.etag + @blp.response(204) + def delete(self, hostname, blockchain): + db.session.query(Partial).filter(Partial.hostname==hostname, Partial.blockchain==blockchain).delete() + db.session.commit() diff --git a/api/views/partials/schemas.py b/api/views/partials/schemas.py new file mode 100644 index 00000000..ab7689b8 --- /dev/null +++ b/api/views/partials/schemas.py @@ -0,0 +1,27 @@ +import marshmallow as ma +from marshmallow_sqlalchemy import field_for +from marshmallow_toplevel import TopLevelSchema + +from api.extensions.api import Schema, AutoSchema +from common.models.partials import Partial + +class PartialSchema(AutoSchema): + unique_id = field_for(Partial, "unique_id") + + class Meta(AutoSchema.Meta): + table = Partial.__table__ + + +class PartialQueryArgsSchema(Schema): + unique_id = ma.fields.Str() + hostname = ma.fields.Str() + +class BatchOfPartialSchema(TopLevelSchema): + _toplevel = ma.fields.Nested( + PartialSchema, + required=True, + many=True + ) + +class BatchOfPartialQueryArgsSchema(Schema): + hostname = ma.fields.Str() diff --git a/api/views/plots/resources.py b/api/views/plots/resources.py index 6df021c6..13e0b39e 100644 --- a/api/views/plots/resources.py +++ b/api/views/plots/resources.py @@ -1,15 +1,16 @@ +import asyncio import datetime as dt from flask.views import MethodView from api import app +from api.rpc import chia from api.extensions.api import Blueprint, SQLCursorPage from common.extensions.database import db from common.models import Plot from .schemas import PlotSchema, PlotQueryArgsSchema, BatchOfPlotSchema, BatchOfPlotQueryArgsSchema - blp = Blueprint( 'Plot', __name__, @@ -17,7 +18,6 @@ description="Operations on all plots on farmer" ) - @blp.route('/') class Plots(MethodView): @@ -33,11 +33,18 @@ def get(self, args): @blp.arguments(BatchOfPlotSchema) @blp.response(201, PlotSchema(many=True)) def post(self, new_items): + # Get plot info via RPC to determine type: solo or portable + plots_via_rpc = chia.get_all_plots() # Now delete all old plots by hostname of first new plotting db.session.query(Plot).filter(Plot.hostname==new_items[0]['hostname']).delete() items = [] for new_item in new_items: item = Plot(**new_item) + item.type = "" + for plot_rpc in plots_via_rpc: + if plot_rpc['plot_id'].startswith("0x{0}".format(item.plot_id)) and 'type' in plot_rpc: + item.type = plot_rpc['type'] + #app.logger.info("Found type: {0}".format(item.type)) db.session.add(item) items.append(item) db.session.commit() diff --git a/api/views/pools/__init__.py b/api/views/pools/__init__.py new file mode 100644 index 00000000..172d2b79 --- /dev/null +++ b/api/views/pools/__init__.py @@ -0,0 +1 @@ +from .resources import blp # noqa \ No newline at end of file diff --git a/api/views/pools/resources.py b/api/views/pools/resources.py new file mode 100644 index 00000000..eb416ece --- /dev/null +++ b/api/views/pools/resources.py @@ -0,0 +1,73 @@ +import datetime as dt + +from flask.views import MethodView + +from api import app +from api.extensions.api import Blueprint, SQLCursorPage +from common.extensions.database import db +from common.models import Pool + +from .schemas import PoolSchema, PoolQueryArgsSchema, BatchOfPoolSchema, BatchOfPoolQueryArgsSchema + + +blp = Blueprint( + 'Pool', + __name__, + url_prefix='/pools', + description="Operations on all pools recorded on farmer" +) + + +@blp.route('/') +class Pools(MethodView): + + @blp.etag + @blp.arguments(BatchOfPoolQueryArgsSchema, location='query') + @blp.response(200, PoolSchema(many=True)) + @blp.paginate(SQLCursorPage) + def get(self, args): + ret = Pool.query.filter_by(**args) + return ret + + @blp.etag + @blp.arguments(BatchOfPoolSchema) + @blp.response(201, PoolSchema(many=True)) + def post(self, new_items): + if len(new_items) == 0: + return "No pools provided.", 400 + db.session.query(Pool).filter(Pool.hostname==new_items[0]['hostname']).delete() + items = [] + for new_item in new_items: + item = Pool(**new_item) + items.append(item) + db.session.add(item) + db.session.commit() + return items + + +@blp.route('//') +class PoolByHostname(MethodView): + + @blp.etag + @blp.response(200, PoolSchema) + def get(self, hostname, blockchain): + return db.session.query(Pool).filter(Pool.hostname==hostname, Pool.blockchain==blockchain) + + @blp.etag + @blp.arguments(BatchOfPoolSchema) + @blp.response(200, PoolSchema(many=True)) + def put(self, new_items, hostname, blockchain): + db.session.query(Pool).filter(Pool.hostname==hostname, Pool.blockchain==blockchain).delete() + items = [] + for new_item in new_items: + item = Pool(**new_item) + items.append(item) + db.session.add(item) + db.session.commit() + return items + + @blp.etag + @blp.response(204) + def delete(self, hostname, blockchain): + db.session.query(Pool).filter(Pool.hostname==hostname, Pool.blockchain==blockchain).delete() + db.session.commit() diff --git a/api/views/pools/schemas.py b/api/views/pools/schemas.py new file mode 100644 index 00000000..f58addbc --- /dev/null +++ b/api/views/pools/schemas.py @@ -0,0 +1,27 @@ +import marshmallow as ma +from marshmallow_sqlalchemy import field_for +from marshmallow_toplevel import TopLevelSchema + +from api.extensions.api import Schema, AutoSchema +from common.models.pools import Pool + +class PoolSchema(AutoSchema): + unique_id = field_for(Pool, "unique_id") + + class Meta(AutoSchema.Meta): + table = Pool.__table__ + + +class PoolQueryArgsSchema(Schema): + unique_id = ma.fields.Str() + hostname = ma.fields.Str() + +class BatchOfPoolSchema(TopLevelSchema): + _toplevel = ma.fields.Nested( + PoolSchema, + required=True, + many=True + ) + +class BatchOfPoolQueryArgsSchema(Schema): + hostname = ma.fields.Str() diff --git a/api/views/workers/resources.py b/api/views/workers/resources.py index 540ed323..dffd5f38 100644 --- a/api/views/workers/resources.py +++ b/api/views/workers/resources.py @@ -1,4 +1,6 @@ import datetime as dt +import pytz +import os from flask.views import MethodView @@ -33,6 +35,8 @@ def get(self, args): @blp.response(201, WorkerSchema) def post(self, new_item): item = Worker.query.get(new_item['hostname']) + if not 'displayname' in new_item: # Old clients use hostname + new_item['displayname'] = new_item['hostname'] if item: # update new_item['created_at'] = item.created_at new_item['updated_at'] = dt.datetime.now() diff --git a/common/config/globals.py b/common/config/globals.py index 21a63733..92391aa3 100644 --- a/common/config/globals.py +++ b/common/config/globals.py @@ -194,8 +194,8 @@ def load_plotman_version(): last_plotman_version = outs.decode('utf-8').strip() if last_plotman_version.startswith('plotman'): last_plotman_version = last_plotman_version[len('plotman'):].strip() - if last_plotman_version.endswith('+dev'): - last_plotman_version = last_plotman_version[:-len('+dev')].strip() + #if last_plotman_version.endswith('+dev'): + # last_plotman_version = last_plotman_version[:-len('+dev')].strip() last_plotman_version_load_time = datetime.datetime.now() return last_plotman_version @@ -223,6 +223,8 @@ def load_chiadog_version(): last_chiadog_version = outs.decode('utf-8').strip() if last_chiadog_version.startswith('v'): last_chiadog_version = last_chiadog_version[len('v'):].strip() + if '-' in last_chiadog_version: + last_chiadog_version = last_chiadog_version.split('-')[0] + '+dev' last_chiadog_version_load_time = datetime.datetime.now() return last_chiadog_version @@ -305,7 +307,7 @@ def load_flax_version(): if last_flax_version and last_flax_version_load_time >= \ (datetime.datetime.now() - datetime.timedelta(days=RELOAD_MINIMUM_DAYS)): return last_flax_version - proc = Popen("{0} version".format(CHIA_BINARY), + proc = Popen("{0} version".format(FLAX_BINARY), stdout=PIPE, stderr=PIPE, shell=True) try: outs, errs = proc.communicate(timeout=90) diff --git a/common/models/__init__.py b/common/models/__init__.py index 8d5723ef..b494b893 100644 --- a/common/models/__init__.py +++ b/common/models/__init__.py @@ -4,9 +4,11 @@ from .connections import Connection from .farms import Farm from .keys import Key +from .partials import Partial from .plots import Plot from .plotnfts import Plotnft from .plottings import Plotting +from .pools import Pool from .stats import StatPlotCount, StatPlotsSize, StatTotalChia, StatNetspaceSize, StatTimeToWin, \ StatPlotsTotalUsed, StatPlotsDiskUsed, StatPlotsDiskFree, StatPlottingTotalUsed, \ StatPlottingDiskUsed, StatPlottingDiskFree diff --git a/common/models/partials.py b/common/models/partials.py new file mode 100644 index 00000000..d35700b6 --- /dev/null +++ b/common/models/partials.py @@ -0,0 +1,19 @@ +import datetime as dt +import sqlalchemy as sa + +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship, backref + +from common.extensions.database import db + +class Partial(db.Model): + __tablename__ = "partials" + + unique_id = sa.Column(sa.String(length=255), primary_key=True) + hostname = sa.Column(sa.String(length=255), nullable=False) + blockchain = sa.Column(sa.String(length=64), nullable=False) + launcher_id = sa.Column(sa.String(length=255), nullable=False) + pool_url = sa.Column(sa.String(length=255), nullable=False) + pool_response = sa.Column(sa.String, nullable=False) + created_at = sa.Column(sa.String(length=64), nullable=False) + \ No newline at end of file diff --git a/common/models/plots.py b/common/models/plots.py index d1ddb8aa..28c90727 100644 --- a/common/models/plots.py +++ b/common/models/plots.py @@ -11,6 +11,7 @@ class Plot(db.Model): hostname = sa.Column(sa.String(length=255), primary_key=True) plot_id = sa.Column(sa.String(length=16), primary_key=True) + type = sa.Column(sa.String(length=32), nullable=True) dir = sa.Column(sa.String(length=255), nullable=False) file = sa.Column(sa.String(length=255), nullable=False) size = sa.Column(sa.Integer, nullable=False) diff --git a/common/models/pools.py b/common/models/pools.py new file mode 100644 index 00000000..b8920aba --- /dev/null +++ b/common/models/pools.py @@ -0,0 +1,19 @@ +import datetime as dt +import sqlalchemy as sa + +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship, backref + +from common.extensions.database import db + +class Pool(db.Model): + __tablename__ = "pools" + + unique_id = sa.Column(sa.String(length=255), primary_key=True) + hostname = sa.Column(sa.String(length=255), nullable=False) + blockchain = sa.Column(sa.String(length=64), nullable=True) + launcher_id = sa.Column(sa.String(length=255), nullable=False) + login_link = sa.Column(sa.String, nullable=True) + pool_state = sa.Column(sa.String, nullable=False) + updated_at = sa.Column(sa.String(length=64), nullable=False) + \ No newline at end of file diff --git a/common/models/workers.py b/common/models/workers.py index bd426f71..d4bbbf60 100644 --- a/common/models/workers.py +++ b/common/models/workers.py @@ -11,6 +11,7 @@ class Worker(db.Model): __tablename__ = "workers" hostname = sa.Column(sa.String(length=255), primary_key=True) + displayname = sa.Column(sa.String(length=255), nullable=True) mode = sa.Column(sa.String(length=64), nullable=False) services = sa.Column(sa.String, nullable=False) url = sa.Column(sa.String, nullable=False) @@ -18,7 +19,7 @@ class Worker(db.Model): latest_ping_result = sa.Column(sa.String) # Holds status of most recent ping ping_success_at = sa.Column(sa.DateTime()) # Time of last successful ping created_at = sa.Column(sa.DateTime(), server_default=func.now()) - updated_at = sa.Column(sa.DateTime(), onupdate=func.now()) + updated_at = sa.Column(sa.DateTime()) def farming_status(self): return j.loads(self.services)['chia_farm_status'] diff --git a/config/plotman.sample.yaml b/config/plotman.sample.yaml index adc98c3d..576d3971 100644 --- a/config/plotman.sample.yaml +++ b/config/plotman.sample.yaml @@ -96,7 +96,7 @@ plotting: farmer_pk: REPLACE_WITH_THE_REAL_VALUE # ONLY FOR OLD SOLO PLOTS, COMMENT OUT IF PORTABLE PLOTTING!!! pool_pk: REPLACE_WITH_THE_REAL_VALUE - # See 'Settings | Pools' page, for 'P2 singleton address' value, UNCOMMENT IF PORTABLE PLOTTING!!! + # See 'Settings | Pools' page, for 'Pool contract address' value, UNCOMMENT IF PORTABLE PLOTTING!!! #pool_contract_address: REPLACE_WITH_THE_REAL_VALUE # If you enable 'chia', plot in *parallel* with higher tmpdir_max_jobs and global_max_jobs @@ -106,7 +106,7 @@ plotting: chia: # The stock plotter: https://github.com/Chia-Network/chia-blockchain k: 32 # k-size of plot, leave at 32 most of the time - e: False # Use -e plotting option + e: False # Disable bitfield back sorting (default is True) n_threads: 2 # Threads per job n_buckets: 128 # Number of buckets to split data into job_buffer: 3389 # Per job memory diff --git a/dockerfile b/dockerfile index b0ea979b..2ee0503d 100644 --- a/dockerfile +++ b/dockerfile @@ -52,7 +52,6 @@ RUN \ FROM package_stage # Base install of official Chia binaries at the given branch ARG CHIA_BRANCH -ARG PATCH_CHIAPOS ARG FLAX_BRANCH # copy local files @@ -64,7 +63,6 @@ WORKDIR /chia-blockchain # Install Chia, Plotman, Chiadog, Madmax, Flax, Machinaris, etc RUN \ /usr/bin/bash /machinaris/scripts/chia_install.sh ${CHIA_BRANCH} \ - && /usr/bin/bash /machinaris/scripts/patch_chiapos.sh ${PATCH_CHIAPOS} \ && /usr/bin/bash /machinaris/scripts/chiadog_install.sh \ && /usr/bin/bash /machinaris/scripts/plotman_install.sh \ && /usr/bin/bash /machinaris/scripts/madmax_install.sh \ @@ -104,7 +102,6 @@ ENV controller_api_port=8927 ENV PATH="${PATH}:/chia-blockchain/venv/bin:/flax-blockchain/venv/bin" ENV TZ=Etc/UTC ENV FLASK_ENV=production -ENV FLASK_APP=/machinaris/main.py ENV XDG_CONFIG_HOME=/root/.chia ENV AUTO_PLOT=false diff --git a/scripts/chia_launch.sh b/scripts/chia_launch.sh index f6d75625..6295f4e8 100644 --- a/scripts/chia_launch.sh +++ b/scripts/chia_launch.sh @@ -46,10 +46,18 @@ elif [[ ${mode} =~ ^harvester.* ]]; then echo "A farmer peer address and port are required." exit else - if [ -d /root/.chia/farmer_ca ]; then + if [ ! -f /root/.chia/farmer_ca/chia_ca.crt ]; then + mkdir -p /root/.chia/farmer_ca + response=$(curl --write-out '%{http_code}' --silent http://${controller_host}:8927/certificates/?type=chia --output /tmp/certs.zip) + if [ $response == '200' ]; then + unzip /tmp/certs.zip -d /root/.chia/farmer_ca + fi + rm -f /tmp/certs.zip + fi + if [ -f /root/.chia/farmer_ca/chia_ca.crt ]; then chia init -c /root/.chia/farmer_ca 2>&1 > /root/.chia/mainnet/log/init.log else - echo "Did not find your farmer's ca folder at /root/.chia/farmer_ca." + echo "Did not find your farmer's certificates within /root/.chia/farmer_ca." echo "See: https://github.com/guydavis/machinaris/wiki/Workers#harvester" fi chia configure --set-farmer-peer ${farmer_address}:${farmer_port} diff --git a/scripts/chiadog_install.sh b/scripts/chiadog_install.sh index 114f8b0d..33ab466a 100644 --- a/scripts/chiadog_install.sh +++ b/scripts/chiadog_install.sh @@ -3,17 +3,13 @@ # Installs Chiadog for log monitoring and alerting # +CHIADOG_BRANCH=main + echo 'Installing Chiadog...' cd / -git clone https://github.com/martomi/chiadog.git - -# Temporary patch for spam about partial proofs -# https://github.com/martomi/chiadog/issues/252#issuecomment-877416135 - -cd /chiadog/src/chia_log/handlers/ -sed -i 's/FoundProofs(),//g' harvester_activity_handler.py +git clone --branch ${CHIADOG_BRANCH} https://github.com/martomi/chiadog.git cd /chia-blockchain/ diff --git a/scripts/dev/start-api.sh b/scripts/dev/start-api.sh index 73678dd1..db730e1f 100644 --- a/scripts/dev/start-api.sh +++ b/scripts/dev/start-api.sh @@ -7,15 +7,23 @@ echo 'Starting Machinaris...' mkdir -p /root/.chia/machinaris/logs cd /code/machinaris -LOG_LEVEL='info' -RELOAD='--reload' + +if [ $FLASK_ENV == "development" ]; +then + LOG_LEVEL='debug' + RELOAD='--reload' +else + LOG_LEVEL='info' + RELOAD='--preload' +fi # To enable SSL, use the Chia self-signed cert #--certfile=/root/.chia/mainnet/config/ssl/ca/chia_ca.crt \ #--keyfile=/root/.chia/mainnet/config/ssl/ca/chia_ca.key \ /chia-blockchain/venv/bin/gunicorn ${RELOAD} \ - --bind 0.0.0.0:8927 --timeout 90 \ + --bind 0.0.0.0:8927 \ + --timeout 90 \ --log-level=${LOG_LEVEL} \ --workers=2 \ --config api/gunicorn.conf.py \ diff --git a/scripts/dev/start-web.sh b/scripts/dev/start-web.sh index 1cbc9402..1d119688 100644 --- a/scripts/dev/start-web.sh +++ b/scripts/dev/start-web.sh @@ -8,10 +8,18 @@ echo 'Starting Machinaris...' mkdir -p /root/.chia/machinaris/logs cd /code/machinaris -/chia-blockchain/venv/bin/gunicorn \ - --reload \ +if [ $FLASK_ENV == "development" ]; +then + LOG_LEVEL='debug' + RELOAD='--reload' +else + LOG_LEVEL='info' + RELOAD='--preload' +fi + +/chia-blockchain/venv/bin/gunicorn ${RELOAD} \ --bind 0.0.0.0:8926 \ --timeout 90 \ - --log-level=info \ + --log-level=$LOG_LEVEL \ --workers=2 \ web:app diff --git a/scripts/flax_launch.sh b/scripts/flax_launch.sh index fe3bb064..168765c7 100644 --- a/scripts/flax_launch.sh +++ b/scripts/flax_launch.sh @@ -56,10 +56,18 @@ elif [[ ${mode} =~ ^harvester.* ]]; then echo "A farmer peer address and port are required." exit else - if [ -d /root/.flax/farmer_ca ]; then + if [ ! -f /root/.flax/farmer_ca/flax_ca.crt ]; then + mkdir -p /root/.flax/farmer_ca + response=$(curl --write-out '%{http_code}' --silent http://${controller_host}:8927/certificates/?type=flax --output /tmp/certs.zip) + if [ $response == '200' ]; then + unzip /tmp/certs.zip -d /root/.flax/farmer_ca + fi + rm -f /tmp/certs.zip + fi + if [ -f /root/.flax/farmer_ca/flax_ca.crt ]; then flax init -c /root/.flax/farmer_ca 2>&1 > /root/.flax/mainnet/log/init.log else - echo "Did not find your farmer's ca folder at /root/.flax/farmer_ca." + echo "Did not find your farmer's certificates within /root/.flax/farmer_ca." echo "See: https://github.com/guydavis/machinaris/wiki/Workers#harvester" fi flax configure --set-farmer-peer ${farmer_address}:${flax_farmer_port} diff --git a/scripts/flaxdog_install.sh b/scripts/flaxdog_install.sh index 459b7a5e..b49d5aa5 100644 --- a/scripts/flaxdog_install.sh +++ b/scripts/flaxdog_install.sh @@ -9,6 +9,11 @@ cd / git clone https://github.com/langhorst/flaxdog.git +# Temporary patch for spam about partial proofs +# https://github.com/martomi/chiadog/issues/252#issuecomment-877416135 +#cd /flaxdog/src/flax_log/handlers/ +#sed -i 's/FoundProofs(),//g' harvester_activity_handler.py + cd /flax-blockchain/ # Chia-blockchain needs PyYAML=5.4.1 but Chiadog wants exactly 5.4 diff --git a/scripts/patch_chiapos.sh b/scripts/patch_chiapos.sh deleted file mode 100644 index cfc4781c..00000000 --- a/scripts/patch_chiapos.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/env bash -# -# Only if PATCH_CHIAPOS is set, when building the image tagged with ':chiapos' -# Patch for faster plotting, on some systems with newer/fast CPUs -# See https://github.com/guydavis/machinaris/wiki/Releases#chiapos -# - -PATCH_CHIAPOS=$1 -echo "PATCH_CHIAPOS=${PATCH_CHIAPOS}" -if [[ $PATCH_CHIAPOS = 'true' ]]; then - echo 'Patching with Chiapos...' - cd /chia-blockchain - curl -o install_multithreaded_chiapos.sh https://gist.githubusercontent.com/SippieCup/8420c831ffcd74f4c4c3c756d1bda912/raw/4be54e136f3f7c070f320e935e883e5ef4c7141d/install_multithreaded_chiapos.sh - chmod a+x install_multithreaded_chiapos.sh - ./install_multithreaded_chiapos.sh /chia-blockchain -else - echo 'Not patching for chiapos. Leaving Chia binaries as stock for main release.' -fi \ No newline at end of file diff --git a/scripts/setup_databases.sh b/scripts/setup_databases.sh index 86c8c5c2..e46cf35b 100644 --- a/scripts/setup_databases.sh +++ b/scripts/setup_databases.sh @@ -11,7 +11,6 @@ mkdir -p /root/.chia/chiadog/dbs if [[ $1 == "reset" ]]; then mv /root/.chia/machinaris/dbs/machinaris.db /root/.chia/machinaris/dbs/machinaris.db.bak mv /root/.chia/machinaris/dbs/stats.db /root/.chia/machinaris/dbs/stats.db.bak - mv /root/.chia/chiadog/dbs/chiadog.db /root/.chia/chiadog/dbs/chiadog.db.bak fi # If old databases not managed by flask-migrate yet @@ -20,10 +19,6 @@ if [ ! -f /root/.chia/machinaris/dbs/.managed ] && [ -f /root/.chia/machinaris/d rm -f machinaris.db mv stats.db stats.db.old fi -if [ ! -f /root/.chia/chiadog/dbs/.managed ] && [ -f /root/.chia/chiadog/dbs/chiadog.db ]; then - cd /root/.chia/chiadog/dbs - mv chiadog.db chiadog.db.old -fi # Perform database migration, if any cd /machinaris/api @@ -48,10 +43,5 @@ EOF fi touch /root/.chia/machinaris/dbs/.managed -if [ ! -f /root/.chia/chiadog/dbs/.managed ] && [ -f /root/.chia/chiadog/dbs/chiadog.db.old ]; then - sqlite3 /root/.chia/chiadog/dbs/chiadog.db.old < {1}".format(value, value.strftime(format))) return value.strftime(format) else: return "" @@ -64,4 +63,10 @@ def plotnameshortener(value): match.group(7)[:20]) return value -app.jinja_env.filters['plotnameshortener'] = plotnameshortener \ No newline at end of file +app.jinja_env.filters['plotnameshortener'] = plotnameshortener + +def launcheridshortener(value): + #app.logger.info("Shorten: {0}".format(value)) + return value[:12] + '...' + +app.jinja_env.filters['launcheridshortener'] = launcheridshortener \ No newline at end of file diff --git a/web/actions/chia.py b/web/actions/chia.py index 0bf1d18f..976d6a74 100644 --- a/web/actions/chia.py +++ b/web/actions/chia.py @@ -4,12 +4,14 @@ import datetime import os +import pexpect import psutil import re import requests import signal import shutil import socket +import sys import time import traceback import urllib @@ -22,10 +24,11 @@ from web import app, db, utils from common.models import farms as f, plots as p, challenges as c, wallets as w, \ - blockchains as b, connections as co, keys as k, plotnfts as pn + blockchains as b, connections as co, keys as k, plotnfts as pn, pools as po, \ + partials as pr from common.config import globals from web.models.chia import FarmSummary, FarmPlots, BlockchainChallenges, Wallets, \ - Blockchains, Connections, Keys, Plotnfts + Blockchains, Connections, Keys, Plotnfts, Pools, Partials from . import worker as wk CHIA_BINARY = '/chia-blockchain/venv/bin/chia' @@ -40,10 +43,14 @@ def load_plots_farming(): return FarmPlots(plots) def recent_challenges(): - minute_ago = (datetime.datetime.now() - datetime.timedelta(seconds=80)).strftime("%Y-%m-%d %H:%M:%S.000") - challenges = db.session.query(c.Challenge).filter(c.Challenge.created_at >= minute_ago).order_by(c.Challenge.created_at.desc()) + five_minutes_ago = (datetime.datetime.now() - datetime.timedelta(minutes=5)).strftime("%Y-%m-%d %H:%M:%S.000") + challenges = db.session.query(c.Challenge).filter(c.Challenge.created_at >= five_minutes_ago).order_by(c.Challenge.created_at.desc()).limit(20) return BlockchainChallenges(challenges) +def load_partials(): + partials = db.session.query(pr.Partial).order_by(pr.Partial.created_at.desc()).limit(10) + return Partials(partials) + def load_wallets(): wallets = db.session.query(w.Wallet).all() return Wallets(wallets) @@ -64,6 +71,11 @@ def load_plotnfts(): plotnfts = db.session.query(pn.Plotnft).all() return Plotnfts(plotnfts) +def load_pools(): + plotnfts = db.session.query(pn.Plotnft).all() + pools = db.session.query(po.Pool).all() + return Pools(pools, plotnfts) + def load_farmers(): worker_summary = wk.load_worker_summary() farmers = [] @@ -71,11 +83,13 @@ def load_farmers(): if farmer in worker_summary.farmers: farmers.append({ 'hostname': farmer.hostname, + 'displayname': farmer.displayname, 'farming_status': farmer.farming_status().lower() }) elif farmer in worker_summary.harvesters: farmers.append({ 'hostname': farmer.hostname, + 'displayname': farmer.displayname, 'farming_status': 'harvesting' }) return farmers @@ -312,25 +326,24 @@ def process_pool_save(choice, pool_url, current_pool_url): return False return process_pool_join(choice, pool_url, pool_wallet_id) -def process_pool_leave(choice, wallet_id): - app.logger.info("Attempting to leave pool.") - proc = Popen("{0} plotnft leave -y -i {1}".format(CHIA_BINARY, wallet_id), stdout=PIPE, stderr=PIPE, shell=True) +def process_pool_leave(choice, wallet_index): + cmd = "{0} plotnft leave -y -i {1}".format(CHIA_BINARY, wallet_index) + app.logger.info("Attempting to leave pool: {0}".format(cmd)) + result = "" try: - outs, errs = proc.communicate(timeout=90) - except TimeoutExpired: - proc.kill() - proc.communicate() - app.logger.info(traceback.format_exc()) - flash('Timed out while leaving Chia pool!', 'danger') - flash(str(ex), 'warning') - return False - if errs: - app.logger.info("{0}".format(errs.decode('utf-8'))) - flash('Error while leaving Chia pool.', 'danger') - flash(errs.decode('utf-8'), 'warning') - return False - if outs: # Chia outputs their errors to stdout, not stderr, so must check. - stdout_lines = outs.decode('utf-8').splitlines() + child = pexpect.spawn(cmd) + child.logfile = sys.stdout.buffer + while True: + i = child.expect(["Choose wallet key:.*\r\n", pexpect.EOF]) + if i == 0: + app.logger.info("plotnft got index prompt so selecting #{0}".format(wallet_index)) + child.sendline("{0}".format(wallet_index)) + elif i==1: + app.logger.info("plotnft end of output...") + result += child.before.decode("utf-8") + child.read().decode("utf-8") + break + if result: # Chia outputs their errors to stdout, not stderr, so must check. + stdout_lines = result.splitlines() out_file = '/root/.chia/mainnet/log/plotnft.log' with open(out_file, 'a') as f: f.write("\nchia plotnft plotnft leave -y -i 1 --> Executed at: {0}\n".format(time.strftime("%Y%m%d-%H%M%S"))) @@ -342,6 +355,11 @@ def process_pool_leave(choice, wallet_id): flash('Error while leaving Chia pool.', 'danger') flash(line, 'warning') return False + except Exception as ex: + app.logger.info(traceback.format_exc()) + print(str(child)) + flash(str(ex), 'danger') + return False time.sleep(15) try: # Trigger a status update requests.get("http://localhost:8927/plotnfts/", timeout=5) @@ -356,6 +374,8 @@ def process_pool_join(choice, pool_url, pool_wallet_id): try: if not pool_url.strip(): raise Exception("Empty pool URL provided.") + if not pool_url.startswith('https://') and not pool_url.startswith('http://'): + pool_url = "https://" + pool_url result = urllib.parse.urlparse(pool_url) if result.scheme != 'https': raise Exception("Non-HTTPS scheme provided.") @@ -367,37 +387,42 @@ def process_pool_join(choice, pool_url, pool_wallet_id): return False if pool_wallet_id: # Just joining a pool with existing NFT cmd = "{0} plotnft join -y -u {1} -i {2}".format(CHIA_BINARY, pool_url, pool_wallet_id) + wallet_index = pool_wallet_id else: # Both creating NFT and joining pool in one setp cmd = "{0} plotnft create -y -u {1} -s pool".format(CHIA_BINARY, pool_url) + wallet_index = 1 app.logger.info("Executing: {0}".format(cmd)) - proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) + result = "" try: - outs, errs = proc.communicate(timeout=90) - except TimeoutExpired: - proc.kill() - proc.communicate() + child = pexpect.spawn(cmd) + child.logfile = sys.stdout.buffer + while True: + i = child.expect(["Choose wallet key:.*\r\n", pexpect.EOF]) + if i == 0: + app.logger.info("plotnft got index prompt so selecting #{0}".format(wallet_index)) + child.sendline("{0}".format(wallet_index)) + elif i==1: + app.logger.info("plotnft end of output...") + result += child.before.decode("utf-8") + child.read().decode("utf-8") + break + if result: # Chia outputs their errors to stdout, not stderr, so must check. + stdout_lines = result.splitlines() + out_file = '/root/.chia/mainnet/log/plotnft.log' + with open(out_file, 'a') as f: + f.write("\n{0} --> Executed at: {1}\n".format(cmd, time.strftime("%Y%m%d-%H%M%S"))) + for line in stdout_lines: + f.write(line) + f.write("\n**********************************************************************\n") + for line in stdout_lines: + if "Error" in line: + flash('Error while joining Chia pool. Please double-check pool URL: {0}'.format(pool_url), 'danger') + flash(line, 'warning') + return False + except Exception as ex: app.logger.info(traceback.format_exc()) - flash('Timed out while joining Chia pool!', 'danger') - flash(str(ex), 'warning') + print(str(child)) + flash(str(ex), 'danger') return False - if errs: - app.logger.info("{0}".format(errs.decode('utf-8'))) - flash('Error while joining Chia pool. Please double-check pool URL: {0}'.format(pool_url), 'danger') - flash(errs.decode('utf-8'), 'warning') - return False - if outs: # Chia outputs their errors to stdout, not stderr, so must check. - stdout_lines = outs.decode('utf-8').splitlines() - out_file = '/root/.chia/mainnet/log/plotnft.log' - with open(out_file, 'a') as f: - f.write("\n{0} --> Executed at: {1}\n".format(cmd, time.strftime("%Y%m%d-%H%M%S"))) - for line in stdout_lines: - f.write(line) - f.write("\n**********************************************************************\n") - for line in stdout_lines: - if "Error" in line: - flash('Error while joining Chia pool. Please double-check pool URL: {0}'.format(pool_url), 'danger') - flash(line, 'warning') - return False time.sleep(15) try: # Trigger a status update requests.get("http://localhost:8927/plotnfts/", timeout=5) @@ -408,26 +433,23 @@ def process_pool_join(choice, pool_url, pool_wallet_id): return True def process_self_pool(): - app.logger.info("Attempting to create NFT for self-pooling.") cmd = "{0} plotnft create -y -s local".format(CHIA_BINARY) - app.logger.info("Executing: {0}".format(cmd)) - proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) + app.logger.info("Attempting to create NFT for self-pooling. {0}".format(cmd)) + result = "" try: - outs, errs = proc.communicate(timeout=90) - except TimeoutExpired: - proc.kill() - proc.communicate() - app.logger.info(traceback.format_exc()) - flash('Timed out while creating NFT!', 'danger') - flash(str(ex), 'warning') - return False - if errs: - app.logger.info("{0}".format(errs.decode('utf-8'))) - flash('Error while creating NFT.', 'danger') - flash(errs.decode('utf-8'), 'warning') - return False - if outs: # Chia outputs their errors to stdout, not stderr, so must check. - stdout_lines = outs.decode('utf-8').splitlines() + child = pexpect.spawn(cmd) + child.logfile = sys.stdout.buffer + while True: + i = child.expect(["Choose wallet key:.*\r\n", pexpect.EOF]) + if i == 0: + app.logger.info("plotnft got index prompt so selecting #{0}".format(wallet_index)) + child.sendline("{0}".format(wallet_index)) + elif i==1: + app.logger.info("plotnft end of output...") + result += child.before.decode("utf-8") + child.read().decode("utf-8") + break + if result: # Chia outputs their errors to stdout, not stderr, so must check. + stdout_lines = result.splitlines() out_file = '/root/.chia/mainnet/log/plotnft.log' with open(out_file, 'a') as f: f.write("\n{0} --> Executed at: {1}\n".format(cmd, time.strftime("%Y%m%d-%H%M%S"))) @@ -439,6 +461,11 @@ def process_self_pool(): flash('Error while creating self-pooling NFT', 'danger') flash(line, 'warning') return False + except Exception as ex: + app.logger.info(traceback.format_exc()) + print(str(child)) + flash(str(ex), 'danger') + return False time.sleep(15) try: # Trigger a status update requests.get("http://localhost:8927/plotnfts/", timeout=5) diff --git a/web/actions/chiadog.py b/web/actions/chiadog.py index 21f8f9ad..f5d621f3 100644 --- a/web/actions/chiadog.py +++ b/web/actions/chiadog.py @@ -17,6 +17,7 @@ from common.models import alerts as a from web import app, db, utils +from web.models.chiadog import Alerts from . import worker as wk def load_config(farmer, blockchain): @@ -29,6 +30,7 @@ def load_farmers(): if (farmer in worker_summary.farmers) or (farmer in worker_summary.harvesters): farmers.append({ 'hostname': farmer.hostname, + 'displayname': farmer.displayname, 'monitoring_status': farmer.monitoring_status().lower() }) return farmers @@ -49,7 +51,8 @@ def save_config(farmer, blockchain, config): flash('Nice! Chiadog\'s config.yaml validated and saved successfully.', 'success') def get_notifications(): - return db.session.query(a.Alert).order_by(a.Alert.created_at.desc()).all() + alerts = db.session.query(a.Alert).order_by(a.Alert.created_at.desc()).all() + return Alerts(alerts) def remove_alerts(unique_ids): app.logger.info("Removing {0} alerts: {1}".format(len(unique_ids), unique_ids)) diff --git a/web/actions/plotman.py b/web/actions/plotman.py index aacb1486..7e03cf66 100644 --- a/web/actions/plotman.py +++ b/web/actions/plotman.py @@ -35,6 +35,7 @@ def load_plotters(): for plotter in w.load_worker_summary().plotters: plotters.append({ 'hostname': plotter.hostname, + 'displayname': plotter.displayname, 'plotting_status': plotter.plotting_status(), 'archiving_status': plotter.archiving_status(), 'archiving_enabled': plotter.archiving_enabled() @@ -145,11 +146,11 @@ def load_key_pk(type): def load_pool_contract_address(): plotnfts = c.load_plotnfts() if len(plotnfts.rows) == 1: - m = re.search('P2 singleton address .*: (\w+)'.format(type), plotnfts.rows[0]['details']) + m = re.search('Pool contract address .*: (\w+)'.format(type), plotnfts.rows[0]['details']) if m: return m.group(1) elif len(plotnfts.rows) > 1: - app.logger.info("Did not find a unique P2 singleton address as multiple plotnfts exist. Not replacing in plotman.yaml.") + app.logger.info("Did not find a unique Pool contract address as multiple plotnfts exist. Not replacing in plotman.yaml.") return None def load_config_replacements(): diff --git a/web/actions/stats.py b/web/actions/stats.py new file mode 100644 index 00000000..0aee2143 --- /dev/null +++ b/web/actions/stats.py @@ -0,0 +1,120 @@ +# +# Access to statistics and calculated values. +# + +import datetime + +from common.utils import converters +from common.models.alerts import Alert +from common.models.stats import StatPlotCount, StatPlotsSize, StatTotalChia, StatNetspaceSize, StatTimeToWin, \ + StatPlotsTotalUsed, StatPlotsDiskUsed, StatPlotsDiskFree, StatPlottingTotalUsed, \ + StatPlottingDiskUsed, StatPlottingDiskFree +from web import app, db, utils + +def load_daily_diff(): + summary = {} + # initialize defaults + since_date = datetime.datetime.now() - datetime.timedelta(hours=24) + since_str = since_date.strftime("%Y%m%d%H%M%S") + summary['plot_count'] = plot_count_diff(since_str) + summary['plots_size'] = plots_size_diff(since_str) + summary['total_chia'] = total_coin_diff(since_str, 'chia') + summary['total_flax'] = total_coin_diff(since_str, 'flax') + summary['netspace_chia'] = netspace_size_diff(since_str, 'chia') + summary['netspace_flax'] = netspace_size_diff(since_str, 'flax') + return summary + +def plot_count_diff(since): + result = '' + try: + latest = db.session.query(StatPlotCount).order_by(StatPlotCount.created_at.desc()).limit(1).first() + #app.logger.info(latest.value) + before = db.session.query(StatPlotCount).filter(StatPlotCount.created_at <= since).order_by(StatPlotCount.created_at.desc()).limit(1).first() + #app.logger.info(before.value) + if (latest.value - before.value) != 0: + result = "%+0g in last day." % (latest.value - before.value) + except Exception as ex: + app.logger.info("Failed to query for day diff of plot_count because {0}".format(str(ex))) + #app.logger.info("Result is: {0}".format(result)) + return result + +def plots_size_diff(since): + result = '' + try: + latest = db.session.query(StatPlotsSize).order_by(StatPlotsSize.created_at.desc()).limit(1).first() + #app.logger.info(latest.value) + before = db.session.query(StatPlotsSize).filter(StatPlotsSize.created_at <= since).order_by(StatPlotsSize.created_at.desc()).limit(1).first() + #app.logger.info(before.value) + gibs = (latest.value - before.value) + fmtted = converters.gib_to_fmt(gibs) + if fmtted == "0.000 B": + result = "" + elif not fmtted.startswith('-'): + result = "+{0} in last day.".format(fmtted) + else: + result = fmtted + except Exception as ex: + app.logger.info("Failed to query for day diff of plots_size because {0}".format(str(ex))) + #app.logger.info("Result is: {0}".format(result)) + return result + +def total_coin_diff(since, blockchain): + result = '' + try: + latest = db.session.query(StatTotalChia).filter(StatTotalChia.blockchain==blockchain).order_by(StatTotalChia.created_at.desc()).limit(1).first() + #app.logger.info(latest.value) + before = db.session.query(StatTotalChia).filter(StatTotalChia.blockchain==blockchain, StatTotalChia.created_at <= since).order_by(StatTotalChia.created_at.desc()).limit(1).first() + #app.logger.info(before.value) + if (latest.value - before.value) != 0: + result = "%+6g in last day." % (latest.value - before.value) + except Exception as ex: + app.logger.info("Failed to query for day diff of total_chia because {0}".format(str(ex))) + #app.logger.info("Result is: {0}".format(result)) + return result + +def netspace_size_diff(since, blockchain): + result = '' + try: + latest = db.session.query(StatNetspaceSize).filter(StatNetspaceSize.blockchain==blockchain).order_by(StatNetspaceSize.created_at.desc()).limit(1).first() + #app.logger.info(latest.value) + before = db.session.query(StatNetspaceSize).filter(StatNetspaceSize.blockchain==blockchain, StatNetspaceSize.created_at <= since).order_by(StatNetspaceSize.created_at.desc()).limit(1).first() + #app.logger.info(before.value) + gibs = (latest.value - before.value) + fmtted = converters.gib_to_fmt(gibs) + if fmtted == "0.000 B": + result = "" + elif not fmtted.startswith('-'): + result = "+{0} in last day.".format(fmtted) + else: + result = "{0} in last day.".format(fmtted) + except Exception as ex: + app.logger.info("Failed to query for day diff of netspace_size because {0}".format(str(ex))) + #app.logger.info("Result is: {0}".format(result)) + return result + +def load_daily_notifications(): + summary = {} + # initialize defaults + since_date = datetime.datetime.now() - datetime.timedelta(hours=24) + summary['daily_summary_chia'] = daily_notifications(since_date, 'chia') + summary['daily_summary_flax'] = daily_notifications(since_date, 'flax') + #app.logger.info(summary) + return summary + +def daily_notifications(since, blockchain): + result = [] + try: + #app.logger.info(since) + dailys = db.session.query(Alert).filter( + Alert.blockchain==blockchain, + Alert.created_at >= since, + Alert.priority == "LOW", + Alert.service == "DAILY" + ).order_by(Alert.created_at.desc()).all() + for daily in dailys: + #app.logger.info("{0} at {1}".format(daily.hostname, daily.created_at)) + result.append(daily) + except Exception as ex: + app.logger.info("Failed to query for latest daily summary because {0}".format(str(ex))) + result.sort(key=lambda daily: daily.hostname, reverse=False) + return result \ No newline at end of file diff --git a/web/actions/worker.py b/web/actions/worker.py index d976f03f..f450afd1 100644 --- a/web/actions/worker.py +++ b/web/actions/worker.py @@ -27,8 +27,10 @@ 'connections', 'farms', 'keys', + 'plotnfts', 'plots', 'plottings', + 'pools', 'wallets', 'workers' ] @@ -44,6 +46,8 @@ def get_worker_by_hostname(hostname): def prune_workers_status(hostnames): for hostname in hostnames: + worker = get_worker_by_hostname(hostname) for table in ALL_TABLES_BY_HOSTNAME: - db.session.execute("DELETE FROM " + table + " WHERE hostname = :hostname", {"hostname":hostname}) + db.session.execute("DELETE FROM " + table + " WHERE hostname = :hostname OR hostname = :displayname", + {"hostname":hostname, "displayname":worker.displayname}) db.session.commit() diff --git a/web/default_settings.py b/web/default_settings.py index 8b3e3535..9f19d0d4 100644 --- a/web/default_settings.py +++ b/web/default_settings.py @@ -4,6 +4,9 @@ class DefaultConfig: API_TITLE = "Machinaris WEB" SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = 'sqlite:////root/.chia/machinaris/dbs/machinaris.db' + SQLALCHEMY_BINDS = { + 'stats': 'sqlite:////root/.chia/machinaris/dbs/stats.db' + } SQLALCHEMY_ECHO = True if 'FLASK_ENV' in os.environ and os.environ['FLASK_ENV'] == "development" else False CONTROLLER_SCHEME = 'http' CONTROLLER_HOST = os.environ['controller_host'] if 'controller_host' in os.environ else 'localhost' diff --git a/web/models/chia.py b/web/models/chia.py index 16cdb304..b307e887 100644 --- a/web/models/chia.py +++ b/web/models/chia.py @@ -1,3 +1,4 @@ +import json import os import traceback @@ -12,17 +13,17 @@ class FarmSummary: def __init__(self, farms): - self.status = "Unknown" + self.status = "-" self.plot_count = 0 self.plots_size = 0 self.total_chia = 0 self.total_flax = 0 self.netspace_size = 0 self.flax_netspace_size = 0 - self.netspace_display_size = "?" - self.flax_netspace_display_size = "?" - self.expected_time_to_win = "Unknown" - self.flax_expected_time_to_win = "Unknown" + self.netspace_display_size = "-" + self.flax_netspace_display_size = "-" + self.expected_time_to_win = "-" + self.flax_expected_time_to_win = "-" fullnode_plots_size = 0 for farm in farms: self.plot_count += farm.plot_count @@ -38,6 +39,7 @@ def __init__(self, farms): self.flax_netspace_display_size = '?' if not farm.flax_netspace_size else converters.gib_to_fmt(farm.flax_netspace_size) self.flax_netspace_size = farm.flax_netspace_size self.flax_expected_time_to_win = farm.flax_expected_time_to_win + app.logger.debug("ETW: {0}".format(self.expected_time_to_win)) self.plots_display_size = converters.gib_to_fmt(self.plots_size) self.calc_status(self.status) @@ -59,12 +61,12 @@ def calc_entire_farm_flax_etw(self, fullnode_plots_size, expected_time_to_win, t self.flax_expected_time_to_win = converters.format_minutes(int(total_farm_etw_mins)) except: app.logger.debug("Failed to calculate ETW for entire farm due to: {0}".format(traceback.format_exc())) - self.expected_time_to_win = "Unknown" + self.flax_expected_time_to_win = "-" class FarmPlots: def __init__(self, plots): - self.columns = ['worker', 'plot_id', 'dir', 'plot', 'create_date', 'size'] + self.columns = ['worker', 'plot_id', 'dir', 'plot', 'type', 'create_date', 'size' ] self.rows = [] plots_by_id = {} for plot in plots: @@ -80,7 +82,8 @@ def __init__(self, plots): 'dir': plot.dir, \ 'plot': plot.file, \ 'create_date': plot.created_at, \ - 'size': plot.size }) + 'size': plot.size, \ + 'type': plot.type if plot.type else "" }) class BlockchainChallenges: @@ -133,7 +136,7 @@ def __init__(self, keys): class Blockchains: def __init__(self, blockchains): - self.columns = ['hostname', 'details', 'updated_at'] + self.columns = ['hostname', 'blockchain', 'details', 'updated_at'] self.rows = [] for blockchain in blockchains: self.rows.append({ @@ -141,6 +144,20 @@ def __init__(self, blockchains): 'blockchain': blockchain.blockchain, 'details': blockchain.details, 'updated_at': blockchain.updated_at }) + +class Partials: + + def __init__(self, partials): + self.columns = ['hostname', 'blockchain', 'launcher_id', 'pool_url', 'pool_response', 'created_at'] + self.rows = [] + for partial in partials: + self.rows.append({ + 'hostname': partial.hostname, + 'blockchain': partial.blockchain, + 'launcher_id': partial.launcher_id, + 'pool_url': partial.pool_url, + 'pool_response': partial.pool_response, + 'created_at': partial.created_at }) class Connections: @@ -149,7 +166,8 @@ def __init__(self, connections): for connection in connections: self.rows.append({ 'hostname': connection.hostname, - 'blockchain': connection.blockchain, + 'blockchain': connection.blockchain, + 'protocol_port': '8444' if connection.blockchain == 'chia' else '6888', 'details': connection.details }) @@ -206,3 +224,44 @@ def get_current_pool_url(self): elif "Target state: SELF_POOLING" in line: return None # Switching back to self-pooling, no pool_url return pool_url + +class Pools: + + def __init__(self, pools, plotnfts): + self.columns = ['hostname', 'blockchain', 'pool_state', 'updated_at'] + self.rows = [] + for pool in pools: + launcher_id = pool.launcher_id + plotnft = self.find_plotnft(plotnfts, launcher_id) + updated_at = pool.updated_at or datetime.now() + pool_state = json.loads(pool.pool_state) + if plotnft: + status = self.extract_plotnft_value(plotnft, "Current state:") + points_successful_last_24h = self.extract_plotnft_value(plotnft, "Percent Successful Points (24h)") + else: + status = "-" + pool_errors_24h = len(pool_state['pool_errors_24h']) + points_found_24h = len(pool_state['points_found_24h']) + points_successful_last_24h = "%.2f"% ( (points_found_24h - pool_errors_24h) / points_found_24h * 100) + self.rows.append({ + 'hostname': pool.hostname, + 'launcher_id': pool.launcher_id, + 'login_link': pool.login_link, + 'blockchain': pool.blockchain, + 'pool_state': pool_state, + 'updated_at': pool.updated_at, + 'status': status, + 'points_successful_last_24h': points_successful_last_24h + }) + + def find_plotnft(self, plotnfts, launcher_id): + for plotnft in plotnfts: + if launcher_id in plotnft.details: + return plotnft + return None + + def extract_plotnft_value(self, plotnft, key): + for line in plotnft.details.splitlines(): + if line.startswith(key): + return line[line.index(':')+1:].strip() + return None \ No newline at end of file diff --git a/web/models/chiadog.py b/web/models/chiadog.py new file mode 100644 index 00000000..baab0dbb --- /dev/null +++ b/web/models/chiadog.py @@ -0,0 +1,26 @@ +import traceback + +from web import app +from web.actions import worker + +class Alerts: + + def __init__(self, alerts): + self.rows = [] + for alert in alerts: + displayname = alert.hostname + try: + w = worker.get_worker_by_hostname(alert.hostname) + displayname = w.displayname + except: + app.logger.info("Failed to find worker for hostname: {0}".format(alert.hostname)) + self.rows.append({ + 'unique_id': alert.unique_id, + 'hostname': alert.hostname, + 'worker': displayname, + 'blockchain': alert.blockchain, + 'service': alert.service, + 'message': alert.message, + 'priority': alert.priority, + 'created_at': alert.created_at + }) \ No newline at end of file diff --git a/web/models/worker.py b/web/models/worker.py index 5057fa20..69e79b8b 100644 --- a/web/models/worker.py +++ b/web/models/worker.py @@ -1,8 +1,11 @@ +import json import os import traceback from datetime import datetime +from common.config import globals + from web import app class WorkerSummary: @@ -25,6 +28,30 @@ def __init__(self, workers): self.farmers_harvesters.append(worker) if worker.mode == "fullnode": self.fullnodes.append(worker) + config = json.loads(worker.config) + worker.versions = {} + if 'machinaris_version' in config: + worker.versions['machinaris'] = config['machinaris_version'] + other_versions = "" + if 'chia_version' in config: + other_versions += "Chia: " + config['chia_version'] + "
" + if 'chiadog_version' in config: + other_versions += "Chiadog: " + config['chiadog_version'] + "
" + gc = globals.load() + if gc['flax_enabled']: + if 'flax_version' in config: + other_versions += "Flax: " + config['flax_version'] + "
" + if 'flaxdog_version' in config: + other_versions += "Flaxdog: " + config['flaxdog_version'] + "
" + if 'madmax_version' in config: + other_versions += "Madmax: " + config['madmax_version'] + "
" + if 'plotman_version' in config: + other_versions += "Plotman: " + config['plotman_version'] + worker.versions['components'] = other_versions + if 'now' in config: + worker.time_on_worker = config['now'] + else: + worker.time_on_worker = '?' def set_ping_response(self, response): self.ping_response = response diff --git a/web/routes.py b/web/routes.py index 2a1711b2..7c25e66c 100644 --- a/web/routes.py +++ b/web/routes.py @@ -1,4 +1,5 @@ +import pathlib import pytz import os import time @@ -9,7 +10,7 @@ from common.config import globals from web import app, utils -from web.actions import chia, plotman, chiadog, worker, log_handler +from web.actions import chia, plotman, chiadog, worker, log_handler, stats @app.route('/') def landing(): @@ -29,8 +30,11 @@ def index(): farming = chia.load_farm_summary() plotting = plotman.load_plotting_summary() challenges = chia.recent_challenges() + partials = chia.load_partials() + daily_diff = stats.load_daily_diff() return render_template('index.html', reload_seconds=60, farming=farming.__dict__, \ - plotting=plotting.__dict__, challenges=challenges, workers=workers, global_config=gc) + plotting=plotting.__dict__, challenges=challenges, workers=workers, + daily_diff=daily_diff, partials=partials, global_config=gc) @app.route('/views/challenges') def views_challenges(): @@ -102,8 +106,9 @@ def farming(): farmers = chia.load_farmers() farming = chia.load_farm_summary() plots = chia.load_plots_farming() + daily_notifications = stats.load_daily_notifications() return render_template('farming.html', farming=farming, plots=plots, - farmers=farmers, global_config=gc) + farmers=farmers, daily_notifications=daily_notifications, global_config=gc) @app.route('/plots_check') def plots_check(): @@ -272,9 +277,17 @@ def logfile(): @app.route('/worker_launch') def worker_launch(): [farmer_pk, pool_pk, pool_contract_address] = plotman.load_plotting_keys() + pathlib.Path('/root/.chia/machinaris/tmp/').mkdir(parents=True, exist_ok=True) + pathlib.Path('/root/.chia/machinaris/tmp/worker_launch.tmp').touch() return render_template('worker_launch.html', farmer_pk=farmer_pk, pool_pk=pool_pk, pool_contract_address=pool_contract_address) +@app.route('/pools') +def pools(): + gc = globals.load() + pools = chia.load_pools() + return render_template('pools.html', pools=pools, global_config=gc) + @app.route('/favicon.ico') def favicon(): return send_from_directory(os.path.join(app.root_path, 'static'), diff --git a/web/templates/alerts.html b/web/templates/alerts.html index 117ca23b..200ecb58 100644 --- a/web/templates/alerts.html +++ b/web/templates/alerts.html @@ -29,7 +29,7 @@
+ + {% endif %} + {% endblock %} {% block scripts %} {% if global_config.farming_enabled %} {% endif %} {% endblock %} \ No newline at end of file diff --git a/web/templates/network/connections.html b/web/templates/network/connections.html index 7ddd8b90..74e6182f 100644 --- a/web/templates/network/connections.html +++ b/web/templates/network/connections.html @@ -48,6 +48,11 @@ {% if connections.rows|length > 0 %} {% for connection in connections.rows %}
+ + + Check Port Forwarding + +

{{connection.hostname}} - {{connection.blockchain}}

{{connection.details}}
diff --git a/web/templates/plotting.html b/web/templates/plotting.html index 9b2abdf3..164e9e45 100644 --- a/web/templates/plotting.html +++ b/web/templates/plotting.html @@ -56,7 +56,7 @@