diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5737055..a9953cb 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -2,7 +2,6 @@ version: 2 updates: - - package-ecosystem: "github-actions" directory: "/" schedule: diff --git a/.github/workflows/cd-release.yml b/.github/workflows/CD_Release.yml similarity index 69% rename from .github/workflows/cd-release.yml rename to .github/workflows/CD_Release.yml index 5996b2b..1c17ddf 100644 --- a/.github/workflows/cd-release.yml +++ b/.github/workflows/CD_Release.yml @@ -1,9 +1,9 @@ -# Continious Deployment - Production +# Continuous Deployment - Production name: Release - Version & Deploy on: - push: - branches: [ release ] + release: + types: [published] workflow_dispatch: @@ -14,7 +14,7 @@ jobs: steps: - name: Release Webhook - uses: distributhor/workflow-webhook@v1 + uses: distributhor/workflow-webhook@v3 env: webhook_url: "https://recursion.space/webhooks/github/" webhook_secret: "Y0uR5ecr3t" diff --git a/.github/workflows/CD_dev-release.yml b/.github/workflows/CD_dev-release.yml new file mode 100644 index 0000000..dba6175 --- /dev/null +++ b/.github/workflows/CD_dev-release.yml @@ -0,0 +1,102 @@ +name: CD | Dev Release + +# Performs the task of merging from master to the dev-release branch. +# Pings the server to pull an update once the dev-release branch has been merged. + +on: + push: + branches: [master] + + workflow_dispatch: + +jobs: + + merge: + name: master -> master-ci + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Wait on check + uses: fountainhead/action-wait-for-check@v1.1.0 + id: wait-for-build + + with: + token: ${{ secrets.GITHUB_TOKEN }} + checkName: build + ref: ${{ github.event.pull_request.head.sha || github.sha }} + + - name: Create Pull Request + + if: steps.wait-for-build.outputs.conclusion == 'success' + uses: repo-sync/pull-request@v2 + with: + destination_branch: "master-ci" + github_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Merge master -> master-ci + + if: steps.wait-for-build.outputs.conclusion == 'success' + uses: devmasx/merge-branch@1.4.0 + with: + type: now + target_branch: master-ci + github_token: ${{ secrets.GITHUB_TOKEN }} + + + + # master-ci -> dev-release + sanitize: + name: master-ci -> dev-release + runs-on: ubuntu-latest + + steps: + - name: Checkout master-ci + uses: actions/checkout@v3 + with: + ref: master-ci + fetch-depth: 0 + + - name: Wait on Merge + uses: fountainhead/action-wait-for-check@v1.1.0 + id: wait-for-merge + with: + token: ${{ secrets.GITHUB_TOKEN }} + checkName: master -> master-ci + ref: ${{ github.event.pull_request.head.sha || github.sha }} + + - name: Checkout dev-release + uses: actions/checkout@v3 + with: + ref: dev-release + fetch-depth: 0 + + - name: Remove Bloat + id: remove-bloat + if: steps.wait-for-merge.outputs.conclusion == 'success' + run: | + git config user.name github-actions + git config user.email github-actions@github.com + + git checkout dev-release + git pull origin dev-release + + git merge origin/master-ci + + rm -rf LICENSE + rm -rf README.md + rm -rf docs + rm -rf tests + + git add . + + git diff-index --quiet HEAD || git commit -m "Merge master-ci into dev-release and remove unnecessary files" + + git push origin dev-release + + - name: Webhook + uses: distributhor/workflow-webhook@v3 + env: + webhook_url: "https://dev.recursion.space/webhooks/github/" + webhook_secret: "Y0uR5ecr3t" diff --git a/.github/workflows/CI_E2E.yml b/.github/workflows/CI_E2E.yml new file mode 100644 index 0000000..4abcce2 --- /dev/null +++ b/.github/workflows/CI_E2E.yml @@ -0,0 +1,46 @@ +name: CI | E2E Integration + +on: + push: + branches: + - '**' + - '!master-ci' + - '!release' + + pull_request: + branches: + - master + + workflow_dispatch: + +jobs: + setup: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - uses: webfactory/ssh-agent@v0.7.0 + with: + ssh-private-key: ${{ secrets.RECURSION_SPACE_ACCESS_TOKEN}} + + - name: Setup Server + run: | + sudo apt-get update -y && sudo apt-get upgrade -y + sudo apt install software-properties-common -y + sudo add-apt-repository ppa:deadsnakes/ppa + sudo apt install python3.11 -y + cd /opt/ + + git clone --single-branch --branch release git@github.com:RecursionSpace/RecursionSpace.git + + sudo apt-get install python3.11-venv -y + sudo python3.11 -m venv /opt/RecursionSpace/env + source /opt/RecursionSpace/env/bin/activate + + /opt/RecursionSpace/env/bin/python3.11 -m pip install --upgrade pip + + sudo pip install --no-input -U -r /opt/RecursionSpace/requirements.txt --no-cache-dir + + cd /opt/RecursionSpace/ + sudo python manage.py migrate --noinput diff --git a/.github/workflows/pylint.yml b/.github/workflows/CI_Pylint.yml similarity index 65% rename from .github/workflows/pylint.yml rename to .github/workflows/CI_Pylint.yml index 3770517..761c2d4 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/CI_Pylint.yml @@ -1,4 +1,4 @@ -name: Code Quality +name: CI | Pylint on: [push] @@ -8,11 +8,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v1 + - uses: actions/checkout@v3 + - name: Set up Python 3.11 + uses: actions/setup-python@v4.5.0 with: - python-version: 3.8 + python-version: 3.11 - name: Install dependencies run: | @@ -21,9 +21,8 @@ jobs: pip install pylint-exit if [ -f requirements.txt ]; then pip3 install -r requirements.txt; fi - - name: Analysing the code with pylint + - name: Analyzing the code with Pylint run: | - pylint `ls -r|grep .py$|xargs` --disable=import-error,E1101 || pylint-exit --error-fail --warn-fail --convention-fail $? pylint openpod `ls -r|grep .py$|xargs` --disable=import-error,E1101 || pylint-exit --error-fail --warn-fail --convention-fail $? pylint tests `ls -r|grep .py$|xargs` --disable=import-error,E1101,R0801 || pylint-exit --error-fail --warn-fail --convention-fail $? diff --git a/.github/workflows/ShellCheck.yml b/.github/workflows/CI_ShellCheck.yml similarity index 54% rename from .github/workflows/ShellCheck.yml rename to .github/workflows/CI_ShellCheck.yml index 3296ecc..0666ae4 100644 --- a/.github/workflows/ShellCheck.yml +++ b/.github/workflows/CI_ShellCheck.yml @@ -1,19 +1,26 @@ -name: Script Check +name: CI | Script Check on: push: + branches: + - '**' + - 'master' + - '!master-ci' + - '!release' + + pull_request: branches: - master - - main workflow_dispatch: + jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run Shellcheck uses: azohra/shell-linter@latest diff --git a/.github/workflows/CI_TestInstaller.yml b/.github/workflows/CI_TestInstaller.yml new file mode 100644 index 0000000..a91ff44 --- /dev/null +++ b/.github/workflows/CI_TestInstaller.yml @@ -0,0 +1,16 @@ +name: CI | Installer + +on: [push] + +jobs: + install: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Run Installer + run: | + sudo chmod +x installer.sh + sudo ./installer.sh diff --git a/.github/workflows/RecursionTests.yml b/.github/workflows/CI_Tests.yml similarity index 86% rename from .github/workflows/RecursionTests.yml rename to .github/workflows/CI_Tests.yml index be6c2c6..ea44a2f 100644 --- a/.github/workflows/RecursionTests.yml +++ b/.github/workflows/CI_Tests.yml @@ -1,7 +1,7 @@ # This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -name: Recursion.Space Tests +name: CI | Build & Test on: push: branches: [ master ] @@ -14,11 +14,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - name: Set up Python 3.11 + uses: actions/setup-python@v4.5.0 with: - python-version: 3.9 + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/CI_VerifyInstaller.yml b/.github/workflows/CI_VerifyInstaller.yml new file mode 100644 index 0000000..cf7cba2 --- /dev/null +++ b/.github/workflows/CI_VerifyInstaller.yml @@ -0,0 +1,35 @@ +name: CI | Verify Bash Installer + +on: + push: + branches: + - '**' + - 'master' + - '!master-ci' + - '!release' + + pull_request: + branches: + - master + + workflow_dispatch: + + +jobs: + install: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Run Bash Installer + run: | + sudo /bin/bash installer.sh + sudo systemd-analyze verify openpod.service + + # - name: Validate JSON + # uses: limitusus/json-syntax-check@v1 + # env: + # BASE: '/opt/OpenPod' + # with: + # pattern: "\\.json$" diff --git a/.github/workflows/TestInstaller.yml b/.github/workflows/TestInstaller.yml deleted file mode 100644 index f8b0674..0000000 --- a/.github/workflows/TestInstaller.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Test Installer - -on: [push] - -jobs: - install: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v1 - with: - python-version: 3.8 - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install python-crontab - - - name: Run Installer - run: | - sudo python installer.py diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 9ba7e79..5074934 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -38,11 +38,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -53,7 +53,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -67,4 +67,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/merge_to-dev-release.yml b/.github/workflows/merge_to-dev-release.yml deleted file mode 100644 index 9f46817..0000000 --- a/.github/workflows/merge_to-dev-release.yml +++ /dev/null @@ -1,52 +0,0 @@ -# Performs the task of merging from master to the dev-release branch. -# Pings the server to pull an update once the dev-release branch has been merged. - -name: Dev Merge & Deploy -on: - push: - branches: [ master ] - - workflow_dispatch: - -jobs: - - merge: - name: Merge To dev-release - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Wait on check - uses: fountainhead/action-wait-for-check@v1.0.0 - id: wait-for-build - - with: - token: ${{ secrets.GITHUB_TOKEN }} - checkName: build - ref: ${{ github.event.pull_request.head.sha || github.sha }} - - - name: Create Pull Request - - if: steps.wait-for-build.outputs.conclusion == 'success' - uses: repo-sync/pull-request@v2 - with: - destination_branch: "dev-release" - github_token: ${{ secrets.GITHUB_TOKEN }} - - - - name: Merge master -> dev-release - - if: steps.wait-for-build.outputs.conclusion == 'success' - uses: devmasx/merge-branch@1.4.0 - with: - type: now - target_branch: dev-release - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Webhook - if: steps.wait-for-build.outputs.conclusion == 'success' - uses: distributhor/workflow-webhook@v1 - env: - webhook_url: "https://dev.recursion.space/webhooks/github/" - webhook_secret: "Y0uR5ecr3t" diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000..0182ace --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1 @@ +disable=SC1091 diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 0a28162..0000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Recursion.Space - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/README.md deleted file mode 100644 index fe18e51..0000000 --- a/README.md +++ /dev/null @@ -1,143 +0,0 @@ -
- -

OpenPod

- -[![Code Quality](https://github.com/RecursionSpace/OpenPod/actions/workflows/pylint.yml/badge.svg)](https://github.com/RecursionSpace/OpenPod/actions/workflows/pylint.yml) -  -[![Script Check](https://github.com/RecursionSpace/OpenPod/actions/workflows/ShellCheck.yml/badge.svg)](https://github.com/RecursionSpace/OpenPod/actions/workflows/ShellCheck.yml) -  -[![Recursion.Space Tests](https://github.com/RecursionSpace/OpenPod/actions/workflows/RecursionTests.yml/badge.svg)](https://github.com/RecursionSpace/OpenPod/actions/workflows/RecursionTests.yml) -  -[![Test Installer](https://github.com/RecursionSpace/OpenPod/actions/workflows/TestInstaller.yml/badge.svg)](https://github.com/RecursionSpace/OpenPod/actions/workflows/TestInstaller.yml) - -
- -## Table of Contents - -- [What is OpenPod?](#what-is-openpod) -- [Directory Structure](#directory-structure) - - -## What is OpenPod? - -The “Pod” is the physical extension of the Recursion.Space system for a facility. The Pod allows for door/egress and equipment control nodes. Additionally, the Pod acts as a local backup in an internet outage for users to continue accessing their facility. The Pod has a direct internet connection then uses a wireless method to communicate with nodes. A single facility can have multiple Pods, but each Pod can only be linked to one facility. - -1) Assume the Pod is plug and play internet connection. (DHCP) -2) Pods can self-generate ID and register it with the central system. -3) Users enter an ID to link the physical Pod with the web interface. - -Pod communicates to the web API via hook notifications, and a return confirmation is sent back. - -* Records Door/Equipment access -* Registers new nodes and Pods -* Pulls info dumps - -The web interface communicates with the Pod via MQTT. - -* Alerts for new users -* User updates -* Remote control (door unlock) - -Each Pod is on its MQTT topic, and the topic is created once the Pod registers with the central system. - -# Software Design - -The embedded code manages the communication between the API server and the node communication, acting as a middleman. There needs to be minimal factory configuration to make deployable units quickly. - -## OS - -Each Pod is installed with the latest version of Ubuntu, and the systems are tested daily for compatibility with the latest releases. An image of the OS with the code ready for deployment can make a quick installation. 32-bit for Pi3 and 64-bit for 4+ - -# Installation - -To install on a new Pod, the HUB_Installer.py file and the latest 0_1_0 folder are all that is needed. Run the HUB_Installer to complete the Pod setup. - -# Development - -Pod software development is done on a DigitalOcean hosted server. It is then transferred to the physical hardware running the system and tested before going into production. - -# Initialization - -Pod uses an initializer or “launcher” to configure the system before running the main code. The launcher performs the following tasks before running the main program: - -* Update & Upgrade the OS -* Sync Clock -* Installs program required packages -* Check for required files, create them if missing -* Configure start on boot file -* Creates serial if needed -* Registers the Pod with the API server -* Initializes the main program - -To launch the software, when the device boots, the launcher configures the system first to run the software in the background and then creates a script that executes the launch on startup. Working version with a cronjob. - -```bash -(@reboot (cd /home/ubuntu/RecursionHub && sudo python3 HUB_Launcher.py &)) -``` - -## Required Packages - -Required packages are stored under “Requirements” in the settings.py file. - -Pypubsub is used for internal flags and alerts and has a broker-type system that can be subscribed to. - -# MQTT - -The MQTT protocol is used for server to Pod communication, allowing the Pod to listen in real-time for incoming instruction. - -Communication to Pods is accomplished from quick commands represented by a hexadecimal number. - -| Comand Number | Command | -|---------------|-----------------------| -| AA (170) | Facility ID Available | -| BA (186) | Pull User Dump | -| CA (202) | Update Available | -| DA(218) | Time Zone Change | -| | | -| FA (250) | Zip & Send Logs | - -# Updates - -The end-user triggers updates via the webserver to run them with minimal interruption. An MQTT message is sent to the Pod to initiate the updating process. The update is a zip file downloaded from the server. - -Download zip file from server and store in the update folder. -Unzip contents of a zip file -Run Launcher - -First, commit and push the latest changes to git to prepare an update. Using WinSCP, download the 0_1_0 folder with the newest code. Rename the downloaded folder to match the latest version number. Zip the folder's CONTENTS, so there is no second directory folder with a matching name inside the zip file folder. Clean up all extra copies of the old folder. Update the latest version name using the recursion admin panel, then update their devices. - -# Logging - -The Recursion System uses several logging points to be used both for troubleshooting as well as security and auditing. There are two log files created for the Pod, RecursionLog.log, and TransactionLog.log; each one is used as follows: - -## RecursionLog.log - -This log is used to record system events. - -* Coming online -* Updating -* New Nodes being added -* Nodes coming online - -## System.Snapshot - -This file contains a JSON summary of the Pod for debugging purposes. The information available in this file must also be readily accessible from the server for troubleshooting purposes. - -``` - -[ - - “local_ip” = “xx.xx.xx.xx”, - -] -``` - -## Directory Structure - -```default -. -├── .github # CI/CD using GitHub Actions and other functions. -├── tests # Contains unit testing files. -└── openpod # Contains OpenPod functionality. - └── modules # Independent core functions. -``` diff --git a/boot.sh b/boot.sh deleted file mode 100644 index 3aa7f67..0000000 --- a/boot.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -#Called by the @reboot conjob, will remain running at all times. - -sleep 30 #Delay before running to allow all the services to startup. - -while : -do - ethChecks=0 #Variable to store the number of delays waiting on an internet connection. - until [[ ( "$ethChecks" -gt 6 ) ]]; #Check to see if the eth adapter has started running yet or 6 attempts have been made. - do - if grep -q 'eth0' ifconfig -s; then - break - fi - (( ethChecks++ )) - sleep 10 - done - - cd /opt/OpenPod/ || exit #Makesure that everthing is being refrenced from the executable folder. - - if [ ! -f system.json ]; then - Token=false - else - Token=$(jq -e '.Token' system.json | xargs) #Read in needed API token, results in null of key is not set. - fi - - if [[ $Token != false && $Token != null ]]; then - CurrentVersion=$(jq '.CurrentVersion' system.json | xargs) #Gets the current version of the program that is available localy. - LatestVersion=$(curl --header "Authorization: Token $Token" https://api.recursion.space/v1/info/hub_version/) #Gets the latest version number available from the server via API. - LatestVersion=$(jq '.[0] | .current_production_version' <<< "$LatestVersion" | xargs) #Isolates just the version number. - if [[ $CurrentVersion != "$LatestVersion" ]]; then #Only pulls new version if it is diffrent from the current version on the system. - curl --header "Authorization: Token $Token" -O --remote-header-name https://recursion.space/updatehub/ #Downloads the latest zip file. - unzip "$LatestVersion".zip -d "$LatestVersion" #Unpacks the zip into a folder of the same name. - rm "$LatestVersion".zip #Clean up and delete the zip file. - python3 "$LatestVersion"/HUB_Launcher.py "$LatestVersion" #Run Launcher from new version. - else - python3 "$LatestVersion"/HUB_Launcher.py "$LatestVersion" - fi - else - echo "Token Not Found" - (cd /opt/OpenPod/ && sudo python3 0_1_0/HUB_Launcher.py) #Updated after last upgrade, the default will be latest version form git. - fi - - status=$? - - if [ $status -ne 0 ]; then - python3 "$CurrentVersion"/HUB_Launcher.py #Run the launcher program from the previous version as a fall back. - fi - - sleep 10 #Delay to prevent constant polling. -done diff --git a/installer.py b/installer.py deleted file mode 100644 index 21ddf39..0000000 --- a/installer.py +++ /dev/null @@ -1,215 +0,0 @@ -#!/usr/bin/env python3 - -'''This file is ran when setting up a new Pi.''' -#Program execution is handled by the HUB_Launcher.py within the latest version folder. - -#need to setup for local time -#https://linuxize.com/post/how-to-set-or-change-timezone-on-ubuntu-18-04/ - -import uuid -import json -import subprocess - -from os import path - - -instalation_log = {} - - -# --------------------------- Set System Time Zone --------------------------- # -try: - subprocess.call(['sudo', 'timedatectl', 'set-timezone', 'UTC']) - - instalation_log['SysTimeZone'] = 'SUCESS - System time zone changed to UTC' -except RuntimeError as err: - instalation_log['SysTimeZone'] = f'FAILED - {err}' - - -# ---------------------------- Update system time ---------------------------- # -try: - subprocess.call(['sudo', 'apt-get', 'install', 'chrony', '-y']) - subprocess.call(['sudo', 'chronyd', '-q']) - - instalation_log['SysTime'] = 'SUCESS - System time synchronized.' -except RuntimeError as err: - instalation_log['SysTime'] = f'FAILED - {err}' - - -# --------------------- Update OS and Package Directories -------------------- # -try: - subprocess.call(['sudo', 'apt-get', 'update', '-y']) - - instalation_log['SysUpdate'] = 'SUCESS - Sytem has now been updated.' -except RuntimeError as err: - instalation_log['SysUpdate'] = f'FAILED - {err}' - - -try: - subprocess.call(['sudo', 'apt-get', 'upgrade', '-y']) - - instalation_log['SysUpgrade'] = 'SUCESS - Sytem has now been upgrded.' -except RuntimeError as err: - instalation_log['SysUpgrade'] = f'FAILED - {err}' - - -# -------------------------------- Install PIP ------------------------------- # -try: - subprocess.call(['sudo', 'apt-get', 'install', 'python3-pip', '-y']) - - instalation_log['GetPip'] = 'SUCESS - pip now installed.' -except RuntimeError as err: - instalation_log['GetPip'] = f'FAILED - {err}' - - -# ------------- Creates execution location at /opt/OpenPod/ ------------- # -try: - subprocess.call(['mkdir', '/opt/OpenPod']) - - instalation_log['MakeDirectory'] = 'SUCESS - OpenPod directory created.' -except RuntimeError as err: - instalation_log['MakeDirectory'] = f'FAILED - {err}' - - -# ----------------------- Creates the system.json file ----------------------- # -try: - subprocess.call(['sudo', 'touch', '/opt/OpenPod/system.json']) - - data = { - "serial" : f"{uuid.uuid4().hex}", #Self assigned aidentification number. - "timezone" : "UTC", - "XBEE_KY" : f"{uuid.uuid4().hex}", #Network key be assigned to the nodes. - "XBEE_OP" : False, #False -> XBee not configured. - "CurrentVersion" : "0_1_0", #Program fallback. - } - - with open('/opt/OpenPod/system.json', 'w', encoding="utf-8") as outfile: - json.dump(data, outfile) - - instalation_log['system.json'] = 'SUCESS - Suscessfulyl created system json file.' -except RuntimeError as err: - instalation_log['system.json'] = f'FAILED - {err}' - - -# ---------------------------------------------------------------------------- # -# Install Bash Requirements # -# ---------------------------------------------------------------------------- # - -# ------- jq - parse json bash https://stedolan.github.io/jq/download/ ------- # -try: - subprocess.call(['sudo', 'apt-get', 'install', 'jq', '-y']) - - instalation_log['BashReq_jq'] = 'SUCESS - jq now installed.' -except RuntimeError as err: - instalation_log['BashReq_jq'] = f'FAILED - {err}' - - -# -------------------- unzip to manage zipfiles from bash -------------------- # -try: - subprocess.call(['sudo', 'apt-get', 'install', 'unzip', '-y']) - - instalation_log['BashReq_unzip'] = 'SUCESS - unzip now installed.' -except RuntimeError as err: - instalation_log['BashReq_unzip'] = f'FAILED - {err}' - - -# ----------------- ifconfig to check network adapter status ----------------- # -#Depreciated use https://askubuntu.com/questions/1031640/ifconfig-missing-after-ubuntu-18-04-install -try: - subprocess.call(['sudo', 'apt-get', 'install', 'net-tools', '-y']) - - instalation_log['Install_net-tools'] = 'SUCESS - Installed net-tools.' -except RuntimeError as err: - instalation_log['pip_requiremetns'] = f'FAILED - Could not install net-tools.{err}' - - -# ------------------ pip requiremetns from requirements.txt ------------------ # -try: - subprocess.call([ - 'sudo', - 'pip3', - 'install', - '--no-input', - '-U', - '-r', - 'requirements.txt', - '--no-cache-dir', - '--no-dependencies' - ]) - - instalation_log['pip_requiremetns'] = 'SUCESS - Installed all pip requirements.' -except RuntimeError as err: - instalation_log['pip_requiremetns'] = f'FAILED - {err}' - - -# ---------------------- Create Folders and Directories ---------------------- # -try: - subprocess.call(['mkdir', '/opt/OpenPod/logs']) - subprocess.call(['mkdir', '/opt/OpenPod/data']) - subprocess.call(['touch', '/opt/OpenPod/logs/RecursionLog.log']) - subprocess.call(['touch', '/opt/OpenPod/logs/System.Snapshot']) - - instalation_log['folders_directories'] = 'SUCESS - Log folder and files prepared' -except RuntimeError as err: - instalation_log['folders_directories'] = f'FAILED - {err}' - - -# ------------------------- Create Data Storage Files ------------------------ # -try: - if path.exists("/opt/OpenPod/data/dump.json") is False: - subprocess.call(['sudo', 'touch', '/opt/OpenPod/data/dump.json']) - if path.exists("/opt/OpenPod/data/nodes.json") is False: - subprocess.call(['sudo', 'touch', '/opt/OpenPod/data/nodes.json']) - if path.exists("/opt/OpenPod/data/owners.json") is False: - subprocess.call(['sudo', 'touch', '/opt/OpenPod/data/owners.json']) - if path.exists("/opt/OpenPod/data/permissions.json") is False: - subprocess.call(['sudo', 'touch', '/opt/OpenPod/data/permissions.json']) - - instalation_log['storage_files'] = 'SUCESS - Suscessfulyl created data files.' -except RuntimeError as err: - instalation_log['storage_files'] = f'FAILED - {err}' - - -# ------------------ Move boot.sh to /opt/OpenPod/ ------------------ # -try: - subprocess.call(['sudo', 'cp', 'boot.sh', '/opt/OpenPod/HUB_Boot.sh']) - - instalation_log['move_boot.sh'] = 'SUCESS - boot.sh file is not in the /opt/ directory.' -except RuntimeError as err: - instalation_log['move_boot.sh'] = f'FAILED - {err}' - - -# ----------------------------- Copy 0_1_0 Folder ---------------------------- # -try: - subprocess.call(['sudo', 'cp', '-r', '0_1_0', '/opt/OpenPod/0_1_0']) - - instalation_log['copy0_1_0'] = 'SUCESS - 0_1_0/ copied into the /opt/ directory.' -except RuntimeError as err: - instalation_log['copy0_1_0'] = f'FAILED - {err}' - - -# --------------------------- Set file permissions --------------------------- # -try: - subprocess.call(['sudo', 'chmod', '+x', '/opt/OpenPod/HUB_Boot.sh']) - - instalation_log['chmod'] = 'SUCESS - Permissions for HUB_Boot.sh updated.' -except RuntimeError as err: - instalation_log['chmod'] = f'FAILED - {err}' - - -# ----------------------------- Configure crontab ---------------------------- # -try: - from crontab import CronTab # Needs to be here since it is installed by the requirements file. - cron = CronTab(user='root') - job = cron.new(command='(cd /opt/OpenPod && sudo ./HUB_Boot.sh &)') - job.every_reboot() - cron.write() - - instalation_log['configure_python-crontab'] = 'SUCCESS - Crontab sucessfully created.' -except ImportError as err: - instalation_log['configure_python-crontab'] = f'FAILED - {err}' -except RuntimeError as err: - instalation_log['configure_python-crontab'] = f'FAILED - {err}' - -for log_item, result in instalation_log.items(): - print(f"{log_item} -> {result}") -print("Recursion HUB Installation Program Complete. Restart System.") diff --git a/installer.sh b/installer.sh new file mode 100644 index 0000000..ce50343 --- /dev/null +++ b/installer.sh @@ -0,0 +1,269 @@ +#!/bin/bash + +# Installer for OpenPod, for more information see https://github.com/blokbot-io/OpenBlok/blob/master/install.sh + + +# ---------------------------------------------------------------------------- # +# Disable Prompts # +# ---------------------------------------------------------------------------- # +export DEBIAN_FRONTEND=noninteractive +sed -i "/#\$nrconf{restart} = 'i';/s/.*/\$nrconf{restart} = 'a';/" /etc/needrestart/needrestart.conf + + +# ---------------------------------------------------------------------------- # +# Help # +# ---------------------------------------------------------------------------- # +Help() +{ + # Display Help + echo "OpenPod instalation script" + echo + echo "h Display this help" + echo "b Set custom branch for OpenPod" + echo "u Set custom URL for OpenPod" + echo "d Enable debug mode" +} + +# TO FIX: ARGS VS OPTS + +# ---------------------------------------------------------------------------- # +# Defaults # +# ---------------------------------------------------------------------------- # +DEBUG=flase # -d +REPO='https://github.com/RecursionSpace/OpenPod' + +# ---------------------------------------------------------------------------- # +# Options # +# ---------------------------------------------------------------------------- # +while getopts ":hbdu" flags; do + case "${flags}" in + h) # display Help + Help + exit;; + b) # Custom branch + BRANCH="${OPTARG}";; + d) # Enable debug mode + DEBUG=true ;; + u) # Custom URL endpoint + URL="${OPTARG}";; + \?) echo "Invalid option: -${OPTARG}" >&2; + exit 1 ;; + esac +done + +if [ $DEBUG ]; then + BRANCH='dev-release' + URL='dev.recursion.space' + API_URL='dev.api.recursion.space' +elif [ ! $DEBUG ]; then + BRANCH='release' + URL='recursion.space' + API_URL='api.recursion.space' +fi + +echo "Installing OpenPod with the following options:" +echo "Debug | $DEBUG" +echo "Repo | $REPO" +echo "Branch | $BRANCH" +echo "URL | $URL" +echo "API_URL | $API_URL" + +# -------------------------------- Verify Root ------------------------------- # +if [ "$EUID" -ne 0 ] + then echo "Please run as root with sudo." + exit +fi + +# --------------------------------- SSH User --------------------------------- # +echo "Verifying SSH user 'openpod'" + +if ! id -u "openpod" >/dev/null 2>&1; then + echo "Creating user 'openpod'" + useradd -m -s /bin/bash openpod + usermod -aG sudo openpod + mkdir -p ~openpod/.ssh/ && touch ~openpod/.ssh/authorized_keys + echo "openpod ALL=(ALL) NOPASSWD:ALL" | sudo tee -a /etc/sudoers > /dev/null +else + echo "User 'openpod' already exists, skipping..." + mkdir -p ~openpod/.ssh/ && touch ~openpod/.ssh/authorized_keys +fi + +# ---------------------------- Update System Time ---------------------------- # +sudo timedatectl set-timezone UTC + + +# ---------------------------------------------------------------------------- # +# Dependencies # +# ---------------------------------------------------------------------------- # + +# ------------------------------ build-essential ----------------------------- # +# Required to install RPi.GPIO - https://github.com/ynsta/steamcontroller/issues/42 +REQUIRED_PKG="build-essential" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install build-essential -y +else + echo "build-essential already installed, skipping..." +fi + +# ---------------------------------- chrony ---------------------------------- # +REQUIRED_PKG="chrony" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install chrony -y + sudo chronyd -q +else + echo "chrony already installed, skipping..." +fi + +# ----------------------------------- unzip ---------------------------------- # +REQUIRED_PKG="unzip" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install unzip -y +else + echo "unzip already installed, skipping..." +fi + +# ------------------------------------ jq ------------------------------------ # +REQUIRED_PKG="jq" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install jq -y +else + echo "jq already installed, skipping..." +fi + +# -------------------------------- Python 3.11 ------------------------------- # +pytohn_version=$(python -c 'import sys; print(".".join(map(str, sys.version_info[0:2])))') +if [ "$pytohn_version" != "3.11" ]; then + sudo apt install software-properties-common -y + yes '' | sudo add-apt-repository ppa:deadsnakes/ppa + sudo apt-get install python3.11 -y + sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 + sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1 +else + echo "Python 3.11 already installed" +fi + +# -------------------------------- Python-Dev -------------------------------- # +REQUIRED_PKG="python3.11-dev" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install python3.11-dev -y +else + echo "python3.11-dev already installed, skipping..." +fi + +# ------------------------ Python Virtual Environment ------------------------ # +REQUIRED_PKG="python3.11-venv" +PKG_OK=$(dpkg-query -W --showformat='${Status}\n' $REQUIRED_PKG|grep "install ok installed") +if [ "" = "$PKG_OK" ]; then + echo "No $REQUIRED_PKG. Setting up $REQUIRED_PKG..." + sudo apt-get install python3.11-venv -y +else + echo "python3.11-venv already installed, skipping..." +fi + +# ---------------------------------------------------------------------------- # +# OpenPod # +# ---------------------------------------------------------------------------- # + +# -------------------------- Clear Previous Install -------------------------- # +sudo rm -rf /opt/OpenPod +sudo systemctl stop openpod.service + +# ------------------------------- Clone OpenPod ------------------------------ # +set -e # Exit when any command fails. +sudo mkdir -p /opt +cd /opt +sudo git clone --single-branch --branch $BRANCH "${REPO}".git +cd OpenPod + +# ----------------------------- Setup Enviroment ----------------------------- # +sudo python3.11 -m venv /opt/OpenPod/env +source /opt/OpenPod/env/bin/activate +pip install --no-input -U -r /opt/OpenPod/requirements.txt + +# ---------------------------- Create Directories ---------------------------- # +sudo mkdir -p /opt/OpenPod/logs +sudo mkdir -p /opt/OpenPod/data + +# ------------------------------- Create Files ------------------------------- # +# Log Location +sudo touch /opt/OpenPod/logs/RecursionLog.log +sudo touch /opt/OpenPod/logs/System.Snapshot + +# Data Location +sudo touch /opt/OpenPod/data/dump.json +sudo touch /opt/OpenPod/data/nodes.json +sudo touch /opt/OpenPod/data/owners.json +sudo touch /opt/OpenPod/data/permissions.json + +# -------------------------------- system.json ------------------------------- # +sudo touch /opt/OpenPod/system.json +serial_uuid=$(cat /proc/sys/kernel/random/uuid) +serial=${serial_uuid//-} +xbee_uuid=$(cat /proc/sys/kernel/random/uuid) +openpod_version=$(git rev-parse HEAD) +echo '{ + "uuid": "'"$serial_uuid"'", + "debug": '$DEBUG', + "serial": "'"$serial"'", + "timezone": "UTC", + "url": "'"$URL"'", + "api_url": "'"$API_URL"'", + "XBEE": { + "KY": "'"$xbee_uuid"'", + "OP": false + }, + "GPIO": { + "LED_IO": 23, + "LED_STAT": 17 + }, + "version": "'"$openpod_version"'", + "OpenPod": { + "repo": "'"$REPO"'", + "branch": "'"$BRANCH"'", + "commit": "'"$openpod_version"'" + } +}' > /opt/OpenPod/system.json + +# --------------------------- Create Version Folder -------------------------- # +mkdir -p /opt/OpenPod/versions/"$openpod_version" +sudo cp -a /opt/OpenPod/openpod/. /opt/OpenPod/versions/"$openpod_version"/ +sudo rm -rf /opt/OpenPod/openpod + +# --------------------------- Setup OpenPod Service -------------------------- # +cat < /etc/systemd/system/openpod.service +[Unit] +Description=OpenPod | Recursion.Space +After=network.target +StartLimitIntervalSec=0 + +[Service] +Type=simple +User=root +WorkingDirectory=/opt/OpenPod + +ExecStart = /bin/bash -c "exec /opt/OpenPod/env/bin/python3.11 \\ + /opt/OpenPod/versions/\$(jq '.version' /opt/OpenPod/system.json | xargs)/pod.py" + +Restart=always +RestartSec=10 + +[Install] +WantedBy=multi-user.target +EOF + +sudo systemctl enable --now openpod.service +sudo systemctl daemon-reload + +echo "- OpenPod is now installed -" +echo "Serial: $serial" +exit 0 diff --git a/openpod/launcher.py b/openpod/launcher.py deleted file mode 100644 index a39d5b8..0000000 --- a/openpod/launcher.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python3 -""" -- Initiates the LED visuals - -- Installs new requirements as part of an update -(if needed, might want to have a seprate process for this) - -- Checks for connection to server, -- If no connection is found then LED indication is set. -- Pulls configuration files from server. -- Launches main program -""" -import sys -import json -import threading -import subprocess - -# ---------------------------------------------------------------------------- # -# Check Requirements # -# ---------------------------------------------------------------------------- # -try: - import config # pylint: disable=W0611 -except ImportError : - subprocess.call(['sudo', 'pip3', 'install', 'config']) - - - -from modules import rec_lan, rec_log -from modules.rec_log import exception_log - -from settings import LED_STAT -import settings - -if settings.Pi: - try: - from modules import rec_xbee # pylint: disable=C0412 - except ModuleNotFoundError as err: - exception_log.error("%s", err) - - try: - from modules import rec_gpio # pylint: disable=C0412 - except ModuleNotFoundError as err: - exception_log.error("%s", err) - - -# ---------------------------------------------------------------------------- # -# Program Start Visual # -# ---------------------------------------------------------------------------- # -if settings.Pi: - led_stat_thread = threading.Thread(target = rec_gpio.led_stat_thread) - led_stat_thread.start() - - rec_gpio.state(LED_STAT, 1, 0) - - -# ---------------------------------------------------------------------------- # -# Check Network Connection # -# ---------------------------------------------------------------------------- # -try: - rec_lan.monitor_network() #Monitors the network connection while the program is running. - -except RuntimeError as err: - exception_log.error("FATAL - Start Network Monitoring - Error: %s", err) - -finally: - exception_log.debug("Launcher - Exiting Check Network Connection") - -# ---------------------------------------------------------------------------- # -# Update version in system.json # -# ---------------------------------------------------------------------------- # -#Only updates if called by bash script and environmental variable was passed in. -if len(sys.argv) > 1: - with open("system.json", "r+", encoding="utf-8") as file: - data = json.load(file) - data.update( {"CurrentVersion":sys.argv[1]} ) - file.seek(0) - json.dump(data, file) - file.truncate() - - -# ---------------------------------------------------------------------------- # -# Logs Settings For Debugging # -# ---------------------------------------------------------------------------- # -try: - public, local = rec_lan.get_ip() - rec_log.snapshot(public, local) - -except RuntimeError as err: - exception_log.error("FAITIAL - Generate Snapshot - Error: %s", err) - -finally: - exception_log.debug("Launcher - Exiting Settings for Debugging") - -# ---------------------------------------------------------------------------- # -# XBee Configuration # -# ---------------------------------------------------------------------------- # -if settings.Pi: - with open('system.json', 'r', encoding="utf-8") as file: - system_data = json.load(file) - - if system_data.get('XBEE_OP', False) is False: - try: - rec_xbee.xbee_info() - except UnboundLocalError as err: - exception_log.error("Unable to capture XBee info - Error: %s", err) - -# ---------------------------------------------------------------------------- # -# Main HUB Program # -# ---------------------------------------------------------------------------- # -try: - import pod # pylint: disable=W0611 - -except RuntimeError as err: - exception_log.error("Could not start hub.py with error: %s", err) - - # Performs Seppuku to triger bash script and perform update pull (student). - subprocess.call(['pkill', '-f', 'hub.py']) - - # Performs Seppuku to triger bash script and perform update pull - # (can't kill the master before the student). - subprocess.call(['pkill', '-f', 'HUB_Launcher.py']) diff --git a/openpod/modules/op_config.py b/openpod/modules/op_config.py new file mode 100644 index 0000000..1c59360 --- /dev/null +++ b/openpod/modules/op_config.py @@ -0,0 +1,68 @@ +''' +Configuration loader for OpenPod + +Expected JSON format: +{ + "uuid": str, + "debug": bool, + "serial": str, + "timezone": str, + "url": str, + "api_url": str, + "version": str, + "api_token": str, +} +''' + +import json + +SYSTEM_FILE = '/opt/OpenPod/system.json' + + +def get(key, default=None): + ''' + Get a value from the system configuration file. + ''' + try: + with open(SYSTEM_FILE, 'r', encoding="UTF-8") as system_file: + system = json.load(system_file) + except FileNotFoundError: + system = {} + + return system.get(key, default) + + +def set_value(key, value): + ''' + Set a value in the system configuration file. + ''' + try: + with open(SYSTEM_FILE, 'r', encoding="UTF-8") as system_file: + system = json.load(system_file) + except FileNotFoundError: + system = {} + + system[key] = value + + with open(SYSTEM_FILE, 'w', encoding="UTF-8") as system_file: + system_file.seek(0) + json.dump(system, system_file, indent=4) + system_file.truncate() + + +def set_nested_value(keys, value): + ''' + Set a nested value in the system configuration file. + ''' + try: + with open(SYSTEM_FILE, 'r', encoding="UTF-8") as system_file: + system = json.load(system_file) + except FileNotFoundError: + system = {} + + system[keys[0]][keys[1]] = value + + with open(SYSTEM_FILE, 'w', encoding="UTF-8") as system_file: + system_file.seek(0) + json.dump(system, system_file, indent=4) + system_file.truncate() diff --git a/openpod/modules/op_gpio.py b/openpod/modules/op_gpio.py new file mode 100644 index 0000000..ffbc40d --- /dev/null +++ b/openpod/modules/op_gpio.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 + +''' +Module manages raspberry pi gpio functionality. +''' + +from time import sleep + +import config + +from modules import op_config + +try: + from RPi import GPIO + GPIO_AVAILABLE = True +except (RuntimeError, ModuleNotFoundError): + GPIO_AVAILABLE = False + + +LED_IO = op_config.get('GPIO', {}).get('LED_IO', 23) +LED_STAT = op_config.get('GPIO', {}).get('LED_STAT', 17) + + +if GPIO_AVAILABLE: + GPIO.setmode(GPIO.BCM) # Setup for LED indicators. + GPIO.setwarnings(False) + + GPIO.setup(LED_IO, GPIO.OUT) + GPIO.setup(LED_STAT, GPIO.OUT) + + GPIO.output(LED_IO, GPIO.LOW) + GPIO.output(LED_STAT, GPIO.LOW) + +config.LED_IO_ON = 0 +config.LED_IO_OFF = 0 + +config.LED_STAT_ON = 0 +config.LED_STAT_OFF = 0 + + +# ---------------------------------------------------------------------------- # +# Indicator Presets # +# ---------------------------------------------------------------------------- # +def initializing(): + ''' + Indicates that the system is initializing. + LED_IO - Solid on + LED_STAT - Off + ''' + state(LED_IO, 1, 0) + + +def unregistered(): + ''' + Indicates that the system is not registered. + LED_IO - 1 Hz + LED_STAT - N/A + ''' + state(LED_IO, 1, 1) + + +def ready(): + ''' + Indicates that the system is ready. + LED_IO - Solid on + LED_STAT - N/A + ''' + state(LED_IO, 1, 0) + + +def incoming_data(): + ''' + Indicates that there is incoming XBee data. + LED_IO - 4 Hz + LED_STAT - N/A + ''' + state(LED_IO, 0.125, 0.125) + + +def no_network(): + ''' + Indicates that there is no network connection. + LED_IO - N/A + LED_STAT - 4 Hz + ''' + state(LED_STAT, 0.125, 0.125) + + +def no_internet(): + ''' + Indicates that there is no internet connection. + LED_IO - N/A + LED_STAT - 2 Hz + ''' + state(LED_STAT, 0.25, 0.25) + + +def no_recursion(): + ''' + Indicates that a connection to recursion.space could not be established. + LED_IO - N/A + LED_STAT - 1 Hz + ''' + state(LED_STAT, 1, 1) + + +def networked(): + ''' + Indicates that the system is connected to the network. + LED_IO - N/A + LED_STAT - Off + ''' + state(LED_STAT, 0, 0) + +# ---------------------------------------------------------------------------- # +# Set LED Patterns # +# ---------------------------------------------------------------------------- # + + +def state(io_pin, on_time=0, off_time=0): + """ + Sets the io_pin state to on or off. + """ + if io_pin == LED_IO: + led_io_on_off(on_time, off_time) + if io_pin == LED_STAT: + led_io_stat_on_off(on_time, off_time) + + +def led_io_on_off(on_time=0, off_time=0): + ''' + Controls the LED used to indicate input/output actions + ''' + config.LED_IO_ON = on_time + config.LED_IO_OFF = off_time + + +def led_io_stat_on_off(on_time=0, off_time=0): + ''' + Controls the LED used to indicate general system status + ''' + config.LED_STAT_ON = on_time + config.LED_STAT_OFF = off_time + + +# ---------------------------------------------------------------------------- # +# LED Threads # +# ---------------------------------------------------------------------------- # + +def led_io_thread(): + ''' + Process occurring in the thread for the I/O LED + ''' + while True: + if config.LED_IO_ON > 0: + GPIO.output(LED_IO, GPIO.HIGH) + sleep(config.LED_IO_ON) + if config.LED_IO_OFF > 0: + GPIO.output(LED_IO, GPIO.LOW) + sleep(config.LED_IO_OFF) + if config.LED_IO_ON == 0 and config.LED_IO_OFF == 0: + GPIO.output(LED_IO, GPIO.LOW) + sleep(.1) # Allows CPU time for other threads. + + +def led_stat_thread(): + ''' + Process occurring in thread for the System Status LED + ''' + while True: + if config.LED_STAT_ON > 0: + GPIO.output(LED_STAT, GPIO.HIGH) + sleep(config.LED_STAT_ON) + if config.LED_STAT_OFF > 0: + GPIO.output(LED_STAT, GPIO.LOW) + sleep(config.LED_STAT_OFF) + if config.LED_STAT_ON == 0 and config.LED_STAT_OFF == 0: + GPIO.output(LED_STAT, GPIO.LOW) + sleep(.1) # Allows CPU time for other threads. diff --git a/openpod/modules/op_ssh.py b/openpod/modules/op_ssh.py new file mode 100644 index 0000000..1f971fc --- /dev/null +++ b/openpod/modules/op_ssh.py @@ -0,0 +1,34 @@ +''' +openpod | modules | op_ssh.py +Configuration for SSH +''' + +import os +import requests + +from modules import op_config +from modules.rec_log import log_api + + +def update_keys(): + ''' + Requests the keys from the server and updates the local keys. + Request URL: /v1/pod/ssh_pub_keys + ''' + try: + keys = requests.get( + f'https://{op_config.get("api_url")}/v1/pod/ssh_pub_keys/{op_config.get("uuid")}', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 + ) + except requests.exceptions.RequestException as error: + log_api.error('SSH Keys Update Failed: %s', error) + return False + + key_file_path = os.path.expanduser('~openpod/.ssh/authorized_keys') + with open(key_file_path, 'w', encoding="UTF-8") as key_file: + for key in keys.json(): + key_file.write(key['key']) + + log_api.info('SSH Keys Updated') + return True diff --git a/openpod/modules/rec_api.py b/openpod/modules/rec_api.py index cdeb8d5..41209ec 100644 --- a/openpod/modules/rec_api.py +++ b/openpod/modules/rec_api.py @@ -2,72 +2,67 @@ ''' Handles API calls with Recursion.Space +Performs all API calls to the server, functions should be used as a thread. ''' import json import threading import requests -import settings - +from modules import op_config, op_gpio from modules.rec_log import log_api, hash_data -if settings.IS_PI: - from modules import rec_gpio - from settings import LED_IO - -#Performs all API calls to the server, functions should be used as a thread. # ---------------------------------------------------------------------------- # # Request Update For All Information # # ---------------------------------------------------------------------------- # def pull_data_dump(): ''' - Request updated infromation from the server. + Request updated information from the server. ''' - with open('system.json', 'r+', encoding="utf-8") as file: - system_data = json.load(file) - # ----------------------------- Pull Member Data ----------------------------- # - with open("/opt/RecursionHub/data/dump.json", "w", encoding="utf-8") as file: - member_info = requests.get(f'{settings.RECURSION_API_URL}/v1/members', headers={ - 'Authorization' : f'Token {system_data["Token"]}' - }) + with open("/opt/OpenPod/data/dump.json", "w", encoding="utf-8") as file: + member_info = requests.get(f'https://{op_config.get("api_url")}/v1/members', headers={ + 'Authorization': f'Token {op_config.get("api_token")}' + }, timeout=10) - responce = member_info.json() - json.dump(responce, file) + response = member_info.json() + json.dump(response, file) # --------------------------- Pull Operator(s) Data -------------------------- # - with open("/opt/RecursionHub/data/owners.json", "w", encoding="utf-8") as file: - operators_info = requests.get(f'{settings.RECURSION_API_URL}/v1/operators', headers={ - 'Authorization' : f'Token {system_data["Token"]}' - }) + with open("/opt/OpenPod/data/owners.json", "w", encoding="utf-8") as file: + operators_info = requests.get(f'https://{op_config.get("api_url")}/v1/operators', headers={ + 'Authorization': f'Token {op_config.get("api_token")}' + }, timeout=10) - responce = operators_info.json() - json.dump(responce, file) + response = operators_info.json() + json.dump(response, file) # -------------------------------- Nodes Data -------------------------------- # - with open("/opt/RecursionHub/data/nodes.json", "w", encoding="utf-8") as file: + with open("/opt/OpenPod/data/nodes.json", "w", encoding="utf-8") as file: nodes_info = requests.get( - f'{settings.RECURSION_API_URL}/v1/nodes', - headers={'Authorization' : f'Token {system_data["Token"]}'} - ) + f'https://{op_config.get("api_url")}/v1/nodes', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 + ) - responce = nodes_info.json() - json.dump(responce, file) + response = nodes_info.json() + json.dump(response, file) # ----------------------------- Pull Permissions ----------------------------- # - with open("/opt/RecursionHub/data/permissions.json", "w", encoding="utf-8") as file: + with open("/opt/OpenPod/data/permissions.json", "w", encoding="utf-8") as file: permissions_info = requests.get( - f'{settings.RECURSION_API_URL}/v1/permissions', - headers={'Authorization' : f'Token {system_data["Token"]}'} - ) + f'https://{op_config.get("api_url")}/v1/permissions', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 + ) - responce = permissions_info.json() - json.dump(responce, file) + response = permissions_info.json() + json.dump(response, file) return True + # ---------------------------------------------------------------------------- # # Set or Update Timezone # # ---------------------------------------------------------------------------- # @@ -75,46 +70,37 @@ def update_time_zone(): ''' API call to set the HUB timezone with the user selected option. ''' - with open("system.json", "r+", encoding="utf-8") as file: - system_data = json.load(file) - spaces_info = requests.get( - f'{settings.RECURSION_API_URL}/v1/spaces', - headers={'Authorization' : f'Token {system_data["Token"]}'} - ) - - responce = spaces_info.json() + spaces_info = requests.get( + f'https://{op_config.get("api_url")}/v1/spaces', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 + ) - system_data.update( {"timezone":responce[0]["timezone"]} ) - file.seek(0) - json.dump(system_data, file) - file.truncate() + response = spaces_info.json()[0] - log_api.info("Facility time zone set to: %s", responce[0]["timezone"]) + op_config.set_value("timezone", response["timezone"]) + log_api.info("Facility time zone set to: %s", response["timezone"]) # ---------------------------------------------------------------------------- # # Register Hub With Recursion # # ---------------------------------------------------------------------------- # -def register_hub(): #Needs updated! +def register_pod(): # Needs updated! ''' - API to register the hub. + API to register the pod. ''' - with open('system.json', 'r', encoding="utf-8") as system_file: - system_config = json.load(system_file) - - url = settings.RecursionURL+'/hubs/' - payload_tuples = {'serial':f"{system_config['serial']}"} - output = requests.post(url, payload_tuples, auth=('OwnerA', 'Password@1')) - responce = output.json() + url = f'https://{op_config.get("url")}/hubs/' + payload_tuples = { + 'uuid': f"{op_config.get('uuid')}", + 'serial': f"{op_config.get('serial')}" + } + output = requests.post(url, payload_tuples, auth=('OwnerA', 'Password@1'), timeout=10) + response = output.json() - log_api.info("Hub registration responce: %s", responce) + log_api.info(f"Pod registration response: {response}") - with open("/opt/RecursionHub/system.json", "r+", encoding="utf-8") as file: - data = json.load(file) - data.update( {"HUBid":responce["id"]} ) - file.seek(0) - json.dump(data, file) - log_api.info("Hub registered and assigned HUBid: %s", responce["id"]) + op_config.set_value("pod_id", response["id"]) + log_api.info(f'Pod registered and assigned pod_id: {response["id"]}') # ---------------------------------------------------------------------------- # @@ -122,25 +108,21 @@ def register_hub(): #Needs updated! # ---------------------------------------------------------------------------- # def link_hub(): ''' - Assosiate the hub with a space. + Associate the Pod with a space. ''' try: - with open("system.json", "r+", encoding="utf-8") as file: - system_data = json.load(file) + hubs_info = requests.get( + f'https://{op_config.get("api_url")}/v1/hubs', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 + ) - hubs_info = requests.get(f'{settings.RECURSION_API_URL}/v1/hubs', headers={ - 'Authorization' : f'Token {system_data["Token"]}' - }) + response = hubs_info.json() - responce = hubs_info.json() + op_config.set_value("space", response[0]["facility"]) - system_data.update( {"facility":responce[0]["facility"]} ) - file.seek(0) - json.dump(system_data, file) - file.truncate() + op_gpio.ready() - if settings.IS_PI: - rec_gpio.state(LED_IO, 1) except OSError as err: log_api.error("link_hub - Unable to open file system.json - %s", err) @@ -152,20 +134,17 @@ def pair_node(node_mac): ''' Link a new node with the hub. ''' - with open('system.json', 'r+', encoding="utf-8") as system_file: - system_config = json.load(system_file) - post_content = [ ('mac', node_mac), - ('hub', system_config['HUBid']), - ('facility', system_config['facility']) + ('hub', op_config.get('pod_id')), + ('facility', op_config.get("space")) ] # ------------------------------ API /v1/nodes/ ------------------------------ # requests.post( - f'{settings.RECURSION_API_URL}/v1/nodes', + f'https://{op_config.get("api_url")}/v1/nodes', data=post_content, - headers={'Authorization' : f'Token {system_config["Token"]}'} + headers={'Authorization': f'Token {op_config.get("api_token")}'}, timeout=10 ) pull_data_dump() @@ -176,27 +155,26 @@ def pair_node(node_mac): # ---------------------------------------------------------------------------- # def keepalive(): ''' - Pings Recursion.Space as an indicator that the hub is wtill active. + Pings Recursion.Space as an indicator that the hub is still active. ''' try: - with open('system.json', 'r', encoding="utf-8") as system_file: - system_data = json.load(system_file) - - if 'facility' in system_data: + if op_config.get("space", False): requests.get( - f'''{settings.RecursionURL}/hub/keepalive/''' - f'''{system_data["serial"]}/''' - f'''{system_data["CurrentVersion"]}/''' + f'''https://{op_config.get("url")}/hub/keepalive/''' + f'''{op_config.get("serial")}/''' + f'''{op_config.get("version")}/''' f'''{hash_data()["combined"]}/''', - headers={'Authorization' : f'Token {system_data["Token"]}'} + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 ) else: requests.get( - f'''{settings.RecursionURL}/hub/keepalive/''' - f'''{system_data["serial"]}/''' - f'''{system_data["CurrentVersion"]}/''', - headers={'Authorization' : f'Token {system_data["Token"]}'} + f'''https://{op_config.get("url")}/hub/keepalive/''' + f'''{op_config.get("serial")}/''' + f'''{op_config.get("version")}/''', + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 ) except requests.exceptions.RequestException as err: @@ -205,16 +183,11 @@ def keepalive(): except OSError as err: log_api.error("Keepalive OSError: %s", err) - finally: - - try: - log_api.debug('Keepalive check again in 30 seconds from now.') # DEBUG POINT - threading.Timer(30.0, keepalive).start() - - except RuntimeError as err: - log_api.error("Keepalive thread RuntimeError: %s", err) - + log_api.debug('Heartbeat check again in 30 seconds from now.') # DEBUG POINT + heartbeat_thread = threading.Timer(30.0, keepalive) + heartbeat_thread.daemon = True + heartbeat_thread.start() # ---------------------------------------------------------------------------- # @@ -225,9 +198,6 @@ def access_log(card_number, action, result, node, facility): Access request logging to Recursion.Space ''' try: - with open('system.json', 'r+', encoding="utf-8") as system_file: - system_config = json.load(system_file) - payload = [ ('cardNumber', card_number), ('action', action), @@ -237,9 +207,10 @@ def access_log(card_number, action, result, node, facility): ] requests.post( - f'{settings.RecursionURL}/accesslog/', + f'https://{op_config.get("url")}/accesslog/', data=payload, - headers={'Authorization' : f'Token {system_config["Token"]}'} + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 ) except RuntimeError as err: diff --git a/openpod/modules/rec_gpio.py b/openpod/modules/rec_gpio.py deleted file mode 100644 index a905855..0000000 --- a/openpod/modules/rec_gpio.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 - -''' -Modlue manages raspbery pi gpio functionality. -''' - -from time import sleep - -import config - -from RPi import GPIO - -from settings import LED_IO, LED_STAT - -GPIO.setmode(GPIO.BCM) #Setup for LED indicators. -GPIO.setwarnings(False) - -GPIO.setup(LED_IO, GPIO.OUT) -GPIO.setup(LED_STAT, GPIO.OUT) - -GPIO.output(LED_IO,GPIO.LOW) -GPIO.output(LED_STAT,GPIO.LOW) - -LED_IO_ON = 0 -LED_IO_OFF = 0 -LED_STAT_ON = 0 -LED_STAT_OFF = 0 - -config.LED_IO_ON = 0 -config.LED_IO_OFF = 0 - -config.LED_STAT_ON = 0 -config.LED_STAT_OFF = 0 - -# def Blink(io_pin, BlinkTime, BlinkCount): -# for i in range (BlinkCount): -# sleep (BlinkTime) -# GPIO.output (io_pin, GPIO.HIGH) -# sleep (BlinkTime) -# GPIO.output (io_pin, GPIO.LOW) -# return - -# ---------------------------------------------------------------------------- # -# Set LED Patterns # -# ---------------------------------------------------------------------------- # - -def state(io_pin, on_time=0, off_time=0): - """ - Sets the io_pin state to on or off. - """ - if io_pin == LED_IO: - led_io_on_off(on_time, off_time) - if io_pin == LED_STAT: - led_io_stat_on_off(on_time, off_time) - - -def led_io_on_off(on_time=0, off_time=0): - ''' - Controls the LED used to indicate input/output actions - ''' - # global LED_IO_ON - # global LED_IO_OFF - config.LED_IO_ON = on_time - config.LED_IO_OFF = off_time - - -def led_io_stat_on_off(on_time=0, off_time=0): - ''' - Controls the LED used to indicate general system status - ''' - # global LED_STAT_ON - # global LED_STAT_OFF - config.LED_STAT_ON = on_time - config.LED_STAT_OFF = off_time - -# ---------------------------------------------------------------------------- # -# LED Threads # -# ---------------------------------------------------------------------------- # - -def led_io_thread(): - ''' - Process occuring in the thread for the I/O LED - ''' - while True: - # global LED_IO_ON - # global LED_IO_OFF - if config.LED_IO_ON > 0: - GPIO.output(LED_IO, GPIO.HIGH) - sleep (config.LED_IO_ON) - if config.LED_IO_OFF > 0: - GPIO.output(LED_IO, GPIO.LOW) - sleep (config.LED_IO_OFF) - if config.LED_IO_ON == 0 and config.LED_IO_OFF == 0: - GPIO.output(LED_IO, GPIO.LOW) - sleep(.1) #Allows CPU time for other threads. - - -def led_stat_thread(): - ''' - Process occuring in thethread for the System Status LED - ''' - while True: - # global LED_STAT_ON - # global LED_STAT_OFF - if config.LED_STAT_ON > 0: - GPIO.output(LED_STAT, GPIO.HIGH) - sleep (config.LED_STAT_ON) - if config.LED_STAT_OFF>0: - GPIO.output(LED_STAT, GPIO.LOW) - sleep (config.LED_STAT_OFF) - if config.LED_STAT_ON == 0 and config.LED_STAT_OFF == 0: - GPIO.output(LED_STAT, GPIO.LOW) - sleep(.1) #Allows CPU time for other threads. diff --git a/openpod/modules/rec_lan.py b/openpod/modules/rec_lan.py index 0624cea..b44314e 100644 --- a/openpod/modules/rec_lan.py +++ b/openpod/modules/rec_lan.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 ''' -Handles all netwrork related activities for the hub. +Handles all network related activities for the hub. DOES NOT PERFORM KEEPALIVE - SEE rec_api ''' @@ -10,19 +10,16 @@ import settings +from modules import op_gpio from modules.rec_log import network_log -if settings.IS_PI: - from modules import rec_gpio - - # ------------ Triggers visual indicators based on network status. ----------- # def monitor_network(last_network_status=5, thread_delay=30.0): ''' Threaded: Yes Checks network connection, then updates visual indicators. - Thread delay is extended to up 600 seconds if error occurs. Resets to 10 seconds on sucess. + Thread delay is extended to up 600 seconds if error occurs. Resets to 10 seconds on success. ''' try: current_network_status = test_network() @@ -34,16 +31,16 @@ def monitor_network(last_network_status=5, thread_delay=30.0): if settings.IS_PI: if network_status == 0: - rec_gpio.state(settings.LED_STAT, .125, .125) + op_gpio.no_network() if network_status == 1: - rec_gpio.state(settings.LED_STAT, .25, .25) + op_gpio.no_internet() if network_status == 2: - rec_gpio.state(settings.LED_STAT, 1, 1) + op_gpio.no_recursion() if network_status == 3: - rec_gpio.state(settings.LED_STAT, 0, 0) + op_gpio.networked() thread_delay = 10 @@ -61,18 +58,19 @@ def monitor_network(last_network_status=5, thread_delay=30.0): raise RuntimeError('Network monitoring thread has failed.') from err finally: - network_log.debug('Thread timer set for %s second from now.', thread_delay) # DEBUG POINT + network_log.debug('Thread timer set for %s second from now.', thread_delay) # DEBUG POINT network_watch_thread = threading.Timer( thread_delay, monitor_network, [current_network_status, thread_delay] ) - network_watch_thread.setName('network_watch_thread') + network_watch_thread.name = 'network_watch_thread' network_watch_thread.start() return True + def test_network(): ''' Performs tired network tests. @@ -81,7 +79,7 @@ def test_network(): if networked() is False: return 0 - # Recived a IP address that is not 127.0.0.1, but was unable to access the internet. + # Received a IP address that is not 127.0.0.1, but was unable to access the internet. if internet_on() is False: network_log.warning('LAN Check Fail') return 1 @@ -90,7 +88,7 @@ def test_network(): if recursion_connection() is False: return 2 - #All checks passed and Recursion server is reachable + # All checks passed and Recursion server is reachable # network_log.info('LAN Check Pass') # This would be called every 10 seconds return 3 @@ -104,7 +102,7 @@ def networked(): ''' local_ip = get_ip()[1] - if local_ip == "127.0.0.1": + if local_ip == "127.0.0.1": return False return True @@ -115,17 +113,17 @@ def internet_on(): Performs requests to known external servers. ''' try: - if requests.get('https://recursion.space').status_code == requests.codes.ok: + if requests.get('https://recursion.space', timeout=10).status_code == requests.codes.ok: return True except requests.exceptions.RequestException: try: - if requests.get('https://google.com').status_code == requests.codes.ok: + if requests.get('https://google.com', timeout=10).status_code == requests.codes.ok: return True except requests.exceptions.RequestException: try: - if requests.get('https://amazon.com').status_code == requests.codes.ok: + if requests.get('https://amazon.com', timeout=10).status_code == requests.codes.ok: return True except requests.exceptions.RequestException: @@ -139,7 +137,7 @@ def recursion_connection(): Checks if Recursion.Space is reachable. ''' try: - req = requests.get('https://recursion.space') + req = requests.get('https://recursion.space', timeout=10) if req.status_code == requests.codes.ok: return True @@ -155,28 +153,34 @@ def get_ip(): ''' if internet_on() is True: try: - public_ip = requests.get('https://ip.42.pl/raw', verify=False).text + public_ip = requests.get('https://api.ipify.org', timeout=30).text except requests.exceptions.RequestException as err: public_ip = f'Failed to get public IP with error: {err}' network_log.error(public_ip) - except requests.ConnectionResetError as err: - public_ip = f'Failed to get public IP with error: {err}' - network_log.error(public_ip) - else: public_ip = "WLAN not available." try: - local_ip = ([l for l in ([ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] + hostname = socket.gethostname() + + ip_address = socket.gethostbyname_ex(hostname)[2] + + local_ip_address = [ip for ip in ip_address if not ip.startswith("127.")][:1] + + sock = [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)] + + socket_connections = [ + [ + (s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in sock + ][0][1] + ] - if not ip.startswith("127.")][:1], [[(s.connect(('8.8.8.8', 53)), - s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, - socket.SOCK_DGRAM)]][0][1]]) if l][0][0]) + local_ip = ([l for l in (local_ip_address, socket_connections) if l][0][0]) # network_log.info("Hub's local IP address: {0}".format(local_ip)) - #Prevent constant log writting since now in loop + # Prevent constant log writing since now in loop except OSError as err: network_log.error('Unable to get local IP address with error: %s', err) diff --git a/openpod/modules/rec_log.py b/openpod/modules/rec_log.py index 831e91e..7d773e7 100644 --- a/openpod/modules/rec_log.py +++ b/openpod/modules/rec_log.py @@ -12,9 +12,9 @@ from zipfile import ZipFile import requests import simplejson as json -#https://stackoverflow.com/questions/21663800/python-make-a-list-generator-json-serializable +# https://stackoverflow.com/questions/21663800/python-make-a-list-generator-json-serializable -# from modules import rec_lan +from modules import op_config import settings @@ -28,7 +28,7 @@ # ---------------------------------------------------------------------------- # standard_format = logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%y-%m-%d %H:%M:%S') logging.basicConfig() -logging.getLogger().setLevel(logging.INFO) # Sets default level to INFO for all logs +logging.getLogger().setLevel(logging.INFO) # Sets default level to INFO for all logs console = logging.StreamHandler() console.setFormatter(standard_format) @@ -44,9 +44,9 @@ exception_log.setLevel(logging.DEBUG) try: - exception_log_file = logging.FileHandler('/opt/RecursionHub/logs/exception.log', mode ='a') + exception_log_file = logging.FileHandler('/opt/OpenPod/logs/exception.log', mode='a') except FileNotFoundError: - exception_log_file = logging.FileHandler('tests/exception.log', mode ='a') # CI Testing + exception_log_file = logging.FileHandler('tests/exception.log', mode='a') # CI Testing exception_log_file.setFormatter(standard_format) exception_log.addHandler(exception_log_file) @@ -59,9 +59,9 @@ log_api.setLevel(logging.DEBUG) try: - log_api_file = logging.FileHandler('/opt/RecursionHub/logs/api.log', mode ='a') + log_api_file = logging.FileHandler('/opt/OpenPod/logs/api.log', mode='a') except FileNotFoundError: - log_api_file = logging.FileHandler('tests/api.log', mode ='a') # CI Testing + log_api_file = logging.FileHandler('tests/api.log', mode='a') # CI Testing log_api_file.setFormatter(standard_format) log_api.addHandler(log_api_file) @@ -74,9 +74,9 @@ network_log.setLevel(logging.DEBUG) try: - network_log_file = logging.FileHandler('/opt/RecursionHub/logs/network.log', mode ='a') + network_log_file = logging.FileHandler('/opt/OpenPod/logs/network.log', mode='a') except FileNotFoundError: - network_log_file = logging.FileHandler('tests/network.log', mode ='a') # CI Testing + network_log_file = logging.FileHandler('tests/network.log', mode='a') # CI Testing network_log_file.setFormatter(standard_format) network_log.addHandler(network_log_file) @@ -89,9 +89,9 @@ log_xbee.setLevel(logging.DEBUG) try: - xbee_log_file = logging.FileHandler('/opt/RecursionHub/logs/xbee.log', mode ='a') + xbee_log_file = logging.FileHandler('/opt/OpenPod/logs/xbee.log', mode='a') except FileNotFoundError: - xbee_log_file = logging.FileHandler('tests/xbee.log', mode ='a') # CI Testing + xbee_log_file = logging.FileHandler('tests/xbee.log', mode='a') # CI Testing xbee_log_file.setFormatter(standard_format) log_xbee.addHandler(xbee_log_file) @@ -104,23 +104,23 @@ mqtt_log.setLevel(logging.DEBUG) try: - mqtt_log_file = logging.FileHandler('/opt/RecursionHub/logs/mqtt.log', mode ='a') + mqtt_log_file = logging.FileHandler('/opt/OpenPod/logs/mqtt.log', mode='a') except FileNotFoundError: - mqtt_log_file = logging.FileHandler('tests/mqtt.log', mode ='a') # CI Testing + mqtt_log_file = logging.FileHandler('tests/mqtt.log', mode='a') # CI Testing mqtt_log_file.setFormatter(standard_format) mqtt_log.addHandler(mqtt_log_file) mqtt_log.addHandler(console) -#Logging configurations, use logfile for critial events, use transaction as print alternative +# Logging configurations, use logfile for critial events, use transaction as print alternative # Console handler decides what information to also "print" based on logging level logfile = logging.getLogger('standardlog') try: - fileHandler = logging.FileHandler('/opt/RecursionHub/logs/RecursionLog.log', mode ='a') + fileHandler = logging.FileHandler('/opt/OpenPod/logs/RecursionLog.log', mode='a') except FileNotFoundError: - fileHandler = logging.FileHandler('tests/RecursionLog.log', mode ='a') #For CI + fileHandler = logging.FileHandler('tests/RecursionLog.log', mode='a') # For CI fileHandler.setFormatter(standard_format) @@ -134,15 +134,16 @@ transaction = logging.getLogger('transaction') try: - fileH = logging.FileHandler('/opt/RecursionHub/logs/TransactionLog.log', mode ='a') + fileH = logging.FileHandler('/opt/OpenPod/logs/TransactionLog.log', mode='a') except FileNotFoundError: - fileH = logging.FileHandler('tests/TransactionLog.log', mode ='a') #For CI + fileH = logging.FileHandler('tests/TransactionLog.log', mode='a') # For CI fileH.setFormatter(standard_format) transaction.setLevel(logging.DEBUG) transaction.addHandler(console) transaction.addHandler(fileH) + def publog(level, note): ''' Sets the log level. @@ -172,32 +173,31 @@ def transaction_log(level, note): # ------ Captures wide range of system settings for debugging purposes. ------ # -def snapshot(public, local): +def snapshot(public_ip, local_ip): ''' Create a JSON summary of system settings and status. ''' system_data = {} - # public, local = rec_lan.get_ip() - with open('/opt/RecursionHub/system.json', 'r', encoding="UTF-8") as system_file: + with open('/opt/OpenPod/system.json', 'r', encoding="UTF-8") as system_file: system_json_file = json.load(system_file) system_data["system_json"] = system_json_file - system_data["DEBUG"] = f"{settings.DEBUG}" + system_data["PI"] = f"{settings.IS_PI}" - system_data["PI"] = f"{settings.Pi}" + system_data['ip'] = {} - system_data["local_ip"] = local + system_data['ip']["local"] = local_ip - system_data["public_ip"] = public + system_data['ip']["public"] = public_ip - if 'facility' in system_json_file: + if 'space' in system_json_file: system_data["DataHash"] = hash_data() system_data['pip'] = freeze.freeze() - with open('/opt/RecursionHub/logs/System.Snapshot', 'w', encoding="UTF-8") as snapshot_file: + with open('/opt/OpenPod/logs/System.Snapshot', 'w', encoding="UTF-8") as snapshot_file: snapshot_file.seek(0) json.dump(system_data, snapshot_file, iterable_as_array=True) snapshot_file.truncate() @@ -207,45 +207,44 @@ def snapshot(public, local): # ---------------------------------------------------------------------------- # # Send Diagnostics # # ---------------------------------------------------------------------------- # + + def dump_diagnostics(): ''' - Send the summary of setting to Recurson.Space + Send the summary of setting to Recursion.Space ''' - with open('system.json', 'r+', encoding="UTF-8") as system_file: - system_config = json.load(system_file) - - with open('/opt/RecursionHub/logs/System.Snapshot', 'r', encoding="UTF-8") as snapshot_file: - if 'Token' in system_config: + with open('/opt/OpenPod/logs/System.Snapshot', 'r', encoding="UTF-8") as snapshot_file: + if op_config.get('api_token', False): payload = json.load(snapshot_file) try: - requests.put(f'{settings.RecursionURL}/v1/diagnostics/', - json={"snapshot":payload}, - headers={'Authorization' : f"Token {system_config['Token']}"} - ) + requests.put(f'https://{op_config.get("url")}/v1/diagnostics/', + json={"snapshot": payload}, + headers={'Authorization': f"Token {op_config.get('api_token')}"}, + timeout=10 + ) except requests.exceptions.RequestException as err: exception_log.error('Unable to submit diagnostics. Error: %s', err) # --------------------------- Zip & Send Log Files --------------------------- # + + def zip_send(): ''' - Zip all log files togeather and send to Recursion.Space + Zip all log files together and send to Recursion.Space ''' try: - with open('system.json', 'r', encoding="UTF-8") as file: - system_data = json.load(file) - - zip_file = f'/opt/RecursionHub/logs/{system_data["serial"]}_logs.zip' + zip_file = f'/opt/OpenPod/logs/{op_config.get("serial")}_logs.zip' with ZipFile(zip_file, 'w') as zip_logs: - zip_logs.write('/opt/RecursionHub/logs/System.Snapshot', 'system_snapshot.txt') - zip_logs.write('/opt/RecursionHub/logs/network.log', 'network.log') - zip_logs.write('/opt/RecursionHub/logs/xbee.log', 'xbee.log') - zip_logs.write('/opt/RecursionHub/logs/mqtt.log', 'mqtt.log') - zip_logs.write('/opt/RecursionHub/logs/exception.log', 'exception.log') - zip_logs.write('/opt/RecursionHub/logs/RecursionLog.log', 'RecursionLog.log') - zip_logs.write('/opt/RecursionHub/logs/TransactionLog.log', 'TransactionLog.log') + zip_logs.write('/opt/OpenPod/logs/System.Snapshot', 'system_snapshot.txt') + zip_logs.write('/opt/OpenPod/logs/network.log', 'network.log') + zip_logs.write('/opt/OpenPod/logs/xbee.log', 'xbee.log') + zip_logs.write('/opt/OpenPod/logs/mqtt.log', 'mqtt.log') + zip_logs.write('/opt/OpenPod/logs/exception.log', 'exception.log') + zip_logs.write('/opt/OpenPod/logs/RecursionLog.log', 'RecursionLog.log') + zip_logs.write('/opt/OpenPod/logs/TransactionLog.log', 'TransactionLog.log') zip_logs.close() @@ -256,9 +255,10 @@ def zip_send(): with open(zip_file, 'rb') as zip_file_logs: requests.post( - f'{settings.RecursionURL}/files/upload/external/hublogs/', - files={"file":zip_file_logs}, - headers={'Authorization':f'Token {system_data["Token"]}'} + f'https://{op_config.get("url")}/files/upload/external/hublogs/', + files={"file": zip_file_logs}, + headers={'Authorization': f'Token {op_config.get("api_token")}'}, + timeout=10 ) else: @@ -271,7 +271,6 @@ def zip_send(): exception_log.error("zip_send TypeError: %s", err) - # ---------------------------------------------------------------------------- # # Hash Stored Data # # ---------------------------------------------------------------------------- # @@ -280,22 +279,22 @@ def hash_data(): Produce a hash of the available data to compare with the data on Recursion.Space ''' try: - with open("/opt/RecursionHub/data/dump.json", "rb") as dump_file: + with open("/opt/OpenPod/data/dump.json", "rb") as dump_file: dump_hash = hashlib.md5( dump_file.read() ).hexdigest() - with open("/opt/RecursionHub/data/nodes.json", "rb") as nodes_file: + with open("/opt/OpenPod/data/nodes.json", "rb") as nodes_file: nodes_hash = hashlib.md5( nodes_file.read() ).hexdigest() - with open("/opt/RecursionHub/data/owners.json", "rb") as owners_file: + with open("/opt/OpenPod/data/owners.json", "rb") as owners_file: owners_hash = hashlib.md5( - owners_file.read() + owners_file.read() ).hexdigest() - with open("/opt/RecursionHub/data/permissions.json", "rb") as perm_file: + with open("/opt/OpenPod/data/permissions.json", "rb") as perm_file: permissions_hash = hashlib.md5( perm_file.read() ).hexdigest() @@ -304,11 +303,11 @@ def hash_data(): f"{dump_hash}, {nodes_hash}, {owners_hash}, {permissions_hash}".encode()) return { - 'combined':combined_hash.hexdigest(), - 'dumpHash':dump_hash, - 'nodesHash':nodes_hash, - 'ownersHash':owners_hash, - 'permissionsHash':permissions_hash + 'combined': combined_hash.hexdigest(), + 'dumpHash': dump_hash, + 'nodesHash': nodes_hash, + 'ownersHash': owners_hash, + 'permissionsHash': permissions_hash } except FileNotFoundError as err: diff --git a/openpod/modules/rec_lookup.py b/openpod/modules/rec_lookup.py index 471be02..b062815 100644 --- a/openpod/modules/rec_lookup.py +++ b/openpod/modules/rec_lookup.py @@ -15,6 +15,8 @@ from modules.rec_log import exception_log # --------------------------- Count Matching Nodes --------------------------- # + + def count_matching_mac(rx_source): ''' Checks to see if the mac address being paired has been seen before by the system. @@ -27,7 +29,7 @@ def count_matching_mac(rx_source): match_counter = 0 try: - with open("/opt/RecursionHub/data/nodes.json", "r", encoding="utf-8") as node_file: + with open("/opt/OpenPod/data/nodes.json", "r", encoding="utf-8") as node_file: for data in json.load(node_file): if data['mac'] == rx_source: match_counter += 1 @@ -46,18 +48,20 @@ def count_matching_mac(rx_source): # ---------------------------------------------------------------------------- # # Convert MAC to Node ID # # ---------------------------------------------------------------------------- # + + def mac_to_id(mac_address): ''' Returns the id for the node that belongs to the mac address. ''' - with open('/opt/RecursionHub/data/nodes.json', 'r', encoding="utf-8") as nodes_file: + with open('/opt/OpenPod/data/nodes.json', 'r', encoding="utf-8") as nodes_file: nodes_dump = json.load(nodes_file) for node in nodes_dump: if node['mac'] == mac_address: return node['id'] - #Defaults if the nodes has not been added yet. + # Defaults if the nodes has not been added yet. return mac_address @@ -69,7 +73,7 @@ def is_owner(lookup_id): Checks if the ID matches to an owner. Returns True or False ''' - with open("/opt/RecursionHub/data/owners.json", "r", encoding="utf-8") as owner_file: + with open("/opt/OpenPod/data/owners.json", "r", encoding="utf-8") as owner_file: owner_file = json.load(owner_file) for owner in owner_file: @@ -86,7 +90,7 @@ def get_details(card_id): ''' Checks if user exsists, if yes, returns details. ''' - with open("/opt/RecursionHub/data/dump.json", "r", encoding="utf-8") as member_file: + with open("/opt/OpenPod/data/dump.json", "r", encoding="utf-8") as member_file: member_file = json.load(member_file) for user in member_file: @@ -108,7 +112,7 @@ def get_group_details(access_group_id): Returns the detils of group for a user. access_group_id is an integer ''' - with open("/opt/RecursionHub/data/permissions.json", "r", encoding="utf-8") as permissions_file: + with open("/opt/OpenPod/data/permissions.json", "r", encoding="utf-8") as permissions_file: permissions_file = json.load(permissions_file) for group in permissions_file: @@ -152,9 +156,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Owner Allowed", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Owner Allowed", reference_node, system_info['facility'] + ) ).start() return 1 @@ -171,9 +175,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "User not found", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "User not found", reference_node, system_info['facility'] + ) ).start() return 2 @@ -182,9 +186,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Manually Restricted", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Manually Restricted", reference_node, system_info['facility'] + ) ).start() return 2 @@ -194,9 +198,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Group Not Found", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Group Not Found", reference_node, system_info['facility'] + ) ).start() return 2 @@ -204,8 +208,8 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 if reference_node not in group["allowedNodes"]: # Node Not Allowed threading.Thread( - target=rec_api.access_log, - args=( + target=rec_api.access_log, + args=( requested_id, "Requested access", "No node permission", reference_node, system_info['facility'] ) @@ -214,13 +218,13 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 # 24/7 Override if group.get("twenty_four_seven", False): - print ("Group has 24/7 access.") + print("Group has 24/7 access.") threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Allowed", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Allowed", reference_node, system_info['facility'] + ) ).start() return 1 @@ -230,9 +234,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Now Allowed Day", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Now Allowed Day", reference_node, system_info['facility'] + ) ).start() return 2 @@ -245,9 +249,9 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Allowed", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Allowed", reference_node, system_info['facility'] + ) ).start() return 1 @@ -255,8 +259,8 @@ def access_request(requested_id, request_node): # pylint: disable=R0911 threading.Thread( target=rec_api.access_log, args=( - requested_id, "Requested access", - "Not Allowed Time", reference_node, system_info['facility'] - ) + requested_id, "Requested access", + "Not Allowed Time", reference_node, system_info['facility'] + ) ).start() return 2 diff --git a/openpod/modules/rec_mqtt.py b/openpod/modules/rec_mqtt.py index 82e0d29..56d3fc9 100644 --- a/openpod/modules/rec_mqtt.py +++ b/openpod/modules/rec_mqtt.py @@ -9,12 +9,13 @@ import subprocess import paho.mqtt.client as mqtt -from modules import rec_api, rec_xbee, rec_lookup +from modules import op_config, op_ssh, rec_api, rec_xbee, rec_lookup from modules.rec_log import mqtt_log, exception_log, zip_send -import settings import updater # The callback for when the client receives a CONNACK response from the server. + + def on_connect(client, userdata, flags, return_code): ''' Action taken once a connection has been established. @@ -32,13 +33,14 @@ def on_connect(client, userdata, flags, return_code): def on_message(client, userdata, message): ''' - Handles messeges coming in via MQTT. - 170 - Pairing un-paired Hub - 186 - Pull New Data - 202 - Install System Update - 218 - Timezone Change - 234 - Reboot Hub (Soft restart) - 250 - Zip & Send Logs + Handles messages coming in via MQTT. + 170 (AA) - Pairing un-paired Hub + 171 (AB) - Pull SSH Keys + 186 (BA) - Pull New Data + 202 (CA) - Install System Update + 218 (DA) - Timezone Change + 234 (EA) - Reboot Hub (Soft restart) + 250 (FA) - Zip & Send Logs Node Command - xxxxxxxxxxxxxxxx_## ''' @@ -49,6 +51,7 @@ def on_message(client, userdata, message): try: mqtt_actions = { 170: rec_api.link_hub, + 171: op_ssh.update_keys, 186: rec_api.pull_data_dump, 202: mqtt_start_update, 218: rec_api.update_time_zone, @@ -84,13 +87,15 @@ def on_message(client, userdata, message): mqtt_log.error("MQTT did not match action codes. Payload: %s", message.payload) + return False + def mqtt_rx(): ''' - Esablish a connection to the Recursion.Space MQTT broker + Establish a connection to the Recursion.Space MQTT broker Define what happens when the following actions occur: - Connection to the MQTT broker is made. - - A message is recived. + - A message is received. ''' mqtt_log.info('Connecting to MQTT broker') @@ -98,23 +103,23 @@ def mqtt_rx(): client.on_connect = on_connect client.on_message = on_message - client.connect(f"{settings.RECURSION_DOMAIN}", 1883, 60) + client.connect(f"{op_config.get('url')}", 1883, 60) client.loop_forever() + def mqtt_start_update(): ''' Called by the MQTT function handler to start an update. ''' mqtt_log.info("UPDATE AVAILABLE - Triggered by the user.") try: - updater.update_hub() + updater.update_pod() except RuntimeError as err: exception_log.error("Error while updating, atempting as subprocess. %s", err) - with open('system.json', 'r+', encoding="UTF-8") as file: - system_data = json.load(file) - update_location = f'/opt/RecursionHub/{system_data.CurrentVersion}/updater.py' - with subprocess.Popen(['sudo', 'python3', f'{update_location}']) as script: - print(script) + update_location = f'/opt/OpenPod/{op_config.get("version")}/updater.py' + with subprocess.Popen(['sudo', 'python3', f'{update_location}']) as script: + print(script) + def mqtt_restart_system(): ''' diff --git a/openpod/modules/rec_xbee.py b/openpod/modules/rec_xbee.py index 7315b3d..aac9bf6 100644 --- a/openpod/modules/rec_xbee.py +++ b/openpod/modules/rec_xbee.py @@ -3,25 +3,19 @@ Recursion.Space - XBee Module ''' -import json import binascii import time -# import threading + from time import sleep import serial from pubsub import pub -import settings - -from modules import rec_lookup, rec_api +from modules import rec_lookup, rec_api, op_config from modules.rec_log import log_xbee -#Not sure if LED indicators are being used here. -# if settings.Pi: -# from modules import rec_gpio - #Serial information needed to detect and use XBee. - # https://www.reddit.com/r/Python/comments/6jtzua/pyserial_minimum_time_for_timeout/ +# Serial information needed to detect and use XBee. +# https://www.reddit.com/r/Python/comments/6jtzua/pyserial_minimum_time_for_timeout/ try: ser = serial.Serial("/dev/ttyUSB0", baudrate=9600, timeout=.3) except serial.serialutil.SerialException as serial_err: @@ -29,14 +23,15 @@ # from settings import LED_IO, LED_STAT + def receive(): ''' Processes incoming xbee serial data. ''' - rx_data = ser.readline() #Read data that is currently waiting. + rx_data = ser.readline() # Read data that is currently waiting. sleep(.05) - data_remaining = ser.inWaiting() #Read any data that was not origionally read. - rx_data += ser.read(data_remaining) #Add any data that was left out in first pass. + data_remaining = ser.inWaiting() # Read any data that was not origionally read. + rx_data += ser.read(data_remaining) # Add any data that was left out in first pass. rx_data = binascii.b2a_hex(rx_data).strip().decode('utf-8') @@ -46,33 +41,34 @@ def receive(): log_xbee.info("The cordinator has started.") return (0, 0, 5) - if rx_data[41:(len(rx_data)-2)].isdigit() is False and rx_data != '7e00028a066f': + if rx_data[41:(len(rx_data)-2)].isdigit() is False and rx_data != '7e00028a066f': rx_source = rx_data[8:24] log_xbee.info('Device %s is connecting to network', rx_source) # rx_data = rx_data[30:(len(rx_data)-2)] - lookup_responce = rec_lookup.count_matching_mac(rx_source) - #for Responce in MySQL_Responce: - log_xbee.info('Returned %s matches for MAC addressed.', lookup_responce) - if lookup_responce == 0: + lookup_response = rec_lookup.count_matching_mac(rx_source) + # for Responce in MySQL_Responce: + log_xbee.info('Returned %s matches for MAC addressed.', lookup_response) + if lookup_response == 0: if len(rx_source) == 16: rec_api.pair_node(rx_source) log_xbee.info('%s Connected To XBee Network', rx_source) - return (0,0,5) + return (0, 0, 5) log_xbee.warning("%s is not a valid MAC length, not paired", rx_source) - return (0,0,0) # (Placeholder) Need to set a return that an error has occured. + return (0, 0, 0) # (Placeholder) Need to set a return that an error has occured. - if rx_data[42:(len(rx_data)-2)].isdigit() is True and rx_data != '7e00028a066f': + if rx_data[42:(len(rx_data)-2)].isdigit() is True and rx_data != '7e00028a066f': rx_source = rx_data[8:24] rx_data = rx_data[42:(len(rx_data)-2)] log_xbee.info('User %s requesting access from node %s', rx_data, rx_source) serial_lookup = rec_lookup.access_request(rx_data, rx_source) - return rx_source,rx_data,serial_lookup + return rx_source, rx_data, serial_lookup + + return (0, 0, 0) # (Placeholder) Need to set a return that an error has occured. - return (0,0,0) # (Placeholder) Need to set a return that an error has occured. def transmit(destination, data): ''' @@ -83,23 +79,23 @@ def transmit(destination, data): # BlinkThread.start() dest_16bit = 'FFFE' #data_hex = binascii.hexlify(data) - data_hex = data + data_hex = data hex_len = hex(14 + (len(data_hex)//2)) - hex_len = hex_len.replace('x','00') + hex_len = hex_len.replace('x', '00') checksum = 17 - for i in range(0,len(destination),2): - checksum = checksum + int(destination[i:i+2],16) - for i in range(0,len(dest_16bit),2): - checksum = checksum + int(dest_16bit[i:i+2],16) - for i in range(0,len(data_hex),2): - checksum = checksum + int(data_hex[i:i+2],16) - checksum = checksum%256 + for i in range(0, len(destination), 2): + checksum = checksum + int(destination[i:i+2], 16) + for i in range(0, len(dest_16bit), 2): + checksum = checksum + int(dest_16bit[i:i+2], 16) + for i in range(0, len(data_hex), 2): + checksum = checksum + int(data_hex[i:i+2], 16) + checksum = checksum % 256 checksum = 256 - checksum checksum = format(checksum, '02x') #checksum = hex(checksum) #checksum = checksum[-2:] tx_req = ("7E" + hex_len + "10" + "00" + destination - + dest_16bit + "00" + "00" + data_hex + checksum) + + dest_16bit + "00" + "00" + data_hex + checksum) transmission = binascii.unhexlify(tx_req) log_xbee.info("Transmitting: %s", transmission) ser.write(transmission) @@ -107,46 +103,48 @@ def transmit(destination, data): # ---------------------------------------------------------------------------- # # New XBee Setup Procedure # # ---------------------------------------------------------------------------- # + + def configure_xbee(): ''' Configure Hub XBee modules for first time use. ''' log_xbee.info("Configuring XBee for first time use") - with open('system.json', 'r', encoding="UTF-8") as file: - system_data = json.load(file) - - #Sequence of commands to configure a new XBee, sent in transparent mode. - param_config = [ - b'ATCE1\r', #Enable Cordinator - b'ATAP1\r', #Enable API Mode - b'ATAO1\r', #Set Output Mode To Explicit - b'ATEE1\r', #Enable Security - b'ATEO2\r', #Enable Trust Center - f'ATKY{system_data["XBEE_KY"]}'.encode(), #Set Encryption Key - b'ATAC\r', #Apply Queued Changes - b'ATCN\r', #Exit Command Mode - ] - - ser.write(b'+++') #Enter Command Mode - sleep(1) - print(ser.readline()) - for command in param_config: - ser.write(command) - rx_data = ser.read_until(expected='\r').decode() #Reads buffer until carriage return. - rx_data = str(rx_data.rstrip()) - log_xbee.info("Initial XBee Configuration TX: %s - RX: %s", command, rx_data) + xbee_config = op_config.get('XBEE') + + # Sequence of commands to configure a new XBee, sent in transparent mode. + param_config = [ + b'ATCE1\r', # Enable Cordinator + b'ATAP1\r', # Enable API Mode + b'ATAO1\r', # Set Output Mode To Explicit + b'ATEE1\r', # Enable Security + b'ATEO2\r', # Enable Trust Center + f'ATKY{xbee_config.get("KY")}'.encode(), # Set Encryption Key + b'ATAC\r', # Apply Queued Changes + b'ATCN\r', # Exit Command Mode + ] + + ser.write(b'+++') # Enter Command Mode + sleep(1) + print(ser.readline()) + for command in param_config: + ser.write(command) + rx_data = ser.read_until(expected='\r').decode() # Reads buffer until carriage return. + rx_data = str(rx_data.rstrip()) + log_xbee.info("Initial XBee Configuration TX: %s - RX: %s", command, rx_data) - sleep(10) + sleep(10) def listing(): - '''Function to handling incoming serial info''' + ''' + Function to handling incoming serial info + ''' while True: - if settings.Pi: - if ser.in_waiting != 0: - pub.sendMessage('xbee_rx') - sleep(.01) #Allows CPU time for other threads. + if ser.in_waiting != 0: + pub.sendMessage('xbee_rx') + sleep(.01) # Allows CPU time for other threads. # def mqtt_xbee(firstName, lastName, cardNumber, action, result, timestamp, space): @@ -179,7 +177,7 @@ def xbee_info(): ser.write(b'ATAP\r') # Checks for API mode sleep(.1) - rx_data = 1 # TEMP PATCH + rx_data = 1 # TEMP PATCH try: rx_data = ser.read_until(expected='\r').decode() # Read buffer until carriage return rx_data = str(rx_data.rstrip()) @@ -190,32 +188,25 @@ def xbee_info(): if rx_data != 1: configure_xbee() + sleep(1) # No characters sent for 1 second (Guard Times) + ser.write(b'+++') # Enters AT Command Mode + sleep(1) # No characters sent for 1 second (Guard Times) - with open("system.json", "r+", encoding="UTF-8") as file: - data = json.load(file) - - sleep(1) # No characters sent for 1 second (Guard Times) - ser.write(b'+++') # Enters AT Command Mode - sleep(1) # No characters sent for 1 second (Guard Times) + ser.readline() # Clear buffer - ser.readline() # Clear buffer + ser.write(b'ATOP\r') # Read the operating 64-bit PAN ID. - ser.write(b'ATOP\r') # Read the operating 64-bit PAN ID. - - sleep(.1) - try: - rx_data = ser.read_until(expected='\r').decode() # Read buffer until carriage return - rx_data = str(rx_data.rstrip()) - except TypeError as err: - log_xbee.error("XBee ATOP command error: %s", err) + sleep(.1) + try: + rx_data = ser.read_until(expected='\r').decode() # Read buffer until carriage return + rx_data = str(rx_data.rstrip()) + except TypeError as err: + log_xbee.error("XBee ATOP command error: %s", err) - data.update( {"XBEE_OP":rx_data} ) - file.seek(0) - json.dump(data, file) - file.truncate() + op_config.set_nested_value(['XBEE', 'OP'], rx_data) - ser.write(b'ATCN\r') # Exit Command Mode - ser.readline() # Clear buffer + ser.write(b'ATCN\r') # Exit Command Mode + ser.readline() # Clear buffer xbee_network_discovery() diff --git a/openpod/pod.py b/openpod/pod.py index 5f79d17..086bf13 100644 --- a/openpod/pod.py +++ b/openpod/pod.py @@ -6,109 +6,122 @@ """ import sys -import os.path #Allows modules to access from directory above. -import json +import os.path # Allows modules to access from directory above. import threading from time import sleep import config import requests from pubsub import pub -import settings - -from modules import rec_log, rec_mqtt, rec_xbee, rec_api +from modules import op_config, op_gpio, op_ssh, rec_log, rec_mqtt, rec_xbee, rec_api, rec_lan from modules.rec_log import exception_log, zip_send -if settings.Pi: - try: - from modules import rec_gpio - rec_gpio.state(settings.LED_IO, 1, 0) - except ModuleNotFoundError as err: - exception_log.error("%s", err) +# --------------------------- Visualization Threads --------------------------- # +threading.Thread(target=op_gpio.led_stat_thread).start() +threading.Thread(target=op_gpio.led_io_thread).start() + +op_gpio.initializing() + + +# ---------------------------------------------------------------------------- # +# Check Network Connection # +# ---------------------------------------------------------------------------- # +try: + rec_lan.monitor_network() # Monitors the network connection while the program is running. +except RuntimeError as err: + exception_log.error(f"FATAL - Start Network Monitoring - Error: {err}") # Not sure if the next section is required. sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) -with open('/opt/RecursionHub/system.json', 'r+', encoding="utf-8") as system_file: - systemConfig = json.load(system_file) -Version = systemConfig['CurrentVersion'] +Version = op_config.get('version', None) -#Inserts path to refrence then starts importing modules. +# Inserts path to reference then starts importing modules. sys.path.insert(0, f"./{Version}") sys.path.insert(1, f"./{Version}/modules") sys.path.append("..") config.XBEE_FLAG = False + + def incoming_xbee_data(): ''' Required functions to handle events and related status checking activities. ''' config.XBEE_FLAG = True + pub.subscribe(incoming_xbee_data, 'xbee_rx') -begin_xbee = threading.Thread(target = rec_xbee.listing) +begin_xbee = threading.Thread(target=rec_xbee.listing) begin_xbee.start() -#Register HUB with Recursion -if not systemConfig.get('HUBid', False): - rec_api.register_hub() +# Register HUB with Recursion +if not op_config.get('pod_id', False): + rec_api.register_pod() # ------------------------------- TEMP SOLUTION ------------------------------ # try: - URL = f'{settings.RecursionURL}/obtaintoken/{systemConfig["serial"]}/' - r = requests.get(URL) - - if r.status_code == 201: - print (f"Hub Toekn: {r.text}") - with open("/opt/RecursionHub/system.json", "r+", encoding="UTF-8") as file: - data = json.load(file) - data.update( {"Token":f"{r.text}"} ) - file.seek(0) - json.dump(data, file) - file.truncate() + URL = f'https://{op_config.get("url")}/pod/obtaintoken/{op_config.get("serial")}/' + response = requests.get(URL, timeout=10) + + if response.status_code == 201: + op_config.set_value("api_token", response.text) + except Exception as err: # pylint: disable=W0703 print(err) # ------------------------------- TEMP SOLUTION ------------------------------ # -if settings.DEBUG: +if op_config.get('debug', False): rec_log.publog("debug", "*** DEBUG Enabled ***") -if settings.Pi: - rec_log.publog("debug", "*** PI TRUE ***") -if not settings.Pi: - rec_log.publog("debug", "*** PI FALSE ***") rec_log.publog("info", f"Version: {Version}") rec_api.keepalive() -MQTTlisten = threading.Thread(target = rec_mqtt.mqtt_rx) +MQTTlisten = threading.Thread(target=rec_mqtt.mqtt_rx) MQTTlisten.start() -if settings.Pi: - led_io_thread = threading.Thread(target = rec_gpio.led_io_thread) - led_io_thread.start() - -#Only pull info if hub has already been paired with a facility. +# Only pull info if hub has already been paired with a facility. try: - if 'facility' in systemConfig: + if op_config.get('space', False): rec_log.publog("info", "Pulling any missing data.") rec_api.pull_data_dump() rec_api.update_time_zone() + op_ssh.update_keys() else: rec_log.publog("info", "Facility connection not found, no data to pull.") - if settings.Pi: - rec_gpio.state(settings.LED_IO, 1, 1) # Slow blink ready to pair to a facility. + op_gpio.unregistered() # Slow blink ready to pair to a facility. - zip_send() # Send latest log files on boot. + zip_send() # Send latest log files on boot. except Exception as err: # pylint: disable=W0703 - rec_log.publog("error", f"Error occured when pulling data: {err}") + rec_log.publog("error", f"Error occurred when pulling data: {err}") + + +# ---------------------------------------------------------------------------- # +# XBee Configuration # +# ---------------------------------------------------------------------------- # +if op_config.get('XBEE').get('OP', False) is False: + try: + rec_xbee.xbee_info() + except UnboundLocalError as err: + exception_log.error("Unable to capture XBee info - Error: %s", err) + + +# ---------------------------------------------------------------------------- # +# Logs Settings For Debugging # +# ---------------------------------------------------------------------------- # +try: + public_ip, local_ip = rec_lan.get_ip() + rec_log.snapshot(public_ip, local_ip) +except RuntimeError as err: + exception_log.error("FATAL - Generate Snapshot - Error: %s", err) rec_log.publog("info", "Recursion system has successfully initiated.") @@ -118,25 +131,26 @@ def process_xbee_data(): ''' Triggered when there is available XBee data. ''' - xbee_frame_info = rec_xbee.receive() #Reads in recived XBee data. - rec_gpio.state(settings.LED_IO, 1) + xbee_frame_info = rec_xbee.receive() # Reads in received XBee data. + op_gpio.ready() # Pod is ready. if xbee_frame_info[2] == 0: - rec_xbee.transmit(xbee_frame_info[0],"30") + rec_xbee.transmit(xbee_frame_info[0], "30") elif xbee_frame_info[2] == 1: - rec_xbee.transmit(xbee_frame_info[0],"31") + rec_xbee.transmit(xbee_frame_info[0], "31") elif xbee_frame_info[2] == 2: - rec_xbee.transmit(xbee_frame_info[0],"32") + rec_xbee.transmit(xbee_frame_info[0], "32") elif xbee_frame_info[2] == 5: pass else: - rec_xbee.transmit(xbee_frame_info[0],"32") + rec_xbee.transmit(xbee_frame_info[0], "32") + -#https://stackoverflow.com/questions/10926328/efficient-and-fast-python-while-loop-while-using-sleep -#https://stackoverflow.com/questions/17553543/pyserial-non-blocking-read-loop +# https://stackoverflow.com/questions/10926328/efficient-and-fast-python-while-loop-while-using-sleep +# https://stackoverflow.com/questions/17553543/pyserial-non-blocking-read-loop while True: if config.XBEE_FLAG: - rec_gpio.state(settings.LED_IO, .125, .125) #Indicate incoming XBee data. - DataProcessing = threading.Thread(target = process_xbee_data) + op_gpio.incoming_data() # Pod is receiving XBee data. + DataProcessing = threading.Thread(target=process_xbee_data) DataProcessing.start() sleep(.05) config.XBEE_FLAG = False diff --git a/openpod/updater.py b/openpod/updater.py index 0786d01..96ef436 100644 --- a/openpod/updater.py +++ b/openpod/updater.py @@ -1,120 +1,72 @@ #!/usr/bin/env python3 -""" -Recursion.Space -hub_updater.py +''' +OpenPod | updater.py -Call the function 'update_hub' to pull the latest update. -""" +Grabs the latest version of OpenPod from GitHub and updates the current version. +''' -#Triggered by the user from the web interface to update the current version. +# Triggered by the user from the web interface to update the current version. -import re -import sys -import json +import os +import shutil import zipfile -import subprocess import urllib.request import requests -import settings - +from modules import op_config from modules.rec_log import exception_log -def current_hub_version(): - ''' - Reades the curent version number from the system file. - ''' - with open('system.json', 'r', encoding="utf-8") as system_file: - system_data = json.load(system_file) - - return system_data['CurrentVersion'] - -def update_version_name(): +def update_pod(): ''' - Fetches the new version number available from the server. + Steps through the update process. + 1) Gets the latest version info from /pod/openpod/version/ + 2) Downloads the latest version zip. + 3) Extracts the zip file. + 4) Copies the files to the root directory. + 5) Cleans up. ''' - with open('system.json', 'r', encoding="utf-8") as system_file: - system_data = json.load(system_file) - request_response= requests.get( - f'{settings.RecursionURL}/updatehub/', - headers={'Authorization' : f"Token {system_data['Token']}"} + try: + latest_version = requests.get( + f"https://{op_config.get('url')}/pod/openpod/version/", + timeout=10 ) - response_data = request_response.headers['content-disposition'] - return re.findall("filename=(.+)", response_data)[0] - - -def download_update(): - ''' - This will request the file and download it. - - Fiest gets the name of the file for the update. - - https://stackoverflow.com/questions/45247983/urllib-urlretrieve-with-custom-header - ''' - with open('system.json', 'r+', encoding="utf-8") as system_file: - system_data = json.load(system_file) - - fname = update_version_name() - opener = urllib.request.build_opener() - opener.addheaders = [('Authorization', f"Token {system_data['Token']}")] - urllib.request.install_opener(opener) - urllib.request.urlretrieve(f'{settings.RecursionURL}/updatehub/', fname) - - - - # exception_log.info("Update version pulled: %s", re.findall(r"(.+?)(\.[^.]*$|$)", fname)[0][0]) - return re.findall(r"(.+?)(\.[^.]*$|$)", fname)[0][0] + latest_version = latest_version.json() + # Download the latest version zip. + zip_url = f"{op_config.get('OpenPod').get('repo')}/archive/{latest_version['hash']}.zip" + urllib.request.urlretrieve(zip_url, f"{latest_version['hash']}.zip") -def unzip_update(): - ''' - Extracts the contents of the zip file. - Removed the .zip file. - ''' - new_version = download_update() - - with zipfile.ZipFile(f'{new_version}.zip', 'r') as zip_ref: - zip_ref.extractall(f'{new_version}/') - - subprocess.call(['rm', f'{new_version}.zip']) #Cleaning up downloaded file. + # Extract the zip file. + with zipfile.ZipFile(f"{latest_version['hash']}.zip", 'r') as zip_ref: + zip_ref.extractall() - return new_version + # Copy the files to the root directory. + os.makedirs(f"/opt/OpenPod/versions/{latest_version['hash']}/", exist_ok=True) - -def update_hub(): - ''' - Main code called to update the hub. - ''' - try: - exception_log.info("Update Started") - new_version = unzip_update() + shutil.copytree( + f"OpenPod-{latest_version['hash']}/openpod", + f"/opt/OpenPod/versions/{latest_version['hash']}", + dirs_exist_ok=True + ) except RuntimeError as err: exception_log.error("Unable to pull update with error: %s", err) - new_version = current_hub_version() #If unable to update, just run the current version. - - finally: - with open("system.json", "r+", encoding="utf-8") as file: - data = json.load(file) - data.update( {"CurrentVersion":new_version} ) - file.seek(0) - json.dump(data, file) - file.truncate() + except FileNotFoundError as err: + exception_log.error("Unable to find file with error: %s", err) + else: + # Update the version number in the config file. + op_config.set_value('version', latest_version['version']) + op_config.set_nested_value(['OpenPod', 'commit'], latest_version['hash']) - #Relaunch with new program if update was sucessful. - try: - #Kill process that should be triggered to re-open by bash script. - subprocess.call(['pkill', '-f', 'hub.py']) - subprocess.call(['pkill', '-f', 'HUB_Launcher.py']) - except RuntimeError as err: - exception_log.error("Could not kill program, trying to launch new version. Error: %s", err) + finally: + # Clean Up + if os.path.exists(f"{latest_version['hash']}.zip"): + os.remove(f"{latest_version['hash']}.zip") - launch_location = f'/opt/RecursionHub/{new_version}/HUB_Launcher.py' - with subprocess.Popen(['nohup', 'python3', '-u', f'{launch_location}', '&'])as script: - print(script) + shutil.rmtree(f"OpenPod-{latest_version['hash']}/", ignore_errors=True) - finally: - sys.exit() + os.system("sudo systemctl restart openpod.service") diff --git a/requirements.txt b/requirements.txt index b269b0e..6b0a80d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,43 +1,12 @@ -asgiref>=3.2.7 -asn1crypto>=0.24.0 -attrs>=17.4.0 -blinker>=1.4 -certifi>=2020.6.20 -chardet>=3.0.4 -configobj>=5.0.6 -constantly>=15.1.0 -config>=0.5.0 -Deprecated>=1.2.10 -gitdb>=4.0.5 -GitPython>=3.1.8 -hyperlink>=17.3.1 -idna>=2.10 -incremental>=16.10.1 -Interactive>=1.0.1 -jsonpatch>=1.16 -jsonpointer>=1.10 -jsonschema>=2.6.0 -keyring>=10.6.0 -keyrings.alt>=3.0 -MarkupSafe>=1.0 -netifaces>=0.10.4 -oauthlib>=2.0.6 -paho-mqtt -paramiko>=2.7.1 -pubsub>=0.1.2 -pyasn1>=0.4.2 -pyasn1-modules>=0.2.1 -PyJWT>=1.7.1 -pyOpenSSL>=17.5.0 -Pypubsub>=4.0.3 -pyserial>=3.4 -python-crontab>=2.5.1 -python-dateutil>=2.8.1 -pytimeparse>=1.1.8 -pytz>=2020.1 -requests>=2.24.0 -requests-unixsocket>=0.1.5 -RPi.GPIO>=0.7.0 -SecretStorage>=2.3.1 -service-identity>=16.0.0 -simplejson==3.17.2 +config==0.5.1 +paho-mqtt==1.6.1 +pubsub==0.1.2 +PyJWT==2.6.0 +Pypubsub==4.0.3 +pyserial==3.5 +python-dateutil==2.8.2 +pytimeparse==1.1.8 +pytz==2022.6 +requests==2.28.1 +RPi.GPIO==0.7.1 +simplejson==3.18.0 diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index dd48f90..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -''' Required for tests to see folder and perform tests. ''' diff --git a/tests/modules/__init__.py b/tests/modules/__init__.py deleted file mode 100644 index 224cd58..0000000 --- a/tests/modules/__init__.py +++ /dev/null @@ -1 +0,0 @@ -''' Required for tests to recognize folder and perform tests. ''' diff --git a/tests/modules/test_api.py b/tests/modules/test_api.py deleted file mode 100644 index c02bd14..0000000 --- a/tests/modules/test_api.py +++ /dev/null @@ -1,103 +0,0 @@ -''' Tests for rec_api.py ''' - -import sys -import unittest - -from io import StringIO -from unittest.mock import patch - -from modules import rec_api - -sys.path.insert(0, "0_1_0/") - - -class TestAPI(unittest.TestCase): - '''Collection of tests for the api module''' - - def test_pull_data_dump(self): - '''Test the pull_data_dump function''' - - system_json = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - testdata2 = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - testdata3 = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - testdata4 = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - testdata5 = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - testreturn = [{ - "cardNumber": "3132323637373936", - "access_group": 7, - "phone_number": "2403426671", - "address": "123 America Ln", - "city": "USA City", - "state": "PA", - "zip_code": " 1234567", - "username": "GenericMember2", - "first_name": "Gener", - "last_name": "Mem", - "email": "member@email.com", - "restricted_nodes": [] - }, { - "cardNumber": "33", - "access_group": 5, - "phone_number": "2403426671", - "address": "", - "city": "", - "state": "", - "zip_code": "", - "username": "GenericMember3", - "first_name": "Generic", - "last_name": "Member3", - "email": "generic@email.com", - "restricted_nodes": ["123", "shdfhethetbe"] - }] - - with patch('modules.rec_api.open') as mock_open: - mock_open.side_effect = [system_json, testdata2, testdata3, testdata4, testdata5] - - with patch("modules.rec_api.requests.get") as mocked_requests: - mocked_requests.return_value.json.return_value = testreturn - self.assertTrue(rec_api.pull_data_dump()) - mock_open.assert_called() - mocked_requests.assert_called() - -if __name__ == '__main__': - unittest.main() diff --git a/tests/modules/test_lan.py b/tests/modules/test_lan.py deleted file mode 100644 index d7edc33..0000000 --- a/tests/modules/test_lan.py +++ /dev/null @@ -1,106 +0,0 @@ -''' Unit testing for rec_lan.py ''' - -import sys -import unittest - -from unittest.mock import patch - -from modules import rec_lan - -sys.path.insert(0, "openpod/") - - -class TestLan(unittest.TestCase): - ''' Tests for the lan module ''' - - def test_monitor_network(self): - ''' - Confirm that the network monitor is running. - ''' - with patch('modules.rec_lan.test_network') as mocked_test_network: - mocked_test_network.return_value = 0 - - with patch('modules.rec_lan.threading') as mocked_threading: - self.assertTrue(rec_lan.monitor_network()) - mocked_threading.Timer.assert_called() - - def test_test_network(self): - ''' - Confirms all tests are called properly. - ''' - with patch('modules.rec_lan.networked') as mocked_networked: - mocked_networked.return_value = False - self.assertEqual(rec_lan.test_network(), 0) - - mocked_networked.return_value = True - self.assertNotEqual(rec_lan.test_network(), 0) - - with patch('modules.rec_lan.internet_on') as mocked_internet_on: - mocked_internet_on.return_value = False - self.assertEqual(rec_lan.test_network(), 1) - - mocked_internet_on.return_value = True - self.assertNotEqual(rec_lan.test_network(), 1) - - with patch('modules.rec_lan.recursion_connection') as mocked_recursion_connection: - mocked_recursion_connection.return_value = False - self.assertEqual(rec_lan.test_network(), 2) - - mocked_recursion_connection.return_value = True - self.assertEqual(rec_lan.test_network(), 3) - - self.assertTrue(mocked_networked.called) - self.assertTrue(mocked_internet_on.called) - self.assertTrue(mocked_recursion_connection.called) - - def test_networked(self): - ''' - Confirm that the network is active. - ''' - with patch('modules.rec_lan.get_ip') as mocked_get_ip: - mocked_get_ip.return_value = ("127.0.0.1", "127.0.0.1") - self.assertFalse(rec_lan.networked()) - - mocked_get_ip.return_value = ("192.168.1.1", "192.168.1.1") - self.assertTrue(rec_lan.networked()) - - def test_internet_on(self): - ''' - Confirms active internet connection. - ''' - self.assertTrue(rec_lan.internet_on()) - - with patch('modules.rec_lan.requests.get') as mocked_requests: - mocked_requests.return_value.status_code = None - self.assertFalse(rec_lan.internet_on()) - - def test_recursion_connection(self): - ''' - Confirms that the recursion server is reachable. - ''' - self.assertTrue(rec_lan.recursion_connection()) - - with patch('modules.rec_lan.requests.get') as mocked_requests: - mocked_requests.return_value.status_code = None - self.assertFalse(rec_lan.internet_on()) - - def test_get_ip(self): - ''' - Verify that the ip address is obtained. - ''' - with patch('modules.rec_lan.internet_on') as mocked_internet_on: - mocked_internet_on.return_value = True - - with patch('modules.rec_lan.requests.get') as mocked_requests: - mocked_requests.return_value.text = "0.0.0.0" - public_ip, local_ip = rec_lan.get_ip() - - self.assertEqual((rec_lan.get_ip())[0], '0.0.0.0') - self.assertEqual(public_ip, '0.0.0.0') - self.assertNotEqual(local_ip, '127.0.0.1') - - mocked_internet_on.return_value = False - self.assertNotEqual((rec_lan.get_ip())[0], '0.0.0.0') - -if __name__ == '__main__': - unittest.main() diff --git a/tests/modules/test_log.py b/tests/modules/test_log.py deleted file mode 100644 index b3005e6..0000000 --- a/tests/modules/test_log.py +++ /dev/null @@ -1,31 +0,0 @@ -''' Tests for rec_log.py ''' - -# import sys -# import logging -# import unittest - -# #from modules import rec_lan, rec_api, rec_xbee, rec_log - -# sys.path.insert(0, "openpod/") - -# class Testlog(unittest.TestCase): -# ''' Tests for the log module ''' - -# def __init__(self, *args, **kwargs): -# self.reset() -# logging.Handler.__init__(self, *args, **kwargs) - -# def emit(self, record): -# self.messages[record.levelname.lower()].append(record.getMessage()) - -# def reset(self): -# self.messages = { -# 'debug': [], -# 'info': [], -# 'warning': [], -# 'error': [], -# 'critical': [], -# } - -# if __name__ == '__main__': -# unittest.main() diff --git a/tests/modules/test_lookup.py b/tests/modules/test_lookup.py deleted file mode 100644 index 27932ee..0000000 --- a/tests/modules/test_lookup.py +++ /dev/null @@ -1,383 +0,0 @@ -''' -Tests for the lookup module. -''' - -import sys -import unittest - -from io import StringIO -from unittest.mock import patch - -from modules import rec_lookup - -sys.path.insert(0, "0_1_0/") - - -class TestLookup(unittest.TestCase): - '''Lokup Tests''' - - def setUp(self): - self.nodes_json = StringIO( - '''[ - { - "id": 1, - "name": "Test Node", - "mac": "0011223344556677", - "tool": false, - "door": false, - "qr_toggle": false, - "hub": 1, - "facility": "7659e76b-470c-4d5f-bff4-fcc120f08848", - "qr_code": null - } - ]''' - ) - - self.nodes_alternative_json = StringIO( - '''[ - { - "id": 1, - "name": "Test Node", - "mac": "0000000000000000", - "tool": false, - "door": false, - "qr_toggle": false, - "hub": 1, - "facility": "7659e76b-470c-4d5f-bff4-fcc120f08848", - "qr_code": null - } - ]''' - ) - - self.nodes_empty_json = StringIO('') - - def test_count_matching_mac(self): - ''' - Confirms MAC conflection is counted correctly. - ''' - count_result = rec_lookup.count_matching_mac("12345678") - self.assertEqual(count_result, 0) - - self.assertEqual( - rec_lookup.count_matching_mac("0011223344556677"), - 0 - ) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.nodes_json - - count_result = rec_lookup.count_matching_mac("0011223344556677") - - mock_open.assert_called() - self.assertEqual(count_result, 1) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.nodes_alternative_json - - count_result = rec_lookup.count_matching_mac("0011223344556677") - - mock_open.assert_called() - self.assertEqual(count_result, 0) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.nodes_empty_json - - self.assertEqual( - rec_lookup.count_matching_mac("0011223344556677"), - 0 - ) - - mock_open.assert_called() - -class TestLookUpAccessRequest(unittest.TestCase): # pylint: disable=R0904 - '''Access Request Tests''' - - def setUp(self): - ''' - Collection of JSON used for testing. - ''' - self.system_json = StringIO( - '''{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }''' - ) - - self.nodes_json = StringIO( - '''[ - { - "id": 1, - "name": "Test Node", - "mac": "0011223344556677", - "tool": false, - "door": false, - "qr_toggle": false, - "hub": 1, - "facility": "7659e76b-470c-4d5f-bff4-fcc120f08848", - "qr_code": null - } - ]''' - ) - - self.members_json = StringIO( - '''[ - { - "cardNumber": "313233343536373839", - "access_group": 123, - "phone_number": "1234567890", - "address": "1331 12th ave", - "city": "Altoona", - "state": "PA", - "zip_code": "16601", - "username": "BestName", - "first_name": "John", - "last_name": "Doe", - "email": "email@email.com", - "restricted_nodes": [0,9,8] - } - ]''' - ) - - self.owners_json = StringIO( - '''[ - { - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032", - "cardNumber": "30393837363534333231", - "phone_number": null, - "address": null, - "city": null, - "state": null, - "zip_code": null, - "username": "OwnerUserName", - "first_name": "Jim", - "last_name": "John", - "email": "email@email.com" - } - ]''' - ) - - self.permissions_json = StringIO( - '''[ - { - "id": 1, - "name": "General Access", - "startTime": "20:20:20", - "endTime": "23:23:23", - "monday": true, - "tuesday": true, - "wednesday": true, - "thursday": true, - "friday": true, - "saturday": true, - "sunday": true, - "twenty_four_seven": false, - "default_fallback": true, - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032", - "allowedNodes": [1, 4, 6] - } - ]''' - ) - - # ----------------------------------- _alt ----------------------------------- # - - self.system_json = StringIO( - '''{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }''' - ) - - self.nodes_json_alt = StringIO( - '''[ - { - "id": 1, - "name": "Test Node", - "mac": "0011223344556677", - "tool": false, - "door": false, - "qr_toggle": false, - "hub": 1, - "facility": "7659e76b-470c-4d5f-bff4-fcc120f08848", - "qr_code": null - } - ]''' - ) - - self.members_json_alt = StringIO( - '''[ - { - "cardNumber": "313233343536373839", - "access_group": 123, - "phone_number": "1234567890", - "address": "1331 12th ave", - "city": "Altoona", - "state": "PA", - "zip_code": "16601", - "username": "BestName", - "first_name": "John", - "last_name": "Doe", - "email": "email@email.com", - "restricted_nodes": [0,9,8] - } - ]''' - ) - - self.owners_json_alt = StringIO( - '''[ - { - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032", - "cardNumber": "30393837363534333231", - "phone_number": null, - "address": null, - "city": null, - "state": null, - "zip_code": null, - "username": "OwnerUserName", - "first_name": "Jim", - "last_name": "John", - "email": "email@email.com" - } - ]''' - ) - - self.permissions_json_alt = StringIO( - '''[ - { - "id": 1, - "name": "General Access", - "startTime": "20:20:20", - "endTime": "23:23:23", - "monday": true, - "tuesday": true, - "wednesday": true, - "thursday": true, - "friday": true, - "saturday": true, - "sunday": true, - "twenty_four_seven": false, - "default_fallback": true, - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032", - "allowedNodes": [1, 4, 6] - } - ]''' - ) - - def test_files_opened(self): - ''' - Confirms that all the files are correctly opened and read. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.side_effect = [ - self.system_json, - self.nodes_json, # Opened from conversion function. - self.owners_json, # Opened from owner check function. - self.members_json, # Opened from get_details function. - self.permissions_json, # Opened from get_group_details function. - ] - - self.assertAlmostEqual(rec_lookup.access_request(313131, '0011223344556677'), 2) - mock_open.assert_called() - - def test_mac_to_id(self): - ''' - Confirms that the mac address is converted to the node id. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.nodes_json - - node_id = rec_lookup.mac_to_id('0011223344556677') - mock_open.assert_called() - self.assertEqual(node_id, 1) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.nodes_json_alt - - node_id = rec_lookup.mac_to_id('9911223344556677') - mock_open.assert_called() - self.assertEqual(node_id, '9911223344556677') - - def test_is_owner(self): - ''' - Confirms that the owner check function returns the correct value. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.owners_json - - owner = rec_lookup.is_owner('30393837363534333231') - mock_open.assert_called() - self.assertTrue(owner) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.owners_json_alt - - owner = rec_lookup.is_owner('99393837363534333231') - mock_open.assert_called() - self.assertFalse(owner) - - def test_get_details(self): - ''' - Verifies that the correct details are returned. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.members_json - - user = rec_lookup.get_details('313233343536373839') - mock_open.assert_called() - self.assertTrue(user['found']) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.members_json_alt - - user = rec_lookup.get_details('993233343536373839') - mock_open.assert_called() - self.assertFalse(user['found']) - - def test_get_group_details(self): - ''' - Verifies that the correct details are returned. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.permissions_json - - group = rec_lookup.get_group_details(1) - mock_open.assert_called() - self.assertTrue(group['found']) - - with patch('modules.rec_lookup.open') as mock_open: - mock_open.return_value = self.permissions_json_alt - - group = rec_lookup.get_group_details(69) - mock_open.assert_called() - self.assertFalse(group['found']) - - def test_access_request_combinations(self): - ''' - Checks that the access request function returns the correct values. - ''' - with patch('modules.rec_lookup.open') as mock_open: - mock_open.side_effect = [ - self.system_json, - self.nodes_json, # Opened from conversion function. - self.owners_json, # Opened from owner check function. - self.members_json, # Opened from get_details function. - self.permissions_json, # Opened from get_group_details function. - ] - - self.assertEqual( - rec_lookup.access_request(313131, '0011223344556677'), - 2 - ) - - mock_open.assert_called() - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/modules/test_mqtt.py b/tests/modules/test_mqtt.py deleted file mode 100644 index 813860b..0000000 --- a/tests/modules/test_mqtt.py +++ /dev/null @@ -1,49 +0,0 @@ -''' -Tests MQTT functionality. -''' -import unittest -from unittest.mock import patch - -from modules import rec_mqtt - - -class DummyMessage: # pylint: disable=R0903 - '''Creating a mock response''' - - def __init__(self, command): - self.payload=command - - topic='test_hub' - -class TestOnMessage(unittest.TestCase): - '''unit tests for the MQTT module''' - - def test_function_calls(self): - ''' - Checks that the function calls are correct. - ''' - with patch('modules.rec_mqtt.rec_api.link_hub') as mock_link: - self.assertTrue( - rec_mqtt.on_message('test_client', None, DummyMessage('170')) - ) - mock_link.assert_called() - - with patch('modules.rec_mqtt.rec_api.pull_data_dump') as mock_pull: - rec_mqtt.on_message('test_client', None, DummyMessage('186')) - mock_pull.assert_called() - - with patch('modules.rec_mqtt.mqtt_start_update') as mock_update: - rec_mqtt.on_message('test_client', None, DummyMessage('202')) - mock_update.assert_called() - - with patch('modules.rec_mqtt.rec_api.update_time_zone') as mock_timezone: - rec_mqtt.on_message('test_client', None, DummyMessage('218')) - mock_timezone.assert_called() - - with patch('modules.rec_mqtt.mqtt_restart_system') as mock_restart: - rec_mqtt.on_message('test_client', None, DummyMessage('234')) - mock_restart.assert_called() - - with patch('modules.rec_mqtt.zip_send') as mock_zip: - rec_mqtt.on_message('test_client', None, DummyMessage('250')) - mock_zip.assert_called() diff --git a/tests/modules/test_xbee.py b/tests/modules/test_xbee.py deleted file mode 100644 index 51d8fb5..0000000 --- a/tests/modules/test_xbee.py +++ /dev/null @@ -1,40 +0,0 @@ -# import os -# import pty -# import json -# import logging -# import unittest - -# from io import StringIO -# from unittest.mock import patch, mock_open, Mock - -# import sys -# sys.path.insert(0, "0_1_0/") - -# from modules.rec_xbee import configure_xbee - -# class TestXbee(unittest.TestCase): -# def test_confXBee(self): -# systemJSON = StringIO("""{ -# "serial" : "536780dfe639468e8e23fc568006950d", -# "XBEE_KY" : "11111111111111111111111111111111", -# "timezone": "America/New_York", -# "CurrentVersion": "0_0_0", -# "HUBid": 40, -# "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", -# "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" -# }""") - -# master, slave = pty.openpty() -# s_name = os.ttyname(slave) - -# with patch('modules.rec_xbee.open') as mock_open: -# mock_open.side_effect = [systemJSON] - -# with patch('modules.rec_xbee.serial.Serial.write') as mock_write: -# configure_xbee() #Call the function to run. -# mock_open.assert_called() #TEST - systemJSON was called - - - -# if __name__ == '__main__': -# unittest.main() diff --git a/tests/test_installer.py b/tests/test_installer.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_pod.py b/tests/test_pod.py deleted file mode 100644 index aaf5721..0000000 --- a/tests/test_pod.py +++ /dev/null @@ -1,21 +0,0 @@ -''' Unit test for pod.py ''' - -import sys -# import unittest - -sys.path.insert(0, "openpod/") - -# import hub - -# class TestHub(unittest.TestCase): -# ''' -# General tests for the hub.py file -# ''' -# def test_xbee_flag_set_true(self): -# ''' -# Check if the xbee flag is set to true. -# ''' -# global XBEE_FLAG -# XBEE_FLAG = False -# hub.incoming_xbee_data() -# self.assertTrue(XBEE_FLAG) diff --git a/tests/test_updater.py b/tests/test_updater.py deleted file mode 100644 index c0bc464..0000000 --- a/tests/test_updater.py +++ /dev/null @@ -1,38 +0,0 @@ -''' -Tests for update.py -''' - -import sys -import unittest - -from io import StringIO -from unittest.mock import patch - -import updater # OpenPod Updater - -sys.path.insert(0, "openpod/") - -class TestUpdater(unittest.TestCase): - '''Collection of tests.''' - - def test_get_current_versions(self): - ''' - Verify that the current versions are returned. - ''' - system_json = StringIO("""{ - "serial": "536780dfe639468e8e23fc568006950d", - "timezone": "America/New_York", - "CurrentVersion": "0_0_0", - "HUBid": 40, - "Token": "5a12ff36eed2f0647a48af62e635eb8cfd4c5979", - "facility": "3b9fdc97-9649-4c80-8b48-10df647bd032" - }""") - - with patch('updater.open') as mock_open: - mock_open.side_effect = [system_json, system_json] - self.assertEqual(updater.current_hub_version(), "0_0_0") - mock_open.assert_called() - - -if __name__ == '__main__': - unittest.main()