init
This commit is contained in:
commit
5a6237c452
6 changed files with 155 additions and 0 deletions
10
common.py
Normal file
10
common.py
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
|
||||||
|
def read_file_to_list(filename):
|
||||||
|
try:
|
||||||
|
lines = []
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
for line in f.readlines():
|
||||||
|
lines.append(line.strip())
|
||||||
|
return lines
|
||||||
|
except FileNotFoundError:
|
||||||
|
return []
|
||||||
1
data/abi/SHIO.json
Normal file
1
data/abi/SHIO.json
Normal file
File diff suppressed because one or more lines are too long
33
logs.py
Normal file
33
logs.py
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
logging.basicConfig(
|
||||||
|
format='[%(asctime)s] %(message)s',
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S',
|
||||||
|
level=logging.INFO,
|
||||||
|
handlers=[
|
||||||
|
TimedRotatingFileHandler("{}/app.log".format(os.getenv('DATA_FOLDER')), when="midnight", backupCount=7),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
loggers = {}
|
||||||
|
|
||||||
|
|
||||||
|
def set_channel_logger(channel_name, level=logging.INFO):
|
||||||
|
os.makedirs("{}/logs/".format(os.getenv("DATA_FOLDER")), exist_ok=True)
|
||||||
|
log_file = "{}/logs/{}.log".format(os.getenv("DATA_FOLDER"), channel_name)
|
||||||
|
handler = TimedRotatingFileHandler(log_file, when="midnight", interval=1, utc=True)
|
||||||
|
handler.setFormatter(logging.Formatter("[%(asctime)s] %(message)s", "%Y-%m-%d %H:%M:%S"))
|
||||||
|
logger = logging.getLogger(channel_name)
|
||||||
|
logger.setLevel(level)
|
||||||
|
logger.addHandler(handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
def log_message(channel_name, message):
|
||||||
|
if channel_name not in loggers.keys():
|
||||||
|
loggers[channel_name] = set_channel_logger(channel_name)
|
||||||
|
loggers[channel_name].info(message)
|
||||||
104
main.py
Normal file
104
main.py
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from json import JSONDecodeError
|
||||||
|
|
||||||
|
from web3 import Web3, HTTPProvider
|
||||||
|
|
||||||
|
from common import *
|
||||||
|
from logs import *
|
||||||
|
|
||||||
|
web3 = Web3(HTTPProvider(os.getenv('RPC_SERVER')))
|
||||||
|
log_contract = web3.eth.contract(
|
||||||
|
os.getenv('LOG_CONTRACT_ADDRESS'),
|
||||||
|
abi=json.load(open("./data/abi/{}".format(os.getenv('LOG_CONTRACT_ABI_FILE'))))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def keep_chat_updated(event_filter, poll_interval):
|
||||||
|
while True:
|
||||||
|
for event in event_filter.get_new_entries():
|
||||||
|
output_line(event)
|
||||||
|
await asyncio.sleep(poll_interval)
|
||||||
|
|
||||||
|
|
||||||
|
def preload_chat(earliest_block, latest_block):
|
||||||
|
event_logs = web3.eth.get_logs({
|
||||||
|
"fromBlock": int(earliest_block),
|
||||||
|
"toBlock": int(latest_block),
|
||||||
|
"topics": ["0x6b81130c485ac9b98332fa40c2e57900867815b0fe1497e1a168caf930fc9c9d"],
|
||||||
|
"address": os.getenv('LOG_CONTRACT_ADDRESS')
|
||||||
|
})
|
||||||
|
warnings.filterwarnings("ignore")
|
||||||
|
for log in event_logs:
|
||||||
|
output_line(log)
|
||||||
|
|
||||||
|
|
||||||
|
def load_chat(latest_block):
|
||||||
|
event_filter = log_contract.events.LogEvent.create_filter(fromBlock=latest_block)
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
asyncio.run(keep_chat_updated(event_filter, 2))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def output_line(log):
|
||||||
|
try:
|
||||||
|
tx_receipt = web3.eth.get_transaction_receipt(log.transactionHash)
|
||||||
|
decoded_data = log_contract.events.LogEvent().process_receipt(tx_receipt)
|
||||||
|
except Exception as e:
|
||||||
|
return
|
||||||
|
if decoded_data[0]['args']['Aura'] and decoded_data[0]['args']['Soul']:
|
||||||
|
# save latest block number
|
||||||
|
rooms = json.load(open(room_file := "{}/rooms.json".format(os.getenv('DATA_FOLDER')), 'r'))
|
||||||
|
rooms['VOID']['last_block'] = tx_receipt['blockNumber']
|
||||||
|
open(room_file, 'w').write(json.dumps(rooms))
|
||||||
|
|
||||||
|
# get the block and save it to cache
|
||||||
|
block_number = tx_receipt['blockNumber']
|
||||||
|
if block_number not in block_cache:
|
||||||
|
block_cache[block_number] = web3.eth.get_block(block_number)
|
||||||
|
|
||||||
|
# log the message and output to console
|
||||||
|
log_message("VOID", decoded_data[0]['args']['LogLine'])
|
||||||
|
timestamp = datetime.fromtimestamp(block_cache[block_number]['timestamp'], tz=timezone.utc)
|
||||||
|
print("[{}] {}".format(timestamp.strftime("%Y-%m-%d %H:%M:%S"), decoded_data[0]['args']['LogLine']))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# create the log and rooms folder if it doesn't exist
|
||||||
|
os.makedirs("{}/logs/".format(os.getenv('DATA_FOLDER')), exist_ok=True)
|
||||||
|
os.makedirs("{}/rooms/".format(os.getenv('DATA_FOLDER')), exist_ok=True)
|
||||||
|
|
||||||
|
# create the rooms file if it doesn't exist
|
||||||
|
try:
|
||||||
|
room = json.load(open(room_file := "{}/rooms/{}.json".format(os.getenv('DATA_FOLDER'), os.getenv('CHANNEL_NAME')), 'r')) # TODO: change this to target an address
|
||||||
|
except (JSONDecodeError, FileNotFoundError):
|
||||||
|
room = {
|
||||||
|
"label": os.getenv('CHANNEL_NAME'),
|
||||||
|
"last_block": 0,
|
||||||
|
"preloaded": False,
|
||||||
|
}
|
||||||
|
open(room_file, 'w').write(json.dumps(room))
|
||||||
|
log_file = "{}/logs/{}.log".format(os.getenv('DATA_FOLDER'), "VOID")
|
||||||
|
# grab the entire log if the log file doesn't exist
|
||||||
|
if not room['preloaded']:
|
||||||
|
os.remove(log_file)
|
||||||
|
latest_block = web3.eth.get_block('latest')
|
||||||
|
block_cache = {latest_block.number: latest_block}
|
||||||
|
earliest_block_number = 21220693
|
||||||
|
preload_chat(earliest_block_number, latest_block.number)
|
||||||
|
room['preloaded'] = True
|
||||||
|
room['last_block'] = latest_block.number
|
||||||
|
else:
|
||||||
|
# print past log to console
|
||||||
|
logs = read_file_to_list(log_file)
|
||||||
|
for log in logs:
|
||||||
|
print(log)
|
||||||
|
open(room_file, 'w').write(json.dumps(room))
|
||||||
|
# check for new messages
|
||||||
|
load_chat(room['last_block'])
|
||||||
2
requirements.txt
Normal file
2
requirements.txt
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
web3
|
||||||
|
python-dotenv
|
||||||
5
sample.env
Normal file
5
sample.env
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
DATA_FOLDER=./data
|
||||||
|
RPC_SERVER=https://rpc.pulsechain.com
|
||||||
|
LOG_CONTRACT_ADDRESS=0x7aE73C498A308247BE73688c09c96B3fd06dDB84
|
||||||
|
LOG_CONTRACT_ABI_FILE=SHIO.json
|
||||||
|
CHANNEL_NAME=VOID
|
||||||
Loading…
Reference in a new issue