refactor: switch from Google Sheets to ODS file output
Some checks are pending
Docker / build (push) Waiting to run

Replaced the integration with Google Sheets for data storage with using locally managed ODS files. This change simplifies data handling by leveraging Pandas to read and write the ODS format, reducing dependencies such as pygsheets. This transition enhances maintainability and autonomy of the data management processes.
This commit is contained in:
Kumi 2024-11-13 12:16:21 +01:00
parent a9f1a887e9
commit f95782b093
Signed by: kumi
GPG key ID: ECBCC9082395383F

View file

@ -6,10 +6,11 @@ import datetime
from datetime import date, timedelta from datetime import date, timedelta
from .models import Coin, Social, P2Pool from .models import Coin, Social, P2Pool
import requests import requests
import pygsheets
from django.conf import settings from django.conf import settings
import pandas as pd
BASE_DIR = settings.BASE_DIR BASE_DIR = settings.BASE_DIR
DATA_FILE = settings.DATA_FILE
#################################################################################### ####################################################################################
@ -377,7 +378,7 @@ async def update_social_data(symbol):
#################################################################################### ####################################################################################
# Asynchronous get p2pool and p2poolmini data and then save to google sheets # Asynchronous get p2pool and p2poolmini data and then save to .ods
#################################################################################### ####################################################################################
async def get_p2pool_data(session, mini): async def get_p2pool_data(session, mini):
today = date.today() today = date.today()
@ -426,33 +427,27 @@ async def get_p2pool_data(session, mini):
p2pool_stat.mini = False p2pool_stat.mini = False
p2pool_stat.save() p2pool_stat.save()
print("p2pool saved!") print("p2pool saved!")
gc = pygsheets.authorize(
service_file="service_account_credentials.json" df = pd.read_excel(DATA_FILE, sheet_name="p2pool", engine="odf")
) start_row, end_row = 2, 9999
sh = gc.open("zcash_bitcoin") start_col, end_col = 0, 2
wks = sh.worksheet_by_title("p2pool") values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
values_mat = wks.get_values(
start=(3, 1), end=(9999, 3), returnas="matrix"
)
k = len(values_mat) k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d") date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d") date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d") date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2: if date_aux < date_aux2:
cell = "F" + str(k + 3) values_mat[k][5] = p2pool_stat.totalblocksfound
wks.update_value(cell, p2pool_stat.totalblocksfound) values_mat[k][4] = p2pool_stat.totalhashes
cell = "E" + str(k + 3) values_mat[k][3] = p2pool_stat.percentage
wks.update_value(cell, p2pool_stat.totalhashes) values_mat[k][2] = p2pool_stat.hashrate
cell = "D" + str(k + 3) values_mat[k][1] = p2pool_stat.miners
wks.update_value(cell, p2pool_stat.percentage) values_mat[k][0] = datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
cell = "C" + str(k + 3)
wks.update_value(cell, p2pool_stat.hashrate) df.iloc[start_row:end_row, start_col:end_col] = values_mat
cell = "B" + str(k + 3) df.to_excel(DATA_FILE, sheet_name="p2pool", index=False)
wks.update_value(cell, p2pool_stat.miners)
cell = "A" + str(k + 3)
wks.update_value(
cell, datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
)
print("spreadsheet updated") print("spreadsheet updated")
else: else:
print("spreadsheet already with the latest data") print("spreadsheet already with the latest data")
@ -473,33 +468,27 @@ async def get_p2pool_data(session, mini):
p2pool_stat.mini = True p2pool_stat.mini = True
p2pool_stat.save() p2pool_stat.save()
print("p2pool_mini saved!") print("p2pool_mini saved!")
gc = pygsheets.authorize(
service_file="service_account_credentials.json" df = pd.read_excel(DATA_FILE, sheet_name="p2poolmini", engine="odf")
) start_row, end_row = 2, 9999
sh = gc.open("zcash_bitcoin") start_col, end_col = 0, 2
wks = sh.worksheet_by_title("p2poolmini") values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
values_mat = wks.get_values(
start=(3, 1), end=(9999, 3), returnas="matrix"
)
k = len(values_mat) k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d") date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d") date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d") date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2: if date_aux < date_aux2:
cell = "F" + str(k + 3) values_mat[k][5] = p2pool_stat.totalblocksfound
wks.update_value(cell, p2pool_stat.totalblocksfound) values_mat[k][4] = p2pool_stat.totalhashes
cell = "E" + str(k + 3) values_mat[k][3] = p2pool_stat.percentage
wks.update_value(cell, p2pool_stat.totalhashes) values_mat[k][2] = p2pool_stat.hashrate
cell = "D" + str(k + 3) values_mat[k][1] = p2pool_stat.miners
wks.update_value(cell, p2pool_stat.percentage) values_mat[k][0] = datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
cell = "C" + str(k + 3)
wks.update_value(cell, p2pool_stat.hashrate) df.iloc[start_row:end_row, start_col:end_col] = values_mat
cell = "B" + str(k + 3) df.to_excel(DATA_FILE, sheet_name="p2poolmini", index=False)
wks.update_value(cell, p2pool_stat.miners)
cell = "A" + str(k + 3)
wks.update_value(
cell, datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
)
print("spreadsheet updated") print("spreadsheet updated")
else: else:
print("spreadsheet already with the latest data") print("spreadsheet already with the latest data")