moneropro/app/charts/synchronous.py
Kumi b80cba1d1f
Some checks failed
Docker / build (push) Has been cancelled
fix: standardize log messages in update functions
Updated the log messages within the `update_p2pool` function to use consistent, English-language messages. This improves readability and consistency across logging outputs. No functional behavior was altered.
2024-11-13 16:20:51 +01:00

970 lines
34 KiB
Python

import pandas as pd
import json
import requests
import datetime
from datetime import date, timedelta
from .models import (
Coin,
Social,
P2Pool,
Dominance,
Rank,
Sfmodel,
DailyData,
Withdrawal,
)
from requests import Session
from requests.exceptions import ConnectionError, Timeout, TooManyRedirects
import pytz
from django.conf import settings
DATA_FILE = settings.DATA_FILE
BASE_DIR = settings.BASE_DIR
####################################################################################
# Reddit api
####################################################################################
# TODO: Fix this...
# api = PushshiftAPI() # When it's working
api = False # When it's not
# Get daily post on Reddit
def data_prep_posts(subreddit, start_time, end_time, filters, limit):
if len(filters) == 0:
filters = [
"id",
"author",
"created_utc",
"domain",
"url",
"title",
"num_comments",
]
if api:
posts = list(
api.search_submissions(
subreddit=subreddit,
after=start_time,
before=end_time,
filter=filters,
limit=limit,
)
)
return pd.DataFrame(posts)
return 0
# Get daily comments on Reddit
def data_prep_comments(term, start_time, end_time, filters, limit):
if len(filters) == 0:
filters = ["id", "author", "created_utc", "body", "permalink", "subreddit"]
if api:
comments = list(
api.search_comments(
q=term, after=start_time, before=end_time, filter=filters, limit=limit
)
)
return pd.DataFrame(comments)
return 0
####################################################################################
# Other useful functions
####################################################################################
# Get data from a coin
def get_history_function(symbol, start_time=None, end_time=None):
update = True
count = 0
priceusd = 0
inflation = 0
pricebtc = 0
stocktoflow = 0
supply = 0
fee = 0
revenue = 0
hashrate = 0
transactions = 0
blocksize = 0
difficulty = 0
with open(BASE_DIR / "settings.json") as file:
data = json.load(file)
file.close()
if not (start_time and end_time):
start_time = "2000-01-01"
end_time = "2100-01-01"
url = (
data["metrics_provider"][0]["metrics_url_new"]
+ symbol
+ "/"
+ start_time
+ "/"
+ end_time
)
print(url)
coins = Coin.objects.filter(name=symbol).order_by("-date")
for coin in coins:
if coin.supply > 0:
supply = coin.supply
break
for coin in coins:
if coin.inflation > 0:
inflation = coin.inflation
break
for coin in coins:
if coin.hashrate > 0:
hashrate = coin.hashrate
break
for coin in coins:
if coin.transactions > 0:
transactions = coin.transactions
break
for coin in coins:
if coin.priceusd > 0:
priceusd = coin.priceusd
break
while update:
response = requests.get(url)
data_aux = json.loads(response.text)
data_aux2 = data_aux["data"]
for item in data_aux2:
day, hour = str(item["time"]).split("T")
day = datetime.datetime.strptime(day, "%Y-%m-%d")
day = datetime.datetime.strftime(day, "%Y-%m-%d")
coin = Coin.objects.filter(name=symbol).filter(date=day)
if coin:
coin.delete()
try:
coin = Coin()
coin.name = symbol
coin.date = day
try:
coin.priceusd = float(item["PriceUSD"])
priceusd = coin.priceusd
except (ValueError, KeyError, TypeError):
coin.priceusd = priceusd
try:
coin.pricebtc = float(item["PriceBTC"])
pricebtc = coin.pricebtc
except (ValueError, KeyError, TypeError):
coin.pricebtc = pricebtc
try:
coin.inflation = float(item["IssContPctAnn"])
coin.stocktoflow = (100 / coin.inflation) ** 1.65
inflation = coin.inflation
stocktoflow = coin.stocktoflow
except (ValueError, KeyError, TypeError):
coin.inflation = inflation
coin.stocktoflow = stocktoflow
try:
if symbol == "xmr":
if float(item["SplyCur"]) < 18000000:
coin.supply = float(item["SplyCur"]) + 497108
else:
coin.supply = float(item["SplyCur"])
supply = coin.supply
else:
coin.supply = float(item["SplyCur"])
supply = coin.supply
except (ValueError, KeyError, TypeError):
coin.supply = supply
try:
coin.fee = float(item["FeeTotNtv"])
fee = coin.fee
except (ValueError, KeyError, TypeError):
coin.fee = fee
try:
coin.revenue = float(item["RevNtv"])
revenue = coin.revenue
except (ValueError, KeyError, TypeError):
coin.revenue = revenue
try:
coin.hashrate = float(item["HashRate"])
hashrate = coin.hashrate
except (ValueError, KeyError, TypeError):
coin.hashrate = hashrate
try:
coin.transactions = float(item["TxCnt"])
transactions = coin.transactions
except (ValueError, KeyError, TypeError):
coin.transactions = transactions
try:
coin.blocksize = float(item["BlkSizeMeanByte"])
blocksize = coin.blocksize
except (ValueError, KeyError, TypeError):
coin.blocksize = blocksize
try:
coin.difficulty = float(item["DiffLast"])
difficulty = coin.difficulty
except (ValueError, KeyError, TypeError):
coin.difficulty = difficulty
coin.save()
count += 1
print(coin.name + " " + str(coin.date) + " " + str(item["SplyCur"]))
except (ValueError, KeyError, TypeError):
pass
try:
url = (
data["metrics_provider"][0]["metrics_url_new"]
+ symbol
+ "/"
+ start_time
+ "/"
+ end_time
+ "/"
+ data_aux["next_page_token"]
)
except (ValueError, KeyError):
update = False
break
return count
# Get most recent metrics from a data provider of your choice for 'symbol'
def get_latest_metrics(symbol, url):
update = True
count = 0
while update:
response = requests.get(url)
data = json.loads(response.text)
data_aux = data["data"]
for item in data_aux:
day, hour = str(item["time"]).split("T")
day = datetime.datetime.strptime(day, "%Y-%m-%d")
day = datetime.datetime.strftime(day, "%Y-%m-%d")
try:
coin = Coin.objects.filter(name=symbol).get(date=day)
except Coin.DoesNotExist:
coin = Coin()
try:
coin.name = symbol
coin.date = day
try:
coin.priceusd = float(item["PriceUSD"])
except (ValueError, KeyError):
coin.priceusd = 0
try:
coin.pricebtc = float(item["PriceBTC"])
except (ValueError, KeyError):
coin.pricebtc = 0
try:
coin.inflation = float(item["IssContPctAnn"])
coin.stocktoflow = (100 / coin.inflation) ** 1.65
except (ValueError, KeyError):
coin.inflation = 0
coin.stocktoflow = 0
try:
coin.supply = float(item["SplyCur"])
except (ValueError, KeyError):
coin.supply = 0
try:
coin.fee = float(item["FeeTotNtv"])
except (ValueError, KeyError):
coin.fee = 0
try:
coin.revenue = float(item["RevNtv"])
except (ValueError, KeyError):
coin.revenue = 0
try:
coin.hashrate = float(item["HashRate"])
except (ValueError, KeyError):
coin.hashrate = 0
try:
coin.transactions = float(item["TxCnt"])
except (ValueError, KeyError):
coin.transactions = 0
coin.save()
count += 1
print(str(symbol) + " " + str(coin.date))
except Exception:
pass
try:
url = data["next_page_url"]
update = True
except KeyError:
update = False
break
return count
# Get binance withdrawal state
def get_binance_withdrawal(symbol):
url = "https://www.binance.com/en/network"
withdrawals = Withdrawal.objects.order_by("-date")
if len(withdrawals) > 0:
for withdrawal in withdrawals:
break
else:
withdrawal = Withdrawal()
withdrawal.state = True
withdrawal.save()
return True
current_date = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)
response = requests.get(url)
result = response.text
position = result.find(symbol)
result = result[position : position + 400]
position = result.find("withdrawEnable")
result = result[position : position + 25]
try:
result.index("true")
if (current_date - withdrawal.date).seconds > 3600:
new_withdrawal = Withdrawal()
new_withdrawal.state = True
new_withdrawal.save()
return True
except Exception:
try:
result.index("false")
if ((current_date - withdrawal.date).seconds > 3600) or withdrawal.state:
new_withdrawal = Withdrawal()
new_withdrawal.state = False
new_withdrawal.save()
return False
except Exception:
return None
# Get latest price data for Monero
def get_latest_price(symbol):
with open(BASE_DIR / "settings.json") as file:
data = json.load(file)
url = data["metrics_provider"][0]["price_url_old"] + symbol
print(url)
parameters = {
"convert": "USD",
}
headers = {
"Accepts": "application/json",
data["metrics_provider"][0]["api_key_name"]: data["metrics_provider"][0][
"api_key_value"
],
}
session = Session()
session.headers.update(headers)
try:
response = session.get(url, params=parameters)
data = json.loads(response.text)
print("getting latest data")
try:
if data["data"][symbol.upper()]["cmc_rank"]:
print("new data received")
pass
else:
print("problem with the data provider")
data = False
except KeyError:
data = False
except (ConnectionError, Timeout, TooManyRedirects):
data = False
file.close()
return data
# Get latest dominance value and update
def update_dominance(data):
if not (data):
print("error updating dominance")
return False
else:
dominance = Dominance()
dominance.name = "xmr"
dominance.date = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
dominance.dominance = float(
data["data"]["XMR"]["quote"]["USD"]["market_cap_dominance"]
)
dominance.save()
df = pd.read_excel(DATA_FILE, engine="odf", sheet_name="Sheet7")
start_row, end_row = 2, 9999
start_col, end_col = 0, 2
values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2:
values_mat[k][1] = dominance.dominance
values_mat[k][0] = dominance.date
df.iloc[start_row:end_row, start_col:end_col] = values_mat
df.to_excel(DATA_FILE, sheet_name="Sheet7", index=False)
print("spreadsheet updated")
else:
print("spreadsheet already with the latest data")
return False
return data
# Get latest rank value and update
def update_rank(data=None):
if not (data):
data = get_latest_price("xmr")
if not (data):
print("error updating rank")
return False
else:
rank = Rank()
rank.name = "xmr"
rank.date = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
rank.rank = int(data["data"]["XMR"]["cmc_rank"])
rank.save()
df = pd.read_excel(DATA_FILE, engine="odf", sheet_name="Sheet8")
start_row, end_row = 2, 9999
start_col, end_col = 0, 2
values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2:
values_mat[k][1] = rank.rank
values_mat[k][0] = rank.date
df.iloc[start_row:end_row, start_col:end_col] = values_mat
df.to_excel(DATA_FILE, sheet_name="Sheet8", index=False)
print("spreadsheet updated")
else:
print("spreadsheet already with the latest data")
return data
return data
# Load Reddit api to check if there are new followers
def check_new_social(symbol):
yesterday = datetime.datetime.strftime(date.today() - timedelta(1), "%Y-%m-%d")
socials = Social.objects.filter(name=symbol).filter(date=yesterday)
if symbol == "Bitcoin":
timeframe = 14400
hours = 86400 / timeframe
timeframe2 = 3600
hours2 = 86400 / timeframe2
elif symbol == "Monero":
timeframe = 43200
hours = 86400 / timeframe
timeframe2 = 43200
hours2 = 86400 / timeframe2
elif symbol == "Cryptocurrency":
timeframe = 14400
hours = 86400 / timeframe
timeframe2 = 1800
hours2 = 86400 / timeframe2
if not (socials):
print("new social")
request = "https://www.reddit.com/r/" + symbol + "/about.json"
response = requests.get(
request, headers={"User-agent": "Checking new social data"}
)
data = json.loads(response.content)
data = data["data"]
subscribers = data["subscribers"]
social = Social()
social.name = symbol
social.date = yesterday
social.subscriberCount = subscribers
date_aux = date.today()
date_aux = datetime.datetime.strftime(date_aux, "%Y-%m-%d")
date_aux = datetime.datetime.strptime(date_aux, "%Y-%m-%d")
timestamp1 = int(datetime.datetime.timestamp(date_aux))
timestamp2 = int(timestamp1 - timeframe)
limit = 1000
filters = []
data = data_prep_posts(symbol, timestamp2, timestamp1, filters, limit)
if data != 0:
social.postsPerHour = len(data) / hours
else:
social.postsPerHour = 0
timestamp2 = int(timestamp1 - timeframe2)
limit = 1000
data = data_prep_comments(symbol, timestamp2, timestamp1, filters, limit)
if data != 0:
social.commentsPerHour = len(data) / hours2
else:
social.commentsPerHour = 0
social.save()
print("getting new data - " + str(social.name) + " - " + str(social.date))
return True
# Update database DailyData with most recent coin data
def update_database(date_from=None, date_to=None):
date_zero = "2014-05-20"
if not (date_from) or not (date_to):
date_to = date.today()
date_from = date_to - timedelta(5)
amount = date_from - datetime.datetime.strptime(date_zero, "%Y-%m-%d").date()
else:
print(str(date_from) + " to " + str(date_to))
date_from = datetime.datetime.strptime(date_from, "%Y-%m-%d")
date_to = datetime.datetime.strptime(date_to, "%Y-%m-%d")
amount = date_from - datetime.datetime.strptime(date_zero, "%Y-%m-%d").date()
count = 0
date_aux = date_from
while date_aux <= date_to:
date_aux = date_from + timedelta(count)
date_aux2 = date_aux - timedelta(1)
try:
coin_xmr = Coin.objects.filter(name="xmr").get(date=date_aux)
coin_xmr2 = Coin.objects.filter(name="xmr").get(date=date_aux2)
coin_btc = Coin.objects.filter(name="btc").get(date=date_aux)
coin_btc2 = Coin.objects.filter(name="btc").get(date=date_aux2)
try:
coin_dash = Coin.objects.filter(name="dash").get(date=date_aux)
except Coin.DoesNotExist:
coin_dash = Coin()
try:
coin_zcash = Coin.objects.filter(name="zec").get(date=date_aux)
except Coin.DoesNotExist:
coin_zcash = Coin()
try:
coin_grin = Coin.objects.filter(name="grin").get(date=date_aux)
except Coin.DoesNotExist:
coin_grin = Coin()
if coin_btc.inflation == 0 or coin_xmr.inflation == 0:
return count
count_aux = 0
found = False
while count_aux < 100 and not (found):
try:
date_aux3 = date_aux - timedelta(count_aux)
social_btc = Social.objects.filter(name="Bitcoin").get(
date=date_aux3
)
social_xmr = Social.objects.filter(name="Monero").get(
date=date_aux3
)
social_crypto = Social.objects.filter(name="Cryptocurrency").get(
date=date_aux3
)
found = True
except Social.DoesNotExist:
found = False
count_aux += 1
except Coin.DoesNotExist:
return count
try:
data = Sfmodel.objects.get(date=coin_xmr.date)
except Sfmodel.DoesNotExist:
data = Sfmodel()
data.priceusd = 0
data.pricebtc = 0
data.stocktoflow = 0
data.greyline = 0
data.color = 0
data.date = coin_xmr.date
data.pricebtc = coin_xmr.pricebtc
data.priceusd = coin_xmr.priceusd
if data.stocktoflow == 0 and coin_xmr.supply > 0:
supply = int(coin_xmr.supply) * 10**12
reward = (2**64 - 1 - supply) >> 19
if reward < 0.6 * (10**12):
reward = 0.6 * (10**12)
inflation = 100 * reward * 720 * 365 / supply
data.stocktoflow = (100 / (inflation)) ** 1.65
v0 = 0.002
delta = (0.015 - 0.002) / (6 * 365)
data.color = 30 * coin_xmr.pricebtc / ((amount.days) * delta + v0)
amount += timedelta(1)
data.save()
try:
data = DailyData.objects.get(date=coin_xmr.date)
except DailyData.DoesNotExist:
data = DailyData()
# Date field
data.date = coin_xmr.date
# Basic information
data.btc_priceusd = 0
data.xmr_priceusd = 0
data.xmr_pricebtc = 0
# Marketcap charts
data.btc_marketcap = 0
data.xmr_marketcap = 0
data.dash_marketcap = 0
data.grin_marketcap = 0
data.zcash_marketcap = 0
# Transactions charts
data.xmr_transacpercentage = 0
data.btc_transactions = 0
data.zcash_transactions = 0
data.dash_transactions = 0
data.grin_transactions = 0
data.xmr_transactions = 0
data.btc_supply = 0
data.xmr_supply = 0
# Issuance charts
data.btc_inflation = 0
data.xmr_inflation = 0
data.dash_inflation = 0
data.grin_inflation = 0
data.zcash_inflation = 0
data.xmr_metcalfebtc = 0
data.xmr_metcalfeusd = 0
data.btc_return = 0
data.xmr_return = 0
data.btc_emissionusd = 0
data.btc_emissionntv = 0
data.xmr_emissionusd = 0
data.xmr_emissionntv = 0
# Mining charts
data.btc_minerrevntv = 0
data.xmr_minerrevntv = 0
data.btc_minerrevusd = 0
data.xmr_minerrevusd = 0
data.btc_minerfeesntv = 0
data.xmr_minerfeesntv = 0
data.btc_minerfeesusd = 0
data.xmr_minerfeesusd = 0
data.btc_transcostntv = 0
data.xmr_transcostntv = 0
data.btc_transcostusd = 0
data.xmr_transcostusd = 0
data.xmr_minerrevcap = 0
data.btc_minerrevcap = 0
data.btc_commitntv = 0
data.xmr_commitntv = 0
data.btc_commitusd = 0
data.xmr_commitusd = 0
data.btc_blocksize = 0
data.xmr_blocksize = 0
data.btc_difficulty = 0
data.xmr_difficulty = 0
# Reddit charts
data.btc_subscriberCount = 0
data.btc_commentsPerHour = 0
data.btc_postsPerHour = 0
data.xmr_subscriberCount = 0
data.xmr_commentsPerHour = 0
data.xmr_postsPerHour = 0
data.crypto_subscriberCount = 0
data.crypto_commentsPerHour = 0
data.crypto_postsPerHour = 0
# Date field
data.date = coin_xmr.date
# Basic information
data.btc_priceusd = coin_btc.priceusd
data.xmr_priceusd = coin_xmr.priceusd
data.xmr_pricebtc = coin_xmr.pricebtc
# Marketcap charts
data.btc_marketcap = float(coin_btc.priceusd) * float(coin_btc.supply)
data.xmr_marketcap = float(coin_xmr.priceusd) * float(coin_xmr.supply)
data.dash_marketcap = float(coin_dash.priceusd) * float(coin_dash.supply)
data.grin_marketcap = float(coin_grin.priceusd) * float(coin_grin.supply)
data.zcash_marketcap = float(coin_zcash.priceusd) * float(coin_zcash.supply)
# Transactions charts
try:
data.xmr_transacpercentage = coin_xmr.transactions / coin_btc.transactions
except ZeroDivisionError:
pass
data.xmr_transactions = coin_xmr.transactions
data.btc_transactions = coin_btc.transactions
data.zcash_transactions = coin_zcash.transactions
data.dash_transactions = coin_dash.transactions
data.grin_transactions = coin_grin.transactions
data.btc_supply = coin_btc.supply
data.xmr_supply = coin_xmr.supply
# Issuance charts
data.btc_inflation = coin_btc.inflation
data.xmr_inflation = coin_xmr.inflation
data.dash_inflation = coin_dash.inflation
data.grin_inflation = coin_grin.inflation
data.zcash_inflation = coin_zcash.inflation
try:
data.xmr_metcalfebtc = (
coin_xmr.transactions
* coin_xmr.supply
/ (coin_btc.supply * coin_btc.transactions)
)
data.xmr_metcalfeusd = (
coin_btc.priceusd
* coin_xmr.transactions
* coin_xmr.supply
/ (coin_btc.supply * coin_btc.transactions)
)
except ZeroDivisionError:
pass
data.btc_return = coin_btc.priceusd / 30
data.xmr_return = coin_xmr.priceusd / 5.01
data.btc_emissionusd = (coin_btc.supply - coin_btc2.supply) * coin_btc.priceusd
data.btc_emissionntv = coin_btc.supply - coin_btc2.supply
data.xmr_emissionusd = (coin_xmr.supply - coin_xmr2.supply) * coin_xmr.priceusd
data.xmr_emissionntv = coin_xmr.supply - coin_xmr2.supply
# Mining charts
data.btc_minerrevntv = coin_btc.revenue
data.xmr_minerrevntv = coin_xmr.revenue
data.btc_minerrevusd = coin_btc.revenue * coin_btc.priceusd
data.xmr_minerrevusd = coin_xmr.revenue * coin_xmr.priceusd
data.btc_minerfeesntv = coin_btc.revenue - coin_btc.supply + coin_btc2.supply
data.xmr_minerfeesntv = coin_xmr.revenue - coin_xmr.supply + coin_xmr2.supply
data.btc_minerfeesusd = (
coin_btc.revenue - coin_btc.supply + coin_btc2.supply
) * coin_btc.priceusd
data.xmr_minerfeesusd = (
coin_xmr.revenue - coin_xmr.supply + coin_xmr2.supply
) * coin_xmr.priceusd
try:
data.btc_transcostntv = coin_btc.fee / coin_btc.transactions
data.xmr_transcostntv = coin_xmr.fee / coin_xmr.transactions
data.btc_transcostusd = (
coin_btc.priceusd * coin_btc.fee / coin_btc.transactions
)
data.xmr_transcostusd = (
coin_xmr.priceusd * coin_xmr.fee / coin_xmr.transactions
)
except ZeroDivisionError:
pass
try:
data.xmr_minerrevcap = 365 * 100 * coin_xmr.revenue / coin_xmr.supply
data.btc_minerrevcap = 365 * 100 * coin_btc.revenue / coin_btc.supply
except ZeroDivisionError:
pass
try:
data.btc_commitntv = coin_btc.hashrate / (coin_btc.revenue)
data.xmr_commitntv = coin_xmr.hashrate / (coin_xmr.revenue)
data.btc_commitusd = coin_btc.hashrate / (
coin_btc.revenue * coin_btc.priceusd
)
data.xmr_commitusd = coin_xmr.hashrate / (
coin_xmr.revenue * coin_xmr.priceusd
)
except ZeroDivisionError:
pass
try:
data.btc_blocksize = coin_btc.blocksize
data.xmr_blocksize = coin_xmr.blocksize
data.btc_difficulty = coin_btc.difficulty
data.xmr_difficulty = coin_xmr.difficulty
except ZeroDivisionError:
pass
# Reddit charts
try:
data.btc_subscriberCount = social_btc.subscriberCount
data.btc_commentsPerHour = social_btc.commentsPerHour
data.btc_postsPerHour = social_btc.postsPerHour
data.xmr_subscriberCount = social_xmr.subscriberCount
data.xmr_commentsPerHour = social_xmr.commentsPerHour
data.xmr_postsPerHour = social_xmr.postsPerHour
data.crypto_subscriberCount = social_crypto.subscriberCount
data.crypto_commentsPerHour = social_crypto.commentsPerHour
data.crypto_postsPerHour = social_crypto.postsPerHour
except (Social.DoesNotExist, UnboundLocalError):
data.btc_subscriberCount = 0
data.btc_commentsPerHour = 0
data.btc_postsPerHour = 0
data.xmr_subscriberCount = 0
data.xmr_commentsPerHour = 0
data.xmr_postsPerHour = 0
data.crypto_subscriberCount = 0
data.crypto_commentsPerHour = 0
data.crypto_postsPerHour = 0
data.save()
try:
print(
str(coin_xmr.supply)
+ " xmr "
+ str(data.xmr_subscriberCount)
+ " - "
+ str(social_xmr.subscriberCount)
+ " = "
+ str(int(data.xmr_marketcap))
+ " => "
+ str(coin_xmr.inflation)
)
except (Social.DoesNotExist, UnboundLocalError):
pass
count += 1
return count
# Get latest P2Pool data
def update_p2pool():
today = date.today()
yesterday = date.today() - timedelta(1)
try:
p2pool_stat = P2Pool.objects.filter(mini=False).get(date=today)
print("Found P2Pool of today")
if p2pool_stat.percentage > 0:
print("Percentage > 0")
update = False
else:
print("Percentage < 0")
p2pool_stat.delete()
try:
coin = Coin.objects.filter(name="xmr").get(date=yesterday)
print("Found coin of yesterday")
if coin.hashrate > 0:
update = True
else:
update = False
except Coin.DoesNotExist:
print("Didn't find coin of yesterday")
update = False
except P2Pool.DoesNotExist:
print("Didn't find P2Pool of today")
try:
coin = Coin.objects.filter(name="xmr").get(date=yesterday)
if coin.hashrate > 0:
update = True
else:
update = False
except Coin.DoesNotExist:
update = False
if update:
p2pool_stat = P2Pool()
p2pool_stat.date = today
response = requests.get("https://p2pool.io/api/pool/stats")
data = json.loads(response.text)
p2pool_stat.hashrate = data["pool_statistics"]["hashRate"]
p2pool_stat.percentage = (
100 * data["pool_statistics"]["hashRate"] / coin.hashrate
)
p2pool_stat.miners = data["pool_statistics"]["miners"]
p2pool_stat.totalhashes = data["pool_statistics"]["totalHashes"]
p2pool_stat.totalblocksfound = data["pool_statistics"]["totalBlocksFound"]
p2pool_stat.mini = False
p2pool_stat.save()
print("p2pool saved!")
df = pd.read_excel(DATA_FILE, engine="odf", sheet_name="p2pool")
start_row, end_row = 2, 9999
start_col, end_col = 0, 2
values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2:
values_mat[k][5] = p2pool_stat.totalblocksfound
values_mat[k][4] = p2pool_stat.totalhashes
values_mat[k][3] = p2pool_stat.percentage
values_mat[k][2] = p2pool_stat.hashrate
values_mat[k][1] = p2pool_stat.miners
values_mat[k][0] = datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
df.iloc[start_row:end_row, start_col:end_col] = values_mat
df.to_excel(DATA_FILE, sheet_name="p2pool", index=False)
print("spreadsheet updated")
else:
print("spreadsheet already with the latest data")
return data
today = date.today()
yesterday = date.today() - timedelta(1)
try:
p2pool_stat = P2Pool.objects.filter(mini=True).get(date=today)
print("Found P2PoolMini of today")
if p2pool_stat.percentage > 0:
print("Percentage > 0")
update = False
else:
print("Percentage < 0")
p2pool_stat.delete()
try:
coin = Coin.objects.filter(name="xmr").get(date=yesterday)
print("Found coin of yesterday")
if coin.hashrate > 0:
update = True
else:
update = False
except Coin.DoesNotExist:
print("Didn't find coin of yesterday")
update = False
except P2Pool.DoesNotExist:
print("Didn't find P2PoolMini of today")
try:
coin = Coin.objects.filter(name="xmr").get(date=yesterday)
if coin.hashrate > 0:
update = True
else:
update = False
except Coin.DoesNotExist:
update = False
if update:
p2pool_stat = P2Pool()
p2pool_stat.date = today
response = requests.get("https://p2pool.io/mini/api/pool/stats")
data = json.loads(response.text)
p2pool_stat.hashrate = data["pool_statistics"]["hashRate"]
p2pool_stat.percentage = (
100 * data["pool_statistics"]["hashRate"] / coin.hashrate
)
p2pool_stat.miners = data["pool_statistics"]["miners"]
p2pool_stat.totalhashes = data["pool_statistics"]["totalHashes"]
p2pool_stat.totalblocksfound = data["pool_statistics"]["totalBlocksFound"]
p2pool_stat.mini = True
p2pool_stat.save()
print("p2pool_mini saved!")
df = pd.read_excel(DATA_FILE, engine="odf", sheet_name="p2poolmini")
start_row, end_row = 2, 9999
start_col, end_col = 0, 2
values_mat = df.iloc[start_row:end_row, start_col:end_col].to_numpy()
k = len(values_mat)
date_aux = datetime.datetime.strptime(values_mat[k - 1][0], "%Y-%m-%d")
date_aux2 = datetime.datetime.strftime(date.today(), "%Y-%m-%d")
date_aux2 = datetime.datetime.strptime(date_aux2, "%Y-%m-%d")
if date_aux < date_aux2:
values_mat[k][5] = p2pool_stat.totalblocksfound
values_mat[k][4] = p2pool_stat.totalhashes
values_mat[k][3] = p2pool_stat.percentage
values_mat[k][2] = p2pool_stat.hashrate
values_mat[k][1] = p2pool_stat.miners
values_mat[k][0] = datetime.datetime.strftime(p2pool_stat.date, "%Y-%m-%d")
df.iloc[start_row:end_row, start_col:end_col] = values_mat
df.to_excel(DATA_FILE, sheet_name="p2poolmini", index=False)
print("spreadsheet updated")
else:
print("spreadsheet already with the latest data")
return data
return True