2022-09-17 03:04:08 +00:00
|
|
|
from psaw import PushshiftAPI
|
|
|
|
import pandas as pd
|
|
|
|
import json
|
|
|
|
import requests
|
|
|
|
import datetime
|
|
|
|
from datetime import date, timedelta
|
2022-12-25 17:59:02 +00:00
|
|
|
from .models import Coin, Social, P2Pool, Dominance, Rank, Sfmodel, DailyData, Withdrawal
|
2022-09-17 03:04:08 +00:00
|
|
|
from requests import Session
|
|
|
|
from requests.exceptions import ConnectionError, Timeout, TooManyRedirects
|
|
|
|
import pygsheets
|
|
|
|
|
|
|
|
####################################################################################
|
|
|
|
# Reddit api
|
|
|
|
####################################################################################
|
2022-12-25 17:59:02 +00:00
|
|
|
#api = PushshiftAPI() # When it's working
|
|
|
|
api = False # When it's not
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
# Get daily post on Reddit
|
|
|
|
def data_prep_posts(subreddit, start_time, end_time, filters, limit):
|
|
|
|
if(len(filters) == 0):
|
|
|
|
filters = ['id', 'author', 'created_utc', 'domain', 'url', 'title', 'num_comments']
|
|
|
|
|
2022-10-16 06:10:48 +00:00
|
|
|
if not(api == False):
|
|
|
|
posts = list(api.search_submissions(subreddit=subreddit, after=start_time, before=end_time, filter=filters, limit=limit))
|
|
|
|
return pd.DataFrame(posts)
|
|
|
|
return 0
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
# Get daily comments on Reddit
|
|
|
|
def data_prep_comments(term, start_time, end_time, filters, limit):
|
|
|
|
if (len(filters) == 0):
|
|
|
|
filters = ['id', 'author', 'created_utc','body', 'permalink', 'subreddit']
|
|
|
|
|
2022-10-16 06:10:48 +00:00
|
|
|
if not(api == False):
|
|
|
|
comments = list(api.search_comments(q=term, after=start_time, before=end_time, filter=filters, limit=limit))
|
|
|
|
return pd.DataFrame(comments)
|
|
|
|
return 0
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
####################################################################################
|
|
|
|
# Other useful functions
|
|
|
|
####################################################################################
|
|
|
|
# Get most recent metrics from a data provider of your choice for 'symbol'
|
|
|
|
def get_latest_metrics(symbol, url):
|
|
|
|
update = True
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
while update:
|
|
|
|
response = requests.get(url)
|
|
|
|
data = json.loads(response.text)
|
|
|
|
data_aux = data['data']
|
|
|
|
for item in data_aux:
|
|
|
|
day, hour = str(item['time']).split('T')
|
|
|
|
day = datetime.datetime.strptime(day, '%Y-%m-%d')
|
|
|
|
day = datetime.datetime.strftime(day, '%Y-%m-%d')
|
|
|
|
try:
|
|
|
|
coin = Coin.objects.filter(name=symbol).get(date=day)
|
|
|
|
except:
|
|
|
|
coin = Coin()
|
|
|
|
|
|
|
|
try:
|
|
|
|
coin.name = symbol
|
|
|
|
coin.date = day
|
|
|
|
try:
|
|
|
|
coin.priceusd = float(item['PriceUSD'])
|
|
|
|
except:
|
|
|
|
coin.priceusd = 0
|
|
|
|
try:
|
|
|
|
coin.pricebtc = float(item['PriceBTC'])
|
|
|
|
except:
|
|
|
|
coin.pricebtc = 0
|
|
|
|
try:
|
|
|
|
coin.inflation = float(item['IssContPctAnn'])
|
|
|
|
coin.stocktoflow = (100/coin.inflation)**1.65
|
|
|
|
except:
|
|
|
|
coin.inflation = 0
|
|
|
|
coin.stocktoflow = 0
|
|
|
|
try:
|
|
|
|
coin.supply = float(item['SplyCur'])
|
|
|
|
except:
|
|
|
|
coin.supply = 0
|
|
|
|
try:
|
|
|
|
coin.fee = float(item['FeeTotNtv'])
|
|
|
|
except:
|
|
|
|
coin.fee = 0
|
|
|
|
try:
|
|
|
|
coin.revenue = float(item['RevNtv'])
|
|
|
|
except:
|
|
|
|
coin.revenue = 0
|
|
|
|
try:
|
|
|
|
coin.hashrate = float(item['HashRate'])
|
|
|
|
except:
|
|
|
|
coin.hashrate = 0
|
|
|
|
try:
|
|
|
|
coin.transactions = float(item['TxCnt'])
|
|
|
|
except:
|
|
|
|
coin.transactions = 0
|
|
|
|
|
|
|
|
coin.save()
|
|
|
|
count += 1
|
|
|
|
print(str(symbol) + ' ' + str(coin.date))
|
|
|
|
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
url = data['next_page_url']
|
|
|
|
update = True
|
|
|
|
except:
|
|
|
|
update = False
|
|
|
|
break
|
|
|
|
return count
|
|
|
|
|
2022-12-25 17:59:02 +00:00
|
|
|
# Get binance withdrawal state
|
|
|
|
def get_binance_withdrawal(symbol):
|
|
|
|
url = 'https://www.binance.com/en/network'
|
|
|
|
|
|
|
|
withdrawals = Withdrawal.objects.order_by('-date')
|
|
|
|
if len(withdrawals) > 0:
|
|
|
|
for withdrawal in withdrawals:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
withdrawal = Withdrawal()
|
|
|
|
withdrawal.state = True
|
|
|
|
withdrawal.save()
|
|
|
|
return True
|
|
|
|
|
|
|
|
response = requests.get(url)
|
|
|
|
result = response.text
|
|
|
|
position = result.find(symbol)
|
|
|
|
result = result[position:position+400]
|
|
|
|
position = result.find('withdrawEnable')
|
|
|
|
result = result[position:position+25]
|
|
|
|
try:
|
|
|
|
result.index('true')
|
|
|
|
print(result.index('true'))
|
|
|
|
print('Enabled')
|
|
|
|
if not(withdrawal.state):
|
|
|
|
new_withdrawal = Withdrawal()
|
|
|
|
new_withdrawal.state = True
|
|
|
|
new_withdrawal.save()
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
result.index('false')
|
|
|
|
print(result.index('false'))
|
|
|
|
print('Disabled')
|
|
|
|
if withdrawal.state:
|
|
|
|
new_withdrawal = Withdrawal()
|
|
|
|
new_withdrawal.state = False
|
|
|
|
new_withdrawal.save()
|
|
|
|
return False
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2022-09-17 03:04:08 +00:00
|
|
|
# Get latest price data for Monero
|
|
|
|
def get_latest_price(symbol):
|
|
|
|
with open("settings.json") as file:
|
|
|
|
data = json.load(file)
|
|
|
|
|
|
|
|
url = data["metrics_provider"][0]["price_url_old"] + symbol
|
|
|
|
print(url)
|
|
|
|
parameters = {
|
|
|
|
'convert':'USD',
|
|
|
|
}
|
|
|
|
headers = {
|
|
|
|
'Accepts': 'application/json',
|
|
|
|
data["metrics_provider"][0]["api_key_name"]: data["metrics_provider"][0]["api_key_value"],
|
|
|
|
}
|
|
|
|
|
|
|
|
session = Session()
|
|
|
|
session.headers.update(headers)
|
|
|
|
|
|
|
|
try:
|
|
|
|
response = session.get(url, params=parameters)
|
|
|
|
data = json.loads(response.text)
|
|
|
|
print('getting latest data')
|
|
|
|
try:
|
|
|
|
if data['data'][symbol.upper()]['cmc_rank']:
|
|
|
|
print('new data received')
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print('problem with the data provider')
|
|
|
|
data = False
|
|
|
|
except:
|
|
|
|
data = False
|
|
|
|
except (ConnectionError, Timeout, TooManyRedirects) as e:
|
|
|
|
data = False
|
|
|
|
|
|
|
|
file.close()
|
|
|
|
return data
|
|
|
|
|
|
|
|
# Get latest dominance value and update
|
|
|
|
def update_dominance(data):
|
|
|
|
if not(data):
|
|
|
|
print('error updating dominance')
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
dominance = Dominance()
|
|
|
|
dominance.name = 'xmr'
|
|
|
|
dominance.date = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
dominance.dominance = float(data['data']['XMR']['quote']['USD']['market_cap_dominance'])
|
|
|
|
dominance.save()
|
|
|
|
|
|
|
|
gc = pygsheets.authorize(service_file='service_account_credentials.json')
|
|
|
|
sh = gc.open('zcash_bitcoin')
|
|
|
|
wks = sh.worksheet_by_title('Sheet7')
|
|
|
|
|
|
|
|
values_mat = wks.get_values(start=(3,1), end=(9999,2), returnas='matrix')
|
|
|
|
|
|
|
|
k = len(values_mat)
|
|
|
|
date_aux = datetime.datetime.strptime(values_mat[k-1][0], '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strptime(date_aux2, '%Y-%m-%d')
|
|
|
|
if date_aux < date_aux2:
|
|
|
|
cell = 'B' + str(k + 3)
|
|
|
|
wks.update_value(cell, dominance.dominance)
|
|
|
|
cell = 'A' + str(k + 3)
|
|
|
|
wks.update_value(cell, dominance.date)
|
|
|
|
print('spreadsheet updated')
|
|
|
|
else:
|
|
|
|
print('spreadsheet already with the latest data')
|
|
|
|
return False
|
|
|
|
return data
|
|
|
|
|
|
|
|
# Get latest rank value and update
|
|
|
|
def update_rank(data=None):
|
|
|
|
if not(data):
|
|
|
|
data = get_latest_price('xmr')
|
|
|
|
if not(data):
|
|
|
|
print('error updating rank')
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
rank = Rank()
|
|
|
|
rank.name = 'xmr'
|
|
|
|
rank.date = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
rank.rank = int(data['data']['XMR']['cmc_rank'])
|
|
|
|
rank.save()
|
|
|
|
|
|
|
|
gc = pygsheets.authorize(service_file='service_account_credentials.json')
|
|
|
|
sh = gc.open('zcash_bitcoin')
|
|
|
|
wks = sh.worksheet_by_title('Sheet8')
|
|
|
|
|
|
|
|
values_mat = wks.get_values(start=(3,1), end=(9999,2), returnas='matrix')
|
|
|
|
|
|
|
|
k = len(values_mat)
|
|
|
|
date_aux = datetime.datetime.strptime(values_mat[k-1][0], '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strptime(date_aux2, '%Y-%m-%d')
|
|
|
|
if date_aux < date_aux2:
|
|
|
|
cell = 'B' + str(k + 3)
|
|
|
|
wks.update_value(cell, rank.rank)
|
|
|
|
cell = 'A' + str(k + 3)
|
|
|
|
wks.update_value(cell, rank.date)
|
|
|
|
print('spreadsheet updated')
|
|
|
|
else:
|
|
|
|
print('spreadsheet already with the latest data')
|
|
|
|
return data
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
# Load Reddit api to check if there are new followers
|
|
|
|
def check_new_social(symbol):
|
2022-09-27 19:09:51 +00:00
|
|
|
yesterday = datetime.datetime.strftime(date.today()-timedelta(1), '%Y-%m-%d')
|
2022-09-23 20:38:20 +00:00
|
|
|
socials = Social.objects.filter(name=symbol).filter(date=yesterday)
|
2022-09-17 03:04:08 +00:00
|
|
|
if not(socials):
|
2022-09-27 19:09:51 +00:00
|
|
|
print('new social')
|
2022-09-17 03:04:08 +00:00
|
|
|
request = 'https://www.reddit.com/r/'+ symbol +'/about.json'
|
|
|
|
response = requests.get(request, headers = {'User-agent': 'Checking new social data'})
|
|
|
|
data = json.loads(response.content)
|
|
|
|
data = data['data']
|
|
|
|
subscribers = data['subscribers']
|
|
|
|
social = Social()
|
|
|
|
social.name = symbol
|
2022-09-23 20:38:20 +00:00
|
|
|
social.date = yesterday
|
2022-09-17 03:04:08 +00:00
|
|
|
social.subscriberCount = subscribers
|
|
|
|
|
|
|
|
date_aux = date.today()
|
|
|
|
date_aux = datetime.datetime.strftime(date_aux, '%Y-%m-%d')
|
|
|
|
date_aux = datetime.datetime.strptime(date_aux, '%Y-%m-%d')
|
|
|
|
timestamp1 = int(datetime.datetime.timestamp(date_aux))
|
|
|
|
|
|
|
|
timestamp2 = int(timestamp1 - 43200)
|
|
|
|
limit = 1000
|
|
|
|
filters = []
|
|
|
|
data = data_prep_posts(symbol, timestamp2, timestamp1, filters, limit)
|
2022-10-16 06:10:48 +00:00
|
|
|
if data != 0:
|
|
|
|
social.postsPerHour = len(data)/12
|
|
|
|
else:
|
|
|
|
social.postsPerHour = 0
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
timestamp2 = int(timestamp1 - 3600)
|
|
|
|
limit = 1000
|
|
|
|
data = data_prep_comments(symbol, timestamp2, timestamp1, filters, limit)
|
2022-10-16 06:10:48 +00:00
|
|
|
if data != 0:
|
|
|
|
social.commentsPerHour = len(data)/1
|
|
|
|
else:
|
|
|
|
social.commentsPerHour = 0
|
2022-09-17 03:04:08 +00:00
|
|
|
social.save()
|
2022-09-24 01:24:06 +00:00
|
|
|
print('getting new data - ' + str(social.name) + ' - ' + str(social.date))
|
2022-09-17 03:04:08 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
# Update database DailyData with most recent coin data
|
|
|
|
def update_database(date_from=None, date_to=None):
|
|
|
|
date_zero = '2014-05-20'
|
|
|
|
|
|
|
|
if not(date_from) or not(date_to):
|
|
|
|
date_to = date.today()
|
|
|
|
date_from = date_to - timedelta(5)
|
|
|
|
amount = date_from - datetime.datetime.strptime(date_zero, '%Y-%m-%d')
|
|
|
|
else:
|
|
|
|
print(str(date_from) + ' to ' + str(date_to))
|
|
|
|
date_from = datetime.datetime.strptime(date_from, '%Y-%m-%d')
|
|
|
|
date_to = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
amount = date_from - datetime.datetime.strptime(date_zero, '%Y-%m-%d')
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
date_aux = date_from
|
|
|
|
while date_aux <= date_to:
|
|
|
|
date_aux = date_from + timedelta(count)
|
|
|
|
date_aux2 = date_aux - timedelta(1)
|
|
|
|
try:
|
|
|
|
coin_xmr = Coin.objects.filter(name='xmr').get(date=date_aux)
|
|
|
|
coin_xmr2 = Coin.objects.filter(name='xmr').get(date=date_aux2)
|
|
|
|
coin_btc = Coin.objects.filter(name='btc').get(date=date_aux)
|
|
|
|
coin_btc2 = Coin.objects.filter(name='btc').get(date=date_aux2)
|
|
|
|
try:
|
|
|
|
coin_dash = Coin.objects.filter(name='dash').get(date=date_aux)
|
|
|
|
except:
|
|
|
|
coin_dash = Coin()
|
|
|
|
try:
|
|
|
|
coin_zcash = Coin.objects.filter(name='zec').get(date=date_aux)
|
|
|
|
except:
|
|
|
|
coin_zcash = Coin()
|
|
|
|
try:
|
|
|
|
coin_grin = Coin.objects.filter(name='grin').get(date=date_aux)
|
|
|
|
except:
|
|
|
|
coin_grin = Coin()
|
|
|
|
|
|
|
|
if coin_btc.inflation == 0 or coin_xmr.inflation == 0:
|
|
|
|
return count
|
|
|
|
|
|
|
|
count_aux = 0
|
|
|
|
found = False
|
|
|
|
while count_aux < 100 and not(found):
|
|
|
|
try:
|
2022-09-23 20:38:20 +00:00
|
|
|
date_aux3 = date_aux - timedelta(count_aux)
|
|
|
|
social_btc = Social.objects.filter(name='Bitcoin').get(date=date_aux3)
|
|
|
|
social_xmr = Social.objects.filter(name='Monero').get(date=date_aux3)
|
2022-09-24 11:08:00 +00:00
|
|
|
social_crypto = Social.objects.filter(name='Cryptocurrency').get(date=date_aux3)
|
2022-09-17 03:04:08 +00:00
|
|
|
found = True
|
|
|
|
except:
|
|
|
|
found = False
|
2022-09-23 20:38:20 +00:00
|
|
|
count_aux += 1
|
2022-09-17 03:04:08 +00:00
|
|
|
except:
|
|
|
|
return count
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = Sfmodel.objects.get(date=coin_xmr.date)
|
|
|
|
|
|
|
|
except:
|
|
|
|
data = Sfmodel()
|
|
|
|
data.priceusd = 0
|
|
|
|
data.pricebtc = 0
|
|
|
|
data.stocktoflow = 0
|
|
|
|
data.greyline = 0
|
|
|
|
data.color = 0
|
|
|
|
data.date = coin_xmr.date
|
|
|
|
data.pricebtc = coin_xmr.pricebtc
|
|
|
|
data.priceusd = coin_xmr.priceusd
|
|
|
|
if data.stocktoflow == 0 and coin_xmr.supply > 0:
|
|
|
|
supply = int(coin_xmr.supply)*10**12
|
|
|
|
reward = (2**64 -1 - supply) >> 19
|
|
|
|
if reward < 0.6*(10**12):
|
|
|
|
reward = 0.6*(10**12)
|
|
|
|
inflation = 100*reward*720*365/supply
|
|
|
|
data.stocktoflow = (100/(inflation))**1.65
|
|
|
|
v0 = 0.002
|
|
|
|
delta = (0.015 - 0.002)/(6*365)
|
|
|
|
data.color = 30*coin_xmr.pricebtc/((amount.days)*delta + v0)
|
|
|
|
amount += timedelta(1)
|
|
|
|
data.save()
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = DailyData.objects.get(date=coin_xmr.date)
|
|
|
|
except:
|
|
|
|
data = DailyData()
|
|
|
|
# Date field
|
|
|
|
data.date = coin_xmr.date
|
|
|
|
# Basic information
|
|
|
|
data.btc_priceusd = 0
|
|
|
|
data.xmr_priceusd = 0
|
|
|
|
data.xmr_pricebtc = 0
|
|
|
|
# Marketcap charts
|
|
|
|
data.btc_marketcap = 0
|
|
|
|
data.xmr_marketcap = 0
|
|
|
|
data.dash_marketcap = 0
|
|
|
|
data.grin_marketcap = 0
|
|
|
|
data.zcash_marketcap = 0
|
|
|
|
# Transactions charts
|
|
|
|
data.xmr_transacpercentage = 0
|
|
|
|
data.btc_transactions = 0
|
|
|
|
data.zcash_transactions = 0
|
|
|
|
data.dash_transactions = 0
|
|
|
|
data.grin_transactions = 0
|
|
|
|
data.xmr_transactions = 0
|
|
|
|
data.btc_supply = 0
|
|
|
|
data.xmr_supply = 0
|
|
|
|
# Issuance charts
|
|
|
|
data.btc_inflation = 0
|
|
|
|
data.xmr_inflation = 0
|
|
|
|
data.dash_inflation = 0
|
|
|
|
data.grin_inflation = 0
|
|
|
|
data.zcash_inflation = 0
|
|
|
|
data.xmr_metcalfebtc = 0
|
|
|
|
data.xmr_metcalfeusd = 0
|
|
|
|
data.btc_return = 0
|
|
|
|
data.xmr_return = 0
|
|
|
|
data.btc_emissionusd = 0
|
|
|
|
data.btc_emissionntv = 0
|
|
|
|
data.xmr_emissionusd = 0
|
|
|
|
data.xmr_emissionntv = 0
|
|
|
|
# Mining charts
|
|
|
|
data.btc_minerrevntv = 0
|
|
|
|
data.xmr_minerrevntv = 0
|
|
|
|
data.btc_minerrevusd = 0
|
|
|
|
data.xmr_minerrevusd = 0
|
|
|
|
data.btc_minerfeesntv = 0
|
|
|
|
data.xmr_minerfeesntv = 0
|
|
|
|
data.btc_minerfeesusd = 0
|
|
|
|
data.xmr_minerfeesusd = 0
|
|
|
|
data.btc_transcostntv = 0
|
|
|
|
data.xmr_transcostntv = 0
|
|
|
|
data.btc_transcostusd = 0
|
|
|
|
data.xmr_transcostusd = 0
|
|
|
|
data.xmr_minerrevcap = 0
|
|
|
|
data.btc_minerrevcap = 0
|
|
|
|
data.btc_commitntv = 0
|
|
|
|
data.xmr_commitntv = 0
|
|
|
|
data.btc_commitusd = 0
|
|
|
|
data.xmr_commitusd = 0
|
|
|
|
data.btc_blocksize = 0
|
|
|
|
data.xmr_blocksize = 0
|
|
|
|
data.btc_difficulty = 0
|
|
|
|
data.xmr_difficulty = 0
|
|
|
|
# Reddit charts
|
|
|
|
data.btc_subscriberCount = 0
|
|
|
|
data.btc_commentsPerHour = 0
|
|
|
|
data.btc_postsPerHour = 0
|
|
|
|
data.xmr_subscriberCount = 0
|
|
|
|
data.xmr_commentsPerHour = 0
|
|
|
|
data.xmr_postsPerHour = 0
|
|
|
|
data.crypto_subscriberCount = 0
|
|
|
|
data.crypto_commentsPerHour = 0
|
|
|
|
data.crypto_postsPerHour = 0
|
|
|
|
|
|
|
|
# Date field
|
|
|
|
data.date = coin_xmr.date
|
|
|
|
# Basic information
|
|
|
|
data.btc_priceusd = coin_btc.priceusd
|
|
|
|
data.xmr_priceusd = coin_xmr.priceusd
|
|
|
|
data.xmr_pricebtc = coin_xmr.pricebtc
|
|
|
|
# Marketcap charts
|
2022-09-30 11:38:49 +00:00
|
|
|
data.btc_marketcap = float(coin_btc.priceusd)*float(coin_btc.supply)
|
|
|
|
data.xmr_marketcap = float(coin_xmr.priceusd)*float(coin_xmr.supply)
|
|
|
|
data.dash_marketcap = float(coin_dash.priceusd)*float(coin_dash.supply)
|
|
|
|
data.grin_marketcap = float(coin_grin.priceusd)*float(coin_grin.supply)
|
|
|
|
data.zcash_marketcap = float(coin_zcash.priceusd)*float(coin_zcash.supply)
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
# Transactions charts
|
|
|
|
try:
|
|
|
|
data.xmr_transacpercentage = coin_xmr.transactions/coin_btc.transactions
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
data.xmr_transactions = coin_xmr.transactions
|
|
|
|
data.btc_transactions = coin_btc.transactions
|
|
|
|
data.zcash_transactions = coin_zcash.transactions
|
|
|
|
data.dash_transactions = coin_dash.transactions
|
|
|
|
data.grin_transactions = coin_grin.transactions
|
|
|
|
data.btc_supply = coin_btc.supply
|
|
|
|
data.xmr_supply = coin_xmr.supply
|
|
|
|
# Issuance charts
|
|
|
|
data.btc_inflation = coin_btc.inflation
|
|
|
|
data.xmr_inflation = coin_xmr.inflation
|
|
|
|
data.dash_inflation = coin_dash.inflation
|
|
|
|
data.grin_inflation = coin_grin.inflation
|
|
|
|
data.zcash_inflation = coin_zcash.inflation
|
|
|
|
try:
|
|
|
|
data.xmr_metcalfebtc = coin_xmr.transactions*coin_xmr.supply/(coin_btc.supply*coin_btc.transactions)
|
|
|
|
data.xmr_metcalfeusd = coin_btc.priceusd*coin_xmr.transactions*coin_xmr.supply/(coin_btc.supply*coin_btc.transactions)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
data.btc_return = coin_btc.priceusd/30
|
|
|
|
data.xmr_return = coin_xmr.priceusd/5.01
|
|
|
|
data.btc_emissionusd = (coin_btc.supply - coin_btc2.supply)*coin_btc.priceusd
|
|
|
|
data.btc_emissionntv = coin_btc.supply - coin_btc2.supply
|
|
|
|
data.xmr_emissionusd = (coin_xmr.supply - coin_xmr2.supply)*coin_xmr.priceusd
|
|
|
|
data.xmr_emissionntv = coin_xmr.supply - coin_xmr2.supply
|
|
|
|
# Mining charts
|
|
|
|
data.btc_minerrevntv = coin_btc.revenue
|
|
|
|
data.xmr_minerrevntv = coin_xmr.revenue
|
|
|
|
data.btc_minerrevusd = coin_btc.revenue*coin_btc.priceusd
|
|
|
|
data.xmr_minerrevusd = coin_xmr.revenue*coin_xmr.priceusd
|
|
|
|
data.btc_minerfeesntv = coin_btc.revenue - coin_btc.supply + coin_btc2.supply
|
|
|
|
data.xmr_minerfeesntv = coin_xmr.revenue - coin_xmr.supply + coin_xmr2.supply
|
|
|
|
data.btc_minerfeesusd = (coin_btc.revenue - coin_btc.supply + coin_btc2.supply)*coin_btc.priceusd
|
|
|
|
data.xmr_minerfeesusd = (coin_xmr.revenue - coin_xmr.supply + coin_xmr2.supply)*coin_xmr.priceusd
|
|
|
|
try:
|
|
|
|
data.btc_transcostntv = coin_btc.fee/coin_btc.transactions
|
|
|
|
data.xmr_transcostntv = coin_xmr.fee/coin_xmr.transactions
|
|
|
|
data.btc_transcostusd = coin_btc.priceusd*coin_btc.fee/coin_btc.transactions
|
|
|
|
data.xmr_transcostusd = coin_xmr.priceusd*coin_xmr.fee/coin_xmr.transactions
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
data.xmr_minerrevcap = 365*100*coin_xmr.revenue/coin_xmr.supply
|
|
|
|
data.btc_minerrevcap = 365*100*coin_btc.revenue/coin_btc.supply
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
data.btc_commitntv = coin_btc.hashrate/(coin_btc.revenue)
|
|
|
|
data.xmr_commitntv = coin_xmr.hashrate/(coin_xmr.revenue)
|
|
|
|
data.btc_commitusd = coin_btc.hashrate/(coin_btc.revenue*coin_btc.priceusd)
|
|
|
|
data.xmr_commitusd = coin_xmr.hashrate/(coin_xmr.revenue*coin_xmr.priceusd)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
data.btc_blocksize = coin_btc.blocksize
|
|
|
|
data.xmr_blocksize = coin_xmr.blocksize
|
|
|
|
data.btc_difficulty = coin_btc.difficulty
|
|
|
|
data.xmr_difficulty = coin_xmr.difficulty
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
# Reddit charts
|
|
|
|
try:
|
|
|
|
data.btc_subscriberCount = social_btc.subscriberCount
|
|
|
|
data.btc_commentsPerHour = social_btc.commentsPerHour
|
|
|
|
data.btc_postsPerHour = social_btc.postsPerHour
|
|
|
|
data.xmr_subscriberCount = social_xmr.subscriberCount
|
|
|
|
data.xmr_commentsPerHour = social_xmr.commentsPerHour
|
|
|
|
data.xmr_postsPerHour = social_xmr.postsPerHour
|
|
|
|
data.crypto_subscriberCount = social_crypto.subscriberCount
|
|
|
|
data.crypto_commentsPerHour = social_crypto.commentsPerHour
|
|
|
|
data.crypto_postsPerHour = social_crypto.postsPerHour
|
|
|
|
except:
|
|
|
|
data.btc_subscriberCount = 0
|
|
|
|
data.btc_commentsPerHour = 0
|
|
|
|
data.btc_postsPerHour = 0
|
|
|
|
data.xmr_subscriberCount = 0
|
|
|
|
data.xmr_commentsPerHour = 0
|
|
|
|
data.xmr_postsPerHour = 0
|
|
|
|
data.crypto_subscriberCount = 0
|
|
|
|
data.crypto_commentsPerHour = 0
|
|
|
|
data.crypto_postsPerHour = 0
|
|
|
|
|
|
|
|
data.save()
|
2022-09-24 11:08:00 +00:00
|
|
|
print(str(coin_xmr.date) + ' xmr ' + str(data.xmr_subscriberCount) + ' - ' + str(social_xmr.subscriberCount) + ' = ' + str(int(data.xmr_marketcap)) + ' => ' + str(coin_xmr.inflation))
|
2022-09-17 03:04:08 +00:00
|
|
|
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
return count
|
|
|
|
|
|
|
|
# Get latest P2Pool data
|
|
|
|
def update_p2pool():
|
|
|
|
today = date.today()
|
|
|
|
yesterday = date.today() - timedelta(1)
|
|
|
|
try:
|
|
|
|
p2pool_stat = P2Pool.objects.filter(mini=False).get(date=today)
|
|
|
|
print('achou p2pool de hoje')
|
|
|
|
if p2pool_stat.percentage > 0:
|
|
|
|
print('porcentagem > 0')
|
|
|
|
update = False
|
|
|
|
else:
|
|
|
|
print('porcentagem < 0')
|
|
|
|
p2pool_stat.delete()
|
|
|
|
try:
|
|
|
|
coin = Coin.objects.filter(name='xmr').get(date=yesterday)
|
|
|
|
print('achou coin de ontem')
|
|
|
|
if coin.hashrate > 0:
|
|
|
|
update = True
|
|
|
|
else:
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
print('nao achou coin de ontem')
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
print('nao achou p2pool de hoje')
|
|
|
|
try:
|
|
|
|
coin = Coin.objects.filter(name='xmr').get(date=yesterday)
|
|
|
|
if coin.hashrate > 0:
|
|
|
|
update = True
|
|
|
|
else:
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
update = False
|
|
|
|
|
|
|
|
if update:
|
|
|
|
p2pool_stat = P2Pool()
|
|
|
|
p2pool_stat.date = today
|
|
|
|
response = requests.get('https://p2pool.io/api/pool/stats')
|
|
|
|
|
|
|
|
data = json.loads(response.text)
|
|
|
|
p2pool_stat.hashrate = data['pool_statistics']['hashRate']
|
|
|
|
p2pool_stat.percentage = 100*data['pool_statistics']['hashRate']/coin.hashrate
|
|
|
|
p2pool_stat.miners = data['pool_statistics']['miners']
|
|
|
|
p2pool_stat.totalhashes = data['pool_statistics']['totalHashes']
|
|
|
|
p2pool_stat.totalblocksfound = data['pool_statistics']['totalBlocksFound']
|
|
|
|
p2pool_stat.mini = False
|
|
|
|
p2pool_stat.save()
|
|
|
|
print('p2pool saved!')
|
|
|
|
|
|
|
|
gc = pygsheets.authorize(service_file='service_account_credentials.json')
|
|
|
|
sh = gc.open('zcash_bitcoin')
|
|
|
|
wks = sh.worksheet_by_title('p2pool')
|
|
|
|
|
|
|
|
values_mat = wks.get_values(start=(3,1), end=(9999,3), returnas='matrix')
|
|
|
|
|
|
|
|
k = len(values_mat)
|
|
|
|
date_aux = datetime.datetime.strptime(values_mat[k-1][0], '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strptime(date_aux2, '%Y-%m-%d')
|
|
|
|
if date_aux < date_aux2:
|
|
|
|
cell = 'F' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.totalblocksfound)
|
|
|
|
cell = 'E' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.totalhashes)
|
|
|
|
cell = 'D' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.percentage)
|
|
|
|
cell = 'C' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.hashrate)
|
|
|
|
cell = 'B' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.miners)
|
|
|
|
cell = 'A' + str(k + 3)
|
|
|
|
wks.update_value(cell, datetime.datetime.strftime(p2pool_stat.date, '%Y-%m-%d'))
|
|
|
|
print('spreadsheet updated')
|
|
|
|
else:
|
|
|
|
print('spreadsheet already with the latest data')
|
|
|
|
return data
|
|
|
|
|
|
|
|
today = date.today()
|
|
|
|
yesterday = date.today() - timedelta(1)
|
|
|
|
try:
|
|
|
|
p2pool_stat = P2Pool.objects.filter(mini=True).get(date=today)
|
|
|
|
print('achou p2pool_mini de hoje')
|
|
|
|
if p2pool_stat.percentage > 0:
|
|
|
|
|
|
|
|
print('porcentagem > 0')
|
|
|
|
update = False
|
|
|
|
else:
|
|
|
|
print('porcentagem < 0')
|
|
|
|
p2pool_stat.delete()
|
|
|
|
try:
|
|
|
|
coin = Coin.objects.filter(name='xmr').get(date=yesterday)
|
|
|
|
|
|
|
|
print('achou coin de ontem')
|
|
|
|
if coin.hashrate > 0:
|
|
|
|
update = True
|
|
|
|
else:
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
print('nao achou coin de ontem')
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
print('nao achou p2pool_mini de hoje')
|
|
|
|
try:
|
|
|
|
coin = Coin.objects.filter(name='xmr').get(date=yesterday)
|
|
|
|
if coin.hashrate > 0:
|
|
|
|
update = True
|
|
|
|
else:
|
|
|
|
update = False
|
|
|
|
except:
|
|
|
|
update = False
|
|
|
|
|
|
|
|
if update:
|
|
|
|
p2pool_stat = P2Pool()
|
|
|
|
p2pool_stat.date = today
|
|
|
|
response = requests.get('https://p2pool.io/mini/api/pool/stats')
|
|
|
|
|
|
|
|
data = json.loads(response.text)
|
|
|
|
p2pool_stat.hashrate = data['pool_statistics']['hashRate']
|
|
|
|
p2pool_stat.percentage = 100*data['pool_statistics']['hashRate']/coin.hashrate
|
|
|
|
p2pool_stat.miners = data['pool_statistics']['miners']
|
|
|
|
p2pool_stat.totalhashes = data['pool_statistics']['totalHashes']
|
|
|
|
p2pool_stat.totalblocksfound = data['pool_statistics']['totalBlocksFound']
|
|
|
|
p2pool_stat.mini = True
|
|
|
|
p2pool_stat.save()
|
|
|
|
print('p2pool_mini saved!')
|
|
|
|
|
|
|
|
gc = pygsheets.authorize(service_file='service_account_credentials.json')
|
|
|
|
sh = gc.open('zcash_bitcoin')
|
|
|
|
wks = sh.worksheet_by_title('p2poolmini')
|
|
|
|
|
|
|
|
values_mat = wks.get_values(start=(3,1), end=(9999,3), returnas='matrix')
|
|
|
|
|
|
|
|
k = len(values_mat)
|
|
|
|
date_aux = datetime.datetime.strptime(values_mat[k-1][0], '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strftime(date.today(), '%Y-%m-%d')
|
|
|
|
date_aux2 = datetime.datetime.strptime(date_aux2, '%Y-%m-%d')
|
|
|
|
if date_aux < date_aux2:
|
|
|
|
cell = 'F' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.totalblocksfound)
|
|
|
|
cell = 'E' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.totalhashes)
|
|
|
|
cell = 'D' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.percentage)
|
|
|
|
cell = 'C' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.hashrate)
|
|
|
|
cell = 'B' + str(k + 3)
|
|
|
|
wks.update_value(cell, p2pool_stat.miners)
|
|
|
|
cell = 'A' + str(k + 3)
|
|
|
|
wks.update_value(cell, datetime.datetime.strftime(p2pool_stat.date, '%Y-%m-%d'))
|
|
|
|
print('spreadsheet updated')
|
|
|
|
else:
|
|
|
|
print('spreadsheet already with the latest data')
|
|
|
|
return data
|
|
|
|
|
|
|
|
return True
|