Connection refused by google sheets API, because daily quota reached - google-drive-api

gspread.exceptions.APIError: {'code': 429, 'message': "Quota exceeded for quota group 'ReadGroup' and limit 'Read requests per user per 100 seconds' of service 'sheets.googleapis.com' for consumer 'project_number:changedthis'.", 'status': 'RESOURCE_EXHAUSTED', 'details': [{'#type': 'type.googleapis.com/google.rpc.Help', 'links': [{'description': 'Google developer console API key', 'url': 'https://console.developers.google.com/project/changedthis/apiui/credential'}]}]}
My code sends data every 60 seconds and this only filling in 3 cells. Now I get the error at the start of my program. Do I wait for a few days or is this a permanent block?

This is related to the usage limit
As per the Sheets Documentation says There is no daily usage limit but limited requests per 100 seconds. If you are owner of a billing account you can increase this quota. Otherwise 500 requests per 100 seconds per project and 100 requests per 100 seconds per user are the limit.
As a workaround make use of different service accounts in order to reach 500 requests / 100 seconds.
Edit
Looking at your code these are the changes that are currently working (Take a look into the scopes, some of them are not necessary in this case, although I didn't modify them):
import gspread
import time
import datetime
from google.oauth2 import service_account
SCOPES = ["https://spreadsheets.google.com/feeds", 'https://www.googleapis.com/auth/spreadsheets',
"https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"]
SERVICE_ACCOUNT_FILE = 'creds.json'
#creds = ServiceAccountCredentials.from_json_keyfile_name("creds.json", scope)
credentials = service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE, scopes=SCOPES)
creds = credentials.with_subject('user#yourdomain.com')
client = gspread.authorize(creds)
sheet = client.open_by_key("ID").sheet1 # Open the spreadsheet
i = 1
while True:
cell = sheet.cell(i, 1).value
i += 1
if len(cell) == 0: # of een manier om te kijken of de cell leeg is
print("Empty row has been found at row", (i-1))
break
# ofzo, als er geen begrenzing is en geen lege cell dan blijft deze lus maar doorgaan totdat sheet.cell(i, 1).value fout gaat denk ik
if i > 1000:
print("Did not find any empty rows")
break
while True:
x = str(datetime.datetime.now())
sheet.update_cell(i, 1, x) # Update one cell
time.sleep(60)
In order to make this work, you should activate the domain-wide authority
References
Sheets API Usage Limits
Resolve a 429 error: Too many requests

Related

How do I concatenate json zip files in Python?

I have a file with, containing 61 zipfiles which each contain 17280 zip files.
The file exists of sensordata for the months April and May and the 61 zipfiles exist of the sensordata per day for these months. The 17280 zip files contain the data for these days per 5 seconds.
I wrote a code to open the data per day in Jupyter Notebook, but I want to open them per month. Here is my code for opening the data per day:
path = 'test_tudelft'
data = pd.DataFrame() # verzameling van alle data
for zip_filename in os.listdir(path): # loop over de zipbestanden
with zipfile.ZipFile(os.path.join(path, zip_filename)) as zf: # open een zipbestand
for file in zf.filelist: # loop over de ingepakte bestanden
# data = pd.concat((data, pd.read_json(zf.open(file), lines=True))) # lees de nieuwe data en voeg samen
data_new = pd.read_json(zf.read(file).decode('utf8')[2:-1], orient='index').T
data = pd.concat((data, data_new)) # lees de nieuwe data en voeg samen
data = data.reset_index(drop=True) # unieke index per bestand
This code works to make plots of the data per day, but I would like to plot the data per month. How can I change my code to open the zipfiles in the zipfiles?

Is there a way to solve this insufficient funds for gas when deploying a signed transaction to ganache?

Good day, everyone! I'm currently taking a 16-hour freecodecamp course on Solidity, Blockchain, and Smart Contracts, and I'm having trouble sending a simple signed transaction to Ganache and I keep getting this Value error message "ValueError: {'message': 'insufficient funds for gas * price + value', 'stack': 'Error: insufficient funds for gas * price + value\n at TransactionPool.prepareTransaction (/home/fingergod/.nvm/versions/node/v17.8.0/lib/node_modules/ganache/dist/node/1.js:2:131154)', 'code': -32003}".
P.S. I've already set my gas price to "gasPrice" while building transactions to be: "gasPrice": w3.eth.gas _price
from solcx import compile_standard, install_solc
import json
from web3 import Web3
import os
from dotenv import load_dotenv
load_dotenv()
install_solc("0.8.13")
with open("./SimpleStorage.sol", "r") as file:
simple_storage_file = file.read()
# compile solidity file
Compiled_solFile = compile_standard(
{
"language": "Solidity",
"sources": {"SimpleStorage.sol": {"content": simple_storage_file}},
"settings": {
"outputSelection": {
"*": {"*": ["abi", "metadata", "evm.bytecode", "evm.sourcecode"]}
}
},
},
solc_version="0.8.13",
)
# print(Compiled_solFile)
with open("compiled_code.json", "w") as file:
json.dump(Compiled_solFile, file)
# get bytecode
bytecode = Compiled_solFile["contracts"]["SimpleStorage.sol"]["simpleStorage"][
"evm"
]["bytecode"]["object"]
# get abi
abi = Compiled_solFile["contracts"]["SimpleStorage.sol"]["simpleStorage"]["abi"]
# print(abi)
# for conneecting to ganache
url = "hTTP://127.0.0.1:8545"
w3 = Web3(Web3.HTTPProvider(url))
chain_id = 1337
my_address = "0x15f029FEB462294b117AD56b1736c551c64a4D80"
private_key = os.getenv("PRIVATE_KEY")
print(private_key)
# Create the contract in python
SimpleStorage = w3.eth.contract(abi=abi, bytecode=bytecode)
print(SimpleStorage)
# get the nonce/latest transaction count
nonce = w3.eth.getTransactionCount(my_address)
print(nonce)
# 1. Build the transacion(needs;chainid,address,nonce)
# 2. Sign the transaction(needs;transaction,privatekey)
# 3. Send the signed transaction
# 1.
transaction = SimpleStorage.constructor().buildTransaction(
{
"chainId": chain_id,
"gasPrice": w3.eth.gas_price,
"from": my_address,
"nonce": nonce,
}
)
print(transaction)
# 2.
signed_txn = w3.eth.account.signTransaction(transaction, private_key=private_key)
print(signed_txn)
# 3.
tx_hash = w3.eth.send_raw_transaction(signed_txn.rawTransaction)
Any assistance would be highly appreciated. I've been stranded here for the past two days.
I think this has to do with your gasLimit value due to gas * price being equal to how much gas you are sending and not how much gas the operation costs. You might have seen from the course you are taking, but as a reminder; when you are sending a transaction to a contract, you are sending the amount of gas which is specified in the gasLimit attribute independent of how much it will cost. That means you are actually sending Wei equal to the gasLimit * gasPrice + value. Try to set your gasLimit attribute (in hex format for consistency) in transaction object to a value lower than your network default and maybe the gasPrice as well.
This should be an issue with insufficient account balance. Try sending the same transaction using an high-level function like transact() to see if it succeeds. If so, you can replicate the gasLimit and gasPrice from the tx details for your build_transaction().
Another note: You can omit the "gasPrice": w3.eth.gas_price line, because you are assigning the network default value, which is done automatically if you omit it, so there is no point to it unless you are going to assign another custom value.

NFT: trying to run create_collectibles scripts throws execution reverted error : This is from Patrick Collins Youtube tutorial

Below is the snip of the script: Using Brownie in VS Code
Error: "Gas estimation failed: 'execution reverted'. This transaction will likely revert. If you wish to broadcast, you must set the gas limit manually."
from brownie import AdvancedCollectible, accounts, config
from scripts.helpful_scripts import get_breed
import time
STATIC_SEED = 123
def main():
dev = accounts.add(config["wallets"]["from_key"])
advanced_collectible = AdvancedCollectible[len(AdvancedCollectible) - 1]
transaction = advanced_collectible.createCollectible(
STATIC_SEED, "None", {"from": dev, "gas_limit": 50000}
)
print("Waiting on second transaction...")
# wait for the 2nd transaction
transaction.wait(1)
time.sleep(35)
requestId = transaction.events["requestedCollectible"]["requestId"]
token_id = advanced_collectible.requestIdToTokenId(requestId)
breed = get_breed(advanced_collectible.tokenIdToBreed(token_id))
print("Dog breed of tokenId {} is {}".format(token_id, breed))
I think this has already been answered here. To summarize it, you probably have vrf_coordinator version error. Try the rinkeby's values from the official docs.
I had the same issue. But Patrick is correct, I did not have any Link tokens inside of my newly created contract on Rinkeby network...So I commented out most of the lines of the code in create_collectible.py to import and apply the fund_advanced_collectible() function once more on my contract:
from helpfulscripts import fund_advanced_collectible
def main():
dev=accounts.add(config['wallets']['from_key'])
advanced_collectible= AdvancedCollectible[len(AdvancedCollectible)-1]
# transaction=advanced_collectible.createCollectible(STATIC_SEED,"None", {"from": dev})
# transaction.wait(1)
# time.sleep(35)
# requestID=transaction.events["requestedCollectible"]["requestID"]
# tokenID=advanced_collectible.requestIDToTokenID(requestID)
# breed=get_breed(advanced_collectible.tokenIDToBreed(tokenID))
# print('Dog breed of {} is {}.'.format(tokenID, breed))
fund_advanced_collectible(advanced_collectible)
With a reminder of the function definition of fund_advanced_collectible from helpfulscripts.py:
def fund_advanced_collectible(nft_contract):
dev=accounts.add(config['wallets']['from_key'])
link_token=interface.LinkTokenInterface(config['networks'][network.show_active()]['link_token'])
link_token.transfer(nft_contract, 100000000000000000,{"from":dev})
Once the transaction was confirmed, I could verify in https://rinkeby.etherscan.io/address that my contract had 0.1 Link and so when executing your code again, the error disappeared...

How to efficiently parse JSON data with multiple keys in Python 2.7?

I'm writing a script that will check the CVS COVID vaccine availability for cities in my state of VA. I have been successful getting the data I'm looking for, but my code is hard coded in some areas. I'm specifically asking for help improving my code in the areas number 1 & 2 below:
The JSON file can be found here:
https://www.cvs.com//immunizations/covid-19-vaccine.vaccine-status.VA.json?vaccineinfo
I'm trying to access the data in the responsePayloadData key. The only way I could figure out how to do this is to make it the only key. For that reason, I deleted the other key responseMetaData:
#remove the key that we don't need
del obj['responseMetaData']
I'm also not sure how to dynamically loop through the VA items without hard coding the number of cities I know are there in the data:
for x, y in obj.items():
for a in range(34):
Here's the full code:
import requests
import json
import time
from datetime import datetime
import urllib2
try:
import indigo
except:
pass
strAvail = "False"
strAvailCity = "None"
try:
# download raw json object from CVS Virginia Website
url = "https://www.cvs.com//immunizations/covid-19-vaccine.vaccine-status.VA.json?vaccineinfo"
data = urllib2.urlopen(url).read().decode()
except urllib2.HTTPError, err:
return {"error": err.reason, "error_code": err.code}
# parse json object
obj = json.loads(data)
# remove the key that we don't need
del obj['responseMetaData']
# loop through the JSON dictionary and check availability
# status options: {"Fully Booked", "Available"}
for x, y in obj.items():
for a in range(34):
# print('City: ' + y['data']['VA'][a]['city'])
# print('Total Available: ' + y['data']['VA'][a]['totalAvailable'])
# print('Percent Available: ' + y['data']['VA'][a]['pctAvailable'])
# print('Status: ' + y['data']['VA'][a]['status'])
# print("------------------------------")
# If there is availability anywhere in the state, take some action.
if y['data']['VA'][a]['status'] == "Available":
strAvail = True
strAvailCity = y['data']['VA'][a]['city']
# Log timestamp for this check to the JSON
now = datetime.now()
strDateTime = now.strftime("%m/%d/%Y %I:%M %p")
EDIT: Since the JSON is not available outside the US. I've pasted it below:
{"responsePayloadData":{"currentTime":"2021-02-11T14:55:00.470","data":{"VA":[{"totalAvailable":"1","city":"ABINGDON","state":"VA","pctAvailable":"0.19%","status":"Fully Booked"},{"totalAvailable":"0","city":"ALEXANDRIA","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"ARLINGTON","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"BEDFORD","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"BLACKSBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"CHARLOTTESVILLE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"CHATHAM","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"CHESAPEAKE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"1","city":"DANVILLE","state":"VA","pctAvailable":"0.19%","status":"Fully Booked"},{"totalAvailable":"2","city":"DUBLIN","state":"VA","pctAvailable":"0.39%","status":"Fully Booked"},{"totalAvailable":"0","city":"FAIRFAX","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"FREDERICKSBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"GAINESVILLE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"HAMPTON","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"HARRISONBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"LEESBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"LYNCHBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"MARTINSVILLE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"MECHANICSVILLE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"MIDLOTHIAN","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},
{"totalAvailable":"0","city":"NEWPORT NEWS","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"NORFOLK","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"PETERSBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"PORTSMOUTH","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"RICHMOND","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"ROANOKE","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},
{"totalAvailable":"0","city":"ROCKY MOUNT","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"STAFFORD","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"SUFFOLK","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},
{"totalAvailable":"0","city":"VIRGINIA BEACH","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"WARRENTON","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"WILLIAMSBURG","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"WINCHESTER","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"},{"totalAvailable":"0","city":"WOODSTOCK","state":"VA","pctAvailable":"0.00%","status":"Fully Booked"}]}},"responseMetaData":{"statusDesc":"Success","conversationId":"Id-beb5f68730b34e6aa3bbc1fd927ea12b","refId":"Id-b4a7256078789eb59b8912b4","operation":"getInventorybyCity","statusCode":"0000"}}
Regarding problem 1, you can just access the data by key. You don't need to delete the other key:
payload = obj['responsePayloadData']
For the second problem, you can just iterate over the items in the list associated with obj['data']['VA']:
for city in payload['data']['VA']:
print(city)
{'city': 'ABINGDON',
'pctAvailable': '0.19%',
'state': 'VA',
'status': 'Fully Booked',
'totalAvailable': '1'}
{'city': 'ALEXANDRIA',
'pctAvailable': '0.00%',
'state': 'VA',
'status': 'Fully Booked',
'totalAvailable': '0'}
...

Zabbix API - Is there a way to request reduced number of 'trend' or 'history' records for a specific time range

I have been working on a project for a while which needs to convert Zabbix 'trends' and 'history' data to various types of charts, such as line chart or pie chart.
The problem is that there might be too much data (time-value pairs), especially in the case of 'history' data. Of course, I do not want to send 10,000+ points to the frontend, therefore I want to reduce the number of points, such that it still remains representative of that specific time range.
Of course, one way to solve is to implement this on server-side but, if not necessary, I do not want to burden my resources (CPU, network, etc.).
I have searched through the documentation of Zabbix API for 'history' and 'trends' but I have not found what I needed.
I would like to know if there is any way to request a reduced number of 'history' or 'trend' points from Zabbix API for a specific time period such that it is still representative regarding all the data?
Zabbix API version: 4.0
from datetime import datetime
import math
import sys
import time
from pyzabbix import ZabbixAPI
def n_sized_chunks(lst, n):
"""Yield successive n-sized chunks from 'lst'."""
for i in range(0, len(lst), n):
yield lst[i:i+n]
# The hostname at which the Zabbix web interface is available
ZABBIX_SERVER = '<zabbix-server>'
MAX_POINTS = 300
zapi = ZabbixAPI(ZABBIX_SERVER)
# Login to the Zabbix API
zapi.login('<username>', '<password>')
item_id = '<item-id>'
# Create a time range
time_till = time.mktime(datetime.now().timetuple())
time_from = time_till - 60 * 60 * 24 * 7 # 1 week
# Query item's history (integer) data
history = zapi.history.get(itemids=[item_id],
time_from=time_from,
time_till=time_till,
output='extend',
)
length = len(history)
print(f"Before: {length}") # ~10097
###################################################################
# Can Zabbix API do the followings (or something similar) for me? #
###################################################################
if length <= MAX_POINTS:
sys.exit(0)
chunk_size = math.ceil(length / MAX_POINTS)
x = list(map(lambda point: float(point['clock']), history))
y = list(map(lambda point: float(point['value']), history))
x_chunks = list(n_sized_chunks(lst=x, n=chunk_size))
y_chunks = list(n_sized_chunks(lst=y, n=chunk_size))
history = []
for x, y in zip(x_chunks, y_chunks):
history.append({'clock': (x[0]+x[-1])/2, 'value': sum(y)/len(y)})
######################################################################
print(f"After: {len(history)}") ## ~297
This is not possible currently. You might want to vote on https://support.zabbix.com/browse/ZBXNEXT-656 .