github.com/pdaian/flashboys2@v0.0.0-20190718175736-b101c35361f0/get_bq_blocks.py (about) 1 import csv, os 2 from google.cloud import bigquery 3 4 os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "etharbskey.json" 5 client = bigquery.Client() 6 7 8 FIELDS_TO_GRAB = 'number,timestamp,gas_limit,gas_used,miner,extra_data' 9 10 query = """SELECT """ + FIELDS_TO_GRAB + """ FROM `bigquery-public-data.ethereum_blockchain.blocks`;""" 11 12 13 with open('data/block_data.csv', 'w') as csvfile: 14 spamwriter = csv.writer(csvfile, delimiter=',', 15 quotechar='"', quoting=csv.QUOTE_MINIMAL) 16 17 spamwriter.writerow(FIELDS_TO_GRAB.split(",")) 18 19 job_config = bigquery.QueryJobConfig() 20 query_job = client.query( 21 query, 22 # Location must match that of the dataset(s) referenced in the query. 23 location='US', 24 job_config=job_config) # API request - starts the query 25 26 27 for item in query_job: 28 spamwriter.writerow([item[x] for x in FIELDS_TO_GRAB.split(',')]) 29 30 31 assert query_job.state == 'DONE' 32 print("[database fetcher] Wrote all block data") 33