github.com/pdaian/flashboys2@v0.0.0-20190718175736-b101c35361f0/get_bq_relayers.py (about)

     1  import csv, os
     2  from google.cloud import bigquery
     3  
     4  os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "etharbskey.json"
     5  client = bigquery.Client()
     6  
     7  
     8  query = """SELECT DISTINCT logs.address FROM `bigquery-public-data.ethereum_blockchain.logs` AS logs JOIN UNNEST(topics) AS topic WHERE topic IN UNNEST(@topics)"""
     9  
    10  
    11  for exchange in (('bancor', ['0x276856b36cbc45526a0ba64f44611557a2a8b68662c5388e9fe6d72e86e1c8cb']), ('kyber', ['0xd30ca399cb43507ecec6a629a35cf45eb98cda550c27696dcb0d8c4a3873ce6c']), ('uniswap', ['0x7f4091b46c33e918a0f3aa42307641d17bb67029427a5369e54b353984238705', '0xcd60aa75dea3072fbc07ae6d7d856b5dc5f4eee88854f5b4abf7b680ef8bc50f'])):
    12      outfile = 'data/' + exchange[0] + '_relayers'
    13      open(outfile, 'w').write('')
    14      topics = set(exchange[1])
    15      aqp = bigquery.ArrayQueryParameter('topics', 'STRING', topics)
    16      query_params = [aqp]
    17      job_config = bigquery.QueryJobConfig()
    18      job_config.query_parameters = query_params
    19      query_job = client.query(
    20          query,
    21          # Location must match that of the dataset(s) referenced in the query.
    22          location='US',
    23          job_config=job_config)  # API request - starts the query
    24  
    25  
    26      for item in query_job:
    27          open(outfile, 'a').write(item['address'] + '\n')
    28  
    29      assert query_job.state == 'DONE'
    30      print("[database fetcher] Wrote all %s relayers" % (exchange[0]))
    31