I have a Azure Function app with Python code running on it. This Python app accesses BigQuery using a JSON file, reads some data and then creates a JSON file from it.
I tried all that makes sense, but Azure ecosystem is strangely convoluted.
from google.cloud import bigquery
from google.oauth2 import service_account
import pandas as pd
import json
import datetime
import logging
import azure.functions as func
def main(mytimer: func.TimerRequest) -> None:
utc_timestamp = datetime.datetime.utcnow().replace(
tzinfo=datetime.timezone.utc).isoformat()
if mytimer.past_due:
logging.info('The timer is past due!')
logging.info('Python timer trigger function ran at %s', utc_timestamp)
# RE01
key_path = "bigquery-key.json"
credentials = service_account.Credentials.from_service_account_file(
key_path,
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
client = bigquery.Client(
credentials=credentials,
project=credentials.project_id,
)
project_id = "test-database"
sql = """
SELECT *
FROM test.dummy_table
"""
# Create a dataframe and save first 10 rows to a list
df = client.query(sql).to_dataframe()
top_ten = df["column1"][1:10].to_list()
with open('top_10.json', 'w') as json_file:
json.dump(top_ten, json_file)
Can someone tell me where my JSON file is being saved ? I cannot seem to run this function.