Be2*_*Be2 1 python-3.x google-bigquery google-cloud-platform google-cloud-functions
我有一个每日 csv 文件进入 Google 存储上的存储桶,我构建了一个函数来加载该 csv 并将其附加到 BigQuery 中的表中。但是,我想使用该函数向 csv 添加一个新列在将数据加载到大查询之前执行 id (context["id"]) 。
那可能吗?
提前致谢!
def TimeTableToBigQuery(data, context):
# Getting metadata about the uploaded file, the storage and datetime of insert
excution_id = context['event_id']
bucketname = data['bucket']
filename = data['name']
timeCreated = data['timeCreated']
pathtofile = data["id"].rsplit("/", 2)
# parent_folder = data["id"].rsplit("/", 3)
file = str(pathtofile[1])
name = file.split('---')
dates = name[0].split('_', 1)
arrivedat = str(dates[1])
path = pathtofile[0]
# parent_folder = parent_folder[1]
# work start here to get the data into the table we establish a job before we send this job to load :)
client = bigquery.Client()
dataset_id = 'nature_bi'
dataset_ref = client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.skip_leading_rows = 1
job_config.field_delimiter = ';',
job_config.allow_jagged_rows = True
job_config.allow_quoted_newlines = True
job_config.write_disposition = 'WRITE_TRUNCATE',
job_config.source_format = bigquery.SourceFormat.CSV
job_config.schema = [
bigquery.SchemaField('Anstallningsnummer', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Datum', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Kod', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Kostnadsstalle', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Tidkod', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('OB_tidkod', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Dagsschema', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Schemalagd_arbetstid', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Summa_narvaro', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Summa_franvaro', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Datum_for_klarmarkering', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Datum_for_attestering', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Frislappsdatum', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Export_klockslag', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('Vecka', 'STRING', mode='NULLABLE'),
bigquery.SchemaField('RowHashKey', 'STRING', mode='NULLABLE', description='to be written in BQ'),
bigquery.SchemaField('MergeState', 'INTEGER', mode='NULLABLE', description='for merging data in BQ'),
bigquery.SchemaField('SourceName', 'STRING', mode='NULLABLE', description='Path to file'),
bigquery.SchemaField('SourceScript', 'STRING', mode='NULLABLE', description='Path to file'),
bigquery.SchemaField('ArriveDateTime', 'STRING', mode='NULLABLE', description='Path to file'),
bigquery.SchemaField('InsertDateTime', 'STRING', mode='NULLABLE', description='Path to file'),
bigquery.SchemaField('ExecutionID', 'STRING', mode='NULLABLE', description='Path to file')
]
uri = 'gs://%s/%s' % (bucketname, filename)
print('Received file "%s" at %s.' % (
uri,
timeCreated
))
tablename = 'employee_time'
table_id = dataset_ref.table(tablename) # table_id = "its value was in load_job="
# get the URI for uploaded CSV in GCS from 'data'
uri = 'gs://' + data['bucket'] + '/' + data['name']
# lets do this and send our job that we configured before to load to BQ
load_job = client.load_table_from_uri(
uri,
table_id,
job_config=job_config)
# Here we print some information in the log to track our work
print('Starting job with ID {}'.format(load_job.job_id))
print('File: {}'.format(data['name']))
load_job.result() # wait for table load to complete.
print('Job finished.')
destination_table = client.get_table(dataset_ref.table(tablename))
print('Loaded {} rows.'.format(destination_table.num_rows))
Run Code Online (Sandbox Code Playgroud)
您有 3 种方法可以实现这一目标
insert into <your table> select *,CURRENT_TIMESTAMP() AS InsertDateTim,<your executionId> AS ExecutionId FROM <temp table>。如果您执行 WRITE_TRUNCATE (如代码示例中所示),您可以执行更智能的操作。
nature_bi_<insertDate>_<executionId>SELECT *,(SELECT table_id
FROM `<project>.<dataset>.__TABLES_SUMMARY__`
WHERE table_id LIKE 'nature_bi%') FROM `<project>.<dataset>.nature_bi*` LIMIT 1000
Run Code Online (Sandbox Code Playgroud)
所有解决方案均有效,具体取决于您的限制和文件大小
| 归档时间: |
|
| 查看次数: |
2890 次 |
| 最近记录: |