@@ -124,9 +124,9 @@ def _still_in_use(bad_request):
124124 for doomed in self .to_delete :
125125 if isinstance (doomed , Bucket ):
126126 retry_409 (doomed .delete )(force = True )
127- elif isinstance (doomed , Dataset ):
127+ elif isinstance (doomed , ( Dataset , bigquery . DatasetReference ) ):
128128 retry_in_use (Config .CLIENT .delete_dataset )(doomed )
129- elif isinstance (doomed , Table ):
129+ elif isinstance (doomed , ( Table , bigquery . TableReference ) ):
130130 retry_in_use (Config .CLIENT .delete_table )(doomed )
131131 else :
132132 doomed .delete ()
@@ -327,7 +327,7 @@ def _fetch_single_page(table, selected_fields=None):
327327 page = six .next (iterator .pages )
328328 return list (page )
329329
330- def test_create_rows_then_dump_table (self ):
330+ def test_insert_rows_then_dump_table (self ):
331331 NOW_SECONDS = 1448911495.484366
332332 NOW = datetime .datetime .utcfromtimestamp (
333333 NOW_SECONDS ).replace (tzinfo = UTC )
@@ -339,7 +339,7 @@ def test_create_rows_then_dump_table(self):
339339 ]
340340 ROW_IDS = range (len (ROWS ))
341341
342- dataset = self .temp_dataset (_make_dataset_id ('create_rows_then_dump ' ))
342+ dataset = self .temp_dataset (_make_dataset_id ('insert_rows_then_dump ' ))
343343 TABLE_ID = 'test_table'
344344 schema = [
345345 bigquery .SchemaField ('full_name' , 'STRING' , mode = 'REQUIRED' ),
@@ -352,7 +352,7 @@ def test_create_rows_then_dump_table(self):
352352 self .to_delete .insert (0 , table )
353353 self .assertTrue (_table_exists (table ))
354354
355- errors = Config .CLIENT .create_rows (table , ROWS , row_ids = ROW_IDS )
355+ errors = Config .CLIENT .insert_rows (table , ROWS , row_ids = ROW_IDS )
356356 self .assertEqual (len (errors ), 0 )
357357
358358 rows = ()
@@ -1315,7 +1315,7 @@ def test_query_external_table(self):
13151315 self .assertEqual (sorted (row_tuples , key = by_age ),
13161316 sorted (ROWS , key = by_age ))
13171317
1318- def test_create_rows_nested_nested (self ):
1318+ def test_insert_rows_nested_nested (self ):
13191319 # See #2951
13201320 SF = bigquery .SchemaField
13211321 schema = [
@@ -1342,14 +1342,14 @@ def test_create_rows_nested_nested(self):
13421342 table = retry_403 (Config .CLIENT .create_table )(table_arg )
13431343 self .to_delete .insert (0 , table )
13441344
1345- Config .CLIENT .create_rows (table , to_insert )
1345+ Config .CLIENT .insert_rows (table , to_insert )
13461346
13471347 retry = RetryResult (_has_rows , max_tries = 8 )
13481348 rows = retry (self ._fetch_single_page )(table )
13491349 row_tuples = [r .values () for r in rows ]
13501350 self .assertEqual (row_tuples , to_insert )
13511351
1352- def test_create_rows_nested_nested_dictionary (self ):
1352+ def test_insert_rows_nested_nested_dictionary (self ):
13531353 # See #2951
13541354 SF = bigquery .SchemaField
13551355 schema = [
@@ -1376,7 +1376,7 @@ def test_create_rows_nested_nested_dictionary(self):
13761376 table = retry_403 (Config .CLIENT .create_table )(table_arg )
13771377 self .to_delete .insert (0 , table )
13781378
1379- Config .CLIENT .create_rows (table , to_insert )
1379+ Config .CLIENT .insert_rows (table , to_insert )
13801380
13811381 retry = RetryResult (_has_rows , max_tries = 8 )
13821382 rows = retry (self ._fetch_single_page )(table )
@@ -1402,7 +1402,7 @@ def test_create_table_rows_fetch_nested_schema(self):
14021402 for line in rows_file :
14031403 to_insert .append (json .loads (line ))
14041404
1405- errors = Config .CLIENT .create_rows_json (table , to_insert )
1405+ errors = Config .CLIENT .insert_rows_json (table , to_insert )
14061406 self .assertEqual (len (errors ), 0 )
14071407
14081408 retry = RetryResult (_has_rows , max_tries = 8 )
@@ -1467,19 +1467,24 @@ def test_nested_table_to_dataframe(self):
14671467 'nested_record' : {'nested_nested_string' : 'some deep insight' },
14681468 }
14691469 to_insert = [
1470- ( ' Some value' , record )
1470+ { 'string_col' : ' Some value' , 'record_col' : record },
14711471 ]
1472+ rows = [json .dumps (row ) for row in to_insert ]
1473+ body = six .StringIO ('{}\n ' .format ('\n ' .join (rows )))
14721474 table_id = 'test_table'
14731475 dataset = self .temp_dataset (_make_dataset_id ('nested_df' ))
1474- table_arg = Table (dataset .table (table_id ), schema = schema )
1475- table = retry_403 (Config .CLIENT .create_table )(table_arg )
1476+ table = dataset .table (table_id )
14761477 self .to_delete .insert (0 , table )
1477- Config .CLIENT .create_rows (table , to_insert )
1478- QUERY = 'SELECT * from `{}.{}.{}`' .format (
1479- Config .CLIENT .project , dataset .dataset_id , table_id )
1480-
1481- retry = RetryResult (_has_rows , max_tries = 8 )
1482- df = retry (self ._fetch_dataframe )(QUERY )
1478+ job_config = bigquery .LoadJobConfig ()
1479+ job_config .write_disposition = 'WRITE_TRUNCATE'
1480+ job_config .source_format = 'NEWLINE_DELIMITED_JSON'
1481+ job_config .schema = schema
1482+ # Load a table using a local JSON file from memory.
1483+ Config .CLIENT .load_table_from_file (
1484+ body , table , job_config = job_config ).result ()
1485+
1486+ df = Config .CLIENT .list_rows (
1487+ table , selected_fields = schema ).to_dataframe ()
14831488
14841489 self .assertIsInstance (df , pandas .DataFrame )
14851490 self .assertEqual (len (df ), 1 ) # verify the number of rows
0 commit comments