1212# See the License for the specific language governing permissions and
1313# limitations under the License.
1414
15+ import copy
1516import re
16- import mock
17- import six
1817from concurrent import futures
1918
19+ import mock
2020import pytest
21+ import six
2122
2223try :
2324 import pandas
@@ -63,6 +64,26 @@ def ipython_interactive(request, ipython):
6364 yield ipython
6465
6566
67+ JOB_REFERENCE_RESOURCE = {"projectId" : "its-a-project-eh" , "jobId" : "some-random-id" }
68+ TABLE_REFERENCE_RESOURCE = {
69+ "projectId" : "its-a-project-eh" ,
70+ "datasetId" : "ds" ,
71+ "tableId" : "persons" ,
72+ }
73+ QUERY_RESOURCE = {
74+ "jobReference" : JOB_REFERENCE_RESOURCE ,
75+ "configuration" : {
76+ "query" : {
77+ "destinationTable" : TABLE_REFERENCE_RESOURCE ,
78+ "query" : "SELECT 42 FROM `life.the_universe.and_everything`;" ,
79+ "queryParameters" : [],
80+ "useLegacySql" : False ,
81+ }
82+ },
83+ "status" : {"state" : "DONE" },
84+ }
85+
86+
6687def test_context_credentials_auto_set_w_application_default_credentials ():
6788 """When Application Default Credentials are set, the context credentials
6889 will be created the first time it is called
@@ -117,22 +138,13 @@ def test_context_connection_can_be_overriden():
117138 default_patch = mock .patch (
118139 "google.auth.default" , return_value = (credentials_mock , project )
119140 )
141+ job_reference = copy .deepcopy (JOB_REFERENCE_RESOURCE )
142+ job_reference ["projectId" ] = project
120143
121144 query = "select * from persons"
122- job_reference = {"projectId" : project , "jobId" : "some-random-id" }
123- table = {"projectId" : project , "datasetId" : "ds" , "tableId" : "persons" }
124- resource = {
125- "jobReference" : job_reference ,
126- "configuration" : {
127- "query" : {
128- "destinationTable" : table ,
129- "query" : query ,
130- "queryParameters" : [],
131- "useLegacySql" : False ,
132- }
133- },
134- "status" : {"state" : "DONE" },
135- }
145+ resource = copy .deepcopy (QUERY_RESOURCE )
146+ resource ["jobReference" ] = job_reference
147+ resource ["configuration" ]["query" ]["query" ] = query
136148 data = {"jobReference" : job_reference , "totalRows" : 0 , "rows" : []}
137149
138150 conn = magics .context ._connection = make_connection (resource , data )
@@ -170,22 +182,13 @@ def test_context_no_connection():
170182 default_patch = mock .patch (
171183 "google.auth.default" , return_value = (credentials_mock , project )
172184 )
185+ job_reference = copy .deepcopy (JOB_REFERENCE_RESOURCE )
186+ job_reference ["projectId" ] = project
173187
174188 query = "select * from persons"
175- job_reference = {"projectId" : project , "jobId" : "some-random-id" }
176- table = {"projectId" : project , "datasetId" : "ds" , "tableId" : "persons" }
177- resource = {
178- "jobReference" : job_reference ,
179- "configuration" : {
180- "query" : {
181- "destinationTable" : table ,
182- "query" : query ,
183- "queryParameters" : [],
184- "useLegacySql" : False ,
185- }
186- },
187- "status" : {"state" : "DONE" },
188- }
189+ resource = copy .deepcopy (QUERY_RESOURCE )
190+ resource ["jobReference" ] = job_reference
191+ resource ["configuration" ]["query" ]["query" ] = query
189192 data = {"jobReference" : job_reference , "totalRows" : 0 , "rows" : []}
190193
191194 conn_mock = make_connection (resource , data , data , data )
@@ -549,7 +552,7 @@ def test_bigquery_magic_without_bqstorage(monkeypatch):
549552
550553
551554@pytest .mark .usefixtures ("ipython_interactive" )
552- def test_maximum_bytes_billed_w_int_magic ():
555+ def test_bigquery_magic_w_maximum_bytes_billed ():
553556 ip = IPython .get_ipython ()
554557 ip .extension_manager .load_extension ("google.cloud.bigquery" )
555558 magics .context ._project = None
@@ -576,15 +579,17 @@ def test_maximum_bytes_billed_w_int_magic():
576579 query_job_mock .to_dataframe .return_value = result
577580 with run_query_patch as run_query_mock , default_patch :
578581 run_query_mock .return_value = query_job_mock
579- return_value = ip .run_cell_magic ("bigquery" , "--maximum_bytes_billed=123456789" , sql )
582+ return_value = ip .run_cell_magic (
583+ "bigquery" , "--maximum_bytes_billed=123456789" , sql
584+ )
580585
581586 bqstorage_mock .assert_not_called ()
582587 query_job_mock .to_dataframe .assert_called_once_with (bqstorage_client = None )
583588 assert isinstance (return_value , pandas .DataFrame )
584589
585590
586591@pytest .mark .usefixtures ("ipython_interactive" )
587- def test_maximum_bytes_billed_w_string_params ():
592+ def test_bigquery_magic_w_maximum_bytes_billed_invalid ():
588593 ip = IPython .get_ipython ()
589594 ip .extension_manager .load_extension ("google.cloud.bigquery" )
590595 magics .context ._project = None
@@ -595,39 +600,80 @@ def test_maximum_bytes_billed_w_string_params():
595600 ip .run_cell_magic ("bigquery" , "--maximum_bytes_billed=abc" , sql )
596601
597602
603+ @pytest .mark .parametrize (
604+ "param_value,expected" , [("987654321" , "987654321" ), ("None" , "0" )]
605+ )
598606@pytest .mark .usefixtures ("ipython_interactive" )
599- def test_maximum_bytes_billed_w_none__magic ( ):
607+ def test_bigquery_magic_w_maximum_bytes_billed_overrides_context ( param_value , expected ):
600608 ip = IPython .get_ipython ()
601609 ip .extension_manager .load_extension ("google.cloud.bigquery" )
602610 magics .context ._project = None
603611
604- bqstorage_mock = mock .create_autospec (
605- bigquery_storage_v1beta1 .BigQueryStorageClient
606- )
612+ # Set the default maximum bytes billed, so we know it's overridable by the param.
613+ magics .context .default_query_job_config .maximum_bytes_billed = 1234567
607614
615+ project = "test-project"
616+ job_reference = copy .deepcopy (JOB_REFERENCE_RESOURCE )
617+ job_reference ["projectId" ] = project
618+ query = "SELECT 17 AS num"
619+ resource = copy .deepcopy (QUERY_RESOURCE )
620+ resource ["jobReference" ] = job_reference
621+ resource ["configuration" ]["query" ]["query" ] = query
622+ data = {"jobReference" : job_reference , "totalRows" : 0 , "rows" : []}
608623 credentials_mock = mock .create_autospec (
609624 google .auth .credentials .Credentials , instance = True
610625 )
611626 default_patch = mock .patch (
612627 "google.auth.default" , return_value = (credentials_mock , "general-project" )
613628 )
614- run_query_patch = mock .patch (
615- "google.cloud.bigquery.magics._run_query" , autospec = True
629+ conn = magics .context ._connection = make_connection (resource , data )
630+ list_rows_patch = mock .patch (
631+ "google.cloud.bigquery.client.Client.list_rows" ,
632+ return_value = google .cloud .bigquery .table ._EmptyRowIterator (),
616633 )
634+ with list_rows_patch , default_patch :
635+ ip .run_cell_magic (
636+ "bigquery" , "--maximum_bytes_billed={}" .format (param_value ), query
637+ )
617638
618- sql = "SELECT 17 AS num"
619- result = pandas .DataFrame ([17 ], columns = ["num" ])
620- query_job_mock = mock .create_autospec (
621- google .cloud .bigquery .job .QueryJob , instance = True
639+ _ , req = conn .api_request .call_args_list [0 ]
640+ sent_config = req ["data" ]["configuration" ]["query" ]
641+ assert sent_config ["maximumBytesBilled" ] == expected
642+
643+
644+ @pytest .mark .usefixtures ("ipython_interactive" )
645+ def test_bigquery_magic_w_maximum_bytes_billed_w_context ():
646+ ip = IPython .get_ipython ()
647+ ip .extension_manager .load_extension ("google.cloud.bigquery" )
648+ magics .context ._project = None
649+
650+ magics .context .default_query_job_config .maximum_bytes_billed = 1234567
651+
652+ project = "test-project"
653+ job_reference = copy .deepcopy (JOB_REFERENCE_RESOURCE )
654+ job_reference ["projectId" ] = project
655+ query = "SELECT 17 AS num"
656+ resource = copy .deepcopy (QUERY_RESOURCE )
657+ resource ["jobReference" ] = job_reference
658+ resource ["configuration" ]["query" ]["query" ] = query
659+ data = {"jobReference" : job_reference , "totalRows" : 0 , "rows" : []}
660+ credentials_mock = mock .create_autospec (
661+ google .auth .credentials .Credentials , instance = True
622662 )
623- query_job_mock .to_dataframe .return_value = result
624- with run_query_patch as run_query_mock , default_patch :
625- run_query_mock .return_value = query_job_mock
626- return_value = ip .run_cell_magic ("bigquery" , "--maximum_bytes_billed=None" , sql )
663+ default_patch = mock .patch (
664+ "google.auth.default" , return_value = (credentials_mock , "general-project" )
665+ )
666+ conn = magics .context ._connection = make_connection (resource , data )
667+ list_rows_patch = mock .patch (
668+ "google.cloud.bigquery.client.Client.list_rows" ,
669+ return_value = google .cloud .bigquery .table ._EmptyRowIterator (),
670+ )
671+ with list_rows_patch , default_patch :
672+ ip .run_cell_magic ("bigquery" , "" , query )
627673
628- bqstorage_mock . assert_not_called ()
629- query_job_mock . to_dataframe . assert_called_once_with ( bqstorage_client = None )
630- assert isinstance ( return_value , pandas . DataFrame )
674+ _ , req = conn . api_request . call_args_list [ 0 ]
675+ sent_config = req [ "data" ][ "configuration" ][ "query" ]
676+ assert sent_config [ "maximumBytesBilled" ] == "1234567"
631677
632678
633679@pytest .mark .usefixtures ("ipython_interactive" )
@@ -734,23 +780,3 @@ def test_bigquery_magic_with_improperly_formatted_params():
734780
735781 with pytest .raises (SyntaxError ):
736782 ip .run_cell_magic ("bigquery" , "--params {17}" , sql )
737-
738-
739- def test_maximum_bytes_billed_set_value ():
740- """When Application Default Credentials are set, the context credentials
741- will be created the first time it is called
742- """
743-
744- from google .cloud .bigquery import QueryJobConfig
745- job_config = QueryJobConfig ()
746- magics .context .maximum_bytes_billed = 1234567489
747- assert job_config .maximum_bytes_billed == magics .context .maximum_bytes_billed
748-
749-
750- def test_maximum_bytes_billed_set_string ():
751- """When Application Default Credentials are set, the context credentials
752- will be created the first time it is called
753- """
754- with pytest .raises (ValueError ):
755- magics .context .maximum_bytes_billed = "abc"
756-
0 commit comments