diff --git a/google/cloud/bigquery/job.py b/google/cloud/bigquery/job.py index 70db69e71..766db1d42 100644 --- a/google/cloud/bigquery/job.py +++ b/google/cloud/bigquery/job.py @@ -35,6 +35,7 @@ from google.cloud.bigquery.external_config import ExternalConfig from google.cloud.bigquery.external_config import HivePartitioningOptions from google.cloud.bigquery import _helpers +from google.cloud.bigquery.model import ModelReference from google.cloud.bigquery.query import _query_param_from_api_repr from google.cloud.bigquery.query import ArrayQueryParameter from google.cloud.bigquery.query import ScalarQueryParameter @@ -47,8 +48,9 @@ from google.cloud.bigquery.table import _EmptyRowIterator from google.cloud.bigquery.table import RangePartitioning from google.cloud.bigquery.table import _table_arg_to_table_ref -from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.table import Table +from google.cloud.bigquery.table import TableListItem +from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.table import TimePartitioning _DONE_STATE = "DONE" @@ -461,11 +463,11 @@ def created(self): Optional[datetime.datetime]: the creation time (None until set from the server). """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("creationTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) + millis = _helpers._get_sub_prop( + self._properties, ["statistics", "creationTime"] + ) + if millis is not None: + return _helpers._datetime_from_microseconds(millis * 1000.0) @property def started(self): @@ -475,11 +477,9 @@ def started(self): Optional[datetime.datetime]: the start time (None until set from the server). """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("startTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) + millis = _helpers._get_sub_prop(self._properties, ["statistics", "startTime"]) + if millis is not None: + return _helpers._datetime_from_microseconds(millis * 1000.0) @property def ended(self): @@ -489,11 +489,9 @@ def ended(self): Optional[datetime.datetime]: the end time (None until set from the server). """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("endTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) + millis = _helpers._get_sub_prop(self._properties, ["statistics", "endTime"]) + if millis is not None: + return _helpers._datetime_from_microseconds(millis * 1000.0) def _job_statistics(self): """Helper for job-type specific statistics-based properties.""" @@ -535,14 +533,6 @@ def state(self): if status is not None: return status.get("state") - def _scrub_local_properties(self, cleaned): - """Helper: handle subclass properties in cleaned.""" - pass - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - raise NotImplementedError("Abstract") - def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` @@ -550,7 +540,6 @@ def _set_properties(self, api_response): api_response (Dict): response returned from an API call. """ cleaned = api_response.copy() - self._scrub_local_properties(cleaned) statistics = cleaned.get("statistics", {}) if "creationTime" in statistics: @@ -560,25 +549,24 @@ def _set_properties(self, api_response): if "endTime" in statistics: statistics["endTime"] = float(statistics["endTime"]) + # Save configuration to keep reference same in self._configuration. + cleaned_config = cleaned.pop("configuration", {}) + configuration = self._properties.pop("configuration", {}) self._properties.clear() self._properties.update(cleaned) - self._copy_configuration_properties(cleaned.get("configuration", {})) + self._properties["configuration"] = configuration + self._properties["configuration"].update(cleaned_config) # For Future interface self._set_future_result() @classmethod - def _get_resource_config(cls, resource): + def _check_resource_config(cls, resource): """Helper for :meth:`from_api_repr` Args: resource (Dict): resource for the job. - Returns: - (str, Dict): - tuple (string, dict), where the first element is the - job ID and the second contains job-specific configuration. - Raises: KeyError: If the resource has no identifier, or @@ -589,7 +577,6 @@ def _get_resource_config(cls, resource): "Resource lacks required identity information: " '["jobReference"]["jobId"]' ) - job_id = resource["jobReference"]["jobId"] if ( "configuration" not in resource or cls._JOB_TYPE not in resource["configuration"] @@ -598,7 +585,6 @@ def _get_resource_config(cls, resource): "Resource lacks required configuration: " '["configuration"]["%s"]' % cls._JOB_TYPE ) - return job_id, resource["configuration"] def to_api_repr(self): """Generate a resource for the job.""" @@ -1002,15 +988,15 @@ def from_api_repr(cls, resource): Args: resource (Dict): - An extract job configuration in the same representation as is - returned from the API. + A job configuration in the same representation as is returned + from the API. Returns: google.cloud.bigquery.job._JobConfig: Configuration parsed from ``resource``. """ - config = cls() - config._properties = copy.deepcopy(resource) - return config + job_config = cls() + job_config._properties = resource + return job_config class LoadJobConfig(_JobConfig): @@ -1450,12 +1436,23 @@ class LoadJob(_AsyncJob): def __init__(self, job_id, source_uris, destination, client, job_config=None): super(LoadJob, self).__init__(job_id, client) - if job_config is None: + if not job_config: job_config = LoadJobConfig() - self.source_uris = source_uris - self._destination = destination self._configuration = job_config + self._properties["configuration"] = job_config._properties + + if source_uris is not None: + _helpers._set_sub_prop( + self._properties, ["configuration", "load", "sourceUris"], source_uris + ) + + if destination is not None: + _helpers._set_sub_prop( + self._properties, + ["configuration", "load", "destinationTable"], + destination.to_api_repr(), + ) @property def destination(self): @@ -1464,7 +1461,20 @@ def destination(self): See: https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_table """ - return self._destination + dest_config = _helpers._get_sub_prop( + self._properties, ["configuration", "load", "destinationTable"] + ) + return TableReference.from_api_repr(dest_config) + + @property + def source_uris(self): + """Optional[Sequence[str]]: URIs of data files to be loaded. See + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_uris + for supported URI formats. None for jobs that load from a file. + """ + return _helpers._get_sub_prop( + self._properties, ["configuration", "load", "sourceUris"] + ) @property def allow_jagged_rows(self): @@ -1687,24 +1697,12 @@ def output_rows(self): def to_api_repr(self): """Generate a resource for :meth:`_begin`.""" - configuration = self._configuration.to_api_repr() - if self.source_uris is not None: - _helpers._set_sub_prop( - configuration, ["load", "sourceUris"], self.source_uris - ) - _helpers._set_sub_prop( - configuration, ["load", "destinationTable"], self.destination.to_api_repr() - ) - + # Exclude statistics, if set. return { "jobReference": self._properties["jobReference"], - "configuration": configuration, + "configuration": self._properties["configuration"], } - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - @classmethod def from_api_repr(cls, resource, client): """Factory: construct a job given its API representation @@ -1724,16 +1722,9 @@ def from_api_repr(cls, resource, client): Returns: google.cloud.bigquery.job.LoadJob: Job parsed from ``resource``. """ - config_resource = resource.get("configuration", {}) - config = LoadJobConfig.from_api_repr(config_resource) - # A load job requires a destination table. - dest_config = config_resource["load"]["destinationTable"] - ds_ref = DatasetReference(dest_config["projectId"], dest_config["datasetId"]) - destination = TableReference(ds_ref, dest_config["tableId"]) - # sourceUris will be absent if this is a file upload. - source_uris = _helpers._get_sub_prop(config_resource, ["load", "sourceUris"]) + cls._check_resource_config(resource) job_ref = _JobReference._from_api_repr(resource["jobReference"]) - job = cls(job_ref, source_uris, destination, client, config) + job = cls(job_ref, None, None, client) job._set_properties(resource) return job @@ -1824,12 +1815,59 @@ class CopyJob(_AsyncJob): def __init__(self, job_id, sources, destination, client, job_config=None): super(CopyJob, self).__init__(job_id, client) - if job_config is None: + if not job_config: job_config = CopyJobConfig() - self.destination = destination - self.sources = sources self._configuration = job_config + self._properties["configuration"] = job_config._properties + + if destination: + _helpers._set_sub_prop( + self._properties, + ["configuration", "copy", "destinationTable"], + destination.to_api_repr(), + ) + + if sources: + source_resources = [source.to_api_repr() for source in sources] + _helpers._set_sub_prop( + self._properties, + ["configuration", "copy", "sourceTables"], + source_resources, + ) + + @property + def destination(self): + """google.cloud.bigquery.table.TableReference: Table into which data + is to be loaded. + """ + return TableReference.from_api_repr( + _helpers._get_sub_prop( + self._properties, ["configuration", "copy", "destinationTable"], + ) + ) + + @property + def sources(self): + """List[google.cloud.bigquery.table.TableReference]): Table(s) from + which data is to be loaded. + """ + source_configs = _helpers._get_sub_prop( + self._properties, ["configuration", "copy", "sourceTables"] + ) + if source_configs is None: + single = _helpers._get_sub_prop( + self._properties, ["configuration", "copy", "sourceTable"] + ) + if single is None: + raise KeyError("Resource missing 'sourceTables' / 'sourceTable'") + source_configs = [single] + + sources = [] + for source_config in source_configs: + table_ref = TableReference.from_api_repr(source_config) + sources.append(table_ref) + return sources @property def create_disposition(self): @@ -1860,40 +1898,15 @@ def destination_encryption_configuration(self): def to_api_repr(self): """Generate a resource for :meth:`_begin`.""" - - source_refs = [ - { - "projectId": table.project, - "datasetId": table.dataset_id, - "tableId": table.table_id, - } - for table in self.sources - ] - - configuration = self._configuration.to_api_repr() - _helpers._set_sub_prop(configuration, ["copy", "sourceTables"], source_refs) - _helpers._set_sub_prop( - configuration, - ["copy", "destinationTable"], - { - "projectId": self.destination.project, - "datasetId": self.destination.dataset_id, - "tableId": self.destination.table_id, - }, - ) - + # Exclude statistics, if set. return { "jobReference": self._properties["jobReference"], - "configuration": configuration, + "configuration": self._properties["configuration"], } - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - @classmethod def from_api_repr(cls, resource, client): - """Factory: construct a job given its API representation + """Factory: construct a job given its API representation .. note: @@ -1902,7 +1915,6 @@ def from_api_repr(cls, resource, client): Args: resource (Dict): dataset job representation returned from the API - client (google.cloud.bigquery.client.Client): Client which holds credentials and project configuration for the dataset. @@ -1910,22 +1922,9 @@ def from_api_repr(cls, resource, client): Returns: google.cloud.bigquery.job.CopyJob: Job parsed from ``resource``. """ - job_id, config_resource = cls._get_resource_config(resource) - config = CopyJobConfig.from_api_repr(config_resource) - # Copy required fields to the job. - copy_resource = config_resource["copy"] - destination = TableReference.from_api_repr(copy_resource["destinationTable"]) - sources = [] - source_configs = copy_resource.get("sourceTables") - if source_configs is None: - single = copy_resource.get("sourceTable") - if single is None: - raise KeyError("Resource missing 'sourceTables' / 'sourceTable'") - source_configs = [single] - for source_config in source_configs: - table_ref = TableReference.from_api_repr(source_config) - sources.append(table_ref) - job = cls(job_id, sources, destination, client=client, job_config=config) + cls._check_resource_config(resource) + job_ref = _JobReference._from_api_repr(resource["jobReference"]) + job = cls(job_ref, None, None, client=client) job._set_properties(resource) return job @@ -2038,10 +2037,61 @@ def __init__(self, job_id, source, destination_uris, client, job_config=None): if job_config is None: job_config = ExtractJobConfig() - self.source = source - self.destination_uris = destination_uris + self._properties["configuration"] = job_config._properties self._configuration = job_config + if source: + source_ref = { + "projectId": source.project, + "datasetId": source.dataset_id, + } + + if isinstance(source, (Table, TableListItem, TableReference)): + source_ref["tableId"] = source.table_id + source_key = "sourceTable" + else: + source_ref["modelId"] = source.model_id + source_key = "sourceModel" + + _helpers._set_sub_prop( + self._properties, ["configuration", "extract", source_key], source_ref + ) + + if destination_uris: + _helpers._set_sub_prop( + self._properties, + ["configuration", "extract", "destinationUris"], + destination_uris, + ) + + @property + def source(self): + """Union[ \ + google.cloud.bigquery.table.TableReference, \ + google.cloud.bigquery.model.ModelReference \ + ]: Table or Model from which data is to be loaded or extracted. + """ + source_config = _helpers._get_sub_prop( + self._properties, ["configuration", "extract", "sourceTable"] + ) + if source_config: + return TableReference.from_api_repr(source_config) + else: + source_config = _helpers._get_sub_prop( + self._properties, ["configuration", "extract", "sourceModel"] + ) + return ModelReference.from_api_repr(source_config) + + @property + def destination_uris(self): + """List[str]: URIs describing where the extracted data will be + written in Cloud Storage, using the format + ``gs:///``. + """ + return _helpers._get_sub_prop( + self._properties, ["configuration", "extract", "destinationUris"] + ) + @property def compression(self): """See @@ -2092,34 +2142,12 @@ def destination_uri_file_counts(self): def to_api_repr(self): """Generate a resource for :meth:`_begin`.""" - - configuration = self._configuration.to_api_repr() - source_ref = { - "projectId": self.source.project, - "datasetId": self.source.dataset_id, - } - - source = "sourceTable" - if isinstance(self.source, TableReference): - source_ref["tableId"] = self.source.table_id - else: - source_ref["modelId"] = self.source.model_id - source = "sourceModel" - - _helpers._set_sub_prop(configuration, ["extract", source], source_ref) - _helpers._set_sub_prop( - configuration, ["extract", "destinationUris"], self.destination_uris - ) - + # Exclude statistics, if set. return { "jobReference": self._properties["jobReference"], - "configuration": configuration, + "configuration": self._properties["configuration"], } - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - @classmethod def from_api_repr(cls, resource, client): """Factory: construct a job given its API representation @@ -2139,30 +2167,9 @@ def from_api_repr(cls, resource, client): Returns: google.cloud.bigquery.job.ExtractJob: Job parsed from ``resource``. """ - job_id, config_resource = cls._get_resource_config(resource) - config = ExtractJobConfig.from_api_repr(config_resource) - source_config = _helpers._get_sub_prop( - config_resource, ["extract", "sourceTable"] - ) - if source_config: - dataset = DatasetReference( - source_config["projectId"], source_config["datasetId"] - ) - source = dataset.table(source_config["tableId"]) - else: - source_config = _helpers._get_sub_prop( - config_resource, ["extract", "sourceModel"] - ) - dataset = DatasetReference( - source_config["projectId"], source_config["datasetId"] - ) - source = dataset.model(source_config["modelId"]) - - destination_uris = _helpers._get_sub_prop( - config_resource, ["extract", "destinationUris"] - ) - - job = cls(job_id, source, destination_uris, client=client, job_config=config) + cls._check_resource_config(resource) + job_ref = _JobReference._from_api_repr(resource["jobReference"]) + job = cls(job_ref, None, None, client=client) job._set_properties(resource) return job @@ -2631,11 +2638,14 @@ def __init__(self, job_id, query, client, job_config=None): if job_config.use_legacy_sql is None: job_config.use_legacy_sql = False - _helpers._set_sub_prop( - self._properties, ["configuration", "query", "query"], query - ) - + self._properties["configuration"] = job_config._properties self._configuration = job_config + + if query: + _helpers._set_sub_prop( + self._properties, ["configuration", "query", "query"], query + ) + self._query_results = None self._done_timeout = None self._transport_timeout = None @@ -2799,19 +2809,13 @@ def schema_update_options(self): def to_api_repr(self): """Generate a resource for :meth:`_begin`.""" + # Use to_api_repr to allow for some configuration properties to be set + # automatically. configuration = self._configuration.to_api_repr() - - resource = { + return { "jobReference": self._properties["jobReference"], "configuration": configuration, } - configuration["query"]["query"] = self.query - - return resource - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) @classmethod def from_api_repr(cls, resource, client): @@ -2827,9 +2831,9 @@ def from_api_repr(cls, resource, client): Returns: google.cloud.bigquery.job.QueryJob: Job parsed from ``resource``. """ - job_id, config = cls._get_resource_config(resource) - query = _helpers._get_sub_prop(config, ["query", "query"]) - job = cls(job_id, query, client=client) + cls._check_resource_config(resource) + job_ref = _JobReference._from_api_repr(resource["jobReference"]) + job = cls(job_ref, None, client=client) job._set_properties(resource) return job diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 52e00d7c7..bc2658961 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -4266,7 +4266,7 @@ def test_load_table_from_uri(self): self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.source_uris), [SOURCE_URI]) - self.assertIs(job.destination, destination) + self.assertEqual(job.destination, destination) conn = client._connection = make_connection(RESOURCE) @@ -4275,7 +4275,7 @@ def test_load_table_from_uri(self): self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.source_uris), [SOURCE_URI]) - self.assertIs(job.destination, destination) + self.assertEqual(job.destination, destination) def test_load_table_from_uri_w_explicit_project(self): job_id = "this-is-a-job-id" @@ -4576,16 +4576,67 @@ def test_copy_table(self): self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.sources), [source]) - self.assertIs(job.destination, destination) + self.assertEqual(job.destination, destination) - conn = client._connection = make_connection(RESOURCE) - source2 = dataset.table(SOURCE + "2") - job = client.copy_table([source, source2], destination, job_id=JOB) + def test_copy_table_w_multiple_sources(self): + from google.cloud.bigquery.job import CopyJob + from google.cloud.bigquery.table import TableReference + + job_id = "job_name" + source_id = "my-project.my_dataset.source_table" + source_id2 = "my-project.my_dataset.source_table2" + destination_id = "my-other-project.another_dataset.destination_table" + expected_resource = { + "jobReference": {"projectId": self.PROJECT, "jobId": job_id}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": "my-project", + "datasetId": "my_dataset", + "tableId": "source_table", + }, + { + "projectId": "my-project", + "datasetId": "my_dataset", + "tableId": "source_table2", + }, + ], + "destinationTable": { + "projectId": "my-other-project", + "datasetId": "another_dataset", + "tableId": "destination_table", + }, + } + }, + } + returned_resource = expected_resource.copy() + returned_resource["statistics"] = {} + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection(returned_resource) + + job = client.copy_table([source_id, source_id2], destination_id, job_id=job_id) + + # Check that copy_table actually starts the job. + conn.api_request.assert_called_once_with( + method="POST", + path="/projects/%s/jobs" % self.PROJECT, + data=expected_resource, + timeout=None, + ) self.assertIsInstance(job, CopyJob) self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(list(job.sources), [source, source2]) - self.assertIs(job.destination, destination) + self.assertEqual(job.job_id, job_id) + self.assertEqual( + list(sorted(job.sources, key=lambda tbl: tbl.table_id)), + [ + TableReference.from_string(source_id), + TableReference.from_string(source_id2), + ], + ) + self.assertEqual(job.destination, TableReference.from_string(destination_id)) def test_copy_table_w_explicit_project(self): job_id = "this-is-a-job-id" diff --git a/tests/unit/test_job.py b/tests/unit/test_job.py index d21489616..75212ae95 100644 --- a/tests/unit/test_job.py +++ b/tests/unit/test_job.py @@ -455,28 +455,9 @@ def test_state(self): status["state"] = state self.assertEqual(job.state, state) - def test__scrub_local_properties(self): - before = {"foo": "bar"} - resource = before.copy() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._scrub_local_properties(resource) # no raise - self.assertEqual(resource, before) - - def test__copy_configuration_properties(self): - before = {"foo": "bar"} - resource = before.copy() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - with self.assertRaises(NotImplementedError): - job._copy_configuration_properties(resource) - self.assertEqual(resource, before) - def _set_properties_job(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._scrub_local_properties = mock.Mock() - job._copy_configuration_properties = mock.Mock() job._set_future_result = mock.Mock() job._properties = { "jobReference": job._properties["jobReference"], @@ -493,9 +474,6 @@ def test__set_properties_no_stats(self): self.assertEqual(job._properties, resource) - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - def test__set_properties_w_creation_time(self): now, millis = self._datetime_and_millis() config = {"test": True} @@ -509,9 +487,6 @@ def test__set_properties_w_creation_time(self): cleaned["statistics"]["creationTime"] = float(millis) self.assertEqual(job._properties, cleaned) - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - def test__set_properties_w_start_time(self): now, millis = self._datetime_and_millis() config = {"test": True} @@ -525,9 +500,6 @@ def test__set_properties_w_start_time(self): cleaned["statistics"]["startTime"] = float(millis) self.assertEqual(job._properties, cleaned) - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - def test__set_properties_w_end_time(self): now, millis = self._datetime_and_millis() config = {"test": True} @@ -541,38 +513,35 @@ def test__set_properties_w_end_time(self): cleaned["statistics"]["endTime"] = float(millis) self.assertEqual(job._properties, cleaned) - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - - def test__get_resource_config_missing_job_ref(self): + def test__check_resource_config_missing_job_ref(self): resource = {} klass = self._make_derived_class() with self.assertRaises(KeyError): - klass._get_resource_config(resource) + klass._check_resource_config(resource) - def test__get_resource_config_missing_job_id(self): + def test__check_resource_config_missing_job_id(self): resource = {"jobReference": {}} klass = self._make_derived_class() with self.assertRaises(KeyError): - klass._get_resource_config(resource) + klass._check_resource_config(resource) - def test__get_resource_config_missing_configuration(self): + def test__check_resource_config_missing_configuration(self): resource = {"jobReference": {"jobId": self.JOB_ID}} klass = self._make_derived_class() with self.assertRaises(KeyError): - klass._get_resource_config(resource) + klass._check_resource_config(resource) - def test__get_resource_config_missing_config_type(self): + def test__check_resource_config_missing_config_type(self): resource = {"jobReference": {"jobId": self.JOB_ID}, "configuration": {}} klass = self._make_derived_class() with self.assertRaises(KeyError): - klass._get_resource_config(resource) + klass._check_resource_config(resource) - def test__get_resource_config_ok(self): + def test__check_resource_config_ok(self): derived_config = {"foo": "bar"} resource = { "jobReference": {"jobId": self.JOB_ID}, @@ -580,10 +549,8 @@ def test__get_resource_config_ok(self): } klass = self._make_derived_class() - job_id, config = klass._get_resource_config(resource) - - self.assertEqual(job_id, self.JOB_ID) - self.assertEqual(config, {"derived": derived_config}) + # Should not throw. + klass._check_resource_config(resource) def test__build_resource(self): client = _make_client(project=self.PROJECT) @@ -2093,7 +2060,7 @@ def _verifyResourceProperties(self, job, resource): def test_ctor(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - self.assertIs(job.destination, self.TABLE_REF) + self.assertEqual(job.destination, self.TABLE_REF) self.assertEqual(list(job.source_uris), [self.SOURCE1]) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) @@ -2907,7 +2874,7 @@ def test_ctor(self): source = self._table_ref(self.SOURCE_TABLE) destination = self._table_ref(self.DESTINATION_TABLE) job = self._make_one(self.JOB_ID, [source], destination, client) - self.assertIs(job.destination, destination) + self.assertEqual(job.destination, destination) self.assertEqual(job.sources, [source]) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) @@ -3041,8 +3008,9 @@ def test_from_api_repr_wo_sources(self): }, } klass = self._get_target_class() + job = klass.from_api_repr(RESOURCE, client=client) with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) + _ = job.sources def test_from_api_repr_w_properties(self): from google.cloud.bigquery.job import CreateDisposition