diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 3ce498ac28ea..7188f2b1ad2b 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -14,9 +14,11 @@ """Client for interacting with the Google Cloud Logging API.""" - from gcloud.client import JSONClient from gcloud.logging.connection import Connection +from gcloud.logging.connection import _LoggingAPI +from gcloud.logging.connection import _MetricsAPI +from gcloud.logging.connection import _SinksAPI from gcloud.logging.entries import ProtobufEntry from gcloud.logging.entries import StructEntry from gcloud.logging.entries import TextEntry @@ -47,6 +49,41 @@ class Client(JSONClient): """ _connection_class = Connection + _logging_api = _sinks_api = _metrics_api = None + + @property + def logging_api(self): + """Helper for logging-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + """ + if self._logging_api is None: + self._logging_api = _LoggingAPI(self.connection) + return self._logging_api + + @property + def sinks_api(self): + """Helper for log sink-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + """ + if self._sinks_api is None: + self._sinks_api = _SinksAPI(self.connection) + return self._sinks_api + + @property + def metrics_api(self): + """Helper for log metric-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + """ + if self._metrics_api is None: + self._metrics_api = _MetricsAPI(self.connection) + return self._metrics_api def logger(self, name): """Creates a logger bound to the current client. @@ -120,22 +157,9 @@ def list_entries(self, projects=None, filter_=None, order_by=None, if projects is None: projects = [self.project] - params = {'projectIds': projects} - - if filter_ is not None: - params['filter'] = filter_ - - if order_by is not None: - params['orderBy'] = order_by - - if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - - resp = self.connection.api_request(method='POST', path='/entries:list', - data=params) + resp = self.logging_api.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) loggers = {} entries = [self._entry_from_resource(resource, loggers) for resource in resp.get('entries', ())] @@ -181,17 +205,7 @@ def list_sinks(self, page_size=None, page_token=None): more sinks can be retrieved with another call (pass that value as ``page_token``). """ - params = {} - - if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - - path = '/projects/%s/sinks' % (self.project,) - resp = self.connection.api_request(method='GET', path=path, - query_params=params) + resp = self.sinks_api.list_sinks(self.project, page_size, page_token) sinks = [Sink.from_api_repr(resource, self) for resource in resp.get('sinks', ())] return sinks, resp.get('nextPageToken') @@ -235,17 +249,8 @@ def list_metrics(self, page_size=None, page_token=None): more metrics can be retrieved with another call (pass that value as ``page_token``). """ - params = {} - - if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - - path = '/projects/%s/metrics' % (self.project,) - resp = self.connection.api_request(method='GET', path=path, - query_params=params) + resp = self.metrics_api.list_metrics( + self.project, page_size, page_token) metrics = [Metric.from_api_repr(resource, self) for resource in resp.get('metrics', ())] return metrics, resp.get('nextPageToken') diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py index 1c330a28529e..83e1eadcd74c 100644 --- a/gcloud/logging/connection.py +++ b/gcloud/logging/connection.py @@ -46,3 +46,390 @@ class Connection(base_connection.JSONConnection): 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/cloud-platform') """The scopes required for authenticating as a Cloud Logging consumer.""" + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_entries(self, projects, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entry resources. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more entries can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {'projectIds': projects} + + if filter_ is not None: + params['filter'] = filter_ + + if order_by is not None: + params['orderBy'] = order_by + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + resp = self._connection.api_request( + method='POST', path='/entries:list', data=params) + + return resp.get('entries', ()), resp.get('nextPageToken') + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + """API call: log an entry resource via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type entries: sequence of mapping + :param entries: the log entry resources to log. + + :type logger_name: string + :param logger_name: name of default logger to which to log the entries; + individual entries may override. + + :type resource: mapping + :param resource: default resource to associate with entries; + individual entries may override. + + :type labels: mapping + :param labels: default labels to associate with entries; + individual entries may override. + """ + data = {'entries': list(entries)} + + if logger_name is not None: + data['logName'] = logger_name + + if resource is not None: + data['resource'] = resource + + if labels is not None: + data['labels'] = labels + + self._connection.api_request(method='POST', path='/entries:write', + data=data) + + def logger_delete(self, project, logger_name): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type project: string + :param project: ID of project containing the log entries to delete + + :type logger_name: string + :param logger_name: name of logger containing the log entries to delete + """ + path = '/projects/%s/logs/%s' % (project, logger_name) + self._connection.api_request(method='DELETE', path=path) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_sinks(self, project, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type project: string + :param project: ID of the project whose sinks are to be listed. + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more sinks can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/sinks' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + sinks = resp.get('sinks', ()) + return sinks, resp.get('nextPageToken') + + def sink_create(self, project, sink_name, filter_, destination): + """API call: create a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type project: string + :param project: ID of the project in which to create the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks' % (project,) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='POST', path=target, data=data) + + def sink_get(self, project, sink_name): + """API call: retrieve a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + return self._connection.api_request(method='GET', path=target) + + def sink_update(self, project, sink_name, filter_, destination): + """API call: update a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def sink_delete(self, project, sink_name): + """API call: delete a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + self._connection.api_request(method='DELETE', path=target) + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_metrics(self, project, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type project: string + :param project: ID of the project whose metrics are to be listed. + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more metrics can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/metrics' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + metrics = resp.get('metrics', ()) + return metrics, resp.get('nextPageToken') + + def metric_create(self, project, metric_name, filter_, description=None): + """API call: create a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type project: string + :param project: ID of the project in which to create the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics' % (project,) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='POST', path=target, data=data) + + def metric_get(self, project, metric_name): + """API call: retrieve a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + return self._connection.api_request(method='GET', path=target) + + def metric_update(self, project, metric_name, filter_, description): + """API call: update a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def metric_delete(self, project, metric_name): + """API call: delete a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + self._connection.api_request(method='DELETE', path=target) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index f7bb50ee4a80..913e32eb07a7 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -183,10 +183,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request) - data = {'entries': [entry_resource]} - - client.connection.api_request( - method='POST', path='/entries:write', data=data) + client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, severity=None, http_request=None): @@ -219,10 +216,7 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request) - data = {'entries': [entry_resource]} - - client.connection.api_request( - method='POST', path='/entries:write', data=data) + client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, severity=None, http_request=None): @@ -255,10 +249,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request) - data = {'entries': [entry_resource]} - - client.connection.api_request( - method='POST', path='/entries:write', data=data) + client.logging_api.write_entries([entry_resource]) def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request @@ -271,7 +262,7 @@ def delete(self, client=None): ``client`` stored on the current logger. """ client = self._require_client(client) - client.connection.api_request(method='DELETE', path=self.path) + client.logging_api.logger_delete(self.project, self.name) def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): @@ -419,14 +410,14 @@ def commit(self, client=None): if client is None: client = self.client - data = { - 'logName': self.logger.path, + kwargs = { + 'logger_name': self.logger.path, 'resource': {'type': 'global'}, } if self.logger.labels is not None: - data['labels'] = self.logger.labels + kwargs['labels'] = self.logger.labels - entries = data['entries'] = [] + entries = [] for entry_type, entry, labels, iid, severity, http_req in self.entries: if entry_type == 'text': info = {'textPayload': entry} @@ -448,6 +439,5 @@ def commit(self, client=None): info['httpRequest'] = http_req entries.append(info) - client.connection.api_request( - method='POST', path='/entries:write', data=data) + client.logging_api.write_entries(entries, **kwargs) del self.entries[:] diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 34fa343ff53f..3a751b339f05 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -138,14 +138,8 @@ def create(self, client=None): ``client`` stored on the current metric. """ client = self._require_client(client) - target = '/projects/%s/metrics' % (self.project,) - data = { - 'name': self.name, - 'filter': self.filter_, - } - if self.description: - data['description'] = self.description - client.connection.api_request(method='POST', path=target, data=data) + client.metrics_api.metric_create( + self.project, self.name, self.filter_, self.description) def exists(self, client=None): """API call: test for the existence of the metric via a GET request @@ -160,7 +154,7 @@ def exists(self, client=None): client = self._require_client(client) try: - client.connection.api_request(method='GET', path=self.path) + client.metrics_api.metric_get(self.project, self.name) except NotFound: return False else: @@ -177,7 +171,7 @@ def reload(self, client=None): ``client`` stored on the current metric. """ client = self._require_client(client) - data = client.connection.api_request(method='GET', path=self.path) + data = client.metrics_api.metric_get(self.project, self.name) self.description = data.get('description', '') self.filter_ = data['filter'] @@ -192,10 +186,8 @@ def update(self, client=None): ``client`` stored on the current metric. """ client = self._require_client(client) - data = {'name': self.name, 'filter': self.filter_} - if self.description: - data['description'] = self.description - client.connection.api_request(method='PUT', path=self.path, data=data) + client.metrics_api.metric_update( + self.project, self.name, self.filter_, self.description) def delete(self, client=None): """API call: delete a metric via a DELETE request @@ -208,4 +200,4 @@ def delete(self, client=None): ``client`` stored on the current metric. """ client = self._require_client(client) - client.connection.api_request(method='DELETE', path=self.path) + client.metrics_api.metric_delete(self.project, self.name) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 49f651bfe905..ff4376712cfe 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -137,13 +137,8 @@ def create(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) - target = '/projects/%s/sinks' % (self.project,) - data = { - 'name': self.name, - 'filter': self.filter_, - 'destination': self.destination, - } - client.connection.api_request(method='POST', path=target, data=data) + client.sinks_api.sink_create( + self.project, self.name, self.filter_, self.destination) def exists(self, client=None): """API call: test for the existence of the sink via a GET request @@ -158,7 +153,7 @@ def exists(self, client=None): client = self._require_client(client) try: - client.connection.api_request(method='GET', path=self.path) + client.sinks_api.sink_get(self.project, self.name) except NotFound: return False else: @@ -175,7 +170,7 @@ def reload(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) - data = client.connection.api_request(method='GET', path=self.path) + data = client.sinks_api.sink_get(self.project, self.name) self.filter_ = data['filter'] self.destination = data['destination'] @@ -190,12 +185,8 @@ def update(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) - data = { - 'name': self.name, - 'filter': self.filter_, - 'destination': self.destination, - } - client.connection.api_request(method='PUT', path=self.path, data=data) + client.sinks_api.sink_update( + self.project, self.name, self.filter_, self.destination) def delete(self, client=None): """API call: delete a sink via a DELETE request @@ -208,4 +199,4 @@ def delete(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) - client.connection.api_request(method='DELETE', path=self.path) + client.sinks_api.sink_delete(self.project, self.name) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index 2ac27234ad6e..123ee537cb49 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -38,6 +38,39 @@ def test_ctor(self): client = self._makeOne(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) + def test_logging_api(self): + from gcloud.logging.connection import _LoggingAPI + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + api = client.logging_api + self.assertTrue(isinstance(api, _LoggingAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.logging_api + self.assertTrue(again is api) + + def test_sinks_api(self): + from gcloud.logging.connection import _SinksAPI + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + api = client.sinks_api + self.assertTrue(isinstance(api, _SinksAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.sinks_api + self.assertTrue(again is api) + + def test_metrics_api(self): + from gcloud.logging.connection import _MetricsAPI + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + api = client.metrics_api + self.assertTrue(isinstance(api, _MetricsAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.metrics_api + self.assertTrue(again is api) + def test_logger(self): from gcloud.logging.logger import Logger creds = _Credentials() @@ -57,26 +90,17 @@ def test__entry_from_resource_unknown_type(self): client._entry_from_resource({'unknownPayload': {}}, loggers) def test_list_entries_defaults(self): - from datetime import datetime - from gcloud._helpers import UTC from gcloud.logging.entries import TextEntry - from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - IID1 = 'IID1' + IID = 'IID' TEXT = 'TEXT' - SENT = { - 'projectIds': [self.PROJECT], - } TOKEN = 'TOKEN' RETURNED = { 'entries': [{ 'textPayload': TEXT, - 'insertId': IID1, + 'insertId': IID, 'resource': { 'type': 'global', }, - 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }], @@ -84,39 +108,34 @@ def test_list_entries_defaults(self): } creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) - conn = client.connection = _Connection(RETURNED) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = RETURNED + entries, token = client.list_entries() + self.assertEqual(len(entries), 1) entry = entries[0] self.assertTrue(isinstance(entry, TextEntry)) - self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.payload, TEXT) - self.assertEqual(entry.timestamp, NOW) logger = entry.logger self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) self.assertEqual(token, TOKEN) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:list') - self.assertEqual(req['data'], SENT) + + self.assertEqual( + api._list_entries_called_with, + ([self.PROJECT], None, None, None, None)) def test_list_entries_explicit(self): - # pylint: disable=too-many-statements - from datetime import datetime - from gcloud._helpers import UTC from gcloud.logging import DESCENDING from gcloud.logging.entries import ProtobufEntry from gcloud.logging.entries import StructEntry from gcloud.logging.logger import Logger - from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' FILTER = 'logName:LOGNAME' - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) IID1 = 'IID1' IID2 = 'IID2' PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} @@ -124,13 +143,6 @@ def test_list_entries_explicit(self): PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' TOKEN = 'TOKEN' PAGE_SIZE = 42 - SENT = { - 'projectIds': [PROJECT1, PROJECT2], - 'filter': FILTER, - 'orderBy': DESCENDING, - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, - } RETURNED = { 'entries': [{ 'jsonPayload': PAYLOAD, @@ -138,7 +150,6 @@ def test_list_entries_explicit(self): 'resource': { 'type': 'global', }, - 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }, { @@ -147,14 +158,14 @@ def test_list_entries_explicit(self): 'resource': { 'type': 'global', }, - 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }], } - creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - conn = client.connection = _Connection(RETURNED) + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = RETURNED + entries, token = client.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) @@ -164,7 +175,6 @@ def test_list_entries_explicit(self): self.assertTrue(isinstance(entry, StructEntry)) self.assertEqual(entry.insert_id, IID1) self.assertEqual(entry.payload, PAYLOAD) - self.assertEqual(entry.timestamp, NOW) logger = entry.logger self.assertTrue(isinstance(logger, Logger)) self.assertEqual(logger.name, self.LOGGER_NAME) @@ -175,7 +185,6 @@ def test_list_entries_explicit(self): self.assertTrue(isinstance(entry, ProtobufEntry)) self.assertEqual(entry.insert_id, IID2) self.assertEqual(entry.payload, PROTO_PAYLOAD) - self.assertEqual(entry.timestamp, NOW) logger = entry.logger self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) @@ -184,11 +193,9 @@ def test_list_entries_explicit(self): self.assertTrue(entries[0].logger is entries[1].logger) self.assertEqual(token, None) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:list') - self.assertEqual(req['data'], SENT) + self.assertEqual( + api._list_entries_called_with, + ([PROJECT1, PROJECT2], FILTER, DESCENDING, PAGE_SIZE, TOKEN)) def test_sink(self): from gcloud.logging.sink import Sink @@ -205,101 +212,78 @@ def test_sink(self): def test_list_sinks_no_paging(self): from gcloud.logging.sink import Sink PROJECT = 'PROJECT' - CREDS = _Credentials() - - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - + TOKEN = 'TOKEN' SINK_NAME = 'sink_name' FILTER = 'logName:syslog AND severity>=ERROR' SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) - RETURNED = { 'sinks': [{ 'name': SINK_PATH, 'filter': FILTER, 'destination': self.DESTINATION_URI, }], + 'nextPageToken': TOKEN, } - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = RETURNED + + sinks, token = client.list_sinks() - # Execute request. - sinks, next_page_token = CLIENT_OBJ.list_sinks() - # Test values are correct. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertTrue(isinstance(sink, Sink)) self.assertEqual(sink.name, SINK_NAME) self.assertEqual(sink.filter_, FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(next_page_token, None) - self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) - self.assertEqual(req['query_params'], {}) + + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, None, None)) def test_list_sinks_with_paging(self): from gcloud.logging.sink import Sink PROJECT = 'PROJECT' - CREDS = _Credentials() - - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - SINK_NAME = 'sink_name' FILTER = 'logName:syslog AND severity>=ERROR' SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 + TOKEN = 'TOKEN' + PAGE_SIZE = 42 RETURNED = { 'sinks': [{ 'name': SINK_PATH, 'filter': FILTER, 'destination': self.DESTINATION_URI, }], - 'nextPageToken': TOKEN2, } - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = RETURNED + + sinks, token = client.list_sinks(PAGE_SIZE, TOKEN) - # Execute request. - sinks, next_page_token = CLIENT_OBJ.list_sinks(SIZE, TOKEN1) - # Test values are correct. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertTrue(isinstance(sink, Sink)) self.assertEqual(sink.name, SINK_NAME) self.assertEqual(sink.filter_, FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertEqual(next_page_token, TOKEN2) - self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) - self.assertEqual(req['query_params'], - {'pageSize': SIZE, 'pageToken': TOKEN1}) + self.assertEqual(token, None) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) def test_list_sinks_missing_key(self): PROJECT = 'PROJECT' - CREDS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = {} - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + sinks, token = client.list_sinks() - RETURNED = {} - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) - - # Execute request. - sinks, next_page_token = CLIENT_OBJ.list_sinks() - # Test values are correct. self.assertEqual(len(sinks), 0) - self.assertEqual(next_page_token, None) - self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/projects/%s/sinks' % PROJECT) - self.assertEqual(req['query_params'], {}) + self.assertEqual(token, None) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, None, None)) def test_metric(self): from gcloud.logging.metric import Metric @@ -318,59 +302,49 @@ def test_metric(self): def test_list_metrics_no_paging(self): from gcloud.logging.metric import Metric PROJECT = 'PROJECT' - CREDS = _Credentials() - - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - + TOKEN = 'TOKEN' RETURNED = { 'metrics': [{ 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }], + 'nextPageToken': TOKEN, } - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = RETURNED + + metrics, token = client.list_metrics() - # Execute request. - metrics, next_page_token = CLIENT_OBJ.list_metrics() - # Test values are correct. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertTrue(isinstance(metric, Metric)) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) - self.assertEqual(next_page_token, None) - self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) - self.assertEqual(req['query_params'], {}) + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, None, None)) def test_list_metrics_with_paging(self): from gcloud.logging.metric import Metric PROJECT = 'PROJECT' - CREDS = _Credentials() - - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 + TOKEN = 'TOKEN' + PAGE_SIZE = 42 RETURNED = { 'metrics': [{ 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }], - 'nextPageToken': TOKEN2, } - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = RETURNED # Execute request. - metrics, next_page_token = CLIENT_OBJ.list_metrics(SIZE, TOKEN1) + metrics, token = client.list_metrics(PAGE_SIZE, TOKEN) # Test values are correct. self.assertEqual(len(metrics), 1) metric = metrics[0] @@ -378,32 +352,22 @@ def test_list_metrics_with_paging(self): self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) - self.assertEqual(next_page_token, TOKEN2) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) - self.assertEqual(req['query_params'], - {'pageSize': SIZE, 'pageToken': TOKEN1}) + self.assertEqual(token, None) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) def test_list_metrics_missing_key(self): PROJECT = 'PROJECT' - CREDS = _Credentials() - - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = {} - RETURNED = {} - # Replace the connection on the client with one of our own. - CLIENT_OBJ.connection = _Connection(RETURNED) + metrics, token = client.list_metrics() - # Execute request. - metrics, next_page_token = CLIENT_OBJ.list_metrics() - # Test values are correct. self.assertEqual(len(metrics), 0) - self.assertEqual(next_page_token, None) - self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) - req = CLIENT_OBJ.connection._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) - self.assertEqual(req['query_params'], {}) + self.assertEqual(token, None) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, None, None)) class _Credentials(object): @@ -419,13 +383,23 @@ def create_scoped(self, scope): return self -class _Connection(object): +class _DummyLoggingAPI(object): + + def list_entries(self, projects, filter_, order_by, page_size, page_token): + self._list_entries_called_with = ( + projects, filter_, order_by, page_size, page_token) + return self._list_entries_response + + +class _DummySinksAPI(object): + + def list_sinks(self, project, page_size, page_token): + self._list_sinks_called_with = (project, page_size, page_token) + return self._list_sinks_response + - def __init__(self, *responses): - self._responses = responses - self._requested = [] +class _DummyMetricsAPI(object): - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response + def list_metrics(self, project, page_size, page_token): + self._list_metrics_called_with = (project, page_size, page_token) + return self._list_metrics_response diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py index 2939b683305e..e63642fdd7b2 100644 --- a/gcloud/logging/test_connection.py +++ b/gcloud/logging/test_connection.py @@ -17,6 +17,9 @@ class TestConnection(unittest2.TestCase): + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + def _getTargetClass(self): from gcloud.logging.connection import Connection return Connection @@ -31,6 +34,571 @@ def test_default_url(self): self.assertEqual(conn.credentials._scopes, klass.SCOPE) +class Test_LoggingAPI(unittest2.TestCase): + + PROJECT = 'project' + LIST_ENTRIES_PATH = 'entries:list' + WRITE_ENTRIES_PATH = 'entries:write' + LOGGER_NAME = 'LOGGER_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + + def _getTargetClass(self): + from gcloud.logging.connection import _LoggingAPI + return _LoggingAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertTrue(api._connection is connection) + + @staticmethod + def _make_timestamp(): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + NOW = datetime.utcnow().replace(tzinfo=UTC) + return _datetime_to_rfc3339_w_nanos(NOW) + + def test_list_entries_no_paging(self): + TIMESTAMP = self._make_timestamp() + IID = 'IID' + TEXT = 'TEXT' + SENT = { + 'projectIds': [self.PROJECT], + } + TOKEN = 'TOKEN' + RETURNED = { + 'entries': [{ + 'textPayload': TEXT, + 'insertId': IID, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries([self.PROJECT]) + + self.assertEqual(entries, RETURNED['entries']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_list_entries_w_paging(self): + from gcloud.logging import DESCENDING + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + TIMESTAMP = self._make_timestamp() + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SENT = { + 'projectIds': [PROJECT1, PROJECT2], + 'filter': self.FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + } + RETURNED = { + 'entries': [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries( + projects=[PROJECT1, PROJECT2], filter_=self.FILTER, + order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(entries, RETURNED['entries']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_single(self): + TEXT = 'TEXT' + ENTRY = { + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + } + SENT = { + 'entries': [ENTRY], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY]) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_multiple(self): + TEXT = 'TEXT' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + RESOURCE = { + 'type': 'global', + } + LABELS = { + 'baz': 'qux', + 'spam': 'eggs', + } + ENTRY1 = { + 'textPayload': TEXT, + } + ENTRY2 = { + 'jsonPayload': {'foo': 'bar'}, + } + SENT = { + 'logName': LOG_NAME, + 'resource': RESOURCE, + 'labels': LABELS, + 'entries': [ENTRY1, ENTRY2], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_logger_delete(self): + path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn = _Connection({}) + api = self._makeOne(conn) + + api.logger_delete(self.PROJECT, self.LOGGER_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + self.assertEqual(conn._called_with['path'], path) + + +class Test_SinksAPI(unittest2.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_SINKS_PATH = 'projects/%s/sinks' % (PROJECT,) + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging.connection import _SinksAPI + return _SinksAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertTrue(api._connection is connection) + + def test_list_sinks_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks(self.PROJECT) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], {}) + + def test_list_sinks_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_sink_create_conflict(self): + from gcloud.exceptions import Conflict + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_create_ok(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_get_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_get_hit(self): + RESPONSE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_update_miss(self): + from gcloud.exceptions import NotFound + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_update_hit(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_delete_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + +class Test_MetricsAPI(unittest2.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_METRICS_PATH = 'projects/%s/metrics' % (PROJECT,) + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.connection import _MetricsAPI + return _MetricsAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_list_metrics_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics(self.PROJECT) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + + def test_list_metrics_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_metric_create_conflict(self): + from gcloud.exceptions import Conflict + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_create_ok(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_get_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_get_hit(self): + RESPONSE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_update_miss(self): + from gcloud.exceptions import NotFound + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_update_hit(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_delete_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + class _Credentials(object): _scopes = None @@ -42,3 +610,24 @@ def create_scoped_required(): def create_scoped(self, scope): self._scopes = scope return self + + +class _Connection(object): + + _called_with = None + _raise_conflict = False + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + from gcloud.exceptions import Conflict + from gcloud.exceptions import NotFound + self._called_with = kw + if self._raise_conflict: + raise Conflict('oops') + try: + response, self._responses = self._responses[0], self._responses[1:] + except IndexError: + raise NotFound('miss') + return response diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 069ad2f47d2e..1ddcbaa4f6f3 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -28,7 +28,7 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_defaults(self): - conn = _Connection() + conn = object() client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client) self.assertEqual(logger.name, self.LOGGER_NAME) @@ -42,7 +42,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): LABELS = {'foo': 'bar', 'baz': 'qux'} - conn = _Connection() + conn = object() client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) self.assertEqual(logger.name, self.LOGGER_NAME) @@ -56,7 +56,7 @@ def test_ctor_explicit(self): def test_batch_w_bound_client(self): from gcloud.logging.logger import Batch - conn = _Connection() + conn = object() client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client) batch = logger.batch() @@ -66,8 +66,8 @@ def test_batch_w_bound_client(self): def test_batch_w_alternate_client(self): from gcloud.logging.logger import Batch - conn1 = _Connection() - conn2 = _Connection() + conn1 = object() + conn2 = object() client1 = _Client(self.PROJECT, conn1) client2 = _Client(self.PROJECT, conn2) logger = self._makeOne(self.LOGGER_NAME, client=client1) @@ -78,50 +78,44 @@ def test_batch_w_alternate_client(self): def test_log_text_w_str_implicit_client(self): TEXT = 'TEXT' - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_text(TEXT) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - }, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_text_w_default_labels(self): TEXT = 'TEXT' DEFAULT_LABELS = {'foo': 'spam'} - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + logger.log_text(TEXT) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - }, - 'labels': DEFAULT_LABELS, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' @@ -137,79 +131,70 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } - conn = _Connection({}) - client1 = _Client(self.PROJECT, object()) - client2 = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) + logger.log_text(TEXT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - }, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_struct_w_implicit_client(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_struct(STRUCT) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} DEFAULT_LABELS = {'foo': 'spam'} - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + logger.log_struct(STRUCT) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - 'labels': DEFAULT_LABELS, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_struct_w_explicit_client_labels_severity_httpreq(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} @@ -225,58 +210,52 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } - conn = _Connection({}) - client1 = _Client(self.PROJECT, object()) - client2 = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) + logger.log_struct(STRUCT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_proto(message) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_proto_w_default_labels(self): import json @@ -284,27 +263,24 @@ def test_log_proto_w_default_labels(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} - conn = _Connection({}) - client = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + logger.log_proto(message) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - 'labels': DEFAULT_LABELS, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_log_proto_w_explicit_client_labels_severity_httpreq(self): import json @@ -323,58 +299,51 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } - conn = _Connection({}) - client1 = _Client(self.PROJECT, object()) - client2 = _Client(self.PROJECT, conn) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) + logger.log_proto(message, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - SENT = { - 'entries': [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - }], - } - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) def test_delete_w_bound_client(self): - PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - logger = self._makeOne(self.LOGGER_NAME, client=CLIENT) + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.delete() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % PATH) + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) def test_delete_w_alternate_client(self): - PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - conn1 = _Connection({}) - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({}) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - logger = self._makeOne(self.LOGGER_NAME, client=CLIENT1) - logger.delete(client=CLIENT2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % PATH) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1) + + logger.delete(client=client2) + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) def test_list_entries_defaults(self): LISTED = { @@ -385,8 +354,7 @@ def test_list_entries_defaults(self): 'page_token': None, } TOKEN = 'TOKEN' - conn = _Connection() - client = _Client(self.PROJECT, conn) + client = _Client(self.PROJECT) client._token = TOKEN logger = self._makeOne(self.LOGGER_NAME, client=client) entries, token = logger.list_entries() @@ -408,8 +376,7 @@ def test_list_entries_explicit(self): 'page_size': PAGE_SIZE, 'page_token': TOKEN, } - conn = _Connection() - client = _Client(self.PROJECT, conn) + client = _Client(self.PROJECT) logger = self._makeOne(self.LOGGER_NAME, client=client) entries, token = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, @@ -432,20 +399,18 @@ def _makeOne(self, *args, **kwargs): def test_ctor_defaults(self): logger = _Logger() - CLIENT = _Client(project=self.PROJECT) - batch = self._makeOne(logger, CLIENT) + client = _Client(project=self.PROJECT) + batch = self._makeOne(logger, client) self.assertTrue(batch.logger is logger) - self.assertTrue(batch.client is CLIENT) + self.assertTrue(batch.client is client) self.assertEqual(len(batch.entries), 0) def test_log_text_defaults(self): TEXT = 'This is the entry text' - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_text(TEXT) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('text', TEXT, None, None, None, None)]) @@ -462,24 +427,20 @@ def test_log_text_explicit(self): 'requestUrl': URI, 'status': STATUS, } - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_struct(STRUCT) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('struct', STRUCT, None, None, None, None)]) @@ -496,25 +457,21 @@ def test_log_struct_explicit(self): 'requestUrl': URI, 'status': STATUS, } - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_proto(message) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('proto', message, None, None, None, None)]) @@ -532,21 +489,18 @@ def test_log_proto_explicit(self): 'requestUrl': URI, 'status': STATUS, } - connection = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=connection) + client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) - self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() - conn = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=conn) - batch = self._makeOne(logger, CLIENT) + client = _Client(project=self.PROJECT, connection=object()) + batch = self._makeOne(logger, client) batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) with self.assertRaises(ValueError): batch.commit() @@ -561,32 +515,28 @@ def test_commit_w_bound_client(self): IID1 = 'IID1' IID2 = 'IID2' IID3 = 'IID3' - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - logger = _Logger() - SENT = { - 'logName': logger.path, - 'resource': { - 'type': 'global', - }, - 'entries': [ - {'textPayload': TEXT, 'insertId': IID1}, - {'jsonPayload': STRUCT, 'insertId': IID2}, - {'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3}, - ], + RESOURCE = { + 'type': 'global', } - batch = self._makeOne(logger, client=CLIENT) + ENTRIES = [ + {'textPayload': TEXT, 'insertId': IID1}, + {'jsonPayload': STRUCT, 'insertId': IID2}, + {'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3}, + ] + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_text(TEXT, insert_id=IID1) batch.log_struct(STRUCT, insert_id=IID2) batch.log_proto(message, insert_id=IID3) batch.commit() + self.assertEqual(list(batch.entries), []) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, None)) def test_commit_w_alternate_client(self): import json @@ -597,7 +547,10 @@ def test_commit_w_alternate_client(self): STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} + LABELS = { + 'foo': 'bar', + 'baz': 'qux', + } SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' @@ -607,34 +560,27 @@ def test_commit_w_alternate_client(self): 'requestUrl': URI, 'status': STATUS, } - conn1 = _Connection() - conn2 = _Connection({}) - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - logger = Logger('logger_name', CLIENT1, labels=DEFAULT_LABELS) - SENT = { - 'logName': logger.path, - 'resource': {'type': 'global'}, - 'labels': DEFAULT_LABELS, - 'entries': [ - {'textPayload': TEXT, 'labels': LABELS}, - {'jsonPayload': STRUCT, 'severity': SEVERITY}, - {'protoPayload': json.loads(MessageToJson(message)), - 'httpRequest': REQUEST}, - ], - } - batch = self._makeOne(logger, client=CLIENT1) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) + RESOURCE = {'type': 'global'} + ENTRIES = [ + {'textPayload': TEXT, 'labels': LABELS}, + {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'protoPayload': json.loads(MessageToJson(message)), + 'httpRequest': REQUEST}, + ] + batch = self._makeOne(logger, client=client1) + batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) - batch.commit(client=CLIENT2) + batch.commit(client=client2) + self.assertEqual(list(batch.entries), []) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS)) def test_context_mgr_success(self): import json @@ -655,23 +601,19 @@ def test_context_mgr_success(self): 'requestUrl': URI, 'status': STATUS, } - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - logger = Logger('logger_name', CLIENT, labels=DEFAULT_LABELS) - SENT = { - 'logName': logger.path, - 'resource': { - 'type': 'global', - }, - 'labels': DEFAULT_LABELS, - 'entries': [ - {'textPayload': TEXT, 'httpRequest': REQUEST}, - {'jsonPayload': STRUCT, 'labels': LABELS}, - {'protoPayload': json.loads(MessageToJson(message)), - 'severity': SEVERITY}, - ], + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client, labels=DEFAULT_LABELS) + RESOURCE = { + 'type': 'global', } - batch = self._makeOne(logger, client=CLIENT) + ENTRIES = [ + {'textPayload': TEXT, 'httpRequest': REQUEST}, + {'jsonPayload': STRUCT, 'labels': LABELS}, + {'protoPayload': json.loads(MessageToJson(message)), + 'severity': SEVERITY}, + ] + batch = self._makeOne(logger, client=client) with batch as other: other.log_text(TEXT, http_request=REQUEST) @@ -679,11 +621,8 @@ def test_context_mgr_success(self): other.log_proto(message, severity=SEVERITY) self.assertEqual(list(batch.entries), []) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/entries:write') - self.assertEqual(req['data'], SENT) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS)) def test_context_mgr_failure(self): from google.protobuf.struct_pb2 import Struct, Value @@ -701,15 +640,15 @@ def test_context_mgr_failure(self): 'status': STATUS, } message = Struct(fields={'foo': Value(bool_value=True)}) - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ ('text', TEXT, None, IID, None, None), ('struct', STRUCT, None, None, SEVERITY, None), ('proto', message, LABELS, None, None, REQUEST), ] - batch = self._makeOne(logger, client=CLIENT) + batch = self._makeOne(logger, client=client) try: with batch as other: @@ -721,7 +660,7 @@ def test_context_mgr_failure(self): pass self.assertEqual(list(batch.entries), UNSENT) - self.assertEqual(len(conn._requested), 0) + self.assertEqual(api._write_entries_called_with, None) class _Logger(object): @@ -732,16 +671,17 @@ def __init__(self, name="NAME", project="PROJECT"): self.path = '/projects/%s/logs/%s' % (project, name) -class _Connection(object): +class _DummyLoggingAPI(object): + + _write_entries_called_with = None - def __init__(self, *responses): - self._responses = responses - self._requested = [] + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + self._write_entries_called_with = ( + entries, logger_name, resource, labels) - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response + def logger_delete(self, project, logger_name): + self._logger_delete_called_with = (project, logger_name) class _Client(object): diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 4fdbae59d297..24c2933560cc 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -63,8 +63,7 @@ def _makeOne(self, *args, **kw): def test_ctor_defaults(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn = _Connection() - client = _Client(self.PROJECT, conn) + client = _Client(self.PROJECT) metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) @@ -76,8 +75,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn = _Connection() - client = _Client(self.PROJECT, conn) + client = _Client(self.PROJECT) metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION) self.assertEqual(metric.name, self.METRIC_NAME) @@ -89,23 +87,23 @@ def test_ctor_explicit(self): self.assertEqual(metric.path, '/%s' % (FULL,)) def test_from_api_repr_minimal(self): - CLIENT = _Client(project=self.PROJECT) + client = _Client(project=self.PROJECT) FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) RESOURCE = { 'name': self.METRIC_NAME, 'filter': self.FILTER, } klass = self._getTargetClass() - metric = klass.from_api_repr(RESOURCE, client=CLIENT) + metric = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, '') - self.assertTrue(metric._client is CLIENT) + self.assertTrue(metric._client is client) self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) def test_from_api_repr_w_description(self): - CLIENT = _Client(project=self.PROJECT) + client = _Client(project=self.PROJECT) FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) DESCRIPTION = 'DESCRIPTION' RESOURCE = { @@ -114,203 +112,172 @@ def test_from_api_repr_w_description(self): 'description': DESCRIPTION, } klass = self._getTargetClass() - metric = klass.from_api_repr(RESOURCE, client=CLIENT) + metric = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, DESCRIPTION) - self.assertTrue(metric._client is CLIENT) + self.assertTrue(metric._client is client) self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) def test_create_w_bound_client(self): - TARGET = 'projects/%s/metrics' % (self.PROJECT,) - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - } - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric.create() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % TARGET) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) def test_create_w_alternate_client(self): - TARGET = 'projects/%s/metrics' % (self.PROJECT,) - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - } - conn1 = _Connection() - client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - client2 = _Client(project=self.PROJECT, connection=conn2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION) + metric.create(client=client2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % TARGET) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) def test_exists_miss_w_bound_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=conn) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + self.assertFalse(metric.exists()) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) def test_exists_hit_w_alternate_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({'name': FULL}) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) - self.assertTrue(metric.exists(client=CLIENT2)) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + self.assertTrue(metric.exists(client=client2)) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) def test_reload_w_bound_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - DESCRIPTION = 'DESCRIPTION' NEW_FILTER = 'logName:syslog AND severity>=INFO' RESOURCE = { 'name': self.METRIC_NAME, 'filter': NEW_FILTER, } - conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT, - description=DESCRIPTION) + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client, + description=self.DESCRIPTION) + metric.reload() + self.assertEqual(metric.filter_, NEW_FILTER) self.assertEqual(metric.description, '') - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) def test_reload_w_alternate_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - DESCRIPTION = 'DESCRIPTION' NEW_FILTER = 'logName:syslog AND severity>=INFO' RESOURCE = { 'name': self.METRIC_NAME, - 'description': DESCRIPTION, + 'description': self.DESCRIPTION, 'filter': NEW_FILTER, } - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) - metric.reload(client=CLIENT2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + metric.reload(client=client2) + self.assertEqual(metric.filter_, NEW_FILTER) - self.assertEqual(metric.description, DESCRIPTION) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) def test_update_w_bound_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - } - conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric.update() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) def test_update_w_alternate_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - DESCRIPTION = 'DESCRIPTION' - RESOURCE = { - 'name': self.METRIC_NAME, - 'description': DESCRIPTION, - 'filter': self.FILTER, - } - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1, - description=DESCRIPTION) - metric.update(client=CLIENT2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) - self.assertEqual(req['data'], RESOURCE) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + + metric.update(client=client2) + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) def test_delete_w_bound_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric.delete() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % FULL) + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) def test_delete_w_alternate_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({}) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) - metric.delete(client=CLIENT2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % FULL) - - -class _Connection(object): - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def api_request(self, **kw): - from gcloud.exceptions import NotFound - self._requested.append(kw) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) - try: - response, self._responses = self._responses[0], self._responses[1:] - except: - raise NotFound('miss') - else: - return response + metric.delete(client=client2) + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) class _Client(object): - def __init__(self, project, connection=None): + def __init__(self, project): self.project = project - self.connection = connection + + +class _DummyMetricsAPI(object): + + def metric_create(self, project, metric_name, filter_, description): + self._metric_create_called_with = ( + project, metric_name, filter_, description) + + def metric_get(self, project, metric_name): + from gcloud.exceptions import NotFound + self._metric_get_called_with = (project, metric_name) + try: + return self._metric_get_response + except AttributeError: + raise NotFound('miss') + + def metric_update(self, project, metric_name, filter_, description): + self._metric_update_called_with = ( + project, metric_name, filter_, description) + + def metric_delete(self, project, metric_name): + self._metric_delete_called_with = (project, metric_name) diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 8ef6c9c4559d..9c8badf33975 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -63,8 +63,7 @@ def _makeOne(self, *args, **kw): def test_ctor(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn = _Connection() - client = _Client(self.PROJECT, conn) + client = _Client(self.PROJECT) sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) self.assertEqual(sink.name, self.SINK_NAME) @@ -76,7 +75,7 @@ def test_ctor(self): self.assertEqual(sink.path, '/%s' % (FULL,)) def test_from_api_repr_minimal(self): - CLIENT = _Client(project=self.PROJECT) + client = _Client(project=self.PROJECT) FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) RESOURCE = { 'name': FULL, @@ -84,16 +83,16 @@ def test_from_api_repr_minimal(self): 'destination': self.DESTINATION_URI, } klass = self._getTargetClass() - sink = klass.from_api_repr(RESOURCE, client=CLIENT) + sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(sink._client is CLIENT) + self.assertTrue(sink._client is client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) def test_from_api_repr_w_description(self): - CLIENT = _Client(project=self.PROJECT) + client = _Client(project=self.PROJECT) FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) RESOURCE = { 'name': FULL, @@ -101,18 +100,18 @@ def test_from_api_repr_w_description(self): 'destination': self.DESTINATION_URI, } klass = self._getTargetClass() - sink = klass.from_api_repr(RESOURCE, client=CLIENT) + sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(sink._client is CLIENT) + self.assertTrue(sink._client is client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) def test_from_api_repr_with_mismatched_project(self): PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' - CLIENT = _Client(project=PROJECT1) + client = _Client(project=PROJECT1) FULL = 'projects/%s/sinks/%s' % (PROJECT2, self.SINK_NAME) RESOURCE = { 'name': FULL, @@ -121,76 +120,63 @@ def test_from_api_repr_with_mismatched_project(self): } klass = self._getTargetClass() self.assertRaises(ValueError, klass.from_api_repr, - RESOURCE, client=CLIENT) + RESOURCE, client=client) def test_create_w_bound_client(self): - TARGET = 'projects/%s/sinks' % (self.PROJECT,) - RESOURCE = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) + sink.create() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % TARGET) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) def test_create_w_alternate_client(self): - TARGET = 'projects/%s/sinks' % (self.PROJECT,) - RESOURCE = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - conn1 = _Connection() - client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - client2 = _Client(project=self.PROJECT, connection=conn2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) + api = client2.sinks_api = _DummySinksAPI() + sink.create(client=client2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % TARGET) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) def test_exists_miss_w_bound_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT) + client=client) + self.assertFalse(sink.exists()) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) def test_exists_hit_w_alternate_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({'name': FULL}) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT1) - self.assertTrue(sink.exists(client=CLIENT2)) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + client=client1) + + self.assertTrue(sink.exists(client=client2)) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) def test_reload_w_bound_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) NEW_FILTER = 'logName:syslog AND severity>=INFO' NEW_DESTINATION_URI = 'faux.googleapis.com/other' RESOURCE = { @@ -198,20 +184,20 @@ def test_reload_w_bound_client(self): 'filter': NEW_FILTER, 'destination': NEW_DESTINATION_URI, } - conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT) + client=client) + sink.reload() + self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.destination, NEW_DESTINATION_URI) - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) def test_reload_w_alternate_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) NEW_FILTER = 'logName:syslog AND severity>=INFO' NEW_DESTINATION_URI = 'faux.googleapis.com/other' RESOURCE = { @@ -219,108 +205,92 @@ def test_reload_w_alternate_client(self): 'filter': NEW_FILTER, 'destination': NEW_DESTINATION_URI, } - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT1) - sink.reload(client=CLIENT2) + client=client1) + + sink.reload(client=client2) + self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.destination, NEW_DESTINATION_URI) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) def test_update_w_bound_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - RESOURCE = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT) + client=client) + sink.update() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) - self.assertEqual(req['data'], RESOURCE) + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) def test_update_w_alternate_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - RESOURCE = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT1) - sink.update(client=CLIENT2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) - self.assertEqual(req['data'], RESOURCE) + client=client1) + + sink.update(client=client2) + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) def test_delete_w_bound_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT) + client=client) + sink.delete() - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % FULL) + + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) def test_delete_w_alternate_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn1 = _Connection() - CLIENT1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({}) - CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=CLIENT1) - sink.delete(client=CLIENT2) - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'DELETE') - self.assertEqual(req['path'], '/%s' % FULL) + client=client1) + sink.delete(client=client2) -class _Connection(object): + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) - def __init__(self, *responses): - self._responses = responses - self._requested = [] - def api_request(self, **kw): - from gcloud.exceptions import NotFound - self._requested.append(kw) +class _Client(object): + def __init__(self, project): + self.project = project + + +class _DummySinksAPI(object): + + def sink_create(self, project, sink_name, filter_, destination): + self._sink_create_called_with = ( + project, sink_name, filter_, destination) + + def sink_get(self, project, sink_name): + from gcloud.exceptions import NotFound + self._sink_get_called_with = (project, sink_name) try: - response, self._responses = self._responses[0], self._responses[1:] - except: + return self._sink_get_response + except AttributeError: raise NotFound('miss') - else: - return response - -class _Client(object): + def sink_update(self, project, sink_name, filter_, destination): + self._sink_update_called_with = ( + project, sink_name, filter_, destination) - def __init__(self, project, connection=None): - self.project = project - self.connection = connection + def sink_delete(self, project, sink_name): + self._sink_delete_called_with = (project, sink_name)