I have a program which is eventually deployed on Google Cloud Platform and I use the Logs Explorer with GCP to monitor my logs. For that, I need to use the google.cloud.logging library, which is necessary to show logs with correct severity level on GCP Logs Explorer.
In my unit-tests, I am trying to patch the the call to the google.cloud.logging library, however, they all end up with 403 error on my local, suggesting that the library has not been patched.
cloud_logger.py
import google.cloud.logging
import logging
def get_logger():
client = google.cloud.logging.Client()
client.get_default_handler()
client.setup_logging()
logger = logging.getLogger(__name__)
return logger
rss_crawler.py which utlizes the cloud_logger.get_logger
from cloud_logger import get_logger
logger = get_logger()
def crawl_rss_source(source_crawling_details):
brand_name = source_crawling_details[constants.BRAND_NAME]
source_name = source_crawling_details[constants.SOURCE_NAME]
initial_agent_settings = source_crawling_details[constants.INITIAL_AGENT_SETTINGS]
logger.info(f"Started crawling {brand_name}-{source_name} crawling")
source = source_crawling_details["source"]
entry_points_list = source[constants.ENTRY]
source_crawling_details.update({constants.ENTRY: entry_points_list})
source_crawling_details.update({constants.AGENT: initial_agent_settings})
content = get_content(source_crawling_details)
logger.info("Getting links present in rss feed entry")
entry_points = rss_entry_points(content)
source_crawling_details.update({constants.ENTRY_POINTS: entry_points})
candidate_urls = start_content_tasks(source_crawling_details)
if not candidate_urls:
raise CustomException("There are no links to scrape")
# filtered urls found with crawl rules, next step get scrape candidates based on scrape rules
scrape_rules = source[constants.SCRAPE]
scrape_candidates = get_scrape_candidates(scrape_rules, candidate_urls)
if not scrape_candidates:
raise CustomException(
f"Could not find any links for scraping, please check scrape,crawl rules, or possibly depth level for brand {brand_name} , source {source_name}"
)
return scrape_candidates
test_rss_crawler.py
@patch("start_crawl.fetch_source_crawling_fields")
@patch("rss_crawler.logger")
def test_crawl_rss_source_raises_exception(
self, mocked_logger, mocked_source_fetcher
):
mocked_logger.logger.return_value = logging.getLogger(__name__)
self.test_source[constants.SCRAPE] = {
"white_list": ["https://buffer.com/blog/(\\w|\\d|\\-)+/$"]
}
details = set_content_source_details(
self.brand_name,
self.source_name,
self.agent_args,
self.source,
**self.key_word_argument,
)
# test to see if exception is raised if scrape rule is not matching
self.assertRaises(CustomException, crawl_rss_source, details)
self.test_source[constants.CRAWL] = {"white_list": ["https://buffer.com/blog/"]}
details = set_content_source_details(
self.brand_name,
self.source_name,
self.agent_args,
self.source,
**self.key_word_argument,
)
# test to see if exception is raised if crawl rule is not matching
self.assertRaises(CustomException, crawl_rss_source, details)
However, I get these warning messages, when I run these tests, even after patching:
Traceback (most recent call last):
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/cloud/logging_v2/handlers/transports/background_thread.py", line 115, in _safely_commit_batch
batch.commit()
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/cloud/logging_v2/logger.py", line 385, in commit
client.logging_api.write_entries(entries, **kwargs)
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/cloud/logging_v2/_gapic.py", line 149, in write_entries
self._gapic_api.write_log_entries(request=request)
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/cloud/logging_v2/services/logging_service_v2/client.py", line 592, in write_log_entries
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/api_core/gapic_v1/method.py", line 145, in __call__
return wrapped_func(*args, **kwargs)
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/api_core/retry.py", line 286, in retry_wrapped_func
return retry_target(
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/api_core/retry.py", line 189, in retry_target
return target()
File "/Users/reydon227/Concured/crawler/env/lib/python3.8/site-packages/google/api_core/grpc_helpers.py", line 69, in error_remapped_callable
six.raise_from(exceptions.from_grpc_error(exc), exc)
File "<string>", line 3, in raise_from
google.api_core.exceptions.PermissionDenied: 403 The caller does not have permission