|
| 1 | +from __future__ import annotations |
| 2 | + |
| 3 | +from dataclasses import dataclass |
| 4 | + |
| 5 | +import pytest |
| 6 | +from scrapy import Request, Spider |
| 7 | + |
| 8 | +from apify.scrapy.utils import to_scrapy_request |
| 9 | + |
| 10 | + |
| 11 | +class DummySpider(Spider): |
| 12 | + name = 'dummy_spider' |
| 13 | + |
| 14 | + |
| 15 | +@pytest.fixture() |
| 16 | +def spider() -> DummySpider: |
| 17 | + """Fixture to create a "dummy" Scrapy spider.""" |
| 18 | + return DummySpider() |
| 19 | + |
| 20 | + |
| 21 | +@dataclass(frozen=True) |
| 22 | +class TestCase: |
| 23 | + apify_request: dict |
| 24 | + expected_scrapy_request: Request | None |
| 25 | + expected_exception: type[Exception] | None |
| 26 | + |
| 27 | + |
| 28 | +test_cases = [ |
| 29 | + # Valid Apify request without 'userData' (directly from Request Queue) |
| 30 | + TestCase( |
| 31 | + apify_request={'url': 'https://apify.com/', 'method': 'GET', 'uniqueKey': 'https://apify.com/', 'id': 'fvwscO2UJLdr10B'}, |
| 32 | + expected_scrapy_request=Request( |
| 33 | + url='https://apify.com/', |
| 34 | + method='GET', |
| 35 | + meta={'apify_request_id': 'fvwscO2UJLdr10B', 'apify_request_unique_key': 'https://apify.com/'}, |
| 36 | + ), |
| 37 | + expected_exception=None, |
| 38 | + ), |
| 39 | + # Valid Apify request with 'userData' (reconstruction from encoded Scrapy request) |
| 40 | + TestCase( |
| 41 | + apify_request={ |
| 42 | + 'url': 'https://apify.com', |
| 43 | + 'method': 'GET', |
| 44 | + 'id': 'fvwscO2UJLdr10B', |
| 45 | + 'uniqueKey': 'https://apify.com', |
| 46 | + 'userData': { |
| 47 | + 'scrapy_request': 'gASVJgIAAAAAAAB9lCiMA3VybJSMEWh0dHBzOi8vYXBpZnkuY29tlIwIY2FsbGJhY2uUTowHZXJy\nYmFja5ROjAdoZWFkZXJzlH2UKEMGQWNjZXB0lF2UQz90ZXh0L2h0bWwsYXBwbGljYXRpb24veGh0\nbWwreG1sLGFwcGxpY2F0aW9uL3htbDtxPTAuOSwqLyo7cT0wLjiUYUMPQWNjZXB0LUxhbmd1YWdl\nlF2UQwJlbpRhQwpVc2VyLUFnZW50lF2UQyNTY3JhcHkvMi4xMS4wICgraHR0cHM6Ly9zY3JhcHku\nb3JnKZRhQw9BY2NlcHQtRW5jb2RpbmeUXZRDDWd6aXAsIGRlZmxhdGWUYXWMBm1ldGhvZJSMA0dF\nVJSMBGJvZHmUQwCUjAdjb29raWVzlH2UjARtZXRhlH2UKIwQYXBpZnlfcmVxdWVzdF9pZJSMD2Z2\nd3NjTzJVSkxkcjEwQpSMGGFwaWZ5X3JlcXVlc3RfdW5pcXVlX2tleZSMEWh0dHBzOi8vYXBpZnku\nY29tlIwQZG93bmxvYWRfdGltZW91dJRHQGaAAAAAAACMDWRvd25sb2FkX3Nsb3SUjAlhcGlmeS5j\nb22UjBBkb3dubG9hZF9sYXRlbmN5lEc/tYIIAAAAAHWMCGVuY29kaW5nlIwFdXRmLTiUjAhwcmlv\ncml0eZRLAIwLZG9udF9maWx0ZXKUiYwFZmxhZ3OUXZSMCWNiX2t3YXJnc5R9lHUu\n', # noqa: E501 |
| 48 | + }, |
| 49 | + }, |
| 50 | + expected_scrapy_request=Request( |
| 51 | + url='https://apify.com', |
| 52 | + method='GET', |
| 53 | + meta={'apify_request_id': 'fvwscO2UJLdr10B', 'apify_request_unique_key': 'https://apify.com'}, |
| 54 | + ), |
| 55 | + expected_exception=None, |
| 56 | + ), |
| 57 | + # Invalid Apify request (missing 'url' key) |
| 58 | + TestCase( |
| 59 | + apify_request={'method': 'GET', 'id': 'invalid123', 'uniqueKey': 'https://invalid.com'}, |
| 60 | + expected_scrapy_request=None, |
| 61 | + expected_exception=ValueError, |
| 62 | + ), |
| 63 | + # Invalid Apify request (missing 'id' key) |
| 64 | + TestCase( |
| 65 | + apify_request={'url': 'https://example.com', 'method': 'GET', 'uniqueKey': 'invalid123'}, |
| 66 | + expected_scrapy_request=None, |
| 67 | + expected_exception=ValueError, |
| 68 | + ), |
| 69 | + # Invalid Apify request (non-string 'userData.scrapy_request') |
| 70 | + TestCase( |
| 71 | + apify_request={ |
| 72 | + 'url': 'https://example.com', |
| 73 | + 'method': 'GET', |
| 74 | + 'id': 'invalid123', |
| 75 | + 'uniqueKey': 'https://example.com', |
| 76 | + 'userData': {'scrapy_request': 123}, |
| 77 | + }, |
| 78 | + expected_scrapy_request=None, |
| 79 | + expected_exception=TypeError, |
| 80 | + ), |
| 81 | +] |
| 82 | + |
| 83 | + |
| 84 | +@pytest.mark.parametrize('tc', test_cases) |
| 85 | +def test__to_scrapy_request(spider: Spider, tc: TestCase) -> None: |
| 86 | + if tc.expected_exception: |
| 87 | + with pytest.raises(tc.expected_exception): |
| 88 | + to_scrapy_request(tc.apify_request, spider) |
| 89 | + |
| 90 | + else: |
| 91 | + scrapy_request = to_scrapy_request(tc.apify_request, spider) |
| 92 | + |
| 93 | + assert isinstance(scrapy_request, Request) |
| 94 | + assert tc.expected_scrapy_request is not None |
| 95 | + assert scrapy_request.url == tc.expected_scrapy_request.url |
| 96 | + assert scrapy_request.method == tc.expected_scrapy_request.method |
| 97 | + |
| 98 | + # Check meta fields |
| 99 | + assert scrapy_request.meta.get('apify_request_id') == tc.expected_scrapy_request.meta.get('apify_request_id') |
| 100 | + assert scrapy_request.meta.get('apify_request_unique_key') == tc.expected_scrapy_request.meta.get('apify_request_unique_key') |
| 101 | + |
| 102 | + # Check if meta field is updated properly when apify_request comes from Scrapy |
| 103 | + if 'userData' in tc.apify_request and 'scrapy_request' in tc.apify_request['userData']: |
| 104 | + assert scrapy_request.meta['apify_request_id'] == tc.apify_request['id'] |
| 105 | + assert scrapy_request.meta['apify_request_unique_key'] == tc.apify_request['uniqueKey'] |
0 commit comments