summaryrefslogtreecommitdiffstats
path: root/tests/test_encoding.py
blob: e9f50dc9bb063c89d7587042a1a91942c67960c4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
"""
Various encoding handling related tests.

"""
import pytest
import responses
from charset_normalizer.constant import TOO_SMALL_SEQUENCE

from httpie.cli.constants import PRETTY_MAP
from httpie.encoding import UTF8

from .utils import http, HTTP_OK, DUMMY_URL, MockEnvironment
from .fixtures import UNICODE


CHARSET_TEXT_PAIRS = [
    ('big5', '卷首卷首卷首卷首卷卷首卷首卷首卷首卷首卷首卷首卷首卷首卷首卷首卷首卷首'),
    ('windows-1250', 'Všichni lidé jsou si rovni. Všichni lidé jsou si rovni.'),
    (UTF8, 'Všichni lidé jsou si rovni. Všichni lidé jsou si rovni.'),
]


def test_charset_text_pairs():
    # Verify our test data is legit.
    for charset, text in CHARSET_TEXT_PAIRS:
        assert len(text) > TOO_SMALL_SEQUENCE
        if charset != UTF8:
            with pytest.raises(UnicodeDecodeError):
                assert text != text.encode(charset).decode(UTF8)


def test_unicode_headers(httpbin):
    # httpbin doesn't interpret UFT-8 headers
    r = http(httpbin.url + '/headers', f'Test:{UNICODE}')
    assert HTTP_OK in r


def test_unicode_headers_verbose(httpbin):
    # httpbin doesn't interpret UTF-8 headers
    r = http('--verbose', httpbin.url + '/headers', f'Test:{UNICODE}')
    assert HTTP_OK in r
    assert UNICODE in r


def test_unicode_raw(httpbin):
    r = http('--raw', f'test {UNICODE}', 'POST', httpbin.url + '/post')
    assert HTTP_OK in r
    assert r.json['data'] == f'test {UNICODE}'


def test_unicode_raw_verbose(httpbin):
    r = http('--verbose', '--raw', f'test {UNICODE}',
             'POST', httpbin.url + '/post')
    assert HTTP_OK in r
    assert UNICODE in r


def test_unicode_form_item(httpbin):
    r = http('--form', 'POST', httpbin.url + '/post', f'test={UNICODE}')
    assert HTTP_OK in r
    assert r.json['form'] == {'test': UNICODE}


def test_unicode_form_item_verbose(httpbin):
    r = http('--verbose', '--form',
             'POST', httpbin.url + '/post', f'test={UNICODE}')
    assert HTTP_OK in r
    assert UNICODE in r


def test_unicode_json_item(httpbin):
    r = http('--json', 'POST', httpbin.url + '/post', f'test={UNICODE}')
    assert HTTP_OK in r
    assert r.json['json'] == {'test': UNICODE}


def test_unicode_json_item_verbose(httpbin):
    r = http('--verbose', '--json',
             'POST', httpbin.url + '/post', f'test={UNICODE}')
    assert HTTP_OK in r
    assert UNICODE in r


def test_unicode_raw_json_item(httpbin):
    r = http('--json', 'POST', httpbin.url + '/post',
             f'test:={{ "{UNICODE}" : [ "{UNICODE}" ] }}')
    assert HTTP_OK in r
    assert r.json['json'] == {'test': {UNICODE: [UNICODE]}}


def test_unicode_raw_json_item_verbose(httpbin):
    r = http('--json', 'POST', httpbin.url + '/post',
             f'test:={{ "{UNICODE}" : [ "{UNICODE}" ] }}')
    assert HTTP_OK in r
    assert r.json['json'] == {'test': {UNICODE: [UNICODE]}}


def test_unicode_url_query_arg_item(httpbin):
    r = http(httpbin.url + '/get', f'test=={UNICODE}')
    assert HTTP_OK in r
    assert r.json['args'] == {'test': UNICODE}, r


def test_unicode_url_query_arg_item_verbose(httpbin):
    r = http('--verbose', httpbin.url + '/get', f'test=={UNICODE}')
    assert HTTP_OK in r
    assert UNICODE in r


def test_unicode_url(httpbin):
    r = http(f'{httpbin.url}/get?test={UNICODE}')
    assert HTTP_OK in r
    assert r.json['args'] == {'test': UNICODE}


def test_unicode_url_verbose(httpbin):
    r = http('--verbose', f'{httpbin.url}/get?test={UNICODE}')
    assert HTTP_OK in r
    assert r.json['args'] == {'test': UNICODE}


def test_unicode_basic_auth(httpbin):
    # it doesn't really authenticate us because httpbin
    # doesn't interpret the UTF-8-encoded auth
    http('--verbose', '--auth', f'test:{UNICODE}',
         f'{httpbin.url}/basic-auth/test/{UNICODE}')


def test_unicode_digest_auth(httpbin):
    # it doesn't really authenticate us because httpbin
    # doesn't interpret the UTF-8-encoded auth
    http('--auth-type=digest',
         '--auth', f'test:{UNICODE}',
         f'{httpbin.url}/digest-auth/auth/test/{UNICODE}')


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
@responses.activate
def test_terminal_output_response_charset_detection(text, charset):
    responses.add(
        method=responses.POST,
        url=DUMMY_URL,
        body=text.encode(charset),
        content_type='text/plain',
    )
    r = http('--form', 'POST', DUMMY_URL)
    assert text in r


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
@responses.activate
def test_terminal_output_response_content_type_charset(charset, text):
    responses.add(
        method=responses.POST,
        url=DUMMY_URL,
        body=text.encode(charset),
        content_type=f'text/plain; charset={charset}',
    )
    r = http('--form', 'POST', DUMMY_URL)
    assert text in r


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
@pytest.mark.parametrize('pretty', PRETTY_MAP.keys())
@responses.activate
def test_terminal_output_response_content_type_charset_with_stream(charset, text, pretty):
    responses.add(
        method=responses.GET,
        url=DUMMY_URL,
        body=f'<?xml version="1.0"?>\n<c>{text}</c>'.encode(charset),
        stream=True,
        content_type=f'text/xml; charset={charset.upper()}',
    )
    r = http('--pretty', pretty, '--stream', DUMMY_URL)
    assert text in r


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
@pytest.mark.parametrize('pretty', PRETTY_MAP.keys())
@responses.activate
def test_terminal_output_response_charset_override(charset, text, pretty):
    responses.add(
        responses.GET,
        DUMMY_URL,
        body=text.encode(charset),
        content_type='text/plain; charset=utf-8',
    )
    args = ['--pretty', pretty, DUMMY_URL]
    if charset != UTF8:
        # Content-Type charset wrong -> garbled text expected.
        r = http(*args)
        assert text not in r
    r = http('--response-charset', charset, *args)
    assert text in r


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
def test_terminal_output_request_content_type_charset(charset, text):
    r = http(
        '--offline',
        DUMMY_URL,
        f'Content-Type: text/plain; charset={charset.upper()}',
        env=MockEnvironment(
            stdin=text.encode(charset),
            stdin_isatty=False,
        ),
    )
    assert text in r


@pytest.mark.parametrize('charset, text', CHARSET_TEXT_PAIRS)
def test_terminal_output_request_charset_detection(charset, text):
    r = http(
        '--offline',
        DUMMY_URL,
        'Content-Type: text/plain',
        env=MockEnvironment(
            stdin=text.encode(charset),
            stdin_isatty=False,
        ),
    )
    assert text in r