3
from datetime import date
4
from datetime import datetime
5
from datetime import timedelta
6
from datetime import timezone
10
from werkzeug import datastructures
11
from werkzeug import http
12
from werkzeug._internal import _wsgi_encoding_dance
13
from werkzeug.datastructures import Authorization
14
from werkzeug.datastructures import WWWAuthenticate
15
from werkzeug.test import create_environ
19
def test_accept(self):
20
a = http.parse_accept_header("en-us,ru;q=0.5")
21
assert list(a.values()) == ["en-us", "ru"]
22
assert a.best == "en-us"
23
assert a.find("ru") == 1
24
pytest.raises(ValueError, a.index, "de")
25
assert a.to_header() == "en-us,ru;q=0.5"
27
def test_accept_parameter_with_space(self):
28
a = http.parse_accept_header('application/x-special; z="a b";q=0.5')
29
assert a['application/x-special; z="a b"'] == 0.5
31
def test_mime_accept(self):
32
a = http.parse_accept_header(
33
"text/xml,application/xml,"
34
"application/xhtml+xml,"
35
"application/foo;quiet=no; bar=baz;q=0.6,"
36
"text/html;q=0.9,text/plain;q=0.8,"
37
"image/png,*/*;q=0.5",
38
datastructures.MIMEAccept,
40
pytest.raises(ValueError, lambda: a["missing"])
41
assert a["image/png"] == 1
42
assert a["text/plain"] == 0.8
43
assert a["foo/bar"] == 0.5
44
assert a["application/foo;quiet=no; bar=baz"] == 0.6
45
assert a[a.find("foo/bar")] == ("*/*", 0.5)
47
def test_accept_matches(self):
48
a = http.parse_accept_header(
49
"text/xml,application/xml,application/xhtml+xml,"
50
"text/html;q=0.9,text/plain;q=0.8,"
52
datastructures.MIMEAccept,
55
a.best_match(["text/html", "application/xhtml+xml"])
56
== "application/xhtml+xml"
58
assert a.best_match(["text/html"]) == "text/html"
59
assert a.best_match(["foo/bar"]) is None
60
assert a.best_match(["foo/bar", "bar/foo"], default="foo/bar") == "foo/bar"
61
assert a.best_match(["application/xml", "text/xml"]) == "application/xml"
63
def test_accept_mime_specificity(self):
64
a = http.parse_accept_header(
65
"text/*, text/html, text/html;level=1, */*", datastructures.MIMEAccept
67
assert a.best_match(["text/html; version=1", "text/html"]) == "text/html"
68
assert a.best_match(["text/html", "text/html; level=1"]) == "text/html; level=1"
70
def test_charset_accept(self):
71
a = http.parse_accept_header(
72
"ISO-8859-1,utf-8;q=0.7,*;q=0.7", datastructures.CharsetAccept
74
assert a["iso-8859-1"] == a["iso8859-1"]
75
assert a["iso-8859-1"] == 1
76
assert a["UTF8"] == 0.7
77
assert a["ebcdic"] == 0.7
79
def test_language_accept(self):
80
a = http.parse_accept_header(
81
"de-AT,de;q=0.8,en;q=0.5", datastructures.LanguageAccept
83
assert a.best == "de-AT"
86
assert a["de-at"] == 1
89
def test_set_header(self):
90
hs = http.parse_set_header('foo, Bar, "Blah baz", Hehe')
91
assert "blah baz" in hs
92
assert "foobar" not in hs
94
assert list(hs) == ["foo", "Bar", "Blah baz", "Hehe"]
96
assert hs.to_header() == 'foo, Bar, "Blah baz", Hehe'
98
@pytest.mark.parametrize(
102
("a b, c", ["a b", "c"]),
103
('a b, "c, d"', ["a b", "c, d"]),
104
('"a\\"b", c', ['a"b', "c"]),
107
def test_list_header(self, value, expect):
108
assert http.parse_list_header(value) == expect
110
def test_dict_header(self):
111
d = http.parse_dict_header('foo="bar baz", blah=42')
112
assert d == {"foo": "bar baz", "blah": "42"}
114
def test_cache_control_header(self):
115
cc = http.parse_cache_control_header("max-age=0, no-cache")
116
assert cc.max_age == 0
118
cc = http.parse_cache_control_header(
119
'private, community="UCI"', None, datastructures.ResponseCacheControl
122
assert cc["community"] == "UCI"
124
c = datastructures.ResponseCacheControl()
125
assert c.no_cache is None
126
assert c.private is None
128
assert c.no_cache == "*"
130
assert c.private == "*"
132
assert c.private is None
133
# max_age is an int, other types are converted
135
assert c.max_age == 3
138
assert c.s_maxage == 3
140
assert c.to_header() == "no-cache"
142
def test_csp_header(self):
143
csp = http.parse_csp_header(
144
"default-src 'self'; script-src 'unsafe-inline' *; img-src"
146
assert csp.default_src == "'self'"
147
assert csp.script_src == "'unsafe-inline' *"
148
assert csp.img_src is None
150
def test_authorization_header(self):
151
a = Authorization.from_header("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
152
assert a.type == "basic"
153
assert a.username == "Aladdin"
154
assert a.password == "open sesame"
156
a = Authorization.from_header("Basic 0YDRg9GB0YHQutC40IE60JHRg9C60LLRiw==")
157
assert a.type == "basic"
158
assert a.username == "русскиЁ"
159
assert a.password == "Буквы"
161
a = Authorization.from_header("Basic 5pmu6YCa6K+dOuS4reaWhw==")
162
assert a.type == "basic"
163
assert a.username == "普通话"
164
assert a.password == "中文"
166
a = Authorization.from_header(
167
'Digest username="Mufasa",'
168
' realm="testrealm@host.invalid",'
169
' nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093",'
170
' uri="/dir/index.html",'
171
" qop=auth, nc=00000001,"
172
' cnonce="0a4f113b",'
173
' response="6629fae49393a05397450978507c4ef1",'
174
' opaque="5ccc069c403ebaf9f0171e9517f40e41"'
176
assert a.type == "digest"
177
assert a.username == "Mufasa"
178
assert a.realm == "testrealm@host.invalid"
179
assert a.nonce == "dcd98b7102dd2f0e8b11d0f600bfb0c093"
180
assert a.uri == "/dir/index.html"
181
assert a.qop == "auth"
182
assert a.nc == "00000001"
183
assert a.cnonce == "0a4f113b"
184
assert a.response == "6629fae49393a05397450978507c4ef1"
185
assert a.opaque == "5ccc069c403ebaf9f0171e9517f40e41"
187
a = Authorization.from_header(
188
'Digest username="Mufasa",'
189
' realm="testrealm@host.invalid",'
190
' nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093",'
191
' uri="/dir/index.html",'
192
' response="e257afa1414a3340d93d30955171dd0e",'
193
' opaque="5ccc069c403ebaf9f0171e9517f40e41"'
195
assert a.type == "digest"
196
assert a.username == "Mufasa"
197
assert a.realm == "testrealm@host.invalid"
198
assert a.nonce == "dcd98b7102dd2f0e8b11d0f600bfb0c093"
199
assert a.uri == "/dir/index.html"
200
assert a.response == "e257afa1414a3340d93d30955171dd0e"
201
assert a.opaque == "5ccc069c403ebaf9f0171e9517f40e41"
203
assert Authorization.from_header("") is None
204
assert Authorization.from_header(None) is None
205
assert Authorization.from_header("foo").type == "foo"
207
def test_authorization_token_padding(self):
209
token = base64.b64encode(b"This has base64 padding").decode()
210
a = Authorization.from_header(f"Token {token}")
211
assert a.type == "token"
212
assert a.token == token
215
token = base64.b64encode(b"This has base64 padding..").decode()
216
a = Authorization.from_header(f"Token {token}")
217
assert a.type == "token"
218
assert a.token == token
220
def test_authorization_basic_incorrect_padding(self):
221
assert Authorization.from_header("Basic foo") is None
223
def test_bad_authorization_header_encoding(self):
224
"""If the base64 encoded bytes can't be decoded as UTF-8"""
225
content = base64.b64encode(b"\xffser:pass").decode()
226
assert Authorization.from_header(f"Basic {content}") is None
228
def test_authorization_eq(self):
229
basic1 = Authorization.from_header("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
230
basic2 = Authorization(
231
"basic", {"username": "Aladdin", "password": "open sesame"}
233
assert basic1 == basic2
234
bearer1 = Authorization.from_header("Bearer abc")
235
bearer2 = Authorization("bearer", token="abc")
236
assert bearer1 == bearer2
237
assert basic1 != bearer1
238
assert basic1 != object()
240
def test_www_authenticate_header(self):
241
wa = WWWAuthenticate.from_header('Basic realm="WallyWorld"')
242
assert wa.type == "basic"
243
assert wa.realm == "WallyWorld"
245
assert wa.to_header() == 'Basic realm="Foo Bar"'
247
wa = WWWAuthenticate.from_header(
248
'Digest realm="testrealm@host.com",'
249
' qop="auth,auth-int",'
250
' nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093",'
251
' opaque="5ccc069c403ebaf9f0171e9517f40e41"'
253
assert wa.type == "digest"
254
assert wa.realm == "testrealm@host.com"
255
assert wa.parameters["qop"] == "auth,auth-int"
256
assert wa.nonce == "dcd98b7102dd2f0e8b11d0f600bfb0c093"
257
assert wa.opaque == "5ccc069c403ebaf9f0171e9517f40e41"
259
assert WWWAuthenticate.from_header("broken").type == "broken"
260
assert WWWAuthenticate.from_header("") is None
262
def test_www_authenticate_token_padding(self):
264
token = base64.b64encode(b"This has base64 padding").decode()
265
a = WWWAuthenticate.from_header(f"Token {token}")
266
assert a.type == "token"
267
assert a.token == token
270
token = base64.b64encode(b"This has base64 padding..").decode()
271
a = WWWAuthenticate.from_header(f"Token {token}")
272
assert a.type == "token"
273
assert a.token == token
275
def test_www_authenticate_eq(self):
276
basic1 = WWWAuthenticate.from_header("Basic realm=abc")
277
basic2 = WWWAuthenticate("basic", {"realm": "abc"})
278
assert basic1 == basic2
279
token1 = WWWAuthenticate.from_header("Token abc")
280
token2 = WWWAuthenticate("token", token="abc")
281
assert token1 == token2
282
assert basic1 != token1
283
assert basic1 != object()
285
def test_etags(self):
286
assert http.quote_etag("foo") == '"foo"'
287
assert http.quote_etag("foo", True) == 'W/"foo"'
288
assert http.unquote_etag('"foo"') == ("foo", False)
289
assert http.unquote_etag('W/"foo"') == ("foo", True)
290
es = http.parse_etags('"foo", "bar", W/"baz", blar')
291
assert sorted(es) == ["bar", "blar", "foo"]
293
assert "baz" not in es
294
assert es.contains_weak("baz")
296
assert es.contains_raw('W/"baz"')
297
assert es.contains_raw('"foo"')
298
assert sorted(es.to_header().split(", ")) == [
305
def test_etags_nonzero(self):
306
etags = http.parse_etags('W/"foo"')
308
assert etags.contains_raw('W/"foo"')
310
def test_remove_entity_headers(self):
311
now = http.http_date()
314
("Content-Type", "text/html"),
315
("Content-Length", "0"),
317
headers2 = datastructures.Headers(headers1)
319
http.remove_entity_headers(headers1)
320
assert headers1 == [("Date", now)]
322
http.remove_entity_headers(headers2)
323
assert headers2 == datastructures.Headers([("Date", now)])
325
def test_remove_hop_by_hop_headers(self):
326
headers1 = [("Connection", "closed"), ("Foo", "bar"), ("Keep-Alive", "wtf")]
327
headers2 = datastructures.Headers(headers1)
329
http.remove_hop_by_hop_headers(headers1)
330
assert headers1 == [("Foo", "bar")]
332
http.remove_hop_by_hop_headers(headers2)
333
assert headers2 == datastructures.Headers([("Foo", "bar")])
335
@pytest.mark.parametrize(
345
def test_parse_options_header_empty(self, value, expect):
346
assert http.parse_options_header(value) == (expect, {})
348
@pytest.mark.parametrize(
351
("v;a=b;c=d;", {"a": "b", "c": "d"}),
352
("v; ; a=b ; ", {"a": "b"}),
356
('v;a="b"', {"a": "b"}),
358
('v;a="\';\'";b="µ";', {"a": "';'", "b": "µ"}),
359
('v;a="b c"', {"a": "b c"}),
360
# HTTP headers use \" for internal "
361
('v;a="b\\"c";d=e', {"a": 'b"c', "d": "e"}),
362
# HTTP headers use \\ for internal \
363
('v;a="c:\\\\"', {"a": "c:\\"}),
364
# Part with invalid trailing slash is discarded.
366
('v;a="b\\\\\\"c"', {"a": 'b\\"c'}),
367
# multipart form data uses %22 for internal "
368
('v;a="b%22c"', {"a": 'b"c'}),
369
("v;a*=b", {"a": "b"}),
370
("v;a*=ASCII'en'b", {"a": "b"}),
371
("v;a*=US-ASCII''%62", {"a": "b"}),
372
("v;a*=UTF-8''%C2%B5", {"a": "µ"}),
373
("v;a*=US-ASCII''%C2%B5", {"a": "��"}),
374
("v;a*=BAD''%62", {"a": "%62"}),
375
("v;a*=UTF-8'''%F0%9F%90%8D'.txt", {"a": "'🐍'.txt"}),
376
('v;a="🐍.txt"', {"a": "🐍.txt"}),
377
("v;a*0=b;a*1=c;d=e", {"a": "bc", "d": "e"}),
378
("v;a*0*=b", {"a": "b"}),
379
("v;a*0*=UTF-8''b;a*1=c;a*2*=%C2%B5", {"a": "bcµ"}),
380
# Long invalid quoted string with trailing slashes does not freeze.
381
('v;a="' + "\\" * 400, {}),
384
def test_parse_options_header(self, value, expect) -> None:
385
assert http.parse_options_header(value) == ("v", expect)
387
def test_parse_options_header_broken_values(self):
389
assert http.parse_options_header(" ") == ("", {})
390
assert http.parse_options_header(" , ") == (",", {})
391
assert http.parse_options_header(" ; ") == ("", {})
392
assert http.parse_options_header(" ,; ") == (",", {})
393
assert http.parse_options_header(" , a ") == (", a", {})
394
assert http.parse_options_header(" ; a ") == ("", {})
396
def test_parse_options_header_case_insensitive(self):
397
_, options = http.parse_options_header(r'something; fileName="File.ext"')
398
assert options["filename"] == "File.ext"
400
def test_dump_options_header(self):
401
assert http.dump_options_header("foo", {"bar": 42}) == "foo; bar=42"
402
assert "fizz" not in http.dump_options_header("foo", {"bar": 42, "fizz": None})
404
def test_dump_header(self):
405
assert http.dump_header([1, 2, 3]) == "1, 2, 3"
406
assert http.dump_header({"foo": "bar"}) == "foo=bar"
407
assert http.dump_header({"foo*": "UTF-8''bar"}) == "foo*=UTF-8''bar"
409
def test_is_resource_modified(self):
410
env = create_environ()
412
# any method is allowed
413
env["REQUEST_METHOD"] = "POST"
414
assert http.is_resource_modified(env, etag="testing")
415
env["REQUEST_METHOD"] = "GET"
418
pytest.raises(TypeError, http.is_resource_modified, env, data="42", etag="23")
419
env["HTTP_IF_NONE_MATCH"] = http.generate_etag(b"awesome")
420
assert not http.is_resource_modified(env, data=b"awesome")
422
env["HTTP_IF_MODIFIED_SINCE"] = http.http_date(datetime(2008, 1, 1, 12, 30))
423
assert not http.is_resource_modified(
424
env, last_modified=datetime(2008, 1, 1, 12, 00)
426
assert http.is_resource_modified(
427
env, last_modified=datetime(2008, 1, 1, 13, 00)
430
def test_is_resource_modified_for_range_requests(self):
431
env = create_environ()
433
env["HTTP_IF_MODIFIED_SINCE"] = http.http_date(datetime(2008, 1, 1, 12, 30))
434
env["HTTP_IF_RANGE"] = http.generate_etag(b"awesome_if_range")
435
# Range header not present, so If-Range should be ignored
436
assert not http.is_resource_modified(
438
data=b"not_the_same",
439
ignore_if_range=False,
440
last_modified=datetime(2008, 1, 1, 12, 30),
443
env["HTTP_RANGE"] = ""
444
assert not http.is_resource_modified(
445
env, data=b"awesome_if_range", ignore_if_range=False
447
assert http.is_resource_modified(
448
env, data=b"not_the_same", ignore_if_range=False
451
env["HTTP_IF_RANGE"] = http.http_date(datetime(2008, 1, 1, 13, 30))
452
assert http.is_resource_modified(
453
env, last_modified=datetime(2008, 1, 1, 14, 00), ignore_if_range=False
455
assert not http.is_resource_modified(
456
env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=False
458
assert http.is_resource_modified(
459
env, last_modified=datetime(2008, 1, 1, 13, 30), ignore_if_range=True
462
def test_parse_cookie(self):
463
cookies = http.parse_cookie(
464
"dismiss-top=6; CP=null*; PHPSESSID=0a539d42abc001cdc762809248d4beed;"
465
'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d;'
466
"==__Host-eq=bad;__Host-eq=good;"
468
assert cookies.to_dict() == {
470
"PHPSESSID": "0a539d42abc001cdc762809248d4beed",
480
def test_dump_cookie(self):
481
rv = http.dump_cookie(
482
"foo", "bar baz blub", 360, httponly=True, sync_expires=False
484
assert set(rv.split("; ")) == {
488
'foo="bar baz blub"',
490
assert http.dump_cookie("key", "xxx/") == "key=xxx/; Path=/"
491
assert http.dump_cookie("key", "xxx=", path=None) == "key=xxx="
493
def test_bad_cookies(self):
494
cookies = http.parse_cookie(
495
"first=IamTheFirst ; a=1; oops ; a=2 ;second = andMeTwo;"
498
"first": ["IamTheFirst"],
501
"second": ["andMeTwo"],
503
assert cookies.to_dict(flat=False) == expect
504
assert cookies["a"] == "1"
505
assert cookies.getlist("a") == ["1", "2"]
507
def test_empty_keys_are_ignored(self):
508
cookies = http.parse_cookie("spam=ham; duck=mallard; ; ")
509
expect = {"spam": "ham", "duck": "mallard"}
510
assert cookies.to_dict() == expect
512
def test_cookie_quoting(self):
513
val = http.dump_cookie("foo", "?foo")
514
assert val == "foo=?foo; Path=/"
515
assert http.parse_cookie(val)["foo"] == "?foo"
516
assert http.parse_cookie(r'foo="foo\054bar"')["foo"] == "foo,bar"
518
def test_parse_set_cookie_directive(self):
519
val = 'foo="?foo"; version="0.1";'
520
assert http.parse_cookie(val).to_dict() == {"foo": "?foo", "version": "0.1"}
522
def test_cookie_domain_resolving(self):
523
val = http.dump_cookie("foo", "bar", domain="\N{SNOWMAN}.com")
524
assert val == "foo=bar; Domain=xn--n3h.com; Path=/"
526
def test_cookie_unicode_dumping(self):
527
val = http.dump_cookie("foo", "\N{SNOWMAN}")
528
h = datastructures.Headers()
529
h.add("Set-Cookie", val)
530
assert h["Set-Cookie"] == 'foo="\\342\\230\\203"; Path=/'
532
cookies = http.parse_cookie(h["Set-Cookie"])
533
assert cookies["foo"] == "\N{SNOWMAN}"
535
def test_cookie_unicode_keys(self):
536
# Yes, this is technically against the spec but happens
537
val = http.dump_cookie("fö", "fö")
538
assert val == _wsgi_encoding_dance('fö="f\\303\\266"; Path=/')
539
cookies = http.parse_cookie(val)
540
assert cookies["fö"] == "fö"
542
def test_cookie_unicode_parsing(self):
543
# This is submitted by Firefox if you set a Unicode cookie.
544
cookies = http.parse_cookie("fö=fö")
545
assert cookies["fö"] == "fö"
547
def test_cookie_domain_encoding(self):
548
val = http.dump_cookie("foo", "bar", domain="\N{SNOWMAN}.com")
549
assert val == "foo=bar; Domain=xn--n3h.com; Path=/"
551
val = http.dump_cookie("foo", "bar", domain="foo.com")
552
assert val == "foo=bar; Domain=foo.com; Path=/"
554
def test_cookie_maxsize(self):
555
val = http.dump_cookie("foo", "bar" * 1360 + "b")
556
assert len(val) == 4093
558
with pytest.warns(UserWarning, match="cookie is too large"):
559
http.dump_cookie("foo", "bar" * 1360 + "ba")
561
with pytest.warns(UserWarning, match="the limit is 512 bytes"):
562
http.dump_cookie("foo", "w" * 501, max_size=512)
564
@pytest.mark.parametrize(
565
("samesite", "expected"),
567
("strict", "foo=bar; SameSite=Strict"),
568
("lax", "foo=bar; SameSite=Lax"),
569
("none", "foo=bar; SameSite=None"),
573
def test_cookie_samesite_attribute(self, samesite, expected):
574
value = http.dump_cookie("foo", "bar", samesite=samesite, path=None)
575
assert value == expected
577
def test_cookie_samesite_invalid(self):
578
with pytest.raises(ValueError):
579
http.dump_cookie("foo", "bar", samesite="invalid")
581
def test_cookie_partitioned(self):
582
value = http.dump_cookie("foo", "bar", partitioned=True, secure=True)
583
assert value == "foo=bar; Secure; Path=/; Partitioned"
585
def test_cookie_partitioned_sets_secure(self):
586
value = http.dump_cookie("foo", "bar", partitioned=True, secure=False)
587
assert value == "foo=bar; Secure; Path=/; Partitioned"
591
def test_if_range_parsing(self):
592
rv = http.parse_if_range_header('"Test"')
593
assert rv.etag == "Test"
594
assert rv.date is None
595
assert rv.to_header() == '"Test"'
597
# weak information is dropped
598
rv = http.parse_if_range_header('W/"Test"')
599
assert rv.etag == "Test"
600
assert rv.date is None
601
assert rv.to_header() == '"Test"'
603
# broken etags are supported too
604
rv = http.parse_if_range_header("bullshit")
605
assert rv.etag == "bullshit"
606
assert rv.date is None
607
assert rv.to_header() == '"bullshit"'
609
rv = http.parse_if_range_header("Thu, 01 Jan 1970 00:00:00 GMT")
610
assert rv.etag is None
611
assert rv.date == datetime(1970, 1, 1, tzinfo=timezone.utc)
612
assert rv.to_header() == "Thu, 01 Jan 1970 00:00:00 GMT"
615
rv = http.parse_if_range_header(x)
616
assert rv.etag is None
617
assert rv.date is None
618
assert rv.to_header() == ""
620
def test_range_parsing(self):
621
rv = http.parse_range_header("bytes=52")
624
rv = http.parse_range_header("bytes=52-")
625
assert rv.units == "bytes"
626
assert rv.ranges == [(52, None)]
627
assert rv.to_header() == "bytes=52-"
629
rv = http.parse_range_header("bytes=52-99")
630
assert rv.units == "bytes"
631
assert rv.ranges == [(52, 100)]
632
assert rv.to_header() == "bytes=52-99"
634
rv = http.parse_range_header("bytes=52-99,-1000")
635
assert rv.units == "bytes"
636
assert rv.ranges == [(52, 100), (-1000, None)]
637
assert rv.to_header() == "bytes=52-99,-1000"
639
rv = http.parse_range_header("bytes = 1 - 100")
640
assert rv.units == "bytes"
641
assert rv.ranges == [(1, 101)]
642
assert rv.to_header() == "bytes=1-100"
644
rv = http.parse_range_header("AWesomes=0-999")
645
assert rv.units == "awesomes"
646
assert rv.ranges == [(0, 1000)]
647
assert rv.to_header() == "awesomes=0-999"
649
rv = http.parse_range_header("bytes=-")
652
rv = http.parse_range_header("bytes=bad")
655
rv = http.parse_range_header("bytes=bad-1")
658
rv = http.parse_range_header("bytes=-bad")
661
rv = http.parse_range_header("bytes=52-99, bad")
664
def test_content_range_parsing(self):
665
rv = http.parse_content_range_header("bytes 0-98/*")
666
assert rv.units == "bytes"
669
assert rv.length is None
670
assert rv.to_header() == "bytes 0-98/*"
672
rv = http.parse_content_range_header("bytes 0-98/*asdfsa")
675
rv = http.parse_content_range_header("bytes */-1")
678
rv = http.parse_content_range_header("bytes 0-99/100")
679
assert rv.to_header() == "bytes 0-99/100"
682
assert rv.units == "bytes"
683
assert rv.to_header() == "bytes */100"
685
rv = http.parse_content_range_header("bytes */100")
686
assert rv.start is None
687
assert rv.stop is None
688
assert rv.length == 100
689
assert rv.units == "bytes"
693
def test_best_match_works(self):
695
rv = http.parse_accept_header(
696
"foo=,application/xml,application/xhtml+xml,"
697
"text/html;q=0.9,text/plain;q=0.8,"
698
"image/png,*/*;q=0.5",
699
datastructures.MIMEAccept,
700
).best_match(["foo/bar"])
701
assert rv == "foo/bar"
704
@pytest.mark.parametrize(
707
"Basic V2Vya3pldWc6V2VrcnpldWc=",
708
'Digest username=Mufasa, realm="testrealm@host.invalid",'
709
' nonce=dcd98b7102dd2f0e8b11d0f600bfb0c093, uri="/dir/index.html", qop=auth,'
710
" nc=00000001, cnonce=0a4f113b, response=6629fae49393a05397450978507c4ef1,"
711
" opaque=5ccc069c403ebaf9f0171e9517f40e41",
714
def test_authorization_to_header(value: str) -> None:
715
parsed = Authorization.from_header(value)
716
assert parsed is not None
717
assert parsed.to_header() == value
720
@pytest.mark.parametrize(
724
"Sun, 06 Nov 1994 08:49:37 GMT ",
725
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
728
"Sunday, 06-Nov-94 08:49:37 GMT",
729
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
732
" Sun Nov 6 08:49:37 1994",
733
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
737
" Sun 02 Feb 1343 08:49:37 GMT",
738
datetime(1343, 2, 2, 8, 49, 37, tzinfo=timezone.utc),
741
"Thu, 01 Jan 1970 00:00:00 GMT",
742
datetime(1970, 1, 1, tzinfo=timezone.utc),
744
("Thu, 33 Jan 1970 00:00:00 GMT", None),
747
def test_parse_date(value, expect):
748
assert http.parse_date(value) == expect
751
@pytest.mark.parametrize(
755
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
756
"Sun, 06 Nov 1994 08:49:37 GMT",
759
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone(timedelta(hours=-8))),
760
"Sun, 06 Nov 1994 16:49:37 GMT",
762
(datetime(1994, 11, 6, 8, 49, 37), "Sun, 06 Nov 1994 08:49:37 GMT"),
763
(0, "Thu, 01 Jan 1970 00:00:00 GMT"),
764
(datetime(1970, 1, 1), "Thu, 01 Jan 1970 00:00:00 GMT"),
765
(datetime(1, 1, 1), "Mon, 01 Jan 0001 00:00:00 GMT"),
766
(datetime(999, 1, 1), "Tue, 01 Jan 0999 00:00:00 GMT"),
767
(datetime(1000, 1, 1), "Wed, 01 Jan 1000 00:00:00 GMT"),
768
(datetime(2020, 1, 1), "Wed, 01 Jan 2020 00:00:00 GMT"),
769
(date(2020, 1, 1), "Wed, 01 Jan 2020 00:00:00 GMT"),
772
def test_http_date(value, expect):
773
assert http.http_date(value) == expect
776
@pytest.mark.parametrize("value", [".5", "+0.5", "0.5_1", "🯰.🯵"])
777
def test_accept_invalid_float(value):
778
quoted = urllib.parse.quote(value)
783
q = f"q*=UTF-8''{value}"
785
a = http.parse_accept_header(f"en,jp;{q}")
786
assert list(a.values()) == ["en"]
789
def test_accept_valid_int_one_zero():
790
assert http.parse_accept_header("en;q=1") == http.parse_accept_header("en;q=1.0")
791
assert http.parse_accept_header("en;q=0") == http.parse_accept_header("en;q=0.0")
792
assert http.parse_accept_header("en;q=5") == http.parse_accept_header("en;q=5.0")
795
@pytest.mark.parametrize("value", ["🯱🯲🯳", "+1-", "1-1_23"])
796
def test_range_invalid_int(value):
797
assert http.parse_range_header(value) is None
800
@pytest.mark.parametrize("value", ["*/🯱🯲🯳", "1-+2/3", "1_23-125/*"])
801
def test_content_range_invalid_int(value):
802
assert http.parse_content_range_header(f"bytes {value}") is None