diff --git "a/swe-bench-verified/psf#requests#39d0fdd9096f7dceccbc8f82e1eda7dd64717a8e.graph.json" "b/swe-bench-verified/psf#requests#39d0fdd9096f7dceccbc8f82e1eda7dd64717a8e.graph.json" new file mode 100644--- /dev/null +++ "b/swe-bench-verified/psf#requests#39d0fdd9096f7dceccbc8f82e1eda7dd64717a8e.graph.json" @@ -0,0 +1 @@ +{"edges":[{"edgeType":"calls","source":557,"target":602},{"edgeType":"contains","source":1216,"target":1217},{"edgeType":"contains","source":385,"target":816},{"edgeType":"contains","source":116,"target":170},{"edgeType":"contains","source":115,"target":201},{"edgeType":"calls","source":654,"target":502},{"edgeType":"calls","source":691,"target":412},{"edgeType":"calls","source":1630,"target":372},{"edgeType":"contains","source":225,"target":227},{"edgeType":"calls","source":1233,"target":1315},{"edgeType":"imports","source":32,"target":209},{"edgeType":"contains","source":942,"target":990},{"edgeType":"contains","source":1106,"target":1191},{"edgeType":"contains","source":249,"target":275},{"edgeType":"calls","source":1345,"target":1014},{"edgeType":"calls","source":1630,"target":371},{"edgeType":"contains","source":476,"target":1166},{"edgeType":"imports","source":916,"target":287},{"edgeType":"contains","source":76,"target":89},{"edgeType":"calls","source":996,"target":999},{"edgeType":"calls","source":750,"target":433},{"edgeType":"calls","source":756,"target":247},{"edgeType":"calls","source":654,"target":501},{"edgeType":"calls","source":815,"target":795},{"edgeType":"contains","source":116,"target":169},{"edgeType":"contains","source":115,"target":200},{"edgeType":"calls","source":691,"target":411},{"edgeType":"calls","source":1242,"target":507},{"edgeType":"calls","source":1064,"target":740},{"edgeType":"contains","source":477,"target":606},{"edgeType":"contains","source":916,"target":738},{"edgeType":"imports","source":312,"target":779},{"edgeType":"contains","source":943,"target":958},{"edgeType":"contains","source":942,"target":989},{"edgeType":"calls","source":1164,"target":547},{"edgeType":"contains","source":225,"target":226},{"edgeType":"calls","source":1678,"target":204},{"edgeType":"calls","source":736,"target":337},{"edgeType":"imports","source":32,"target":210},{"edgeType":"calls","source":1367,"target":1388},{"edgeType":"extends","source":53,"target":54},{"edgeType":"contains","source":589,"target":1097},{"edgeType":"calls","source":1233,"target":1314},{"edgeType":"calls","source":1775,"target":632},{"edgeType":"contains","source":249,"target":274},{"edgeType":"calls","source":1630,"target":370},{"edgeType":"contains","source":476,"target":1165},{"edgeType":"calls","source":888,"target":118},{"edgeType":"contains","source":76,"target":88},{"edgeType":"calls","source":1680,"target":670},{"edgeType":"calls","source":1630,"target":374},{"edgeType":"calls","source":1543,"target":428},{"edgeType":"calls","source":1553,"target":118},{"edgeType":"contains","source":116,"target":172},{"edgeType":"contains","source":225,"target":229},{"edgeType":"imports","source":32,"target":206},{"edgeType":"contains","source":249,"target":277},{"edgeType":"imports","source":32,"target":207},{"edgeType":"contains","source":589,"target":1100},{"edgeType":"contains","source":942,"target":992},{"edgeType":"calls","source":547,"target":122},{"edgeType":"contains","source":2,"target":7},{"edgeType":"contains","source":76,"target":91},{"edgeType":"contains","source":476,"target":1168},{"edgeType":"calls","source":1553,"target":382},{"edgeType":"calls","source":1732,"target":382},{"edgeType":"calls","source":1186,"target":1188},{"edgeType":"calls","source":1688,"target":689},{"edgeType":"calls","source":654,"target":503},{"edgeType":"calls","source":1630,"target":373},{"edgeType":"contains","source":116,"target":171},{"edgeType":"contains","source":115,"target":202},{"edgeType":"contains","source":225,"target":228},{"edgeType":"calls","source":1667,"target":547},{"edgeType":"calls","source":1588,"target":882},{"edgeType":"imports","source":32,"target":208},{"edgeType":"contains","source":942,"target":991},{"edgeType":"contains","source":249,"target":276},{"edgeType":"contains","source":476,"target":1167},{"edgeType":"calls","source":1472,"target":778},{"edgeType":"calls","source":1002,"target":814},{"edgeType":"contains","source":76,"target":90},{"edgeType":"contains","source":76,"target":93},{"edgeType":"calls","source":601,"target":291},{"edgeType":"contains","source":225,"target":231},{"edgeType":"contains","source":116,"target":174},{"edgeType":"imports","source":32,"target":204},{"edgeType":"contains","source":943,"target":963},{"edgeType":"contains","source":249,"target":279},{"edgeType":"calls","source":1651,"target":509},{"edgeType":"calls","source":1654,"target":416},{"edgeType":"contains","source":2,"target":802},{"edgeType":"contains","source":2,"target":9},{"edgeType":"imports","source":1498,"target":212},{"edgeType":"calls","source":658,"target":374},{"edgeType":"calls","source":1630,"target":367},{"edgeType":"calls","source":1790,"target":428},{"edgeType":"calls","source":1797,"target":211},{"edgeType":"calls","source":1800,"target":118},{"edgeType":"contains","source":476,"target":1170},{"edgeType":"calls","source":1048,"target":704},{"edgeType":"contains","source":76,"target":92},{"edgeType":"calls","source":1639,"target":1409},{"edgeType":"calls","source":1724,"target":888},{"edgeType":"calls","source":1740,"target":392},{"edgeType":"contains","source":116,"target":173},{"edgeType":"contains","source":225,"target":230},{"edgeType":"extends","source":926,"target":742},{"edgeType":"imports","source":32,"target":205},{"edgeType":"contains","source":929,"target":1132},{"edgeType":"imports","source":312,"target":774},{"edgeType":"calls","source":1797,"target":210},{"edgeType":"contains","source":249,"target":278},{"edgeType":"contains","source":2,"target":8},{"edgeType":"calls","source":1643,"target":756},{"edgeType":"calls","source":1654,"target":415},{"edgeType":"imports","source":1498,"target":211},{"edgeType":"contains","source":916,"target":742},{"edgeType":"contains","source":942,"target":993},{"edgeType":"contains","source":943,"target":962},{"edgeType":"calls","source":658,"target":373},{"edgeType":"calls","source":653,"target":528},{"edgeType":"calls","source":1638,"target":118},{"edgeType":"contains","source":476,"target":1169},{"edgeType":"calls","source":1118,"target":386},{"edgeType":"calls","source":1678,"target":203},{"edgeType":"calls","source":1722,"target":689},{"edgeType":"calls","source":654,"target":500},{"edgeType":"imports","source":1498,"target":738},{"edgeType":"contains","source":225,"target":233},{"edgeType":"imports","source":674,"target":384},{"edgeType":"calls","source":1466,"target":433},{"edgeType":"calls","source":1482,"target":994},{"edgeType":"contains","source":116,"target":176},{"edgeType":"contains","source":250,"target":251},{"edgeType":"contains","source":249,"target":282},{"edgeType":"contains","source":943,"target":965},{"edgeType":"calls","source":1598,"target":40},{"edgeType":"imports","source":1498,"target":210},{"edgeType":"calls","source":1233,"target":1313},{"edgeType":"calls","source":1707,"target":96},{"edgeType":"calls","source":1630,"target":369},{"edgeType":"calls","source":1604,"target":118},{"edgeType":"imports","source":29,"target":824},{"edgeType":"contains","source":331,"target":382},{"edgeType":"calls","source":1720,"target":750},{"edgeType":"contains","source":225,"target":232},{"edgeType":"contains","source":116,"target":175},{"edgeType":"contains","source":249,"target":281},{"edgeType":"imports","source":32,"target":203},{"edgeType":"contains","source":249,"target":280},{"edgeType":"imports","source":719,"target":311},{"edgeType":"contains","source":27,"target":28},{"edgeType":"calls","source":1203,"target":1185},{"edgeType":"calls","source":1233,"target":1312},{"edgeType":"imports","source":1498,"target":1266},{"edgeType":"calls","source":1783,"target":382},{"edgeType":"calls","source":1483,"target":433},{"edgeType":"contains","source":2,"target":10},{"edgeType":"calls","source":1630,"target":368},{"edgeType":"contains","source":476,"target":1171},{"edgeType":"calls","source":1797,"target":212},{"edgeType":"contains","source":1340,"target":1341},{"edgeType":"calls","source":1774,"target":392},{"edgeType":"calls","source":1494,"target":87},{"edgeType":"contains","source":225,"target":235},{"edgeType":"contains","source":719,"target":1041},{"edgeType":"calls","source":591,"target":597},{"edgeType":"calls","source":598,"target":380},{"edgeType":"calls","source":1639,"target":1406},{"edgeType":"contains","source":10,"target":1350},{"edgeType":"contains","source":250,"target":253},{"edgeType":"contains","source":116,"target":178},{"edgeType":"calls","source":673,"target":433},{"edgeType":"imports","source":719,"target":325},{"edgeType":"imports","source":29,"target":46},{"edgeType":"calls","source":1172,"target":291},{"edgeType":"calls","source":1367,"target":588},{"edgeType":"contains","source":2,"target":806},{"edgeType":"imports","source":32,"target":217},{"edgeType":"calls","source":557,"target":594},{"edgeType":"calls","source":1628,"target":689},{"edgeType":"calls","source":1543,"target":1210},{"edgeType":"contains","source":1431,"target":1432},{"edgeType":"calls","source":1672,"target":118},{"edgeType":"calls","source":658,"target":370},{"edgeType":"contains","source":379,"target":745},{"edgeType":"calls","source":1402,"target":824},{"edgeType":"calls","source":1024,"target":386},{"edgeType":"contains","source":225,"target":234},{"edgeType":"calls","source":1639,"target":1405},{"edgeType":"calls","source":1672,"target":382},{"edgeType":"contains","source":719,"target":1040},{"edgeType":"calls","source":1581,"target":560},{"edgeType":"contains","source":250,"target":252},{"edgeType":"contains","source":249,"target":283},{"edgeType":"contains","source":116,"target":177},{"edgeType":"contains","source":1106,"target":670},{"edgeType":"calls","source":1061,"target":824},{"edgeType":"calls","source":557,"target":593},{"edgeType":"calls","source":591,"target":596},{"edgeType":"calls","source":1639,"target":1404},{"edgeType":"imports","source":32,"target":218},{"edgeType":"calls","source":658,"target":369},{"edgeType":"contains","source":1431,"target":1433},{"edgeType":"contains","source":225,"target":237},{"edgeType":"calls","source":1638,"target":382},{"edgeType":"calls","source":1639,"target":1408},{"edgeType":"calls","source":1594,"target":689},{"edgeType":"imports","source":1498,"target":222},{"edgeType":"contains","source":250,"target":255},{"edgeType":"contains","source":10,"target":1352},{"edgeType":"contains","source":17,"target":1135},{"edgeType":"contains","source":116,"target":181},{"edgeType":"calls","source":658,"target":372},{"edgeType":"calls","source":653,"target":527},{"edgeType":"calls","source":1233,"target":1309},{"edgeType":"imports","source":32,"target":215},{"edgeType":"calls","source":1496,"target":291},{"edgeType":"contains","source":719,"target":778},{"edgeType":"contains","source":941,"target":1031},{"edgeType":"contains","source":1241,"target":1242},{"edgeType":"calls","source":616,"target":617},{"edgeType":"contains","source":225,"target":236},{"edgeType":"calls","source":1639,"target":1407},{"edgeType":"imports","source":119,"target":954},{"edgeType":"calls","source":1494,"target":88},{"edgeType":"contains","source":1183,"target":1192},{"edgeType":"contains","source":250,"target":254},{"edgeType":"contains","source":116,"target":179},{"edgeType":"contains","source":116,"target":180},{"edgeType":"calls","source":1579,"target":888},{"edgeType":"calls","source":997,"target":961},{"edgeType":"contains","source":27,"target":32},{"edgeType":"contains","source":102,"target":878},{"edgeType":"calls","source":1543,"target":1211},{"edgeType":"imports","source":32,"target":216},{"edgeType":"contains","source":943,"target":968},{"edgeType":"calls","source":740,"target":1000},{"edgeType":"calls","source":658,"target":371},{"edgeType":"calls","source":653,"target":526},{"edgeType":"contains","source":225,"target":239},{"edgeType":"contains","source":719,"target":1045},{"edgeType":"contains","source":1109,"target":1111},{"edgeType":"calls","source":1673,"target":1669},{"edgeType":"calls","source":1686,"target":1266},{"edgeType":"contains","source":674,"target":590},{"edgeType":"calls","source":1799,"target":670},{"edgeType":"contains","source":250,"target":257},{"edgeType":"contains","source":1246,"target":1357},{"edgeType":"calls","source":685,"target":586},{"edgeType":"contains","source":116,"target":183},{"edgeType":"calls","source":653,"target":521},{"edgeType":"imports","source":32,"target":213},{"edgeType":"contains","source":476,"target":1178},{"edgeType":"contains","source":941,"target":1033},{"edgeType":"contains","source":1431,"target":1436},{"edgeType":"calls","source":1549,"target":756},{"edgeType":"contains","source":719,"target":780},{"edgeType":"calls","source":1233,"target":1303},{"edgeType":"contains","source":2,"target":17},{"edgeType":"calls","source":1560,"target":679},{"edgeType":"contains","source":1109,"target":1110},{"edgeType":"calls","source":670,"target":521},{"edgeType":"calls","source":1686,"target":1265},{"edgeType":"calls","source":1713,"target":428},{"edgeType":"contains","source":225,"target":238},{"edgeType":"contains","source":719,"target":1044},{"edgeType":"contains","source":674,"target":589},{"edgeType":"calls","source":1544,"target":382},{"edgeType":"contains","source":250,"target":256},{"edgeType":"contains","source":1498,"target":1738},{"edgeType":"calls","source":1447,"target":482},{"edgeType":"calls","source":609,"target":562},{"edgeType":"contains","source":1246,"target":1358},{"edgeType":"calls","source":653,"target":520},{"edgeType":"contains","source":116,"target":182},{"edgeType":"imports","source":674,"target":395},{"edgeType":"calls","source":1632,"target":296},{"edgeType":"imports","source":32,"target":214},{"edgeType":"contains","source":102,"target":880},{"edgeType":"calls","source":964,"target":392},{"edgeType":"contains","source":941,"target":1032},{"edgeType":"contains","source":250,"target":259},{"edgeType":"contains","source":376,"target":317},{"edgeType":"calls","source":1003,"target":507},{"edgeType":"calls","source":1727,"target":1054},{"edgeType":"contains","source":1183,"target":1197},{"edgeType":"contains","source":1246,"target":1355},{"edgeType":"calls","source":1722,"target":416},{"edgeType":"imports","source":32,"target":211},{"edgeType":"calls","source":1639,"target":1403},{"edgeType":"contains","source":719,"target":782},{"edgeType":"contains","source":942,"target":1004},{"edgeType":"contains","source":941,"target":1035},{"edgeType":"calls","source":658,"target":368},{"edgeType":"contains","source":102,"target":883},{"edgeType":"contains","source":1431,"target":1434},{"edgeType":"contains","source":225,"target":240},{"edgeType":"contains","source":719,"target":1046},{"edgeType":"calls","source":1561,"target":121},{"edgeType":"calls","source":1673,"target":1670},{"edgeType":"contains","source":250,"target":258},{"edgeType":"contains","source":376,"target":316},{"edgeType":"calls","source":1722,"target":415},{"edgeType":"imports","source":719,"target":320},{"edgeType":"extends","source":590,"target":334},{"edgeType":"calls","source":1048,"target":433},{"edgeType":"calls","source":1747,"target":433},{"edgeType":"calls","source":1447,"target":484},{"edgeType":"calls","source":1696,"target":428},{"edgeType":"contains","source":1246,"target":1356},{"edgeType":"calls","source":658,"target":367},{"edgeType":"imports","source":32,"target":212},{"edgeType":"contains","source":1431,"target":1435},{"edgeType":"contains","source":908,"target":1000},{"edgeType":"calls","source":780,"target":813},{"edgeType":"contains","source":941,"target":1034},{"edgeType":"calls","source":1706,"target":382},{"edgeType":"calls","source":1233,"target":247},{"edgeType":"contains","source":102,"target":882},{"edgeType":"calls","source":910,"target":246},{"edgeType":"contains","source":942,"target":974},{"edgeType":"contains","source":589,"target":1082},{"edgeType":"contains","source":719,"target":752},{"edgeType":"calls","source":1552,"target":428},{"edgeType":"contains","source":546,"target":829},{"edgeType":"contains","source":116,"target":154},{"edgeType":"contains","source":115,"target":185},{"edgeType":"contains","source":480,"target":497},{"edgeType":"contains","source":1298,"target":1302},{"edgeType":"calls","source":363,"target":291},{"edgeType":"contains","source":942,"target":973},{"edgeType":"extends","source":1109,"target":334},{"edgeType":"contains","source":23,"target":393},{"edgeType":"calls","source":1056,"target":740},{"edgeType":"calls","source":1345,"target":1030},{"edgeType":"contains","source":589,"target":1081},{"edgeType":"calls","source":1584,"target":756},{"edgeType":"calls","source":583,"target":604},{"edgeType":"calls","source":996,"target":1014},{"edgeType":"calls","source":1715,"target":1716},{"edgeType":"calls","source":1082,"target":462},{"edgeType":"contains","source":486,"target":574},{"edgeType":"contains","source":116,"target":153},{"edgeType":"contains","source":115,"target":184},{"edgeType":"calls","source":671,"target":1048},{"edgeType":"contains","source":1219,"target":1370},{"edgeType":"contains","source":942,"target":972},{"edgeType":"calls","source":1563,"target":878},{"edgeType":"contains","source":27,"target":1062},{"edgeType":"contains","source":480,"target":496},{"edgeType":"calls","source":988,"target":734},{"edgeType":"calls","source":1233,"target":1334},{"edgeType":"calls","source":1800,"target":670},{"edgeType":"contains","source":942,"target":976},{"edgeType":"calls","source":718,"target":386},{"edgeType":"calls","source":509,"target":523},{"edgeType":"extends","source":772,"target":742},{"edgeType":"calls","source":1761,"target":557},{"edgeType":"calls","source":1545,"target":382},{"edgeType":"contains","source":546,"target":831},{"edgeType":"contains","source":116,"target":156},{"edgeType":"contains","source":480,"target":499},{"edgeType":"contains","source":477,"target":592},{"edgeType":"calls","source":1740,"target":415},{"edgeType":"contains","source":115,"target":187},{"edgeType":"contains","source":1219,"target":1373},{"edgeType":"calls","source":1242,"target":1319},{"edgeType":"contains","source":1298,"target":1300},{"edgeType":"calls","source":509,"target":522},{"edgeType":"contains","source":942,"target":975},{"edgeType":"calls","source":1557,"target":1067},{"edgeType":"calls","source":1410,"target":603},{"edgeType":"calls","source":1430,"target":1040},{"edgeType":"calls","source":1191,"target":1049},{"edgeType":"imports","source":1198,"target":1041},{"edgeType":"calls","source":1633,"target":296},{"edgeType":"contains","source":546,"target":830},{"edgeType":"contains","source":116,"target":155},{"edgeType":"contains","source":115,"target":186},{"edgeType":"calls","source":1567,"target":756},{"edgeType":"contains","source":480,"target":498},{"edgeType":"contains","source":942,"target":978},{"edgeType":"calls","source":680,"target":501},{"edgeType":"contains","source":1106,"target":1179},{"edgeType":"imports","source":1198,"target":774},{"edgeType":"calls","source":995,"target":1042},{"edgeType":"contains","source":719,"target":1020},{"edgeType":"calls","source":711,"target":597},{"edgeType":"calls","source":1439,"target":1020},{"edgeType":"calls","source":961,"target":247},{"edgeType":"contains","source":907,"target":477},{"edgeType":"contains","source":116,"target":158},{"edgeType":"contains","source":115,"target":189},{"edgeType":"imports","source":719,"target":560},{"edgeType":"calls","source":1569,"target":689},{"edgeType":"contains","source":908,"target":710},{"edgeType":"calls","source":680,"target":500},{"edgeType":"calls","source":711,"target":596},{"edgeType":"calls","source":509,"target":516},{"edgeType":"contains","source":942,"target":977},{"edgeType":"calls","source":1631,"target":1409},{"edgeType":"calls","source":583,"target":600},{"edgeType":"calls","source":1030,"target":221},{"edgeType":"contains","source":546,"target":832},{"edgeType":"contains","source":116,"target":157},{"edgeType":"contains","source":589,"target":1085},{"edgeType":"contains","source":115,"target":188},{"edgeType":"contains","source":393,"target":822},{"edgeType":"contains","source":942,"target":980},{"edgeType":"imports","source":119,"target":924},{"edgeType":"calls","source":1603,"target":1223},{"edgeType":"calls","source":680,"target":503},{"edgeType":"calls","source":1300,"target":1103},{"edgeType":"calls","source":1711,"target":1046},{"edgeType":"calls","source":1708,"target":82},{"edgeType":"imports","source":1264,"target":311},{"edgeType":"contains","source":589,"target":1088},{"edgeType":"contains","source":116,"target":160},{"edgeType":"contains","source":115,"target":191},{"edgeType":"calls","source":880,"target":117},{"edgeType":"calls","source":724,"target":725},{"edgeType":"contains","source":942,"target":979},{"edgeType":"contains","source":719,"target":757},{"edgeType":"calls","source":1596,"target":118},{"edgeType":"calls","source":740,"target":756},{"edgeType":"calls","source":1596,"target":382},{"edgeType":"contains","source":546,"target":835},{"edgeType":"contains","source":102,"target":1121},{"edgeType":"calls","source":583,"target":602},{"edgeType":"calls","source":680,"target":502},{"edgeType":"extends","source":738,"target":742},{"edgeType":"calls","source":435,"target":434},{"edgeType":"calls","source":1191,"target":1045},{"edgeType":"calls","source":1791,"target":679},{"edgeType":"calls","source":1798,"target":462},{"edgeType":"contains","source":116,"target":159},{"edgeType":"contains","source":589,"target":1087},{"edgeType":"contains","source":115,"target":190},{"edgeType":"contains","source":477,"target":595},{"edgeType":"imports","source":1062,"target":246},{"edgeType":"contains","source":395,"target":762},{"edgeType":"calls","source":462,"target":384},{"edgeType":"calls","source":1655,"target":398},{"edgeType":"contains","source":942,"target":982},{"edgeType":"contains","source":546,"target":838},{"edgeType":"calls","source":1631,"target":1406},{"edgeType":"contains","source":10,"target":13},{"edgeType":"contains","source":480,"target":505},{"edgeType":"imports","source":1498,"target":1265},{"edgeType":"imports","source":312,"target":769},{"edgeType":"calls","source":547,"target":392},{"edgeType":"calls","source":1059,"target":111},{"edgeType":"contains","source":116,"target":162},{"edgeType":"contains","source":115,"target":193},{"edgeType":"contains","source":26,"target":44},{"edgeType":"calls","source":1620,"target":689},{"edgeType":"contains","source":942,"target":981},{"edgeType":"calls","source":1679,"target":710},{"edgeType":"calls","source":928,"target":207},{"edgeType":"contains","source":546,"target":837},{"edgeType":"calls","source":1483,"target":708},{"edgeType":"calls","source":1631,"target":1405},{"edgeType":"contains","source":719,"target":1023},{"edgeType":"calls","source":1059,"target":110},{"edgeType":"calls","source":644,"target":291},{"edgeType":"contains","source":116,"target":161},{"edgeType":"contains","source":115,"target":192},{"edgeType":"contains","source":116,"target":164},{"edgeType":"calls","source":1733,"target":96},{"edgeType":"contains","source":115,"target":195},{"edgeType":"extends","source":920,"target":919},{"edgeType":"calls","source":1214,"target":1122},{"edgeType":"contains","source":546,"target":840},{"edgeType":"calls","source":583,"target":599},{"edgeType":"calls","source":1630,"target":382},{"edgeType":"calls","source":1631,"target":1408},{"edgeType":"contains","source":942,"target":984},{"edgeType":"calls","source":995,"target":247},{"edgeType":"calls","source":1586,"target":689},{"edgeType":"calls","source":1765,"target":689},{"edgeType":"calls","source":1749,"target":392},{"edgeType":"calls","source":1718,"target":296},{"edgeType":"calls","source":735,"target":644},{"edgeType":"contains","source":589,"target":1092},{"edgeType":"calls","source":641,"target":650},{"edgeType":"calls","source":1631,"target":1407},{"edgeType":"contains","source":942,"target":983},{"edgeType":"contains","source":546,"target":839},{"edgeType":"calls","source":557,"target":612},{"edgeType":"calls","source":1647,"target":118},{"edgeType":"imports","source":1198,"target":1049},{"edgeType":"contains","source":116,"target":163},{"edgeType":"calls","source":1603,"target":689},{"edgeType":"calls","source":1647,"target":382},{"edgeType":"contains","source":115,"target":194},{"edgeType":"calls","source":573,"target":380},{"edgeType":"contains","source":116,"target":166},{"edgeType":"contains","source":115,"target":197},{"edgeType":"contains","source":1125,"target":1127},{"edgeType":"calls","source":928,"target":204},{"edgeType":"contains","source":943,"target":955},{"edgeType":"calls","source":1203,"target":1193},{"edgeType":"calls","source":1367,"target":337},{"edgeType":"contains","source":589,"target":1094},{"edgeType":"imports","source":1198,"target":782},{"edgeType":"imports","source":674,"target":906},{"edgeType":"contains","source":719,"target":1028},{"edgeType":"calls","source":691,"target":681},{"edgeType":"calls","source":1583,"target":247},{"edgeType":"contains","source":249,"target":271},{"edgeType":"calls","source":1225,"target":246},{"edgeType":"imports","source":1498,"target":204},{"edgeType":"contains","source":1125,"target":1126},{"edgeType":"contains","source":116,"target":165},{"edgeType":"contains","source":115,"target":196},{"edgeType":"calls","source":1412,"target":1319},{"edgeType":"calls","source":928,"target":203},{"edgeType":"calls","source":584,"target":561},{"edgeType":"calls","source":691,"target":679},{"edgeType":"calls","source":557,"target":605},{"edgeType":"contains","source":546,"target":841},{"edgeType":"contains","source":942,"target":985},{"edgeType":"calls","source":685,"target":337},{"edgeType":"contains","source":249,"target":270},{"edgeType":"calls","source":1650,"target":547},{"edgeType":"imports","source":1498,"target":203},{"edgeType":"calls","source":1415,"target":433},{"edgeType":"calls","source":1410,"target":588},{"edgeType":"contains","source":546,"target":844},{"edgeType":"calls","source":1631,"target":1404},{"edgeType":"extends","source":770,"target":288},{"edgeType":"calls","source":1202,"target":433},{"edgeType":"contains","source":116,"target":168},{"edgeType":"calls","source":1367,"target":603},{"edgeType":"calls","source":1722,"target":433},{"edgeType":"contains","source":115,"target":199},{"edgeType":"calls","source":641,"target":647},{"edgeType":"contains","source":1125,"target":1129},{"edgeType":"calls","source":1312,"target":1251},{"edgeType":"calls","source":1671,"target":428},{"edgeType":"calls","source":1681,"target":118},{"edgeType":"contains","source":943,"target":957},{"edgeType":"calls","source":1000,"target":88},{"edgeType":"contains","source":719,"target":1030},{"edgeType":"contains","source":1106,"target":1189},{"edgeType":"calls","source":509,"target":247},{"edgeType":"contains","source":249,"target":273},{"edgeType":"calls","source":1077,"target":608},{"edgeType":"contains","source":76,"target":87},{"edgeType":"calls","source":1590,"target":296},{"edgeType":"calls","source":1631,"target":1403},{"edgeType":"contains","source":1125,"target":1128},{"edgeType":"contains","source":116,"target":167},{"edgeType":"calls","source":1574,"target":1056},{"edgeType":"contains","source":115,"target":198},{"edgeType":"calls","source":1046,"target":247},{"edgeType":"calls","source":1664,"target":644},{"edgeType":"contains","source":942,"target":987},{"edgeType":"calls","source":1451,"target":641},{"edgeType":"calls","source":1000,"target":87},{"edgeType":"calls","source":1739,"target":433},{"edgeType":"imports","source":674,"target":641},{"edgeType":"imports","source":1198,"target":1045},{"edgeType":"contains","source":249,"target":272},{"edgeType":"calls","source":1077,"target":607},{"edgeType":"contains","source":76,"target":86},{"edgeType":"imports","source":312,"target":764},{"edgeType":"calls","source":1413,"target":1252},{"edgeType":"calls","source":1621,"target":882},{"edgeType":"contains","source":385,"target":783},{"edgeType":"calls","source":1600,"target":740},{"edgeType":"contains","source":290,"target":293},{"edgeType":"calls","source":1597,"target":40},{"edgeType":"contains","source":1760,"target":1762},{"edgeType":"imports","source":312,"target":546},{"edgeType":"calls","source":707,"target":412},{"edgeType":"contains","source":8,"target":50},{"edgeType":"contains","source":312,"target":1196},{"edgeType":"calls","source":1002,"target":1044},{"edgeType":"contains","source":1471,"target":1472},{"edgeType":"calls","source":744,"target":586},{"edgeType":"calls","source":1587,"target":878},{"edgeType":"contains","source":290,"target":292},{"edgeType":"calls","source":658,"target":608},{"edgeType":"calls","source":583,"target":291},{"edgeType":"calls","source":707,"target":411},{"edgeType":"imports","source":1498,"target":777},{"edgeType":"contains","source":1471,"target":1473},{"edgeType":"calls","source":1185,"target":1187},{"edgeType":"contains","source":17,"target":37},{"edgeType":"contains","source":1124,"target":1131},{"edgeType":"contains","source":1123,"target":1162},{"edgeType":"contains","source":546,"target":815},{"edgeType":"calls","source":928,"target":433},{"edgeType":"contains","source":290,"target":295},{"edgeType":"contains","source":17,"target":301},{"edgeType":"calls","source":782,"target":203},{"edgeType":"contains","source":32,"target":365},{"edgeType":"calls","source":755,"target":247},{"edgeType":"contains","source":719,"target":1001},{"edgeType":"imports","source":1498,"target":774},{"edgeType":"calls","source":1185,"target":1186},{"edgeType":"contains","source":1124,"target":1130},{"edgeType":"contains","source":674,"target":546},{"edgeType":"calls","source":1063,"target":740},{"edgeType":"contains","source":17,"target":300},{"edgeType":"contains","source":290,"target":294},{"edgeType":"calls","source":766,"target":433},{"edgeType":"contains","source":1760,"target":1761},{"edgeType":"contains","source":32,"target":364},{"edgeType":"calls","source":1679,"target":670},{"edgeType":"calls","source":1748,"target":112},{"edgeType":"calls","source":1225,"target":205},{"edgeType":"calls","source":1073,"target":689},{"edgeType":"calls","source":1593,"target":689},{"edgeType":"calls","source":1637,"target":382},{"edgeType":"imports","source":1498,"target":245},{"edgeType":"calls","source":817,"target":433},{"edgeType":"calls","source":671,"target":995},{"edgeType":"calls","source":1068,"target":1108},{"edgeType":"calls","source":1230,"target":49},{"edgeType":"calls","source":1225,"target":204},{"edgeType":"calls","source":1663,"target":632},{"edgeType":"imports","source":1498,"target":244},{"edgeType":"calls","source":682,"target":653},{"edgeType":"contains","source":719,"target":1002},{"edgeType":"calls","source":1578,"target":888},{"edgeType":"contains","source":32,"target":366},{"edgeType":"calls","source":658,"target":607},{"edgeType":"calls","source":1191,"target":204},{"edgeType":"calls","source":1557,"target":750},{"edgeType":"imports","source":1498,"target":771},{"edgeType":"contains","source":290,"target":299},{"edgeType":"contains","source":480,"target":487},{"edgeType":"calls","source":1748,"target":114},{"edgeType":"imports","source":1498,"target":243},{"edgeType":"imports","source":476,"target":477},{"edgeType":"contains","source":1106,"target":1164},{"edgeType":"contains","source":476,"target":1139},{"edgeType":"calls","source":1789,"target":428},{"edgeType":"calls","source":1799,"target":118},{"edgeType":"contains","source":1471,"target":1474},{"edgeType":"calls","source":1739,"target":392},{"edgeType":"calls","source":1748,"target":113},{"edgeType":"calls","source":1225,"target":206},{"edgeType":"calls","source":1191,"target":203},{"edgeType":"imports","source":1498,"target":770},{"edgeType":"contains","source":290,"target":298},{"edgeType":"calls","source":644,"target":247},{"edgeType":"calls","source":1647,"target":337},{"edgeType":"contains","source":909,"target":928},{"edgeType":"calls","source":1164,"target":247},{"edgeType":"calls","source":1637,"target":118},{"edgeType":"imports","source":719,"target":358},{"edgeType":"contains","source":480,"target":489},{"edgeType":"calls","source":1672,"target":1407},{"edgeType":"calls","source":1489,"target":1002},{"edgeType":"contains","source":1070,"target":1225},{"edgeType":"imports","source":1198,"target":573},{"edgeType":"contains","source":480,"target":488},{"edgeType":"calls","source":1068,"target":1103},{"edgeType":"calls","source":1672,"target":1406},{"edgeType":"calls","source":1798,"target":671},{"edgeType":"calls","source":756,"target":997},{"edgeType":"calls","source":1672,"target":1670},{"edgeType":"calls","source":1695,"target":428},{"edgeType":"calls","source":1705,"target":118},{"edgeType":"calls","source":1796,"target":204},{"edgeType":"calls","source":1661,"target":689},{"edgeType":"calls","source":1672,"target":1405},{"edgeType":"calls","source":1195,"target":337},{"edgeType":"calls","source":1233,"target":1273},{"edgeType":"contains","source":480,"target":491},{"edgeType":"calls","source":1225,"target":203},{"edgeType":"calls","source":1672,"target":1409},{"edgeType":"contains","source":248,"target":284},{"edgeType":"calls","source":1718,"target":247},{"edgeType":"imports","source":32,"target":248},{"edgeType":"calls","source":1671,"target":118},{"edgeType":"contains","source":1070,"target":1227},{"edgeType":"contains","source":290,"target":302},{"edgeType":"calls","source":1401,"target":824},{"edgeType":"calls","source":1671,"target":382},{"edgeType":"calls","source":1672,"target":1408},{"edgeType":"calls","source":1580,"target":560},{"edgeType":"calls","source":1366,"target":588},{"edgeType":"calls","source":1410,"target":1338},{"edgeType":"calls","source":766,"target":689},{"edgeType":"calls","source":1481,"target":986},{"edgeType":"calls","source":780,"target":784},{"edgeType":"calls","source":1550,"target":428},{"edgeType":"calls","source":1705,"target":644},{"edgeType":"contains","source":480,"target":493},{"edgeType":"calls","source":1043,"target":818},{"edgeType":"contains","source":248,"target":286},{"edgeType":"calls","source":1761,"target":229},{"edgeType":"calls","source":1073,"target":681},{"edgeType":"calls","source":1614,"target":1615},{"edgeType":"calls","source":507,"target":521},{"edgeType":"calls","source":1345,"target":1233},{"edgeType":"calls","source":1652,"target":701},{"edgeType":"calls","source":817,"target":689},{"edgeType":"contains","source":23,"target":1183},{"edgeType":"contains","source":1070,"target":1229},{"edgeType":"extends","source":46,"target":53},{"edgeType":"contains","source":942,"target":969},{"edgeType":"calls","source":507,"target":520},{"edgeType":"contains","source":480,"target":492},{"edgeType":"contains","source":477,"target":585},{"edgeType":"contains","source":248,"target":285},{"edgeType":"imports","source":719,"target":355},{"edgeType":"calls","source":1496,"target":1044},{"edgeType":"calls","source":435,"target":374},{"edgeType":"contains","source":385,"target":794},{"edgeType":"calls","source":1561,"target":878},{"edgeType":"contains","source":1070,"target":1228},{"edgeType":"contains","source":1298,"target":1299},{"edgeType":"extends","source":774,"target":742},{"edgeType":"imports","source":674,"target":954},{"edgeType":"contains","source":589,"target":1080},{"edgeType":"calls","source":1672,"target":1669},{"edgeType":"contains","source":480,"target":495},{"edgeType":"contains","source":477,"target":588},{"edgeType":"calls","source":1598,"target":1056},{"edgeType":"calls","source":1479,"target":1045},{"edgeType":"contains","source":116,"target":152},{"edgeType":"calls","source":1068,"target":573},{"edgeType":"calls","source":1796,"target":203},{"edgeType":"contains","source":1219,"target":1369},{"edgeType":"calls","source":780,"target":781},{"edgeType":"contains","source":942,"target":971},{"edgeType":"calls","source":557,"target":559},{"edgeType":"calls","source":1672,"target":1404},{"edgeType":"calls","source":1759,"target":557},{"edgeType":"imports","source":29,"target":601},{"edgeType":"contains","source":480,"target":494},{"edgeType":"calls","source":1543,"target":382},{"edgeType":"calls","source":1678,"target":689},{"edgeType":"calls","source":1428,"target":1040},{"edgeType":"calls","source":692,"target":337},{"edgeType":"calls","source":671,"target":988},{"edgeType":"calls","source":1672,"target":1403},{"edgeType":"contains","source":942,"target":970},{"edgeType":"calls","source":1676,"target":222},{"edgeType":"calls","source":750,"target":653},{"edgeType":"calls","source":736,"target":586},{"edgeType":"imports","source":38,"target":39},{"edgeType":"imports","source":32,"target":225},{"edgeType":"extends","source":773,"target":742},{"edgeType":"calls","source":1602,"target":1223},{"edgeType":"calls","source":1077,"target":850},{"edgeType":"contains","source":29,"target":1496},{"edgeType":"calls","source":483,"target":501},{"edgeType":"contains","source":17,"target":19},{"edgeType":"calls","source":1122,"target":247},{"edgeType":"contains","source":1123,"target":1143},{"edgeType":"calls","source":1666,"target":296},{"edgeType":"calls","source":1426,"target":601},{"edgeType":"calls","source":737,"target":291},{"edgeType":"contains","source":379,"target":688},{"edgeType":"calls","source":624,"target":623},{"edgeType":"contains","source":1264,"target":1266},{"edgeType":"calls","source":818,"target":951},{"edgeType":"contains","source":1539,"target":1726},{"edgeType":"calls","source":1595,"target":118},{"edgeType":"contains","source":589,"target":1048},{"edgeType":"calls","source":744,"target":337},{"edgeType":"calls","source":1595,"target":382},{"edgeType":"calls","source":483,"target":500},{"edgeType":"calls","source":641,"target":623},{"edgeType":"extends","source":924,"target":288},{"edgeType":"contains","source":929,"target":1078},{"edgeType":"contains","source":1123,"target":1142},{"edgeType":"contains","source":17,"target":18},{"edgeType":"imports","source":1498,"target":760},{"edgeType":"calls","source":1797,"target":462},{"edgeType":"calls","source":1613,"target":1409},{"edgeType":"calls","source":1730,"target":689},{"edgeType":"contains","source":1454,"target":1455},{"edgeType":"calls","source":624,"target":622},{"edgeType":"calls","source":1750,"target":337},{"edgeType":"calls","source":670,"target":520},{"edgeType":"imports","source":32,"target":223},{"edgeType":"contains","source":1539,"target":1723},{"edgeType":"calls","source":1566,"target":756},{"edgeType":"calls","source":1762,"target":229},{"edgeType":"calls","source":483,"target":503},{"edgeType":"calls","source":1664,"target":96},{"edgeType":"calls","source":1551,"target":428},{"edgeType":"calls","source":653,"target":519},{"edgeType":"calls","source":1653,"target":701},{"edgeType":"contains","source":1123,"target":1145},{"edgeType":"contains","source":379,"target":690},{"edgeType":"imports","source":393,"target":395},{"edgeType":"contains","source":1539,"target":1724},{"edgeType":"calls","source":1796,"target":1024},{"edgeType":"imports","source":32,"target":224},{"edgeType":"calls","source":1675,"target":547},{"edgeType":"imports","source":1198,"target":546},{"edgeType":"calls","source":670,"target":519},{"edgeType":"calls","source":483,"target":502},{"edgeType":"contains","source":29,"target":1497},{"edgeType":"contains","source":17,"target":20},{"edgeType":"contains","source":1123,"target":1144},{"edgeType":"calls","source":1345,"target":999},{"edgeType":"contains","source":1264,"target":1265},{"edgeType":"calls","source":641,"target":620},{"edgeType":"calls","source":1732,"target":96},{"edgeType":"contains","source":1539,"target":1729},{"edgeType":"calls","source":1180,"target":824},{"edgeType":"imports","source":29,"target":842},{"edgeType":"contains","source":29,"target":1500},{"edgeType":"calls","source":624,"target":619},{"edgeType":"extends","source":917,"target":774},{"edgeType":"calls","source":1764,"target":689},{"edgeType":"calls","source":1483,"target":415},{"edgeType":"contains","source":1123,"target":1147},{"edgeType":"calls","source":1748,"target":392},{"edgeType":"imports","source":32,"target":221},{"edgeType":"calls","source":1613,"target":1406},{"edgeType":"calls","source":842,"target":1260},{"edgeType":"calls","source":641,"target":619},{"edgeType":"contains","source":1539,"target":1730},{"edgeType":"imports","source":719,"target":330},{"edgeType":"contains","source":909,"target":910},{"edgeType":"calls","source":755,"target":520},{"edgeType":"contains","source":719,"target":986},{"edgeType":"calls","source":1061,"target":284},{"edgeType":"calls","source":626,"target":291},{"edgeType":"contains","source":8,"target":36},{"edgeType":"contains","source":29,"target":1499},{"edgeType":"calls","source":624,"target":618},{"edgeType":"calls","source":755,"target":521},{"edgeType":"calls","source":1646,"target":118},{"edgeType":"contains","source":1123,"target":1146},{"edgeType":"calls","source":842,"target":1259},{"edgeType":"contains","source":1539,"target":1731},{"edgeType":"calls","source":1613,"target":1405},{"edgeType":"calls","source":1646,"target":382},{"edgeType":"calls","source":1679,"target":416},{"edgeType":"imports","source":32,"target":222},{"edgeType":"calls","source":1602,"target":689},{"edgeType":"calls","source":782,"target":215},{"edgeType":"contains","source":909,"target":913},{"edgeType":"calls","source":1659,"target":247},{"edgeType":"calls","source":1438,"target":1020},{"edgeType":"calls","source":641,"target":622},{"edgeType":"contains","source":1539,"target":1727},{"edgeType":"calls","source":1618,"target":725},{"edgeType":"extends","source":677,"target":289},{"edgeType":"contains","source":810,"target":1075},{"edgeType":"imports","source":810,"target":941},{"edgeType":"calls","source":624,"target":621},{"edgeType":"contains","source":29,"target":1503},{"edgeType":"calls","source":767,"target":416},{"edgeType":"calls","source":1613,"target":1408},{"edgeType":"contains","source":1123,"target":1149},{"edgeType":"calls","source":1209,"target":1248},{"edgeType":"contains","source":1454,"target":1456},{"edgeType":"imports","source":32,"target":219},{"edgeType":"calls","source":641,"target":621},{"edgeType":"imports","source":1498,"target":225},{"edgeType":"contains","source":1539,"target":1728},{"edgeType":"calls","source":995,"target":1010},{"edgeType":"calls","source":585,"target":507},{"edgeType":"calls","source":1003,"target":1027},{"edgeType":"calls","source":557,"target":583},{"edgeType":"calls","source":1684,"target":1056},{"edgeType":"calls","source":624,"target":620},{"edgeType":"calls","source":1483,"target":416},{"edgeType":"contains","source":1123,"target":1148},{"edgeType":"calls","source":767,"target":415},{"edgeType":"contains","source":1264,"target":1269},{"edgeType":"calls","source":1613,"target":1407},{"edgeType":"contains","source":25,"target":40},{"edgeType":"imports","source":32,"target":220},{"edgeType":"extends","source":921,"target":919},{"edgeType":"calls","source":658,"target":89},{"edgeType":"calls","source":117,"target":740},{"edgeType":"calls","source":1735,"target":1056},{"edgeType":"calls","source":1179,"target":1115},{"edgeType":"contains","source":1198,"target":1205},{"edgeType":"calls","source":1072,"target":204},{"edgeType":"calls","source":656,"target":415},{"edgeType":"contains","source":810,"target":1077},{"edgeType":"extends","source":385,"target":94},{"edgeType":"calls","source":1068,"target":1120},{"edgeType":"contains","source":32,"target":355},{"edgeType":"imports","source":1498,"target":241},{"edgeType":"contains","source":29,"target":1505},{"edgeType":"calls","source":1680,"target":118},{"edgeType":"contains","source":1539,"target":1734},{"edgeType":"calls","source":1180,"target":291},{"edgeType":"contains","source":1123,"target":1151},{"edgeType":"calls","source":1573,"target":1056},{"edgeType":"calls","source":1705,"target":928},{"edgeType":"contains","source":1198,"target":1204},{"edgeType":"calls","source":1072,"target":203},{"edgeType":"calls","source":1559,"target":433},{"edgeType":"contains","source":810,"target":1076},{"edgeType":"contains","source":385,"target":509},{"edgeType":"contains","source":29,"target":1504},{"edgeType":"calls","source":1068,"target":1119},{"edgeType":"calls","source":606,"target":643},{"edgeType":"contains","source":1123,"target":1150},{"edgeType":"contains","source":1446,"target":1447},{"edgeType":"contains","source":1539,"target":1735},{"edgeType":"calls","source":1654,"target":398},{"edgeType":"calls","source":641,"target":618},{"edgeType":"contains","source":1198,"target":1207},{"edgeType":"calls","source":1653,"target":693},{"edgeType":"calls","source":755,"target":519},{"edgeType":"contains","source":422,"target":423},{"edgeType":"extends","source":698,"target":699},{"edgeType":"contains","source":32,"target":357},{"edgeType":"contains","source":29,"target":1507},{"edgeType":"calls","source":1058,"target":111},{"edgeType":"calls","source":1214,"target":296},{"edgeType":"contains","source":1539,"target":1732},{"edgeType":"calls","source":1139,"target":507},{"edgeType":"calls","source":1679,"target":415},{"edgeType":"calls","source":1600,"target":750},{"edgeType":"calls","source":1613,"target":1404},{"edgeType":"contains","source":1123,"target":1153},{"edgeType":"calls","source":606,"target":645},{"edgeType":"imports","source":719,"target":76},{"edgeType":"calls","source":118,"target":710},{"edgeType":"contains","source":1198,"target":1206},{"edgeType":"contains","source":9,"target":804},{"edgeType":"calls","source":780,"target":800},{"edgeType":"contains","source":1295,"target":576},{"edgeType":"calls","source":656,"target":416},{"edgeType":"calls","source":1619,"target":689},{"edgeType":"contains","source":29,"target":1506},{"edgeType":"contains","source":32,"target":356},{"edgeType":"calls","source":1597,"target":49},{"edgeType":"contains","source":1539,"target":1733},{"edgeType":"calls","source":1058,"target":110},{"edgeType":"contains","source":1123,"target":1152},{"edgeType":"calls","source":1613,"target":1403},{"edgeType":"calls","source":1646,"target":380},{"edgeType":"contains","source":1123,"target":1156},{"edgeType":"calls","source":1654,"target":656},{"edgeType":"calls","source":1073,"target":433},{"edgeType":"calls","source":754,"target":544},{"edgeType":"calls","source":877,"target":959},{"edgeType":"calls","source":1552,"target":382},{"edgeType":"calls","source":1233,"target":1286},{"edgeType":"calls","source":1542,"target":428},{"edgeType":"calls","source":1721,"target":428},{"edgeType":"imports","source":674,"target":675},{"edgeType":"calls","source":1585,"target":416},{"edgeType":"contains","source":1123,"target":1155},{"edgeType":"calls","source":1687,"target":689},{"edgeType":"contains","source":1198,"target":1208},{"edgeType":"calls","source":1666,"target":547},{"edgeType":"extends","source":250,"target":54},{"edgeType":"extends","source":764,"target":768},{"edgeType":"calls","source":1581,"target":1068},{"edgeType":"calls","source":818,"target":938},{"edgeType":"contains","source":719,"target":994},{"edgeType":"calls","source":1610,"target":433},{"edgeType":"contains","source":32,"target":358},{"edgeType":"contains","source":9,"target":14},{"edgeType":"contains","source":1446,"target":1451},{"edgeType":"calls","source":707,"target":679},{"edgeType":"calls","source":782,"target":204},{"edgeType":"contains","source":1123,"target":1154},{"edgeType":"calls","source":1585,"target":415},{"edgeType":"calls","source":1697,"target":382},{"edgeType":"contains","source":1123,"target":1158},{"edgeType":"calls","source":818,"target":412},{"edgeType":"contains","source":802,"target":803},{"edgeType":"contains","source":546,"target":547},{"edgeType":"calls","source":1392,"target":588},{"edgeType":"contains","source":290,"target":291},{"edgeType":"calls","source":363,"target":247},{"edgeType":"contains","source":1454,"target":1464},{"edgeType":"calls","source":725,"target":653},{"edgeType":"contains","source":719,"target":1261},{"edgeType":"calls","source":606,"target":642},{"edgeType":"contains","source":1539,"target":1736},{"edgeType":"calls","source":737,"target":545},{"edgeType":"calls","source":1480,"target":1030},{"edgeType":"calls","source":964,"target":906},{"edgeType":"calls","source":818,"target":411},{"edgeType":"calls","source":1185,"target":1190},{"edgeType":"calls","source":1214,"target":291},{"edgeType":"calls","source":707,"target":681},{"edgeType":"calls","source":1053,"target":1054},{"edgeType":"calls","source":1761,"target":247},{"edgeType":"calls","source":435,"target":392},{"edgeType":"contains","source":719,"target":996},{"edgeType":"contains","source":422,"target":1218},{"edgeType":"contains","source":32,"target":360},{"edgeType":"calls","source":640,"target":644},{"edgeType":"contains","source":1539,"target":1737},{"edgeType":"calls","source":1414,"target":433},{"edgeType":"calls","source":1455,"target":842},{"edgeType":"contains","source":677,"target":683},{"edgeType":"contains","source":1539,"target":1709},{"edgeType":"calls","source":1196,"target":414},{"edgeType":"calls","source":1277,"target":1074},{"edgeType":"calls","source":913,"target":203},{"edgeType":"contains","source":29,"target":1480},{"edgeType":"contains","source":1502,"target":1535},{"edgeType":"contains","source":32,"target":330},{"edgeType":"contains","source":27,"target":485},{"edgeType":"calls","source":610,"target":611},{"edgeType":"contains","source":9,"target":1306},{"edgeType":"calls","source":1707,"target":428},{"edgeType":"calls","source":1717,"target":118},{"edgeType":"calls","source":387,"target":388},{"edgeType":"contains","source":33,"target":34},{"edgeType":"calls","source":1412,"target":588},{"edgeType":"contains","source":1502,"target":1536},{"edgeType":"contains","source":29,"target":1479},{"edgeType":"imports","source":312,"target":316},{"edgeType":"calls","source":1633,"target":1404},{"edgeType":"contains","source":1502,"target":1532},{"edgeType":"calls","source":1345,"target":1347},{"edgeType":"calls","source":1002,"target":88},{"edgeType":"contains","source":1222,"target":1231},{"edgeType":"calls","source":660,"target":648},{"edgeType":"calls","source":1673,"target":428},{"edgeType":"calls","source":1683,"target":118},{"edgeType":"imports","source":1198,"target":330},{"edgeType":"contains","source":1539,"target":1707},{"edgeType":"calls","source":1196,"target":416},{"edgeType":"calls","source":1003,"target":586},{"edgeType":"contains","source":29,"target":1482},{"edgeType":"calls","source":711,"target":389},{"edgeType":"imports","source":312,"target":313},{"edgeType":"contains","source":32,"target":332},{"edgeType":"calls","source":1345,"target":1346},{"edgeType":"calls","source":626,"target":380},{"edgeType":"contains","source":385,"target":751},{"edgeType":"calls","source":1633,"target":1403},{"edgeType":"contains","source":1502,"target":1533},{"edgeType":"calls","source":1773,"target":1027},{"edgeType":"calls","source":1576,"target":1056},{"edgeType":"calls","source":1069,"target":653},{"edgeType":"calls","source":1048,"target":247},{"edgeType":"calls","source":1002,"target":87},{"edgeType":"calls","source":1214,"target":1179},{"edgeType":"calls","source":1741,"target":433},{"edgeType":"calls","source":756,"target":578},{"edgeType":"contains","source":119,"target":1333},{"edgeType":"calls","source":1196,"target":415},{"edgeType":"contains","source":1539,"target":1708},{"edgeType":"calls","source":913,"target":204},{"edgeType":"calls","source":1700,"target":118},{"edgeType":"contains","source":1502,"target":1534},{"edgeType":"contains","source":29,"target":1481},{"edgeType":"contains","source":677,"target":687},{"edgeType":"calls","source":711,"target":911},{"edgeType":"contains","source":1222,"target":1233},{"edgeType":"calls","source":1580,"target":398},{"edgeType":"contains","source":1539,"target":1713},{"edgeType":"calls","source":707,"target":507},{"edgeType":"calls","source":701,"target":693},{"edgeType":"calls","source":744,"target":681},{"edgeType":"calls","source":1392,"target":412},{"edgeType":"contains","source":385,"target":754},{"edgeType":"contains","source":29,"target":1484},{"edgeType":"contains","source":7,"target":52},{"edgeType":"calls","source":808,"target":811},{"edgeType":"contains","source":677,"target":686},{"edgeType":"calls","source":740,"target":1068},{"edgeType":"calls","source":682,"target":752},{"edgeType":"calls","source":1214,"target":380},{"edgeType":"calls","source":1066,"target":740},{"edgeType":"calls","source":1366,"target":689},{"edgeType":"calls","source":1682,"target":670},{"edgeType":"calls","source":1402,"target":1422},{"edgeType":"contains","source":385,"target":753},{"edgeType":"imports","source":719,"target":115},{"edgeType":"calls","source":1392,"target":411},{"edgeType":"contains","source":29,"target":1483},{"edgeType":"contains","source":29,"target":1486},{"edgeType":"calls","source":1545,"target":428},{"edgeType":"calls","source":626,"target":641},{"edgeType":"contains","source":719,"target":708},{"edgeType":"contains","source":1539,"target":1711},{"edgeType":"calls","source":1196,"target":412},{"edgeType":"contains","source":1502,"target":1537},{"edgeType":"imports","source":1219,"target":203},{"edgeType":"calls","source":1467,"target":996},{"edgeType":"calls","source":1002,"target":877},{"edgeType":"contains","source":1437,"target":1438},{"edgeType":"contains","source":29,"target":1485},{"edgeType":"calls","source":1690,"target":689},{"edgeType":"calls","source":1734,"target":382},{"edgeType":"calls","source":711,"target":912},{"edgeType":"calls","source":386,"target":417},{"edgeType":"extends","source":395,"target":94},{"edgeType":"calls","source":1196,"target":411},{"edgeType":"calls","source":1474,"target":778},{"edgeType":"imports","source":1219,"target":204},{"edgeType":"contains","source":1437,"target":1439},{"edgeType":"contains","source":385,"target":755},{"edgeType":"contains","source":1502,"target":1538},{"edgeType":"contains","source":29,"target":1488},{"edgeType":"imports","source":119,"target":1021},{"edgeType":"calls","source":707,"target":502},{"edgeType":"contains","source":1539,"target":1717},{"edgeType":"calls","source":744,"target":412},{"edgeType":"calls","source":1796,"target":40},{"edgeType":"calls","source":1639,"target":679},{"edgeType":"calls","source":1049,"target":206},{"edgeType":"calls","source":1653,"target":509},{"edgeType":"calls","source":1656,"target":416},{"edgeType":"imports","source":312,"target":1116},{"edgeType":"calls","source":1351,"target":622},{"edgeType":"contains","source":802,"target":1308},{"edgeType":"calls","source":386,"target":412},{"edgeType":"calls","source":1742,"target":392},{"edgeType":"contains","source":29,"target":1487},{"edgeType":"imports","source":29,"target":641},{"edgeType":"calls","source":707,"target":501},{"edgeType":"calls","source":1577,"target":750},{"edgeType":"contains","source":1539,"target":1718},{"edgeType":"imports","source":312,"target":589},{"edgeType":"calls","source":744,"target":411},{"edgeType":"contains","source":1124,"target":1103},{"edgeType":"calls","source":1049,"target":205},{"edgeType":"imports","source":312,"target":324},{"edgeType":"calls","source":988,"target":247},{"edgeType":"calls","source":1656,"target":415},{"edgeType":"calls","source":386,"target":411},{"edgeType":"calls","source":1351,"target":621},{"edgeType":"calls","source":1630,"target":428},{"edgeType":"calls","source":1640,"target":118},{"edgeType":"contains","source":29,"target":1490},{"edgeType":"imports","source":119,"target":1019},{"edgeType":"contains","source":1539,"target":1715},{"edgeType":"contains","source":1062,"target":385},{"edgeType":"calls","source":1231,"target":380},{"edgeType":"contains","source":719,"target":712},{"edgeType":"calls","source":386,"target":414},{"edgeType":"contains","source":29,"target":1489},{"edgeType":"calls","source":1313,"target":1273},{"edgeType":"calls","source":1312,"target":247},{"edgeType":"calls","source":707,"target":503},{"edgeType":"contains","source":10,"target":1285},{"edgeType":"contains","source":589,"target":1042},{"edgeType":"calls","source":1800,"target":710},{"edgeType":"calls","source":995,"target":824},{"edgeType":"contains","source":312,"target":1172},{"edgeType":"imports","source":312,"target":322},{"edgeType":"calls","source":1351,"target":623},{"edgeType":"calls","source":585,"target":586},{"edgeType":"extends","source":1318,"target":289},{"edgeType":"extends","source":775,"target":742},{"edgeType":"calls","source":1751,"target":110},{"edgeType":"contains","source":29,"target":1492},{"edgeType":"calls","source":1351,"target":618},{"edgeType":"contains","source":1539,"target":1721},{"edgeType":"calls","source":1774,"target":1775},{"edgeType":"calls","source":1674,"target":118},{"edgeType":"calls","source":1637,"target":1265},{"edgeType":"calls","source":1413,"target":1338},{"edgeType":"calls","source":1674,"target":382},{"edgeType":"calls","source":995,"target":291},{"edgeType":"contains","source":677,"target":695},{"edgeType":"calls","source":1077,"target":1184},{"edgeType":"calls","source":1545,"target":1210},{"edgeType":"extends","source":394,"target":396},{"edgeType":"contains","source":29,"target":1491},{"edgeType":"contains","source":1539,"target":1722},{"edgeType":"calls","source":509,"target":557},{"edgeType":"calls","source":928,"target":1046},{"edgeType":"calls","source":1214,"target":1164},{"edgeType":"imports","source":312,"target":320},{"edgeType":"contains","source":677,"target":694},{"edgeType":"calls","source":1711,"target":291},{"edgeType":"extends","source":46,"target":85},{"edgeType":"calls","source":1351,"target":620},{"edgeType":"imports","source":916,"target":356},{"edgeType":"calls","source":1640,"target":382},{"edgeType":"contains","source":29,"target":1494},{"edgeType":"calls","source":707,"target":500},{"edgeType":"calls","source":1139,"target":586},{"edgeType":"contains","source":1539,"target":1719},{"edgeType":"calls","source":1049,"target":204},{"edgeType":"imports","source":312,"target":317},{"edgeType":"calls","source":1103,"target":380},{"edgeType":"calls","source":1445,"target":877},{"edgeType":"calls","source":1351,"target":619},{"edgeType":"calls","source":1751,"target":111},{"edgeType":"contains","source":29,"target":1493},{"edgeType":"calls","source":1061,"target":96},{"edgeType":"contains","source":1539,"target":1720},{"edgeType":"contains","source":1123,"target":1140},{"edgeType":"imports","source":810,"target":207},{"edgeType":"calls","source":1637,"target":1266},{"edgeType":"calls","source":1049,"target":203},{"edgeType":"imports","source":312,"target":318},{"edgeType":"imports","source":119,"target":752},{"edgeType":"calls","source":1545,"target":1211},{"edgeType":"contains","source":916,"target":921},{"edgeType":"calls","source":1261,"target":1323},{"edgeType":"contains","source":1502,"target":1518},{"edgeType":"calls","source":1079,"target":623},{"edgeType":"contains","source":385,"target":733},{"edgeType":"calls","source":685,"target":681},{"edgeType":"contains","source":1775,"target":1776},{"edgeType":"imports","source":119,"target":204},{"edgeType":"contains","source":379,"target":655},{"edgeType":"calls","source":1646,"target":222},{"edgeType":"calls","source":1048,"target":1055},{"edgeType":"contains","source":916,"target":920},{"edgeType":"calls","source":1676,"target":1670},{"edgeType":"calls","source":710,"target":433},{"edgeType":"contains","source":1502,"target":1519},{"edgeType":"calls","source":1261,"target":1322},{"edgeType":"calls","source":751,"target":483},{"edgeType":"calls","source":1079,"target":622},{"edgeType":"contains","source":674,"target":1022},{"edgeType":"calls","source":1464,"target":842},{"edgeType":"imports","source":485,"target":486},{"edgeType":"calls","source":1709,"target":382},{"edgeType":"calls","source":682,"target":247},{"edgeType":"contains","source":916,"target":923},{"edgeType":"calls","source":1261,"target":1325},{"edgeType":"contains","source":674,"target":1026},{"edgeType":"calls","source":1566,"target":1119},{"edgeType":"contains","source":1539,"target":1690},{"edgeType":"imports","source":1198,"target":842},{"edgeType":"contains","source":29,"target":1465},{"edgeType":"calls","source":1728,"target":1118},{"edgeType":"contains","source":1241,"target":1415},{"edgeType":"extends","source":918,"target":774},{"edgeType":"calls","source":1638,"target":1265},{"edgeType":"contains","source":1502,"target":1516},{"edgeType":"calls","source":1367,"target":681},{"edgeType":"contains","source":674,"target":1025},{"edgeType":"calls","source":1003,"target":337},{"edgeType":"contains","source":916,"target":922},{"edgeType":"calls","source":682,"target":246},{"edgeType":"contains","source":1539,"target":1691},{"edgeType":"calls","source":1261,"target":1324},{"edgeType":"imports","source":719,"target":365},{"edgeType":"extends","source":919,"target":742},{"edgeType":"contains","source":1502,"target":1517},{"edgeType":"imports","source":119,"target":203},{"edgeType":"calls","source":1493,"target":1002},{"edgeType":"calls","source":1667,"target":96},{"edgeType":"contains","source":119,"target":1320},{"edgeType":"calls","source":1494,"target":1495},{"edgeType":"calls","source":509,"target":583},{"edgeType":"contains","source":1539,"target":1696},{"edgeType":"calls","source":736,"target":681},{"edgeType":"calls","source":1783,"target":1785},{"edgeType":"calls","source":1656,"target":701},{"edgeType":"calls","source":1554,"target":428},{"edgeType":"calls","source":685,"target":412},{"edgeType":"contains","source":916,"target":925},{"edgeType":"contains","source":1502,"target":1522},{"edgeType":"contains","source":490,"target":389},{"edgeType":"calls","source":1079,"target":618},{"edgeType":"contains","source":379,"target":659},{"edgeType":"calls","source":1733,"target":428},{"edgeType":"contains","source":1643,"target":1644},{"edgeType":"calls","source":1349,"target":1233},{"edgeType":"calls","source":1699,"target":689},{"edgeType":"calls","source":1441,"target":1023},{"edgeType":"calls","source":1058,"target":740},{"edgeType":"contains","source":1539,"target":1697},{"edgeType":"contains","source":1106,"target":1112},{"edgeType":"calls","source":685,"target":411},{"edgeType":"contains","source":916,"target":924},{"edgeType":"contains","source":1502,"target":1523},{"edgeType":"calls","source":1783,"target":1784},{"edgeType":"calls","source":1664,"target":716},{"edgeType":"imports","source":29,"target":877},{"edgeType":"contains","source":419,"target":475},{"edgeType":"calls","source":1581,"target":118},{"edgeType":"calls","source":1418,"target":414},{"edgeType":"calls","source":1122,"target":1134},{"edgeType":"contains","source":1643,"target":1645},{"edgeType":"calls","source":1565,"target":878},{"edgeType":"calls","source":1581,"target":382},{"edgeType":"imports","source":674,"target":698},{"edgeType":"imports","source":719,"target":360},{"edgeType":"calls","source":1676,"target":1669},{"edgeType":"calls","source":1626,"target":48},{"edgeType":"contains","source":1106,"target":1115},{"edgeType":"contains","source":1502,"target":1520},{"edgeType":"calls","source":1261,"target":1321},{"edgeType":"contains","source":32,"target":319},{"edgeType":"calls","source":578,"target":560},{"edgeType":"calls","source":1602,"target":1056},{"edgeType":"contains","source":916,"target":927},{"edgeType":"contains","source":908,"target":118},{"edgeType":"calls","source":1079,"target":621},{"edgeType":"calls","source":1079,"target":620},{"edgeType":"calls","source":1547,"target":382},{"edgeType":"calls","source":750,"target":778},{"edgeType":"contains","source":1539,"target":1695},{"edgeType":"contains","source":1502,"target":1521},{"edgeType":"contains","source":1106,"target":1114},{"edgeType":"calls","source":1783,"target":1786},{"edgeType":"calls","source":1412,"target":603},{"edgeType":"calls","source":1418,"target":417},{"edgeType":"contains","source":916,"target":926},{"edgeType":"calls","source":1079,"target":619},{"edgeType":"contains","source":385,"target":474},{"edgeType":"calls","source":657,"target":489},{"edgeType":"calls","source":1214,"target":1191},{"edgeType":"calls","source":1392,"target":689},{"edgeType":"extends","source":385,"target":391},{"edgeType":"calls","source":1164,"target":622},{"edgeType":"contains","source":24,"target":41},{"edgeType":"contains","source":1502,"target":1526},{"edgeType":"contains","source":395,"target":432},{"edgeType":"calls","source":1626,"target":1627},{"edgeType":"calls","source":1631,"target":679},{"edgeType":"contains","source":1539,"target":1701},{"edgeType":"calls","source":1616,"target":1408},{"edgeType":"calls","source":1571,"target":689},{"edgeType":"imports","source":674,"target":712},{"edgeType":"contains","source":1502,"target":1527},{"edgeType":"calls","source":1164,"target":621},{"edgeType":"calls","source":1024,"target":204},{"edgeType":"calls","source":1041,"target":206},{"edgeType":"calls","source":1633,"target":1409},{"edgeType":"calls","source":1659,"target":1660},{"edgeType":"calls","source":1048,"target":782},{"edgeType":"contains","source":32,"target":321},{"edgeType":"calls","source":736,"target":412},{"edgeType":"contains","source":29,"target":1471},{"edgeType":"calls","source":592,"target":648},{"edgeType":"calls","source":560,"target":583},{"edgeType":"contains","source":1539,"target":1702},{"edgeType":"calls","source":1616,"target":1407},{"edgeType":"calls","source":1637,"target":756},{"edgeType":"contains","source":1222,"target":1223},{"edgeType":"contains","source":24,"target":43},{"edgeType":"contains","source":1539,"target":1698},{"edgeType":"calls","source":1713,"target":1046},{"edgeType":"contains","source":395,"target":434},{"edgeType":"calls","source":1605,"target":1223},{"edgeType":"contains","source":1502,"target":1524},{"edgeType":"calls","source":1717,"target":1715},{"edgeType":"contains","source":1539,"target":1699},{"edgeType":"calls","source":1349,"target":1231},{"edgeType":"calls","source":882,"target":117},{"edgeType":"contains","source":24,"target":42},{"edgeType":"calls","source":1164,"target":623},{"edgeType":"calls","source":1614,"target":679},{"edgeType":"calls","source":1598,"target":118},{"edgeType":"contains","source":1502,"target":1525},{"edgeType":"contains","source":1106,"target":1118},{"edgeType":"contains","source":32,"target":323},{"edgeType":"calls","source":1197,"target":1185},{"edgeType":"calls","source":1491,"target":263},{"edgeType":"contains","source":719,"target":959},{"edgeType":"calls","source":1686,"target":296},{"edgeType":"contains","source":1539,"target":1700},{"edgeType":"calls","source":1616,"target":1409},{"edgeType":"calls","source":578,"target":291},{"edgeType":"calls","source":744,"target":689},{"edgeType":"calls","source":1633,"target":1406},{"edgeType":"calls","source":645,"target":587},{"edgeType":"calls","source":1164,"target":618},{"edgeType":"contains","source":1502,"target":1530},{"edgeType":"contains","source":119,"target":1328},{"edgeType":"calls","source":1041,"target":203},{"edgeType":"calls","source":1616,"target":1404},{"edgeType":"imports","source":119,"target":1001},{"edgeType":"calls","source":1214,"target":653},{"edgeType":"contains","source":29,"target":1476},{"edgeType":"calls","source":1681,"target":710},{"edgeType":"imports","source":674,"target":708},{"edgeType":"contains","source":1539,"target":1705},{"edgeType":"calls","source":1633,"target":1405},{"edgeType":"calls","source":1622,"target":689},{"edgeType":"contains","source":806,"target":907},{"edgeType":"contains","source":395,"target":435},{"edgeType":"contains","source":1502,"target":1531},{"edgeType":"calls","source":1345,"target":1348},{"edgeType":"calls","source":1048,"target":1042},{"edgeType":"calls","source":1791,"target":1792},{"edgeType":"calls","source":1700,"target":913},{"edgeType":"calls","source":1196,"target":417},{"edgeType":"calls","source":1616,"target":1403},{"edgeType":"calls","source":1559,"target":1056},{"edgeType":"contains","source":32,"target":325},{"edgeType":"imports","source":119,"target":1002},{"edgeType":"calls","source":387,"target":392},{"edgeType":"contains","source":29,"target":1475},{"edgeType":"calls","source":1551,"target":247},{"edgeType":"contains","source":1539,"target":1706},{"edgeType":"contains","source":1502,"target":1528},{"edgeType":"calls","source":1313,"target":1286},{"edgeType":"calls","source":1024,"target":203},{"edgeType":"calls","source":1164,"target":620},{"edgeType":"calls","source":1041,"target":205},{"edgeType":"calls","source":1567,"target":547},{"edgeType":"calls","source":1633,"target":1408},{"edgeType":"calls","source":1767,"target":689},{"edgeType":"calls","source":736,"target":411},{"edgeType":"calls","source":1433,"target":1001},{"edgeType":"contains","source":8,"target":15},{"edgeType":"contains","source":810,"target":1050},{"edgeType":"contains","source":29,"target":1478},{"edgeType":"calls","source":737,"target":644},{"edgeType":"calls","source":1412,"target":1388},{"edgeType":"calls","source":818,"target":247},{"edgeType":"calls","source":1616,"target":1406},{"edgeType":"calls","source":1751,"target":392},{"edgeType":"contains","source":1539,"target":1703},{"edgeType":"contains","source":1502,"target":1529},{"edgeType":"extends","source":738,"target":288},{"edgeType":"calls","source":1164,"target":619},{"edgeType":"calls","source":1139,"target":337},{"edgeType":"calls","source":1649,"target":118},{"edgeType":"calls","source":1041,"target":204},{"edgeType":"contains","source":385,"target":483},{"edgeType":"calls","source":1633,"target":1407},{"edgeType":"contains","source":1106,"target":1122},{"edgeType":"calls","source":1605,"target":689},{"edgeType":"calls","source":1616,"target":1405},{"edgeType":"imports","source":119,"target":207},{"edgeType":"contains","source":29,"target":1477},{"edgeType":"calls","source":1657,"target":398},{"edgeType":"calls","source":1573,"target":888},{"edgeType":"contains","source":393,"target":763},{"edgeType":"contains","source":1539,"target":1704},{"edgeType":"imports","source":119,"target":1046},{"edgeType":"calls","source":842,"target":785},{"edgeType":"calls","source":1544,"target":428},{"edgeType":"calls","source":1554,"target":118},{"edgeType":"calls","source":697,"target":523},{"edgeType":"contains","source":419,"target":456},{"edgeType":"calls","source":726,"target":681},{"edgeType":"contains","source":490,"target":369},{"edgeType":"contains","source":1539,"target":1676},{"edgeType":"contains","source":477,"target":507},{"edgeType":"calls","source":574,"target":373},{"edgeType":"calls","source":1614,"target":372},{"edgeType":"contains","source":312,"target":1394},{"edgeType":"imports","source":119,"target":782},{"edgeType":"imports","source":1237,"target":477},{"edgeType":"calls","source":783,"target":500},{"edgeType":"calls","source":780,"target":593},{"edgeType":"calls","source":1466,"target":996},{"edgeType":"calls","source":1554,"target":382},{"edgeType":"imports","source":1219,"target":244},{"edgeType":"calls","source":1689,"target":689},{"edgeType":"calls","source":1733,"target":382},{"edgeType":"contains","source":419,"target":455},{"edgeType":"calls","source":1614,"target":371},{"edgeType":"calls","source":574,"target":371},{"edgeType":"calls","source":578,"target":247},{"edgeType":"imports","source":312,"target":878},{"edgeType":"contains","source":490,"target":368},{"edgeType":"calls","source":592,"target":606},{"edgeType":"contains","source":1539,"target":1677},{"edgeType":"calls","source":574,"target":372},{"edgeType":"contains","source":29,"target":1446},{"edgeType":"calls","source":1473,"target":778},{"edgeType":"calls","source":1553,"target":412},{"edgeType":"contains","source":1246,"target":1247},{"edgeType":"calls","source":842,"target":787},{"edgeType":"contains","source":490,"target":371},{"edgeType":"calls","source":783,"target":502},{"edgeType":"calls","source":1261,"target":747},{"edgeType":"contains","source":719,"target":1200},{"edgeType":"calls","source":629,"target":519},{"edgeType":"calls","source":1633,"target":1635},{"edgeType":"contains","source":419,"target":458},{"edgeType":"contains","source":1295,"target":1310},{"edgeType":"imports","source":1498,"target":1106},{"edgeType":"calls","source":1614,"target":374},{"edgeType":"imports","source":379,"target":383},{"edgeType":"contains","source":1539,"target":1674},{"edgeType":"calls","source":906,"target":653},{"edgeType":"calls","source":842,"target":786},{"edgeType":"contains","source":490,"target":370},{"edgeType":"calls","source":783,"target":501},{"edgeType":"calls","source":780,"target":594},{"edgeType":"calls","source":1413,"target":1319},{"edgeType":"calls","source":697,"target":524},{"edgeType":"calls","source":1706,"target":428},{"edgeType":"contains","source":419,"target":457},{"edgeType":"calls","source":1242,"target":1335},{"edgeType":"calls","source":1261,"target":746},{"edgeType":"calls","source":1633,"target":1634},{"edgeType":"contains","source":1539,"target":1675},{"edgeType":"calls","source":574,"target":374},{"edgeType":"calls","source":1614,"target":373},{"edgeType":"calls","source":507,"target":337},{"edgeType":"imports","source":674,"target":752},{"edgeType":"calls","source":1068,"target":122},{"edgeType":"calls","source":1230,"target":121},{"edgeType":"contains","source":7,"target":16},{"edgeType":"calls","source":1566,"target":1068},{"edgeType":"contains","source":490,"target":373},{"edgeType":"contains","source":419,"target":460},{"edgeType":"calls","source":1723,"target":689},{"edgeType":"calls","source":1467,"target":433},{"edgeType":"calls","source":574,"target":368},{"edgeType":"calls","source":1172,"target":593},{"edgeType":"calls","source":1614,"target":368},{"edgeType":"contains","source":1060,"target":1201},{"edgeType":"contains","source":119,"target":1304},{"edgeType":"calls","source":1599,"target":40},{"edgeType":"contains","source":1539,"target":1680},{"edgeType":"calls","source":1392,"target":643},{"edgeType":"calls","source":818,"target":204},{"edgeType":"calls","source":1708,"target":96},{"edgeType":"calls","source":617,"target":621},{"edgeType":"contains","source":1246,"target":1250},{"edgeType":"calls","source":1595,"target":428},{"edgeType":"contains","source":490,"target":372},{"edgeType":"contains","source":385,"target":720},{"edgeType":"extends","source":764,"target":314},{"edgeType":"calls","source":726,"target":412},{"edgeType":"contains","source":419,"target":459},{"edgeType":"contains","source":27,"target":719},{"edgeType":"calls","source":1493,"target":154},{"edgeType":"contains","source":546,"target":750},{"edgeType":"calls","source":1312,"target":1273},{"edgeType":"calls","source":1186,"target":1215},{"edgeType":"calls","source":1614,"target":367},{"edgeType":"calls","source":574,"target":367},{"edgeType":"calls","source":1799,"target":710},{"edgeType":"calls","source":818,"target":203},{"edgeType":"contains","source":1539,"target":1681},{"edgeType":"calls","source":836,"target":702},{"edgeType":"calls","source":1392,"target":642},{"edgeType":"calls","source":617,"target":620},{"edgeType":"calls","source":1677,"target":1056},{"edgeType":"calls","source":1077,"target":894},{"edgeType":"calls","source":617,"target":623},{"edgeType":"calls","source":574,"target":370},{"edgeType":"calls","source":1614,"target":370},{"edgeType":"calls","source":1579,"target":398},{"edgeType":"contains","source":1539,"target":1678},{"edgeType":"contains","source":32,"target":39},{"edgeType":"calls","source":1655,"target":684},{"edgeType":"calls","source":928,"target":1024},{"edgeType":"calls","source":1553,"target":411},{"edgeType":"contains","source":395,"target":413},{"edgeType":"contains","source":476,"target":1073},{"edgeType":"contains","source":385,"target":723},{"edgeType":"calls","source":1172,"target":594},{"edgeType":"contains","source":419,"target":461},{"edgeType":"contains","source":490,"target":374},{"edgeType":"calls","source":1614,"target":369},{"edgeType":"imports","source":1198,"target":360},{"edgeType":"calls","source":1065,"target":740},{"edgeType":"calls","source":574,"target":369},{"edgeType":"contains","source":1060,"target":1202},{"edgeType":"imports","source":29,"target":395},{"edgeType":"calls","source":1681,"target":670},{"edgeType":"imports","source":1219,"target":1030},{"edgeType":"contains","source":1539,"target":1679},{"edgeType":"calls","source":617,"target":622},{"edgeType":"calls","source":1082,"target":741},{"edgeType":"contains","source":419,"target":464},{"edgeType":"calls","source":1726,"target":1120},{"edgeType":"contains","source":1539,"target":1684},{"edgeType":"imports","source":379,"target":394},{"edgeType":"calls","source":1798,"target":1266},{"edgeType":"contains","source":1502,"target":1510},{"edgeType":"contains","source":1246,"target":1254},{"edgeType":"calls","source":723,"target":502},{"edgeType":"calls","source":1444,"target":877},{"edgeType":"contains","source":29,"target":1454},{"edgeType":"contains","source":419,"target":463},{"edgeType":"imports","source":312,"target":886},{"edgeType":"calls","source":626,"target":601},{"edgeType":"calls","source":1673,"target":384},{"edgeType":"contains","source":1539,"target":1685},{"edgeType":"calls","source":928,"target":752},{"edgeType":"calls","source":1646,"target":428},{"edgeType":"calls","source":1726,"target":1119},{"edgeType":"calls","source":1580,"target":888},{"edgeType":"calls","source":1798,"target":1265},{"edgeType":"contains","source":1502,"target":1511},{"edgeType":"contains","source":1241,"target":1410},{"edgeType":"contains","source":1246,"target":1255},{"edgeType":"calls","source":723,"target":501},{"edgeType":"calls","source":1544,"target":1211},{"edgeType":"calls","source":1636,"target":1266},{"edgeType":"contains","source":379,"target":649},{"edgeType":"imports","source":312,"target":883},{"edgeType":"contains","source":419,"target":466},{"edgeType":"imports","source":393,"target":486},{"edgeType":"calls","source":1122,"target":291},{"edgeType":"contains","source":1502,"target":1243},{"edgeType":"calls","source":1493,"target":153},{"edgeType":"contains","source":1539,"target":1682},{"edgeType":"calls","source":1652,"target":509},{"edgeType":"calls","source":1655,"target":416},{"edgeType":"calls","source":617,"target":619},{"edgeType":"calls","source":726,"target":411},{"edgeType":"calls","source":1233,"target":1342},{"edgeType":"contains","source":1502,"target":1508},{"edgeType":"contains","source":674,"target":1017},{"edgeType":"calls","source":1741,"target":392},{"edgeType":"calls","source":697,"target":516},{"edgeType":"contains","source":419,"target":465},{"edgeType":"calls","source":823,"target":574},{"edgeType":"contains","source":1539,"target":1683},{"edgeType":"calls","source":1655,"target":415},{"edgeType":"calls","source":1164,"target":573},{"edgeType":"contains","source":742,"target":758},{"edgeType":"contains","source":1246,"target":1253},{"edgeType":"calls","source":723,"target":503},{"edgeType":"contains","source":1502,"target":1509},{"edgeType":"calls","source":617,"target":618},{"edgeType":"contains","source":32,"target":309},{"edgeType":"calls","source":1003,"target":1067},{"edgeType":"contains","source":916,"target":917},{"edgeType":"calls","source":928,"target":1013},{"edgeType":"contains","source":1539,"target":1688},{"edgeType":"contains","source":674,"target":1019},{"edgeType":"contains","source":1241,"target":1413},{"edgeType":"contains","source":1246,"target":1258},{"edgeType":"calls","source":812,"target":646},{"edgeType":"contains","source":1060,"target":1209},{"edgeType":"contains","source":1502,"target":1514},{"edgeType":"calls","source":592,"target":595},{"edgeType":"contains","source":379,"target":651},{"edgeType":"calls","source":1569,"target":433},{"edgeType":"calls","source":708,"target":433},{"edgeType":"imports","source":312,"target":882},{"edgeType":"imports","source":1062,"target":94},{"edgeType":"calls","source":1221,"target":386},{"edgeType":"calls","source":1749,"target":1459},{"edgeType":"contains","source":1539,"target":1689},{"edgeType":"calls","source":509,"target":524},{"edgeType":"contains","source":674,"target":1018},{"edgeType":"imports","source":379,"target":390},{"edgeType":"contains","source":1241,"target":1414},{"edgeType":"calls","source":1697,"target":428},{"edgeType":"contains","source":1502,"target":1515},{"edgeType":"calls","source":1707,"target":382},{"edgeType":"contains","source":395,"target":418},{"edgeType":"contains","source":385,"target":728},{"edgeType":"calls","source":1740,"target":416},{"edgeType":"contains","source":419,"target":467},{"edgeType":"calls","source":1068,"target":111},{"edgeType":"imports","source":1219,"target":245},{"edgeType":"contains","source":32,"target":311},{"edgeType":"contains","source":916,"target":919},{"edgeType":"contains","source":1539,"target":1686},{"edgeType":"calls","source":1610,"target":750},{"edgeType":"extends","source":920,"target":774},{"edgeType":"contains","source":1502,"target":1512},{"edgeType":"calls","source":1636,"target":1265},{"edgeType":"calls","source":1673,"target":118},{"edgeType":"contains","source":674,"target":1021},{"edgeType":"contains","source":1246,"target":1256},{"edgeType":"calls","source":1629,"target":689},{"edgeType":"calls","source":1673,"target":382},{"edgeType":"calls","source":1581,"target":1120},{"edgeType":"calls","source":1544,"target":1210},{"edgeType":"contains","source":476,"target":817},{"edgeType":"calls","source":723,"target":500},{"edgeType":"calls","source":1451,"target":1450},{"edgeType":"extends","source":775,"target":776},{"edgeType":"calls","source":1068,"target":110},{"edgeType":"imports","source":1219,"target":246},{"edgeType":"contains","source":916,"target":918},{"edgeType":"contains","source":32,"target":310},{"edgeType":"imports","source":312,"target":880},{"edgeType":"calls","source":1261,"target":1262},{"edgeType":"calls","source":1412,"target":1338},{"edgeType":"contains","source":1539,"target":1687},{"edgeType":"calls","source":1773,"target":1774},{"edgeType":"contains","source":1502,"target":1513},{"edgeType":"calls","source":1180,"target":337},{"edgeType":"imports","source":32,"target":47},{"edgeType":"contains","source":1241,"target":1412},{"edgeType":"contains","source":1246,"target":1257},{"edgeType":"contains","source":395,"target":420},{"edgeType":"calls","source":653,"target":291},{"edgeType":"calls","source":1581,"target":1119},{"edgeType":"contains","source":379,"target":652},{"edgeType":"calls","source":682,"target":713},{"edgeType":"contains","source":1539,"target":1659},{"edgeType":"calls","source":1043,"target":913},{"edgeType":"imports","source":674,"target":209},{"edgeType":"calls","source":741,"target":761},{"edgeType":"calls","source":1601,"target":1056},{"edgeType":"contains","source":331,"target":525},{"edgeType":"calls","source":1562,"target":679},{"edgeType":"calls","source":1762,"target":557},{"edgeType":"contains","source":546,"target":730},{"edgeType":"contains","source":719,"target":388},{"edgeType":"calls","source":584,"target":608},{"edgeType":"calls","source":1546,"target":382},{"edgeType":"imports","source":674,"target":208},{"edgeType":"contains","source":419,"target":439},{"edgeType":"calls","source":701,"target":416},{"edgeType":"contains","source":1060,"target":1180},{"edgeType":"calls","source":1122,"target":842},{"edgeType":"contains","source":287,"target":303},{"edgeType":"contains","source":1782,"target":1790},{"edgeType":"contains","source":1295,"target":1296},{"edgeType":"calls","source":1044,"target":88},{"edgeType":"imports","source":29,"target":385},{"edgeType":"calls","source":711,"target":633},{"edgeType":"imports","source":674,"target":210},{"edgeType":"calls","source":1112,"target":623},{"edgeType":"contains","source":719,"target":387},{"edgeType":"calls","source":756,"target":560},{"edgeType":"calls","source":584,"target":607},{"edgeType":"calls","source":701,"target":415},{"edgeType":"imports","source":674,"target":738},{"edgeType":"calls","source":1366,"target":412},{"edgeType":"contains","source":1295,"target":1297},{"edgeType":"contains","source":546,"target":729},{"edgeType":"contains","source":1782,"target":1791},{"edgeType":"contains","source":419,"target":438},{"edgeType":"extends","source":675,"target":677},{"edgeType":"calls","source":1492,"target":1002},{"edgeType":"calls","source":684,"target":416},{"edgeType":"calls","source":1691,"target":382},{"edgeType":"contains","source":1539,"target":1657},{"edgeType":"contains","source":546,"target":732},{"edgeType":"contains","source":395,"target":392},{"edgeType":"contains","source":1782,"target":1788},{"edgeType":"imports","source":674,"target":206},{"edgeType":"calls","source":1339,"target":1252},{"edgeType":"contains","source":419,"target":441},{"edgeType":"contains","source":287,"target":305},{"edgeType":"contains","source":1427,"target":1430},{"edgeType":"calls","source":678,"target":337},{"edgeType":"calls","source":684,"target":415},{"edgeType":"imports","source":312,"target":331},{"edgeType":"contains","source":29,"target":1431},{"edgeType":"calls","source":928,"target":1043},{"edgeType":"contains","source":1539,"target":1658},{"edgeType":"imports","source":119,"target":1028},{"edgeType":"contains","source":385,"target":701},{"edgeType":"calls","source":1743,"target":90},{"edgeType":"contains","source":1782,"target":1789},{"edgeType":"calls","source":684,"target":680},{"edgeType":"contains","source":546,"target":731},{"edgeType":"contains","source":419,"target":440},{"edgeType":"calls","source":1708,"target":382},{"edgeType":"imports","source":1498,"target":560},{"edgeType":"calls","source":1122,"target":843},{"edgeType":"contains","source":287,"target":304},{"edgeType":"imports","source":32,"target":287},{"edgeType":"imports","source":719,"target":395},{"edgeType":"calls","source":1402,"target":1407},{"edgeType":"contains","source":1539,"target":1663},{"edgeType":"calls","source":1604,"target":1223},{"edgeType":"contains","source":822,"target":899},{"edgeType":"imports","source":312,"target":328},{"edgeType":"calls","source":246,"target":247},{"edgeType":"calls","source":928,"target":246},{"edgeType":"calls","source":1112,"target":619},{"edgeType":"contains","source":419,"target":443},{"edgeType":"contains","source":2,"target":422},{"edgeType":"calls","source":1456,"target":790},{"edgeType":"calls","source":1496,"target":607},{"edgeType":"imports","source":29,"target":116},{"edgeType":"contains","source":287,"target":307},{"edgeType":"calls","source":1073,"target":507},{"edgeType":"contains","source":719,"target":1449},{"edgeType":"calls","source":263,"target":247},{"edgeType":"calls","source":1613,"target":679},{"edgeType":"calls","source":1597,"target":118},{"edgeType":"imports","source":674,"target":734},{"edgeType":"contains","source":1539,"target":1664},{"edgeType":"calls","source":1456,"target":789},{"edgeType":"calls","source":1402,"target":1406},{"edgeType":"calls","source":1722,"target":735},{"edgeType":"contains","source":395,"target":658},{"edgeType":"calls","source":1597,"target":382},{"edgeType":"imports","source":312,"target":329},{"edgeType":"calls","source":1112,"target":618},{"edgeType":"imports","source":674,"target":205},{"edgeType":"contains","source":419,"target":442},{"edgeType":"contains","source":287,"target":306},{"edgeType":"contains","source":719,"target":1448},{"edgeType":"calls","source":1597,"target":121},{"edgeType":"contains","source":1539,"target":1661},{"edgeType":"calls","source":1044,"target":87},{"edgeType":"calls","source":1366,"target":411},{"edgeType":"calls","source":1553,"target":428},{"edgeType":"contains","source":395,"target":397},{"edgeType":"calls","source":1112,"target":622},{"edgeType":"calls","source":711,"target":632},{"edgeType":"contains","source":2,"target":1216},{"edgeType":"contains","source":29,"target":1437},{"edgeType":"calls","source":1112,"target":621},{"edgeType":"calls","source":1068,"target":928},{"edgeType":"contains","source":2,"target":424},{"edgeType":"contains","source":419,"target":445},{"edgeType":"calls","source":671,"target":1080},{"edgeType":"calls","source":1732,"target":428},{"edgeType":"calls","source":1402,"target":1409},{"edgeType":"imports","source":1498,"target":557},{"edgeType":"calls","source":1472,"target":296},{"edgeType":"calls","source":1698,"target":689},{"edgeType":"extends","source":1220,"target":698},{"edgeType":"contains","source":1539,"target":1662},{"edgeType":"contains","source":589,"target":988},{"edgeType":"extends","source":287,"target":288},{"edgeType":"calls","source":1585,"target":756},{"edgeType":"imports","source":312,"target":327},{"edgeType":"calls","source":1112,"target":620},{"edgeType":"imports","source":312,"target":326},{"edgeType":"contains","source":419,"target":444},{"edgeType":"contains","source":546,"target":735},{"edgeType":"calls","source":1402,"target":1408},{"edgeType":"calls","source":1496,"target":608},{"edgeType":"contains","source":287,"target":308},{"edgeType":"contains","source":1782,"target":1793},{"edgeType":"calls","source":715,"target":502},{"edgeType":"calls","source":1402,"target":1403},{"edgeType":"calls","source":1337,"target":247},{"edgeType":"calls","source":1456,"target":786},{"edgeType":"contains","source":1241,"target":1392},{"edgeType":"calls","source":1691,"target":1693},{"edgeType":"contains","source":546,"target":739},{"edgeType":"calls","source":1632,"target":1408},{"edgeType":"imports","source":119,"target":773},{"edgeType":"contains","source":419,"target":447},{"edgeType":"calls","source":1766,"target":689},{"edgeType":"calls","source":1230,"target":1185},{"edgeType":"calls","source":766,"target":507},{"edgeType":"contains","source":379,"target":630},{"edgeType":"calls","source":1750,"target":392},{"edgeType":"contains","source":719,"target":1453},{"edgeType":"contains","source":1539,"target":1668},{"edgeType":"extends","source":582,"target":394},{"edgeType":"calls","source":715,"target":501},{"edgeType":"calls","source":1209,"target":1307},{"edgeType":"imports","source":119,"target":246},{"edgeType":"calls","source":1456,"target":785},{"edgeType":"calls","source":1468,"target":1470},{"edgeType":"contains","source":589,"target":462},{"edgeType":"calls","source":1691,"target":1692},{"edgeType":"calls","source":1632,"target":1407},{"edgeType":"calls","source":726,"target":689},{"edgeType":"calls","source":1493,"target":166},{"edgeType":"contains","source":546,"target":737},{"edgeType":"calls","source":1648,"target":118},{"edgeType":"contains","source":419,"target":446},{"edgeType":"calls","source":1572,"target":888},{"edgeType":"contains","source":719,"target":1452},{"edgeType":"calls","source":1648,"target":382},{"edgeType":"imports","source":1498,"target":40},{"edgeType":"calls","source":1402,"target":1405},{"edgeType":"contains","source":1539,"target":1665},{"edgeType":"contains","source":742,"target":743},{"edgeType":"calls","source":1456,"target":788},{"edgeType":"contains","source":419,"target":449},{"edgeType":"calls","source":1730,"target":750},{"edgeType":"calls","source":682,"target":206},{"edgeType":"calls","source":1783,"target":428},{"edgeType":"calls","source":1570,"target":689},{"edgeType":"extends","source":477,"target":480},{"edgeType":"calls","source":715,"target":503},{"edgeType":"calls","source":1456,"target":787},{"edgeType":"contains","source":1539,"target":1666},{"edgeType":"calls","source":1402,"target":1404},{"edgeType":"calls","source":1691,"target":1694},{"edgeType":"calls","source":1632,"target":1409},{"edgeType":"imports","source":119,"target":772},{"edgeType":"contains","source":379,"target":631},{"edgeType":"calls","source":1179,"target":653},{"edgeType":"contains","source":419,"target":448},{"edgeType":"contains","source":29,"target":1440},{"edgeType":"calls","source":1003,"target":296},{"edgeType":"calls","source":682,"target":205},{"edgeType":"contains","source":1539,"target":1667},{"edgeType":"calls","source":756,"target":818},{"edgeType":"calls","source":1632,"target":1404},{"edgeType":"calls","source":678,"target":588},{"edgeType":"contains","source":719,"target":1458},{"edgeType":"extends","source":1070,"target":334},{"edgeType":"calls","source":1242,"target":1338},{"edgeType":"calls","source":817,"target":507},{"edgeType":"calls","source":842,"target":789},{"edgeType":"calls","source":1672,"target":428},{"edgeType":"calls","source":1682,"target":118},{"edgeType":"calls","source":629,"target":521},{"edgeType":"contains","source":419,"target":452},{"edgeType":"calls","source":1233,"target":296},{"edgeType":"calls","source":1115,"target":782},{"edgeType":"contains","source":29,"target":1443},{"edgeType":"calls","source":1606,"target":888},{"edgeType":"imports","source":674,"target":741},{"edgeType":"calls","source":483,"target":291},{"edgeType":"contains","source":1539,"target":1672},{"edgeType":"calls","source":1343,"target":585},{"edgeType":"contains","source":589,"target":995},{"edgeType":"calls","source":783,"target":503},{"edgeType":"calls","source":1632,"target":1403},{"edgeType":"calls","source":1575,"target":1056},{"edgeType":"contains","source":719,"target":1457},{"edgeType":"calls","source":842,"target":788},{"edgeType":"calls","source":629,"target":520},{"edgeType":"contains","source":419,"target":451},{"edgeType":"contains","source":419,"target":450},{"edgeType":"contains","source":1539,"target":1673},{"edgeType":"calls","source":1468,"target":1469},{"edgeType":"contains","source":1060,"target":1195},{"edgeType":"contains","source":385,"target":715},{"edgeType":"calls","source":1656,"target":398},{"edgeType":"calls","source":715,"target":500},{"edgeType":"imports","source":674,"target":211},{"edgeType":"contains","source":7,"target":805},{"edgeType":"contains","source":490,"target":367},{"edgeType":"calls","source":1632,"target":1406},{"edgeType":"calls","source":1724,"target":1725},{"edgeType":"contains","source":419,"target":454},{"edgeType":"extends","source":1311,"target":1318},{"edgeType":"calls","source":1659,"target":833},{"edgeType":"calls","source":509,"target":544},{"edgeType":"calls","source":1490,"target":1051},{"edgeType":"calls","source":1632,"target":1405},{"edgeType":"calls","source":842,"target":790},{"edgeType":"calls","source":1680,"target":710},{"edgeType":"calls","source":1589,"target":888},{"edgeType":"imports","source":674,"target":212},{"edgeType":"contains","source":419,"target":453},{"edgeType":"contains","source":7,"target":12},{"edgeType":"calls","source":1030,"target":1040},{"edgeType":"contains","source":1539,"target":1671},{"edgeType":"imports","source":312,"target":117},{"edgeType":"contains","source":1241,"target":1367},{"edgeType":"contains","source":719,"target":1429},{"edgeType":"contains","source":385,"target":684},{"edgeType":"contains","source":262,"target":269},{"edgeType":"calls","source":40,"target":113},{"edgeType":"calls","source":928,"target":596},{"edgeType":"imports","source":32,"target":76},{"edgeType":"contains","source":1539,"target":1643},{"edgeType":"contains","source":1062,"target":573},{"edgeType":"calls","source":1549,"target":372},{"edgeType":"calls","source":591,"target":209},{"edgeType":"contains","source":1567,"target":1568},{"edgeType":"contains","source":331,"target":508},{"edgeType":"calls","source":1624,"target":689},{"edgeType":"calls","source":1398,"target":824},{"edgeType":"contains","source":262,"target":268},{"edgeType":"calls","source":40,"target":112},{"edgeType":"calls","source":1702,"target":913},{"edgeType":"calls","source":1549,"target":371},{"edgeType":"calls","source":1726,"target":433},{"edgeType":"calls","source":591,"target":208},{"edgeType":"calls","source":1602,"target":49},{"edgeType":"imports","source":1198,"target":925},{"edgeType":"calls","source":1667,"target":416},{"edgeType":"contains","source":46,"target":96},{"edgeType":"imports","source":312,"target":908},{"edgeType":"calls","source":1435,"target":1001},{"edgeType":"calls","source":641,"target":247},{"edgeType":"calls","source":1769,"target":689},{"edgeType":"calls","source":1549,"target":374},{"edgeType":"calls","source":1209,"target":608},{"edgeType":"calls","source":1753,"target":392},{"edgeType":"calls","source":877,"target":594},{"edgeType":"contains","source":1539,"target":1641},{"edgeType":"imports","source":1198,"target":924},{"edgeType":"calls","source":1667,"target":415},{"edgeType":"calls","source":1680,"target":1069},{"edgeType":"imports","source":1198,"target":926},{"edgeType":"contains","source":1241,"target":1366},{"edgeType":"calls","source":1214,"target":1245},{"edgeType":"calls","source":40,"target":114},{"edgeType":"calls","source":1172,"target":433},{"edgeType":"calls","source":1229,"target":516},{"edgeType":"calls","source":928,"target":597},{"edgeType":"calls","source":1607,"target":689},{"edgeType":"calls","source":1549,"target":373},{"edgeType":"calls","source":1659,"target":398},{"edgeType":"calls","source":877,"target":593},{"edgeType":"calls","source":1575,"target":888},{"edgeType":"calls","source":1209,"target":607},{"edgeType":"calls","source":1202,"target":824},{"edgeType":"contains","source":46,"target":98},{"edgeType":"calls","source":1549,"target":368},{"edgeType":"calls","source":1209,"target":1395},{"edgeType":"calls","source":40,"target":373},{"edgeType":"extends","source":1106,"target":1109},{"edgeType":"calls","source":1402,"target":433},{"edgeType":"calls","source":1418,"target":994},{"edgeType":"calls","source":1343,"target":412},{"edgeType":"calls","source":1002,"target":412},{"edgeType":"calls","source":386,"target":482},{"edgeType":"calls","source":906,"target":482},{"edgeType":"calls","source":1556,"target":679},{"edgeType":"imports","source":674,"target":782},{"edgeType":"contains","source":1539,"target":1647},{"edgeType":"calls","source":678,"target":679},{"edgeType":"imports","source":29,"target":959},{"edgeType":"contains","source":46,"target":97},{"edgeType":"contains","source":23,"target":810},{"edgeType":"calls","source":1549,"target":367},{"edgeType":"calls","source":1223,"target":1224},{"edgeType":"calls","source":40,"target":372},{"edgeType":"calls","source":1002,"target":411},{"edgeType":"calls","source":1709,"target":428},{"edgeType":"calls","source":1343,"target":411},{"edgeType":"calls","source":1754,"target":90},{"edgeType":"imports","source":1198,"target":395},{"edgeType":"contains","source":1539,"target":1648},{"edgeType":"calls","source":1414,"target":588},{"edgeType":"calls","source":824,"target":380},{"edgeType":"calls","source":1367,"target":1252},{"edgeType":"calls","source":1230,"target":1010},{"edgeType":"calls","source":1549,"target":370},{"edgeType":"contains","source":46,"target":100},{"edgeType":"calls","source":1209,"target":1397},{"edgeType":"calls","source":1118,"target":782},{"edgeType":"calls","source":386,"target":484},{"edgeType":"calls","source":1685,"target":382},{"edgeType":"calls","source":906,"target":484},{"edgeType":"calls","source":678,"target":681},{"edgeType":"imports","source":1198,"target":920},{"edgeType":"contains","source":719,"target":906},{"edgeType":"contains","source":17,"target":998},{"edgeType":"contains","source":46,"target":99},{"edgeType":"calls","source":40,"target":374},{"edgeType":"calls","source":1549,"target":369},{"edgeType":"calls","source":1578,"target":1056},{"edgeType":"contains","source":1539,"target":1646},{"edgeType":"contains","source":719,"target":641},{"edgeType":"calls","source":1702,"target":118},{"edgeType":"imports","source":1198,"target":921},{"edgeType":"contains","source":1062,"target":578},{"edgeType":"imports","source":674,"target":795},{"edgeType":"extends","source":908,"target":909},{"edgeType":"extends","source":777,"target":742},{"edgeType":"calls","source":824,"target":905},{"edgeType":"contains","source":1539,"target":1651},{"edgeType":"calls","source":1073,"target":586},{"edgeType":"calls","source":40,"target":369},{"edgeType":"contains","source":10,"target":426},{"edgeType":"calls","source":40,"target":105},{"edgeType":"calls","source":509,"target":629},{"edgeType":"contains","source":395,"target":646},{"edgeType":"extends","source":941,"target":942},{"edgeType":"calls","source":1596,"target":756},{"edgeType":"contains","source":46,"target":101},{"edgeType":"calls","source":827,"target":811},{"edgeType":"calls","source":824,"target":904},{"edgeType":"contains","source":1539,"target":1652},{"edgeType":"calls","source":40,"target":368},{"edgeType":"calls","source":1581,"target":428},{"edgeType":"imports","source":1198,"target":1200},{"edgeType":"calls","source":1591,"target":382},{"edgeType":"contains","source":1782,"target":1783},{"edgeType":"calls","source":1024,"target":782},{"edgeType":"calls","source":1726,"target":689},{"edgeType":"contains","source":27,"target":1219},{"edgeType":"imports","source":119,"target":560},{"edgeType":"calls","source":726,"target":507},{"edgeType":"calls","source":598,"target":247},{"edgeType":"imports","source":312,"target":919},{"edgeType":"contains","source":29,"target":1424},{"edgeType":"calls","source":1547,"target":428},{"edgeType":"calls","source":40,"target":371},{"edgeType":"calls","source":1754,"target":89},{"edgeType":"contains","source":422,"target":1133},{"edgeType":"calls","source":679,"target":117},{"edgeType":"contains","source":23,"target":25},{"edgeType":"calls","source":1484,"target":795},{"edgeType":"extends","source":760,"target":742},{"edgeType":"calls","source":1758,"target":229},{"edgeType":"contains","source":1539,"target":1649},{"edgeType":"contains","source":331,"target":518},{"edgeType":"contains","source":546,"target":724},{"edgeType":"calls","source":1000,"target":1001},{"edgeType":"contains","source":1062,"target":583},{"edgeType":"calls","source":995,"target":626},{"edgeType":"calls","source":678,"target":411},{"edgeType":"calls","source":40,"target":370},{"edgeType":"contains","source":23,"target":24},{"edgeType":"calls","source":1639,"target":1010},{"edgeType":"contains","source":1539,"target":1650},{"edgeType":"calls","source":678,"target":412},{"edgeType":"contains","source":331,"target":517},{"edgeType":"contains","source":385,"target":693},{"edgeType":"contains","source":810,"target":1263},{"edgeType":"calls","source":995,"target":621},{"edgeType":"calls","source":824,"target":901},{"edgeType":"calls","source":1114,"target":632},{"edgeType":"calls","source":601,"target":415},{"edgeType":"calls","source":1749,"target":502},{"edgeType":"contains","source":1539,"target":1655},{"edgeType":"imports","source":476,"target":586},{"edgeType":"contains","source":10,"target":430},{"edgeType":"calls","source":1655,"target":509},{"edgeType":"calls","source":626,"target":433},{"edgeType":"contains","source":1427,"target":1428},{"edgeType":"calls","source":995,"target":620},{"edgeType":"calls","source":824,"target":900},{"edgeType":"calls","source":1744,"target":392},{"edgeType":"calls","source":1624,"target":412},{"edgeType":"calls","source":1647,"target":756},{"edgeType":"calls","source":1579,"target":750},{"edgeType":"contains","source":1539,"target":1656},{"edgeType":"calls","source":1749,"target":501},{"edgeType":"contains","source":23,"target":26},{"edgeType":"contains","source":10,"target":429},{"edgeType":"contains","source":1782,"target":1787},{"edgeType":"calls","source":1080,"target":363},{"edgeType":"contains","source":589,"target":713},{"edgeType":"calls","source":842,"target":871},{"edgeType":"extends","source":922,"target":742},{"edgeType":"contains","source":546,"target":725},{"edgeType":"contains","source":1237,"target":1241},{"edgeType":"imports","source":674,"target":263},{"edgeType":"contains","source":822,"target":893},{"edgeType":"calls","source":1795,"target":928},{"edgeType":"calls","source":595,"target":603},{"edgeType":"calls","source":995,"target":623},{"edgeType":"contains","source":1539,"target":1653},{"edgeType":"calls","source":824,"target":903},{"edgeType":"calls","source":40,"target":367},{"edgeType":"calls","source":1770,"target":117},{"edgeType":"contains","source":23,"target":29},{"edgeType":"calls","source":1314,"target":247},{"edgeType":"calls","source":1179,"target":204},{"edgeType":"imports","source":1175,"target":320},{"edgeType":"calls","source":752,"target":757},{"edgeType":"contains","source":419,"target":437},{"edgeType":"calls","source":1075,"target":1050},{"edgeType":"calls","source":766,"target":586},{"edgeType":"calls","source":995,"target":622},{"edgeType":"contains","source":29,"target":1427},{"edgeType":"calls","source":601,"target":416},{"edgeType":"contains","source":1539,"target":1654},{"edgeType":"calls","source":1749,"target":503},{"edgeType":"calls","source":1787,"target":382},{"edgeType":"calls","source":824,"target":902},{"edgeType":"calls","source":1315,"target":1273},{"edgeType":"calls","source":1314,"target":1303},{"edgeType":"calls","source":1637,"target":1068},{"edgeType":"calls","source":1179,"target":203},{"edgeType":"calls","source":794,"target":511},{"edgeType":"imports","source":119,"target":818},{"edgeType":"contains","source":419,"target":436},{"edgeType":"contains","source":1159,"target":486},{"edgeType":"calls","source":1651,"target":398},{"edgeType":"calls","source":1652,"target":367},{"edgeType":"calls","source":1754,"target":112},{"edgeType":"contains","source":1062,"target":557},{"edgeType":"calls","source":1196,"target":1290},{"edgeType":"calls","source":1672,"target":1068},{"edgeType":"calls","source":691,"target":296},{"edgeType":"contains","source":1539,"target":1626},{"edgeType":"calls","source":798,"target":415},{"edgeType":"imports","source":674,"target":770},{"edgeType":"calls","source":629,"target":633},{"edgeType":"calls","source":1666,"target":725},{"edgeType":"calls","source":1073,"target":1138},{"edgeType":"contains","source":908,"target":1102},{"edgeType":"calls","source":1122,"target":412},{"edgeType":"imports","source":674,"target":243},{"edgeType":"calls","source":1196,"target":1289},{"edgeType":"contains","source":486,"target":706},{"edgeType":"calls","source":1774,"target":547},{"edgeType":"calls","source":1652,"target":369},{"edgeType":"calls","source":629,"target":635},{"edgeType":"calls","source":1754,"target":114},{"edgeType":"calls","source":1196,"target":1292},{"edgeType":"calls","source":1656,"target":509},{"edgeType":"calls","source":1582,"target":689},{"edgeType":"calls","source":1139,"target":681},{"edgeType":"contains","source":1539,"target":1624},{"edgeType":"calls","source":588,"target":585},{"edgeType":"calls","source":1077,"target":488},{"edgeType":"contains","source":1070,"target":575},{"edgeType":"calls","source":1795,"target":428},{"edgeType":"calls","source":756,"target":398},{"edgeType":"calls","source":693,"target":501},{"edgeType":"calls","source":1302,"target":384},{"edgeType":"calls","source":1652,"target":368},{"edgeType":"contains","source":908,"target":1104},{"edgeType":"extends","source":486,"target":490},{"edgeType":"calls","source":1567,"target":96},{"edgeType":"calls","source":1745,"target":392},{"edgeType":"calls","source":1754,"target":113},{"edgeType":"calls","source":1196,"target":1291},{"edgeType":"calls","source":1648,"target":756},{"edgeType":"imports","source":674,"target":241},{"edgeType":"calls","source":1580,"target":750},{"edgeType":"contains","source":1539,"target":1625},{"edgeType":"calls","source":382,"target":363},{"edgeType":"calls","source":1633,"target":428},{"edgeType":"calls","source":1643,"target":118},{"edgeType":"imports","source":674,"target":769},{"edgeType":"calls","source":629,"target":634},{"edgeType":"calls","source":1643,"target":382},{"edgeType":"calls","source":693,"target":500},{"edgeType":"calls","source":798,"target":416},{"edgeType":"contains","source":1539,"target":1630},{"edgeType":"calls","source":1752,"target":434},{"edgeType":"contains","source":908,"target":1107},{"edgeType":"calls","source":673,"target":586},{"edgeType":"extends","source":288,"target":289},{"edgeType":"contains","source":719,"target":1416},{"edgeType":"calls","source":1753,"target":1459},{"edgeType":"contains","source":27,"target":1198},{"edgeType":"contains","source":1539,"target":1631},{"edgeType":"contains","source":1062,"target":560},{"edgeType":"calls","source":40,"target":389},{"edgeType":"calls","source":1711,"target":382},{"edgeType":"contains","source":1539,"target":1628},{"edgeType":"calls","source":1073,"target":1137},{"edgeType":"calls","source":1196,"target":1288},{"edgeType":"calls","source":1122,"target":411},{"edgeType":"calls","source":1614,"target":750},{"edgeType":"calls","source":1657,"target":474},{"edgeType":"calls","source":1068,"target":1028},{"edgeType":"calls","source":1046,"target":653},{"edgeType":"calls","source":1793,"target":750},{"edgeType":"calls","source":1677,"target":118},{"edgeType":"contains","source":719,"target":1418},{"edgeType":"calls","source":1139,"target":412},{"edgeType":"contains","source":1539,"target":1629},{"edgeType":"calls","source":1601,"target":888},{"edgeType":"calls","source":1073,"target":1136},{"edgeType":"calls","source":1196,"target":1287},{"edgeType":"calls","source":1447,"target":641},{"edgeType":"calls","source":1735,"target":433},{"edgeType":"calls","source":117,"target":118},{"edgeType":"imports","source":1498,"target":589},{"edgeType":"calls","source":1139,"target":411},{"edgeType":"imports","source":312,"target":888},{"edgeType":"calls","source":1456,"target":890},{"edgeType":"contains","source":719,"target":1417},{"edgeType":"calls","source":1566,"target":382},{"edgeType":"contains","source":1198,"target":1106},{"edgeType":"calls","source":1229,"target":523},{"edgeType":"calls","source":1800,"target":1056},{"edgeType":"calls","source":1566,"target":118},{"edgeType":"calls","source":1746,"target":87},{"edgeType":"calls","source":692,"target":521},{"edgeType":"calls","source":842,"target":892},{"edgeType":"imports","source":674,"target":778},{"edgeType":"calls","source":816,"target":113},{"edgeType":"imports","source":1198,"target":918},{"edgeType":"contains","source":719,"target":1420},{"edgeType":"calls","source":1595,"target":1068},{"edgeType":"calls","source":1451,"target":247},{"edgeType":"calls","source":1073,"target":337},{"edgeType":"calls","source":842,"target":891},{"edgeType":"calls","source":692,"target":520},{"edgeType":"calls","source":818,"target":843},{"edgeType":"imports","source":674,"target":779},{"edgeType":"contains","source":27,"target":674},{"edgeType":"calls","source":816,"target":112},{"edgeType":"contains","source":1539,"target":1636},{"edgeType":"contains","source":719,"target":1419},{"edgeType":"contains","source":1109,"target":692},{"edgeType":"calls","source":1604,"target":1056},{"edgeType":"calls","source":1663,"target":284},{"edgeType":"calls","source":959,"target":967},{"edgeType":"contains","source":1539,"target":1632},{"edgeType":"contains","source":908,"target":1113},{"edgeType":"calls","source":995,"target":380},{"edgeType":"calls","source":1549,"target":382},{"edgeType":"contains","source":1641,"target":1642},{"edgeType":"calls","source":1549,"target":118},{"edgeType":"calls","source":959,"target":966},{"edgeType":"extends","source":909,"target":334},{"edgeType":"contains","source":1539,"target":1633},{"edgeType":"calls","source":1229,"target":524},{"edgeType":"calls","source":1663,"target":547},{"edgeType":"calls","source":1160,"target":284},{"edgeType":"imports","source":674,"target":777},{"edgeType":"calls","source":1746,"target":88},{"edgeType":"contains","source":424,"target":1317},{"edgeType":"calls","source":1343,"target":689},{"edgeType":"imports","source":1198,"target":917},{"edgeType":"contains","source":719,"target":1421},{"edgeType":"calls","source":1631,"target":1010},{"edgeType":"calls","source":816,"target":114},{"edgeType":"contains","source":331,"target":504},{"edgeType":"contains","source":1220,"target":1221},{"edgeType":"imports","source":1198,"target":386},{"edgeType":"contains","source":1060,"target":1160},{"edgeType":"calls","source":1731,"target":284},{"edgeType":"calls","source":1664,"target":247},{"edgeType":"contains","source":476,"target":766},{"edgeType":"contains","source":385,"target":680},{"edgeType":"contains","source":262,"target":265},{"edgeType":"imports","source":674,"target":246},{"edgeType":"calls","source":766,"target":337},{"edgeType":"calls","source":1796,"target":118},{"edgeType":"contains","source":1443,"target":1444},{"edgeType":"imports","source":674,"target":774},{"edgeType":"contains","source":1539,"target":1639},{"edgeType":"calls","source":1659,"target":401},{"edgeType":"contains","source":1198,"target":1109},{"edgeType":"contains","source":1060,"target":1423},{"edgeType":"extends","source":313,"target":314},{"edgeType":"contains","source":262,"target":264},{"edgeType":"calls","source":1646,"target":1068},{"edgeType":"calls","source":1731,"target":547},{"edgeType":"calls","source":1477,"target":757},{"edgeType":"calls","source":959,"target":960},{"edgeType":"contains","source":1443,"target":1445},{"edgeType":"contains","source":1539,"target":1640},{"edgeType":"imports","source":674,"target":775},{"edgeType":"contains","source":119,"target":997},{"edgeType":"contains","source":331,"target":506},{"edgeType":"calls","source":1410,"target":1252},{"edgeType":"calls","source":1485,"target":1041},{"edgeType":"calls","source":842,"target":890},{"edgeType":"calls","source":692,"target":519},{"edgeType":"contains","source":262,"target":267},{"edgeType":"imports","source":674,"target":244},{"edgeType":"calls","source":1607,"target":1223},{"edgeType":"contains","source":1539,"target":1637},{"edgeType":"imports","source":674,"target":772},{"edgeType":"contains","source":589,"target":964},{"edgeType":"calls","source":1339,"target":1338},{"edgeType":"calls","source":1616,"target":679},{"edgeType":"calls","source":1590,"target":428},{"edgeType":"calls","source":1600,"target":118},{"edgeType":"contains","source":262,"target":266},{"edgeType":"imports","source":674,"target":245},{"edgeType":"extends","source":825,"target":828},{"edgeType":"calls","source":1199,"target":1185},{"edgeType":"imports","source":476,"target":40},{"edgeType":"contains","source":1539,"target":1638},{"edgeType":"imports","source":674,"target":773},{"edgeType":"calls","source":1554,"target":222},{"edgeType":"contains","source":54,"target":77},{"edgeType":"contains","source":120,"target":145},{"edgeType":"calls","source":1684,"target":118},{"edgeType":"imports","source":1219,"target":573},{"edgeType":"calls","source":1416,"target":1027},{"edgeType":"imports","source":312,"target":679},{"edgeType":"contains","source":908,"target":1086},{"edgeType":"calls","source":833,"target":866},{"edgeType":"calls","source":755,"target":113},{"edgeType":"contains","source":1539,"target":1610},{"edgeType":"calls","source":1476,"target":752},{"edgeType":"extends","source":315,"target":289},{"edgeType":"calls","source":1794,"target":671},{"edgeType":"contains","source":1219,"target":1222},{"edgeType":"calls","source":1577,"target":1056},{"edgeType":"contains","source":120,"target":144},{"edgeType":"calls","source":1668,"target":1670},{"edgeType":"imports","source":29,"target":996},{"edgeType":"calls","source":1456,"target":843},{"edgeType":"contains","source":1062,"target":1332},{"edgeType":"calls","source":1667,"target":644},{"edgeType":"contains","source":476,"target":736},{"edgeType":"calls","source":817,"target":40},{"edgeType":"calls","source":755,"target":112},{"edgeType":"imports","source":1498,"target":909},{"edgeType":"calls","source":1475,"target":782},{"edgeType":"calls","source":1691,"target":428},{"edgeType":"calls","source":1701,"target":118},{"edgeType":"contains","source":476,"target":1003},{"edgeType":"calls","source":1214,"target":1213},{"edgeType":"contains","source":27,"target":916},{"edgeType":"contains","source":120,"target":147},{"edgeType":"contains","source":1233,"target":1316},{"edgeType":"imports","source":719,"target":215},{"edgeType":"contains","source":30,"target":31},{"edgeType":"calls","source":1682,"target":710},{"edgeType":"contains","source":698,"target":727},{"edgeType":"contains","source":589,"target":671},{"edgeType":"contains","source":1539,"target":1608},{"edgeType":"calls","source":1623,"target":689},{"edgeType":"imports","source":29,"target":994},{"edgeType":"calls","source":995,"target":337},{"edgeType":"contains","source":1062,"target":1070},{"edgeType":"contains","source":120,"target":146},{"edgeType":"calls","source":682,"target":263},{"edgeType":"calls","source":1701,"target":913},{"edgeType":"calls","source":1718,"target":386},{"edgeType":"calls","source":816,"target":337},{"edgeType":"contains","source":1539,"target":1609},{"edgeType":"calls","source":755,"target":114},{"edgeType":"contains","source":391,"target":469},{"edgeType":"contains","source":916,"target":314},{"edgeType":"calls","source":1546,"target":428},{"edgeType":"calls","source":691,"target":507},{"edgeType":"calls","source":1663,"target":501},{"edgeType":"imports","source":674,"target":287},{"edgeType":"calls","source":1242,"target":603},{"edgeType":"contains","source":120,"target":149},{"edgeType":"calls","source":1753,"target":89},{"edgeType":"contains","source":1539,"target":1614},{"edgeType":"contains","source":1060,"target":1399},{"edgeType":"contains","source":908,"target":1090},{"edgeType":"contains","source":1756,"target":1758},{"edgeType":"calls","source":1757,"target":229},{"edgeType":"contains","source":1219,"target":698},{"edgeType":"contains","source":1062,"target":544},{"edgeType":"calls","source":1663,"target":500},{"edgeType":"contains","source":331,"target":479},{"edgeType":"calls","source":1581,"target":928},{"edgeType":"calls","source":1468,"target":996},{"edgeType":"calls","source":1735,"target":118},{"edgeType":"contains","source":1241,"target":1339},{"edgeType":"contains","source":1106,"target":1294},{"edgeType":"imports","source":719,"target":213},{"edgeType":"contains","source":385,"target":654},{"edgeType":"contains","source":916,"target":313},{"edgeType":"contains","source":582,"target":625},{"edgeType":"calls","source":1614,"target":433},{"edgeType":"contains","source":120,"target":148},{"edgeType":"calls","source":1414,"target":291},{"edgeType":"contains","source":1060,"target":1398},{"edgeType":"imports","source":719,"target":214},{"edgeType":"contains","source":908,"target":1089},{"edgeType":"contains","source":1756,"target":1759},{"edgeType":"contains","source":1106,"target":1293},{"edgeType":"contains","source":331,"target":478},{"edgeType":"extends","source":942,"target":943},{"edgeType":"calls","source":1663,"target":503},{"edgeType":"contains","source":582,"target":628},{"edgeType":"calls","source":995,"target":598},{"edgeType":"calls","source":1456,"target":842},{"edgeType":"calls","source":1668,"target":1669},{"edgeType":"contains","source":120,"target":151},{"edgeType":"contains","source":1060,"target":1401},{"edgeType":"contains","source":486,"target":697},{"edgeType":"contains","source":331,"target":481},{"edgeType":"contains","source":391,"target":471},{"edgeType":"imports","source":119,"target":1106},{"edgeType":"contains","source":385,"target":656},{"edgeType":"contains","source":391,"target":470},{"edgeType":"contains","source":8,"target":1244},{"edgeType":"contains","source":699,"target":700},{"edgeType":"calls","source":750,"target":793},{"edgeType":"calls","source":1663,"target":502},{"edgeType":"imports","source":119,"target":578},{"edgeType":"calls","source":1753,"target":90},{"edgeType":"contains","source":120,"target":150},{"edgeType":"calls","source":1708,"target":428},{"edgeType":"calls","source":1590,"target":386},{"edgeType":"contains","source":1539,"target":1613},{"edgeType":"calls","source":1020,"target":879},{"edgeType":"contains","source":1756,"target":1757},{"edgeType":"calls","source":644,"target":380},{"edgeType":"calls","source":725,"target":247},{"edgeType":"contains","source":1219,"target":699},{"edgeType":"contains","source":486,"target":696},{"edgeType":"calls","source":1261,"target":1336},{"edgeType":"calls","source":1413,"target":588},{"edgeType":"calls","source":1608,"target":1409},{"edgeType":"calls","source":824,"target":872},{"edgeType":"calls","source":1675,"target":653},{"edgeType":"contains","source":21,"target":51},{"edgeType":"calls","source":1610,"target":1611},{"edgeType":"imports","source":1498,"target":918},{"edgeType":"contains","source":1539,"target":1618},{"edgeType":"calls","source":1613,"target":725},{"edgeType":"calls","source":583,"target":416},{"edgeType":"calls","source":435,"target":247},{"edgeType":"calls","source":1069,"target":412},{"edgeType":"contains","source":1241,"target":1343},{"edgeType":"calls","source":842,"target":843},{"edgeType":"calls","source":691,"target":503},{"edgeType":"calls","source":685,"target":689},{"edgeType":"calls","source":711,"target":940},{"edgeType":"calls","source":1261,"target":1331},{"edgeType":"calls","source":586,"target":587},{"edgeType":"contains","source":391,"target":473},{"edgeType":"calls","source":1597,"target":428},{"edgeType":"calls","source":733,"target":521},{"edgeType":"calls","source":1261,"target":1330},{"edgeType":"calls","source":1608,"target":1408},{"edgeType":"contains","source":476,"target":744},{"edgeType":"contains","source":8,"target":1246},{"edgeType":"calls","source":1314,"target":1273},{"edgeType":"calls","source":1313,"target":247},{"edgeType":"contains","source":1539,"target":1619},{"edgeType":"calls","source":1050,"target":207},{"edgeType":"imports","source":1498,"target":917},{"edgeType":"contains","source":1060,"target":1402},{"edgeType":"calls","source":583,"target":415},{"edgeType":"calls","source":1069,"target":411},{"edgeType":"calls","source":691,"target":502},{"edgeType":"calls","source":118,"target":560},{"edgeType":"calls","source":711,"target":939},{"edgeType":"contains","source":391,"target":472},{"edgeType":"contains","source":908,"target":1093},{"edgeType":"contains","source":589,"target":676},{"edgeType":"contains","source":32,"target":241},{"edgeType":"calls","source":1214,"target":412},{"edgeType":"extends","source":331,"target":333},{"edgeType":"contains","source":8,"target":1249},{"edgeType":"imports","source":719,"target":224},{"edgeType":"calls","source":1581,"target":398},{"edgeType":"imports","source":29,"target":1001},{"edgeType":"calls","source":263,"target":296},{"edgeType":"contains","source":1539,"target":1616},{"edgeType":"contains","source":908,"target":1096},{"edgeType":"contains","source":719,"target":877},{"edgeType":"calls","source":1637,"target":247},{"edgeType":"imports","source":1498,"target":386},{"edgeType":"contains","source":379,"target":582},{"edgeType":"calls","source":1214,"target":411},{"edgeType":"calls","source":1610,"target":1612},{"edgeType":"imports","source":29,"target":1002},{"edgeType":"extends","source":923,"target":742},{"edgeType":"calls","source":1590,"target":382},{"edgeType":"contains","source":1539,"target":1617},{"edgeType":"calls","source":1261,"target":1327},{"edgeType":"contains","source":32,"target":243},{"edgeType":"contains","source":1539,"target":1622},{"edgeType":"imports","source":719,"target":222},{"edgeType":"calls","source":1623,"target":411},{"edgeType":"calls","source":1553,"target":203},{"edgeType":"calls","source":1345,"target":573},{"edgeType":"extends","source":1192,"target":334},{"edgeType":"calls","source":736,"target":689},{"edgeType":"imports","source":674,"target":824},{"edgeType":"calls","source":598,"target":211},{"edgeType":"calls","source":1416,"target":221},{"edgeType":"imports","source":719,"target":221},{"edgeType":"calls","source":1608,"target":1404},{"edgeType":"contains","source":908,"target":1098},{"edgeType":"calls","source":755,"target":629},{"edgeType":"calls","source":1209,"target":824},{"edgeType":"calls","source":1261,"target":1326},{"edgeType":"imports","source":719,"target":223},{"edgeType":"calls","source":1566,"target":1120},{"edgeType":"imports","source":1498,"target":384},{"edgeType":"contains","source":1539,"target":1623},{"edgeType":"calls","source":598,"target":210},{"edgeType":"calls","source":1638,"target":1266},{"edgeType":"contains","source":486,"target":702},{"edgeType":"calls","source":1000,"target":961},{"edgeType":"imports","source":32,"target":115},{"edgeType":"imports","source":674,"target":560},{"edgeType":"imports","source":119,"target":589},{"edgeType":"calls","source":627,"target":632},{"edgeType":"calls","source":1608,"target":1403},{"edgeType":"calls","source":1793,"target":689},{"edgeType":"calls","source":1337,"target":291},{"edgeType":"calls","source":733,"target":520},{"edgeType":"calls","source":1261,"target":1329},{"edgeType":"contains","source":32,"target":245},{"edgeType":"contains","source":908,"target":1101},{"edgeType":"calls","source":1608,"target":1407},{"edgeType":"contains","source":1539,"target":1620},{"edgeType":"calls","source":783,"target":291},{"edgeType":"calls","source":428,"target":462},{"edgeType":"calls","source":1654,"target":509},{"edgeType":"contains","source":27,"target":929},{"edgeType":"calls","source":691,"target":501},{"edgeType":"calls","source":733,"target":519},{"edgeType":"contains","source":32,"target":244},{"edgeType":"calls","source":1608,"target":1406},{"edgeType":"calls","source":1743,"target":392},{"edgeType":"calls","source":1623,"target":412},{"edgeType":"contains","source":1539,"target":1621},{"edgeType":"calls","source":1553,"target":204},{"edgeType":"calls","source":1709,"target":1710},{"edgeType":"calls","source":1641,"target":118},{"edgeType":"contains","source":589,"target":682},{"edgeType":"calls","source":711,"target":937},{"edgeType":"calls","source":1608,"target":1405},{"edgeType":"calls","source":1641,"target":382},{"edgeType":"contains","source":424,"target":1305},{"edgeType":"contains","source":908,"target":1099},{"edgeType":"calls","source":598,"target":212},{"edgeType":"calls","source":691,"target":500},{"edgeType":"calls","source":1729,"target":1118},{"edgeType":"calls","source":1723,"target":247},{"edgeType":"calls","source":705,"target":622},{"edgeType":"contains","source":1317,"target":1340},{"edgeType":"contains","source":763,"target":808},{"edgeType":"calls","source":1567,"target":1119},{"edgeType":"contains","source":1539,"target":1593},{"edgeType":"calls","source":1046,"target":622},{"edgeType":"contains","source":396,"target":558},{"edgeType":"contains","source":1498,"target":1807},{"edgeType":"calls","source":1676,"target":118},{"edgeType":"contains","source":54,"target":61},{"edgeType":"contains","source":120,"target":129},{"edgeType":"calls","source":757,"target":596},{"edgeType":"calls","source":1676,"target":382},{"edgeType":"imports","source":1219,"target":556},{"edgeType":"calls","source":707,"target":296},{"edgeType":"calls","source":705,"target":621},{"edgeType":"contains","source":908,"target":1069},{"edgeType":"contains","source":1539,"target":1594},{"edgeType":"contains","source":1498,"target":1808},{"edgeType":"calls","source":1046,"target":621},{"edgeType":"calls","source":1214,"target":433},{"edgeType":"contains","source":54,"target":60},{"edgeType":"contains","source":120,"target":128},{"edgeType":"calls","source":1713,"target":291},{"edgeType":"imports","source":32,"target":94},{"edgeType":"calls","source":1020,"target":898},{"edgeType":"calls","source":995,"target":619},{"edgeType":"contains","source":1070,"target":1071},{"edgeType":"contains","source":1498,"target":1805},{"edgeType":"calls","source":1671,"target":1068},{"edgeType":"extends","source":1123,"target":1125},{"edgeType":"calls","source":1624,"target":411},{"edgeType":"calls","source":1551,"target":296},{"edgeType":"calls","source":1749,"target":500},{"edgeType":"contains","source":1539,"target":1591},{"edgeType":"calls","source":842,"target":870},{"edgeType":"contains","source":54,"target":63},{"edgeType":"contains","source":102,"target":689},{"edgeType":"contains","source":120,"target":131},{"edgeType":"calls","source":995,"target":618},{"edgeType":"calls","source":1663,"target":258},{"edgeType":"calls","source":1567,"target":1120},{"edgeType":"calls","source":705,"target":623},{"edgeType":"calls","source":1046,"target":623},{"edgeType":"contains","source":1539,"target":1592},{"edgeType":"contains","source":1498,"target":1806},{"edgeType":"calls","source":842,"target":869},{"edgeType":"contains","source":54,"target":62},{"edgeType":"calls","source":757,"target":597},{"edgeType":"contains","source":120,"target":130},{"edgeType":"contains","source":1539,"target":1597},{"edgeType":"contains","source":909,"target":1043},{"edgeType":"calls","source":705,"target":618},{"edgeType":"calls","source":653,"target":380},{"edgeType":"contains","source":1106,"target":1278},{"edgeType":"contains","source":590,"target":626},{"edgeType":"contains","source":582,"target":609},{"edgeType":"calls","source":1564,"target":679},{"edgeType":"contains","source":38,"target":297},{"edgeType":"calls","source":1548,"target":382},{"edgeType":"contains","source":763,"target":812},{"edgeType":"contains","source":54,"target":65},{"edgeType":"contains","source":120,"target":133},{"edgeType":"imports","source":312,"target":922},{"edgeType":"calls","source":1743,"target":415},{"edgeType":"contains","source":1539,"target":1598},{"edgeType":"contains","source":1070,"target":1072},{"edgeType":"calls","source":1727,"target":118},{"edgeType":"calls","source":1451,"target":482},{"edgeType":"extends","source":289,"target":290},{"edgeType":"calls","source":1662,"target":547},{"edgeType":"calls","source":1413,"target":603},{"edgeType":"calls","source":1160,"target":254},{"edgeType":"imports","source":312,"target":923},{"edgeType":"extends","source":1222,"target":699},{"edgeType":"contains","source":54,"target":64},{"edgeType":"contains","source":120,"target":132},{"edgeType":"calls","source":705,"target":620},{"edgeType":"contains","source":1539,"target":1595},{"edgeType":"contains","source":17,"target":421},{"edgeType":"imports","source":1498,"target":94},{"edgeType":"calls","source":1726,"target":416},{"edgeType":"imports","source":29,"target":708},{"edgeType":"contains","source":1498,"target":1809},{"edgeType":"calls","source":1046,"target":620},{"edgeType":"calls","source":1046,"target":619},{"edgeType":"contains","source":476,"target":726},{"edgeType":"calls","source":757,"target":594},{"edgeType":"calls","source":1423,"target":824},{"edgeType":"contains","source":54,"target":67},{"edgeType":"imports","source":312,"target":920},{"edgeType":"calls","source":928,"target":578},{"edgeType":"calls","source":1012,"target":88},{"edgeType":"contains","source":120,"target":135},{"edgeType":"calls","source":705,"target":619},{"edgeType":"calls","source":1752,"target":1459},{"edgeType":"contains","source":1539,"target":1596},{"edgeType":"contains","source":1070,"target":1074},{"edgeType":"calls","source":1074,"target":1072},{"edgeType":"contains","source":485,"target":711},{"edgeType":"calls","source":1451,"target":484},{"edgeType":"contains","source":822,"target":834},{"edgeType":"contains","source":1106,"target":1279},{"edgeType":"calls","source":1726,"target":415},{"edgeType":"calls","source":1647,"target":750},{"edgeType":"calls","source":1046,"target":618},{"edgeType":"contains","source":582,"target":610},{"edgeType":"calls","source":757,"target":593},{"edgeType":"calls","source":1416,"target":1040},{"edgeType":"contains","source":54,"target":66},{"edgeType":"calls","source":1012,"target":87},{"edgeType":"calls","source":1077,"target":1243},{"edgeType":"calls","source":1743,"target":416},{"edgeType":"contains","source":120,"target":134},{"edgeType":"imports","source":312,"target":921},{"edgeType":"imports","source":119,"target":311},{"edgeType":"contains","source":120,"target":137},{"edgeType":"calls","source":1115,"target":1117},{"edgeType":"calls","source":752,"target":214},{"edgeType":"contains","source":1539,"target":1601},{"edgeType":"contains","source":582,"target":614},{"edgeType":"calls","source":1606,"target":1223},{"edgeType":"extends","source":769,"target":742},{"edgeType":"imports","source":719,"target":738},{"edgeType":"contains","source":698,"target":717},{"edgeType":"contains","source":582,"target":613},{"edgeType":"contains","source":119,"target":961},{"edgeType":"contains","source":21,"target":35},{"edgeType":"contains","source":54,"target":69},{"edgeType":"calls","source":883,"target":117},{"edgeType":"calls","source":1555,"target":689},{"edgeType":"contains","source":120,"target":136},{"edgeType":"calls","source":363,"target":380},{"edgeType":"contains","source":1539,"target":1602},{"edgeType":"imports","source":810,"target":825},{"edgeType":"calls","source":1599,"target":118},{"edgeType":"contains","source":822,"target":836},{"edgeType":"contains","source":27,"target":376},{"edgeType":"calls","source":988,"target":560},{"edgeType":"calls","source":752,"target":213},{"edgeType":"contains","source":1539,"target":1603},{"edgeType":"imports","source":119,"target":1105},{"edgeType":"calls","source":1687,"target":296},{"edgeType":"calls","source":1553,"target":222},{"edgeType":"contains","source":54,"target":68},{"edgeType":"contains","source":54,"target":71},{"edgeType":"contains","source":120,"target":139},{"edgeType":"calls","source":842,"target":597},{"edgeType":"contains","source":1539,"target":1599},{"edgeType":"calls","source":1799,"target":1056},{"edgeType":"imports","source":119,"target":573},{"edgeType":"contains","source":806,"target":807},{"edgeType":"calls","source":1657,"target":701},{"edgeType":"extends","source":314,"target":315},{"edgeType":"calls","source":1734,"target":428},{"edgeType":"calls","source":1474,"target":296},{"edgeType":"contains","source":54,"target":70},{"edgeType":"contains","source":120,"target":138},{"edgeType":"calls","source":1115,"target":1118},{"edgeType":"contains","source":331,"target":468},{"edgeType":"calls","source":1059,"target":740},{"edgeType":"calls","source":842,"target":596},{"edgeType":"contains","source":1539,"target":1600},{"edgeType":"calls","source":1164,"target":392},{"edgeType":"calls","source":1442,"target":1023},{"edgeType":"contains","source":908,"target":1079},{"edgeType":"contains","source":698,"target":718},{"edgeType":"calls","source":928,"target":573},{"edgeType":"imports","source":119,"target":1103},{"edgeType":"imports","source":29,"target":986},{"edgeType":"calls","source":1633,"target":382},{"edgeType":"calls","source":1589,"target":689},{"edgeType":"contains","source":54,"target":73},{"edgeType":"contains","source":120,"target":141},{"edgeType":"imports","source":719,"target":205},{"edgeType":"extends","source":248,"target":249},{"edgeType":"contains","source":23,"target":1298},{"edgeType":"calls","source":1478,"target":959},{"edgeType":"calls","source":1434,"target":1001},{"edgeType":"contains","source":698,"target":721},{"edgeType":"calls","source":1768,"target":689},{"edgeType":"contains","source":1539,"target":1606},{"edgeType":"calls","source":1413,"target":1388},{"edgeType":"contains","source":763,"target":820},{"edgeType":"calls","source":1752,"target":392},{"edgeType":"contains","source":54,"target":72},{"edgeType":"calls","source":1705,"target":1056},{"edgeType":"contains","source":120,"target":140},{"edgeType":"calls","source":1606,"target":689},{"edgeType":"calls","source":961,"target":601},{"edgeType":"calls","source":1658,"target":398},{"edgeType":"imports","source":719,"target":206},{"edgeType":"calls","source":1574,"target":888},{"edgeType":"calls","source":1201,"target":824},{"edgeType":"contains","source":763,"target":819},{"edgeType":"contains","source":1539,"target":1607},{"edgeType":"contains","source":54,"target":75},{"edgeType":"calls","source":1657,"target":433},{"edgeType":"imports","source":719,"target":203},{"edgeType":"calls","source":644,"target":653},{"edgeType":"contains","source":120,"target":143},{"edgeType":"calls","source":964,"target":246},{"edgeType":"contains","source":719,"target":601},{"edgeType":"contains","source":27,"target":119},{"edgeType":"extends","source":396,"target":419},{"edgeType":"contains","source":908,"target":1084},{"edgeType":"calls","source":1540,"target":888},{"edgeType":"calls","source":1632,"target":679},{"edgeType":"contains","source":1539,"target":1604},{"edgeType":"calls","source":1798,"target":1082},{"edgeType":"contains","source":907,"target":586},{"edgeType":"calls","source":1024,"target":1028},{"edgeType":"calls","source":1795,"target":382},{"edgeType":"contains","source":1219,"target":1220},{"edgeType":"contains","source":54,"target":74},{"edgeType":"contains","source":120,"target":142},{"edgeType":"imports","source":719,"target":204},{"edgeType":"contains","source":908,"target":1083},{"edgeType":"contains","source":27,"target":1175},{"edgeType":"contains","source":1539,"target":1605},{"edgeType":"contains","source":763,"target":821},{"edgeType":"calls","source":1557,"target":96},{"edgeType":"contains","source":334,"target":349},{"edgeType":"contains","source":490,"target":534},{"edgeType":"calls","source":1048,"target":114},{"edgeType":"calls","source":1556,"target":750},{"edgeType":"calls","source":1747,"target":114},{"edgeType":"contains","source":1763,"target":1768},{"edgeType":"contains","source":1738,"target":1750},{"edgeType":"calls","source":1185,"target":623},{"edgeType":"contains","source":908,"target":1053},{"edgeType":"contains","source":1175,"target":1232},{"edgeType":"calls","source":626,"target":247},{"edgeType":"extends","source":1295,"target":334},{"edgeType":"contains","source":1539,"target":1577},{"edgeType":"calls","source":644,"target":482},{"edgeType":"contains","source":485,"target":953},{"edgeType":"contains","source":33,"target":431},{"edgeType":"contains","source":490,"target":533},{"edgeType":"calls","source":1637,"target":1409},{"edgeType":"calls","source":1028,"target":204},{"edgeType":"contains","source":334,"target":348},{"edgeType":"calls","source":720,"target":503},{"edgeType":"contains","source":929,"target":930},{"edgeType":"imports","source":1198,"target":994},{"edgeType":"contains","source":477,"target":672},{"edgeType":"calls","source":1048,"target":113},{"edgeType":"calls","source":1747,"target":113},{"edgeType":"contains","source":1763,"target":1769},{"edgeType":"calls","source":1641,"target":756},{"edgeType":"contains","source":23,"target":476},{"edgeType":"calls","source":1573,"target":750},{"edgeType":"contains","source":908,"target":1052},{"edgeType":"contains","source":1738,"target":1751},{"edgeType":"calls","source":1185,"target":622},{"edgeType":"contains","source":1539,"target":1578},{"edgeType":"imports","source":674,"target":325},{"edgeType":"contains","source":929,"target":933},{"edgeType":"calls","source":1412,"target":1252},{"edgeType":"contains","source":490,"target":536},{"edgeType":"calls","source":1599,"target":740},{"edgeType":"calls","source":392,"target":368},{"edgeType":"calls","source":118,"target":670},{"edgeType":"calls","source":1233,"target":1251},{"edgeType":"calls","source":1608,"target":725},{"edgeType":"calls","source":1596,"target":40},{"edgeType":"contains","source":1738,"target":1748},{"edgeType":"contains","source":1763,"target":1766},{"edgeType":"contains","source":1222,"target":1360},{"edgeType":"contains","source":909,"target":1024},{"edgeType":"calls","source":886,"target":117},{"edgeType":"contains","source":1175,"target":1234},{"edgeType":"contains","source":334,"target":351},{"edgeType":"contains","source":1539,"target":1575},{"edgeType":"calls","source":644,"target":484},{"edgeType":"calls","source":1798,"target":384},{"edgeType":"contains","source":490,"target":535},{"edgeType":"contains","source":929,"target":932},{"edgeType":"calls","source":392,"target":367},{"edgeType":"calls","source":1558,"target":689},{"edgeType":"contains","source":1738,"target":1749},{"edgeType":"contains","source":1763,"target":1767},{"edgeType":"contains","source":1222,"target":1361},{"edgeType":"contains","source":908,"target":1054},{"edgeType":"contains","source":334,"target":350},{"edgeType":"contains","source":333,"target":381},{"edgeType":"contains","source":1539,"target":1576},{"edgeType":"contains","source":485,"target":954},{"edgeType":"calls","source":1316,"target":1315},{"edgeType":"calls","source":1637,"target":1406},{"edgeType":"contains","source":828,"target":895},{"edgeType":"contains","source":763,"target":796},{"edgeType":"contains","source":1738,"target":1754},{"edgeType":"calls","source":1774,"target":594},{"edgeType":"contains","source":53,"target":79},{"edgeType":"calls","source":818,"target":897},{"edgeType":"calls","source":1045,"target":203},{"edgeType":"calls","source":1367,"target":1319},{"edgeType":"extends","source":1771,"target":909},{"edgeType":"imports","source":719,"target":246},{"edgeType":"contains","source":1539,"target":1581},{"edgeType":"contains","source":1498,"target":1795},{"edgeType":"calls","source":1185,"target":619},{"edgeType":"calls","source":720,"target":500},{"edgeType":"contains","source":490,"target":538},{"edgeType":"contains","source":1175,"target":1236},{"edgeType":"contains","source":334,"target":353},{"edgeType":"calls","source":1042,"target":824},{"edgeType":"contains","source":742,"target":653},{"edgeType":"calls","source":1316,"target":1314},{"edgeType":"contains","source":490,"target":537},{"edgeType":"calls","source":1637,"target":1405},{"edgeType":"calls","source":1626,"target":689},{"edgeType":"contains","source":828,"target":894},{"edgeType":"contains","source":53,"target":78},{"edgeType":"calls","source":1579,"target":560},{"edgeType":"calls","source":1585,"target":374},{"edgeType":"contains","source":1738,"target":1755},{"edgeType":"calls","source":1774,"target":593},{"edgeType":"contains","source":476,"target":707},{"edgeType":"contains","source":1539,"target":1582},{"edgeType":"contains","source":908,"target":1056},{"edgeType":"contains","source":1498,"target":1796},{"edgeType":"calls","source":1704,"target":913},{"edgeType":"contains","source":1175,"target":1235},{"edgeType":"contains","source":485,"target":956},{"edgeType":"calls","source":1185,"target":618},{"edgeType":"contains","source":334,"target":352},{"edgeType":"contains","source":1175,"target":1239},{"edgeType":"calls","source":1028,"target":203},{"edgeType":"calls","source":1729,"target":670},{"edgeType":"calls","source":1747,"target":112},{"edgeType":"calls","source":1637,"target":1408},{"edgeType":"calls","source":720,"target":502},{"edgeType":"calls","source":1592,"target":689},{"edgeType":"contains","source":763,"target":798},{"edgeType":"contains","source":719,"target":1105},{"edgeType":"calls","source":1616,"target":1266},{"edgeType":"contains","source":53,"target":81},{"edgeType":"calls","source":1048,"target":112},{"edgeType":"contains","source":1763,"target":1770},{"edgeType":"calls","source":1755,"target":392},{"edgeType":"contains","source":1539,"target":1579},{"edgeType":"contains","source":908,"target":1059},{"edgeType":"calls","source":741,"target":380},{"edgeType":"contains","source":1738,"target":1752},{"edgeType":"contains","source":490,"target":540},{"edgeType":"calls","source":1185,"target":621},{"edgeType":"calls","source":671,"target":964},{"edgeType":"calls","source":1636,"target":117},{"edgeType":"imports","source":719,"target":244},{"edgeType":"contains","source":1175,"target":1238},{"edgeType":"imports","source":29,"target":1023},{"edgeType":"calls","source":1339,"target":603},{"edgeType":"contains","source":828,"target":896},{"edgeType":"calls","source":1637,"target":1407},{"edgeType":"extends","source":943,"target":944},{"edgeType":"calls","source":1682,"target":1069},{"edgeType":"contains","source":763,"target":797},{"edgeType":"contains","source":53,"target":80},{"edgeType":"calls","source":1616,"target":1265},{"edgeType":"calls","source":1045,"target":204},{"edgeType":"contains","source":1539,"target":1580},{"edgeType":"contains","source":908,"target":1058},{"edgeType":"contains","source":1498,"target":1794},{"edgeType":"contains","source":1738,"target":1753},{"edgeType":"calls","source":1185,"target":620},{"edgeType":"contains","source":334,"target":354},{"edgeType":"contains","source":490,"target":539},{"edgeType":"calls","source":720,"target":501},{"edgeType":"calls","source":1788,"target":689},{"edgeType":"calls","source":1577,"target":888},{"edgeType":"imports","source":719,"target":245},{"edgeType":"calls","source":1554,"target":1068},{"edgeType":"calls","source":757,"target":934},{"edgeType":"calls","source":1585,"target":371},{"edgeType":"calls","source":1671,"target":1669},{"edgeType":"contains","source":376,"target":377},{"edgeType":"contains","source":53,"target":83},{"edgeType":"contains","source":102,"target":679},{"edgeType":"contains","source":396,"target":549},{"edgeType":"contains","source":1539,"target":1585},{"edgeType":"contains","source":1498,"target":1799},{"edgeType":"calls","source":1456,"target":1463},{"edgeType":"calls","source":1758,"target":557},{"edgeType":"calls","source":1768,"target":247},{"edgeType":"contains","source":719,"target":842},{"edgeType":"contains","source":490,"target":542},{"edgeType":"calls","source":1482,"target":392},{"edgeType":"contains","source":1175,"target":1240},{"edgeType":"calls","source":1711,"target":428},{"edgeType":"imports","source":1198,"target":738},{"edgeType":"calls","source":928,"target":653},{"edgeType":"calls","source":1542,"target":382},{"edgeType":"calls","source":1721,"target":382},{"edgeType":"calls","source":702,"target":524},{"edgeType":"contains","source":53,"target":82},{"edgeType":"calls","source":1567,"target":928},{"edgeType":"calls","source":1585,"target":370},{"edgeType":"calls","source":994,"target":986},{"edgeType":"calls","source":1735,"target":212},{"edgeType":"contains","source":396,"target":548},{"edgeType":"extends","source":924,"target":742},{"edgeType":"contains","source":1539,"target":1586},{"edgeType":"calls","source":1023,"target":879},{"edgeType":"contains","source":1498,"target":1800},{"edgeType":"contains","source":490,"target":541},{"edgeType":"calls","source":1456,"target":1462},{"edgeType":"calls","source":691,"target":337},{"edgeType":"extends","source":249,"target":262},{"edgeType":"calls","source":733,"target":92},{"edgeType":"calls","source":826,"target":380},{"edgeType":"contains","source":1106,"target":1268},{"edgeType":"contains","source":822,"target":823},{"edgeType":"calls","source":1637,"target":1404},{"edgeType":"calls","source":1585,"target":373},{"edgeType":"imports","source":1159,"target":94},{"edgeType":"calls","source":1488,"target":1002},{"edgeType":"calls","source":1316,"target":1313},{"edgeType":"calls","source":1214,"target":247},{"edgeType":"contains","source":54,"target":55},{"edgeType":"contains","source":1498,"target":1797},{"edgeType":"imports","source":674,"target":330},{"edgeType":"contains","source":120,"target":123},{"edgeType":"contains","source":334,"target":359},{"edgeType":"contains","source":396,"target":551},{"edgeType":"contains","source":1539,"target":1583},{"edgeType":"calls","source":1580,"target":1056},{"edgeType":"contains","source":908,"target":1063},{"edgeType":"contains","source":1106,"target":1267},{"edgeType":"calls","source":1797,"target":671},{"edgeType":"calls","source":1114,"target":703},{"edgeType":"calls","source":1637,"target":1403},{"edgeType":"calls","source":1585,"target":372},{"edgeType":"calls","source":1671,"target":1670},{"edgeType":"calls","source":1316,"target":1312},{"edgeType":"contains","source":53,"target":84},{"edgeType":"calls","source":386,"target":545},{"edgeType":"contains","source":120,"target":122},{"edgeType":"contains","source":396,"target":550},{"edgeType":"contains","source":1539,"target":1584},{"edgeType":"contains","source":1498,"target":1798},{"edgeType":"calls","source":1720,"target":679},{"edgeType":"contains","source":490,"target":543},{"edgeType":"calls","source":1704,"target":118},{"edgeType":"calls","source":507,"target":488},{"edgeType":"contains","source":698,"target":705},{"edgeType":"calls","source":756,"target":961},{"edgeType":"contains","source":1106,"target":1270},{"edgeType":"contains","source":1539,"target":1589},{"edgeType":"contains","source":1498,"target":1803},{"edgeType":"calls","source":795,"target":809},{"edgeType":"calls","source":1653,"target":373},{"edgeType":"calls","source":1663,"target":1120},{"edgeType":"imports","source":1198,"target":204},{"edgeType":"calls","source":1585,"target":367},{"edgeType":"calls","source":1755,"target":646},{"edgeType":"contains","source":396,"target":553},{"edgeType":"calls","source":1708,"target":1046},{"edgeType":"contains","source":54,"target":57},{"edgeType":"contains","source":1498,"target":1539},{"edgeType":"contains","source":30,"target":801},{"edgeType":"contains","source":120,"target":125},{"edgeType":"contains","source":334,"target":361},{"edgeType":"imports","source":1175,"target":1181},{"edgeType":"contains","source":908,"target":1065},{"edgeType":"contains","source":698,"target":704},{"edgeType":"contains","source":1539,"target":1590},{"edgeType":"contains","source":1498,"target":1804},{"edgeType":"calls","source":1663,"target":1119},{"edgeType":"calls","source":1609,"target":679},{"edgeType":"calls","source":1653,"target":372},{"edgeType":"contains","source":396,"target":552},{"edgeType":"contains","source":21,"target":22},{"edgeType":"contains","source":54,"target":56},{"edgeType":"contains","source":120,"target":124},{"edgeType":"contains","source":385,"target":629},{"edgeType":"contains","source":908,"target":1064},{"edgeType":"contains","source":1062,"target":1311},{"edgeType":"calls","source":702,"target":523},{"edgeType":"contains","source":822,"target":827},{"edgeType":"contains","source":908,"target":1068},{"edgeType":"calls","source":1068,"target":1069},{"edgeType":"calls","source":1456,"target":1461},{"edgeType":"contains","source":1106,"target":1272},{"edgeType":"imports","source":29,"target":1030},{"edgeType":"contains","source":1539,"target":1587},{"edgeType":"calls","source":1585,"target":369},{"edgeType":"calls","source":1276,"target":1227},{"edgeType":"calls","source":1281,"target":1072},{"edgeType":"contains","source":396,"target":555},{"edgeType":"calls","source":1735,"target":211},{"edgeType":"contains","source":54,"target":59},{"edgeType":"contains","source":1498,"target":1801},{"edgeType":"contains","source":120,"target":127},{"edgeType":"calls","source":1456,"target":1460},{"edgeType":"contains","source":1539,"target":1588},{"edgeType":"contains","source":1498,"target":1802},{"edgeType":"calls","source":1653,"target":374},{"edgeType":"calls","source":1585,"target":368},{"edgeType":"imports","source":1198,"target":203},{"edgeType":"calls","source":1735,"target":210},{"edgeType":"contains","source":334,"target":362},{"edgeType":"contains","source":396,"target":554},{"edgeType":"contains","source":54,"target":58},{"edgeType":"contains","source":120,"target":126},{"edgeType":"calls","source":1401,"target":258},{"edgeType":"contains","source":27,"target":102},{"edgeType":"contains","source":908,"target":1066},{"edgeType":"contains","source":27,"target":1159},{"edgeType":"calls","source":1652,"target":433},{"edgeType":"calls","source":995,"target":716},{"edgeType":"contains","source":1222,"target":1345},{"edgeType":"contains","source":719,"target":818},{"edgeType":"imports","source":1498,"target":926},{"edgeType":"calls","source":1050,"target":597},{"edgeType":"calls","source":474,"target":483},{"edgeType":"calls","source":678,"target":501},{"edgeType":"contains","source":385,"target":866},{"edgeType":"contains","source":1539,"target":1560},{"edgeType":"calls","source":435,"target":371},{"edgeType":"contains","source":333,"target":363},{"edgeType":"calls","source":756,"target":462},{"edgeType":"calls","source":1073,"target":412},{"edgeType":"contains","source":1424,"target":1426},{"edgeType":"contains","source":94,"target":109},{"edgeType":"imports","source":119,"target":601},{"edgeType":"imports","source":1498,"target":925},{"edgeType":"calls","source":1640,"target":1068},{"edgeType":"calls","source":1068,"target":1095},{"edgeType":"calls","source":1050,"target":596},{"edgeType":"calls","source":678,"target":500},{"edgeType":"contains","source":1539,"target":1561},{"edgeType":"calls","source":1790,"target":382},{"edgeType":"calls","source":435,"target":370},{"edgeType":"contains","source":490,"target":516},{"edgeType":"calls","source":1678,"target":1211},{"edgeType":"calls","source":1683,"target":1056},{"edgeType":"contains","source":845,"target":874},{"edgeType":"calls","source":1073,"target":411},{"edgeType":"calls","source":656,"target":654},{"edgeType":"contains","source":94,"target":108},{"edgeType":"contains","source":94,"target":111},{"edgeType":"calls","source":507,"target":519},{"edgeType":"calls","source":878,"target":117},{"edgeType":"contains","source":590,"target":591},{"edgeType":"contains","source":1539,"target":1558},{"edgeType":"contains","source":334,"target":335},{"edgeType":"contains","source":331,"target":428},{"edgeType":"calls","source":678,"target":503},{"edgeType":"imports","source":1498,"target":924},{"edgeType":"calls","source":726,"target":337},{"edgeType":"imports","source":719,"target":760},{"edgeType":"calls","source":435,"target":373},{"edgeType":"calls","source":757,"target":433},{"edgeType":"calls","source":557,"target":291},{"edgeType":"calls","source":1709,"target":1046},{"edgeType":"calls","source":1665,"target":296},{"edgeType":"calls","source":1425,"target":601},{"edgeType":"calls","source":386,"target":571},{"edgeType":"calls","source":814,"target":780},{"edgeType":"calls","source":755,"target":759},{"edgeType":"calls","source":1610,"target":679},{"edgeType":"contains","source":94,"target":110},{"edgeType":"contains","source":1222,"target":1344},{"edgeType":"calls","source":766,"target":681},{"edgeType":"calls","source":1083,"target":632},{"edgeType":"contains","source":331,"target":427},{"edgeType":"contains","source":385,"target":867},{"edgeType":"imports","source":1498,"target":923},{"edgeType":"calls","source":1713,"target":1714},{"edgeType":"calls","source":678,"target":502},{"edgeType":"contains","source":1539,"target":1559},{"edgeType":"calls","source":1796,"target":462},{"edgeType":"calls","source":435,"target":372},{"edgeType":"calls","source":1242,"target":1252},{"edgeType":"contains","source":845,"target":876},{"edgeType":"contains","source":1424,"target":1425},{"edgeType":"calls","source":601,"target":247},{"edgeType":"imports","source":1498,"target":395},{"edgeType":"contains","source":477,"target":657},{"edgeType":"contains","source":1222,"target":1349},{"edgeType":"contains","source":94,"target":113},{"edgeType":"contains","source":1539,"target":1564},{"edgeType":"contains","source":28,"target":45},{"edgeType":"calls","source":1653,"target":398},{"edgeType":"contains","source":1159,"target":395},{"edgeType":"calls","source":1674,"target":1068},{"edgeType":"calls","source":1652,"target":693},{"edgeType":"calls","source":392,"target":114},{"edgeType":"contains","source":334,"target":337},{"edgeType":"calls","source":1493,"target":337},{"edgeType":"calls","source":1002,"target":1023},{"edgeType":"calls","source":1717,"target":1056},{"edgeType":"calls","source":928,"target":411},{"edgeType":"contains","source":1771,"target":1772},{"edgeType":"calls","source":996,"target":417},{"edgeType":"extends","source":776,"target":289},{"edgeType":"calls","source":878,"target":111},{"edgeType":"calls","source":766,"target":412},{"edgeType":"contains","source":476,"target":691},{"edgeType":"contains","source":477,"target":660},{"edgeType":"calls","source":435,"target":367},{"edgeType":"imports","source":674,"target":39},{"edgeType":"calls","source":1399,"target":1400},{"edgeType":"contains","source":1539,"target":1565},{"edgeType":"calls","source":705,"target":716},{"edgeType":"contains","source":94,"target":112},{"edgeType":"calls","source":1233,"target":204},{"edgeType":"calls","source":392,"target":113},{"edgeType":"imports","source":1498,"target":921},{"edgeType":"calls","source":1618,"target":689},{"edgeType":"calls","source":1068,"target":1091},{"edgeType":"contains","source":334,"target":336},{"edgeType":"calls","source":1051,"target":560},{"edgeType":"contains","source":38,"target":263},{"edgeType":"calls","source":766,"target":411},{"edgeType":"contains","source":1771,"target":1773},{"edgeType":"calls","source":842,"target":1226},{"edgeType":"calls","source":878,"target":110},{"edgeType":"contains","source":1738,"target":1739},{"edgeType":"contains","source":1222,"target":1351},{"edgeType":"imports","source":312,"target":689},{"edgeType":"calls","source":1401,"target":284},{"edgeType":"contains","source":334,"target":339},{"edgeType":"contains","source":490,"target":524},{"edgeType":"calls","source":1658,"target":509},{"edgeType":"contains","source":1539,"target":1562},{"edgeType":"extends","source":770,"target":742},{"edgeType":"contains","source":719,"target":824},{"edgeType":"calls","source":435,"target":369},{"edgeType":"calls","source":1678,"target":1210},{"edgeType":"contains","source":546,"target":637},{"edgeType":"contains","source":477,"target":662},{"edgeType":"imports","source":1498,"target":920},{"edgeType":"calls","source":1747,"target":392},{"edgeType":"calls","source":598,"target":601},{"edgeType":"contains","source":1219,"target":386},{"edgeType":"contains","source":845,"target":881},{"edgeType":"contains","source":1539,"target":1563},{"edgeType":"contains","source":94,"target":114},{"edgeType":"contains","source":334,"target":338},{"edgeType":"contains","source":490,"target":523},{"edgeType":"imports","source":1498,"target":919},{"edgeType":"contains","source":477,"target":661},{"edgeType":"imports","source":119,"target":331},{"edgeType":"calls","source":928,"target":412},{"edgeType":"calls","source":1601,"target":689},{"edgeType":"imports","source":312,"target":954},{"edgeType":"calls","source":435,"target":368},{"edgeType":"calls","source":392,"target":374},{"edgeType":"contains","source":845,"target":884},{"edgeType":"imports","source":29,"target":1020},{"edgeType":"calls","source":1068,"target":559},{"edgeType":"calls","source":817,"target":412},{"edgeType":"calls","source":1686,"target":428},{"edgeType":"contains","source":1498,"target":1782},{"edgeType":"contains","source":334,"target":341},{"edgeType":"contains","source":719,"target":826},{"edgeType":"imports","source":719,"target":771},{"edgeType":"calls","source":1696,"target":382},{"edgeType":"contains","source":490,"target":526},{"edgeType":"contains","source":477,"target":664},{"edgeType":"calls","source":877,"target":931},{"edgeType":"imports","source":1183,"target":1185},{"edgeType":"calls","source":1589,"target":1056},{"edgeType":"calls","source":1581,"target":247},{"edgeType":"contains","source":546,"target":639},{"edgeType":"contains","source":1222,"target":1354},{"edgeType":"contains","source":1738,"target":1742},{"edgeType":"contains","source":1539,"target":1569},{"edgeType":"calls","source":1410,"target":1319},{"edgeType":"calls","source":817,"target":411},{"edgeType":"contains","source":334,"target":340},{"edgeType":"contains","source":909,"target":1013},{"edgeType":"calls","source":392,"target":373},{"edgeType":"contains","source":477,"target":663},{"edgeType":"contains","source":1771,"target":1777},{"edgeType":"calls","source":1713,"target":382},{"edgeType":"calls","source":1731,"target":617},{"edgeType":"contains","source":546,"target":638},{"edgeType":"imports","source":29,"target":757},{"edgeType":"calls","source":573,"target":576},{"edgeType":"contains","source":17,"target":1182},{"edgeType":"contains","source":1738,"target":1743},{"edgeType":"contains","source":1539,"target":1566},{"edgeType":"contains","source":334,"target":343},{"edgeType":"calls","source":1233,"target":203},{"edgeType":"calls","source":392,"target":112},{"edgeType":"contains","source":490,"target":528},{"edgeType":"contains","source":1771,"target":1774},{"edgeType":"contains","source":477,"target":666},{"edgeType":"calls","source":1679,"target":118},{"edgeType":"contains","source":1738,"target":1740},{"edgeType":"calls","source":1487,"target":1049},{"edgeType":"contains","source":590,"target":598},{"edgeType":"calls","source":1572,"target":1056},{"edgeType":"calls","source":1023,"target":898},{"edgeType":"contains","source":334,"target":342},{"edgeType":"contains","source":1539,"target":1567},{"edgeType":"calls","source":1002,"target":1020},{"edgeType":"contains","source":845,"target":885},{"edgeType":"calls","source":1558,"target":433},{"edgeType":"contains","source":490,"target":527},{"edgeType":"contains","source":1771,"target":1775},{"edgeType":"calls","source":996,"target":414},{"edgeType":"contains","source":546,"target":640},{"edgeType":"contains","source":477,"target":665},{"edgeType":"contains","source":1738,"target":1741},{"edgeType":"contains","source":1222,"target":1353},{"edgeType":"contains","source":9,"target":375},{"edgeType":"calls","source":1684,"target":750},{"edgeType":"contains","source":490,"target":530},{"edgeType":"contains","source":334,"target":345},{"edgeType":"calls","source":1602,"target":121},{"edgeType":"contains","source":546,"target":644},{"edgeType":"calls","source":726,"target":586},{"edgeType":"contains","source":477,"target":669},{"edgeType":"calls","source":392,"target":370},{"edgeType":"calls","source":386,"target":556},{"edgeType":"calls","source":1068,"target":291},{"edgeType":"contains","source":1738,"target":1746},{"edgeType":"calls","source":1186,"target":596},{"edgeType":"contains","source":477,"target":668},{"edgeType":"calls","source":382,"target":416},{"edgeType":"imports","source":29,"target":752},{"edgeType":"contains","source":1763,"target":1764},{"edgeType":"imports","source":674,"target":47},{"edgeType":"imports","source":119,"target":76},{"edgeType":"contains","source":1539,"target":1573},{"edgeType":"calls","source":1597,"target":1068},{"edgeType":"contains","source":1771,"target":1780},{"edgeType":"calls","source":1028,"target":1002},{"edgeType":"calls","source":1028,"target":1001},{"edgeType":"contains","source":334,"target":344},{"edgeType":"contains","source":23,"target":1264},{"edgeType":"calls","source":1585,"target":118},{"edgeType":"extends","source":249,"target":250},{"edgeType":"calls","source":392,"target":369},{"edgeType":"contains","source":582,"target":584},{"edgeType":"contains","source":1062,"target":1295},{"edgeType":"contains","source":490,"target":529},{"edgeType":"contains","source":845,"target":887},{"edgeType":"calls","source":1585,"target":382},{"edgeType":"contains","source":1738,"target":1747},{"edgeType":"calls","source":382,"target":415},{"edgeType":"contains","source":477,"target":667},{"edgeType":"contains","source":1763,"target":1765},{"edgeType":"imports","source":1159,"target":76},{"edgeType":"contains","source":1539,"target":1574},{"edgeType":"imports","source":1219,"target":330},{"edgeType":"contains","source":1771,"target":1781},{"edgeType":"calls","source":1541,"target":689},{"edgeType":"contains","source":334,"target":347},{"edgeType":"contains","source":490,"target":532},{"edgeType":"calls","source":1606,"target":1056},{"edgeType":"contains","source":1539,"target":1570},{"edgeType":"extends","source":1116,"target":1123},{"edgeType":"calls","source":392,"target":372},{"edgeType":"contains","source":1771,"target":1778},{"edgeType":"calls","source":1551,"target":382},{"edgeType":"imports","source":719,"target":764},{"edgeType":"contains","source":1175,"target":1230},{"edgeType":"calls","source":573,"target":575},{"edgeType":"contains","source":1539,"target":1571},{"edgeType":"contains","source":1738,"target":1744},{"edgeType":"contains","source":334,"target":346},{"edgeType":"contains","source":490,"target":531},{"edgeType":"contains","source":845,"target":889},{"edgeType":"calls","source":1665,"target":547},{"edgeType":"calls","source":392,"target":371},{"edgeType":"calls","source":1002,"target":223},{"edgeType":"calls","source":1002,"target":224},{"edgeType":"calls","source":1186,"target":597},{"edgeType":"contains","source":1539,"target":1572},{"edgeType":"calls","source":1794,"target":247},{"edgeType":"contains","source":1738,"target":1745},{"edgeType":"contains","source":1771,"target":1779},{"edgeType":"calls","source":1636,"target":1408},{"edgeType":"contains","source":391,"target":399},{"edgeType":"calls","source":1705,"target":1119},{"edgeType":"imports","source":119,"target":385},{"edgeType":"contains","source":845,"target":859},{"edgeType":"calls","source":1559,"target":888},{"edgeType":"contains","source":908,"target":756},{"edgeType":"calls","source":1179,"target":1041},{"edgeType":"calls","source":1728,"target":670},{"edgeType":"contains","source":546,"target":615},{"edgeType":"calls","source":1337,"target":371},{"edgeType":"contains","source":916,"target":772},{"edgeType":"calls","source":1722,"target":1120},{"edgeType":"contains","source":1070,"target":1283},{"edgeType":"calls","source":118,"target":371},{"edgeType":"calls","source":1649,"target":740},{"edgeType":"contains","source":1246,"target":1378},{"edgeType":"contains","source":394,"target":570},{"edgeType":"calls","source":1754,"target":392},{"edgeType":"contains","source":1539,"target":1544},{"edgeType":"contains","source":1440,"target":1442},{"edgeType":"calls","source":1436,"target":1001},{"edgeType":"calls","source":1681,"target":1069},{"edgeType":"calls","source":824,"target":417},{"edgeType":"contains","source":1340,"target":1371},{"edgeType":"contains","source":394,"target":569},{"edgeType":"calls","source":1636,"target":1407},{"edgeType":"contains","source":17,"target":1157},{"edgeType":"contains","source":845,"target":858},{"edgeType":"calls","source":1337,"target":370},{"edgeType":"contains","source":1002,"target":1012},{"edgeType":"calls","source":1722,"target":1119},{"edgeType":"contains","source":391,"target":398},{"edgeType":"contains","source":916,"target":771},{"edgeType":"imports","source":119,"target":386},{"edgeType":"calls","source":1614,"target":503},{"edgeType":"contains","source":1070,"target":1282},{"edgeType":"calls","source":1632,"target":1266},{"edgeType":"contains","source":1246,"target":1379},{"edgeType":"calls","source":644,"target":712},{"edgeType":"calls","source":118,"target":370},{"edgeType":"contains","source":1539,"target":1545},{"edgeType":"calls","source":1576,"target":888},{"edgeType":"imports","source":674,"target":358},{"edgeType":"contains","source":1340,"target":1368},{"edgeType":"calls","source":1451,"target":1067},{"edgeType":"calls","source":1139,"target":433},{"edgeType":"contains","source":391,"target":401},{"edgeType":"imports","source":1498,"target":1502},{"edgeType":"calls","source":1567,"target":642},{"edgeType":"contains","source":546,"target":617},{"edgeType":"calls","source":1337,"target":373},{"edgeType":"contains","source":476,"target":673},{"edgeType":"calls","source":740,"target":382},{"edgeType":"calls","source":1651,"target":416},{"edgeType":"imports","source":312,"target":742},{"edgeType":"calls","source":1023,"target":593},{"edgeType":"calls","source":118,"target":373},{"edgeType":"contains","source":845,"target":861},{"edgeType":"contains","source":916,"target":774},{"edgeType":"contains","source":1539,"target":1542},{"edgeType":"contains","source":1498,"target":1756},{"edgeType":"calls","source":1486,"target":246},{"edgeType":"imports","source":29,"target":795},{"edgeType":"contains","source":1246,"target":1376},{"edgeType":"contains","source":394,"target":572},{"edgeType":"calls","source":1787,"target":428},{"edgeType":"calls","source":1636,"target":1409},{"edgeType":"calls","source":1647,"target":1068},{"edgeType":"imports","source":119,"target":120},{"edgeType":"calls","source":1705,"target":1120},{"edgeType":"contains","source":391,"target":400},{"edgeType":"imports","source":1498,"target":1501},{"edgeType":"contains","source":845,"target":860},{"edgeType":"imports","source":119,"target":384},{"edgeType":"contains","source":546,"target":616},{"edgeType":"calls","source":1337,"target":372},{"edgeType":"contains","source":23,"target":1237},{"edgeType":"extends","source":925,"target":738},{"edgeType":"calls","source":1640,"target":756},{"edgeType":"calls","source":1651,"target":415},{"edgeType":"contains","source":1070,"target":1284},{"edgeType":"contains","source":916,"target":773},{"edgeType":"calls","source":118,"target":372},{"edgeType":"imports","source":1198,"target":760},{"edgeType":"imports","source":674,"target":356},{"edgeType":"contains","source":1246,"target":1377},{"edgeType":"contains","source":1440,"target":1441},{"edgeType":"contains","source":1539,"target":1543},{"edgeType":"contains","source":391,"target":403},{"edgeType":"calls","source":824,"target":414},{"edgeType":"calls","source":1636,"target":1404},{"edgeType":"imports","source":29,"target":1057},{"edgeType":"calls","source":1418,"target":1027},{"edgeType":"calls","source":1337,"target":367},{"edgeType":"calls","source":1676,"target":428},{"edgeType":"contains","source":1539,"target":1548},{"edgeType":"calls","source":1686,"target":382},{"edgeType":"contains","source":845,"target":863},{"edgeType":"calls","source":1614,"target":500},{"edgeType":"contains","source":1246,"target":1382},{"edgeType":"calls","source":1579,"target":1056},{"edgeType":"extends","source":546,"target":334},{"edgeType":"calls","source":644,"target":709},{"edgeType":"calls","source":118,"target":367},{"edgeType":"calls","source":1796,"target":671},{"edgeType":"contains","source":38,"target":246},{"edgeType":"calls","source":575,"target":204},{"edgeType":"calls","source":629,"target":380},{"edgeType":"calls","source":833,"target":398},{"edgeType":"calls","source":765,"target":392},{"edgeType":"contains","source":391,"target":402},{"edgeType":"calls","source":1636,"target":1403},{"edgeType":"calls","source":1744,"target":433},{"edgeType":"contains","source":1539,"target":1549},{"edgeType":"contains","source":1498,"target":1763},{"edgeType":"contains","source":845,"target":862},{"edgeType":"contains","source":1246,"target":1383},{"edgeType":"calls","source":1685,"target":412},{"edgeType":"contains","source":916,"target":775},{"edgeType":"calls","source":712,"target":714},{"edgeType":"contains","source":1062,"target":741},{"edgeType":"calls","source":1703,"target":118},{"edgeType":"calls","source":644,"target":708},{"edgeType":"calls","source":842,"target":915},{"edgeType":"contains","source":391,"target":405},{"edgeType":"calls","source":1636,"target":1406},{"edgeType":"extends","source":779,"target":777},{"edgeType":"contains","source":763,"target":765},{"edgeType":"extends","source":1782,"target":334},{"edgeType":"contains","source":119,"target":909},{"edgeType":"calls","source":1337,"target":369},{"edgeType":"calls","source":757,"target":643},{"edgeType":"calls","source":1724,"target":1056},{"edgeType":"calls","source":1068,"target":516},{"edgeType":"contains","source":845,"target":865},{"edgeType":"contains","source":942,"target":1029},{"edgeType":"calls","source":1614,"target":502},{"edgeType":"calls","source":1003,"target":681},{"edgeType":"calls","source":118,"target":369},{"edgeType":"contains","source":1246,"target":1380},{"edgeType":"contains","source":1539,"target":1546},{"edgeType":"imports","source":119,"target":115},{"edgeType":"calls","source":1632,"target":1265},{"edgeType":"contains","source":1498,"target":1760},{"edgeType":"calls","source":842,"target":914},{"edgeType":"contains","source":391,"target":404},{"edgeType":"calls","source":1399,"target":824},{"edgeType":"contains","source":477,"target":645},{"edgeType":"calls","source":757,"target":642},{"edgeType":"calls","source":1447,"target":1450},{"edgeType":"calls","source":1625,"target":689},{"edgeType":"calls","source":1636,"target":1405},{"edgeType":"contains","source":119,"target":908},{"edgeType":"imports","source":29,"target":263},{"edgeType":"calls","source":676,"target":246},{"edgeType":"calls","source":1337,"target":368},{"edgeType":"contains","source":17,"target":1163},{"edgeType":"calls","source":1703,"target":913},{"edgeType":"contains","source":916,"target":777},{"edgeType":"contains","source":1539,"target":1547},{"edgeType":"contains","source":845,"target":864},{"edgeType":"calls","source":1614,"target":501},{"edgeType":"contains","source":1246,"target":1381},{"edgeType":"calls","source":118,"target":368},{"edgeType":"imports","source":1159,"target":115},{"edgeType":"contains","source":477,"target":648},{"edgeType":"contains","source":763,"target":767},{"edgeType":"calls","source":1548,"target":428},{"edgeType":"calls","source":1567,"target":632},{"edgeType":"calls","source":386,"target":246},{"edgeType":"calls","source":707,"target":337},{"edgeType":"calls","source":1315,"target":1309},{"edgeType":"extends","source":828,"target":845},{"edgeType":"contains","source":1246,"target":1386},{"edgeType":"calls","source":670,"target":692},{"edgeType":"calls","source":1759,"target":229},{"edgeType":"contains","source":1539,"target":1552},{"edgeType":"contains","source":391,"target":407},{"edgeType":"contains","source":1465,"target":1468},{"edgeType":"contains","source":102,"target":117},{"edgeType":"calls","source":1494,"target":1044},{"edgeType":"contains","source":476,"target":678},{"edgeType":"contains","source":391,"target":406},{"edgeType":"imports","source":119,"target":923},{"edgeType":"extends","source":333,"target":334},{"edgeType":"calls","source":1652,"target":374},{"edgeType":"contains","source":1106,"target":703},{"edgeType":"contains","source":719,"target":1337},{"edgeType":"extends","source":589,"target":590},{"edgeType":"contains","source":1246,"target":1387},{"edgeType":"contains","source":916,"target":779},{"edgeType":"imports","source":119,"target":395},{"edgeType":"contains","source":1539,"target":1553},{"edgeType":"contains","source":394,"target":577},{"edgeType":"calls","source":575,"target":203},{"edgeType":"calls","source":1553,"target":1068},{"edgeType":"calls","source":1139,"target":689},{"edgeType":"calls","source":1003,"target":412},{"edgeType":"contains","source":1539,"target":1550},{"edgeType":"calls","source":1557,"target":679},{"edgeType":"contains","source":1246,"target":1384},{"edgeType":"contains","source":394,"target":580},{"edgeType":"calls","source":1685,"target":411},{"edgeType":"contains","source":94,"target":103},{"edgeType":"calls","source":1068,"target":247},{"edgeType":"contains","source":1465,"target":1466},{"edgeType":"contains","source":490,"target":511},{"edgeType":"contains","source":391,"target":409},{"edgeType":"calls","source":1757,"target":557},{"edgeType":"contains","source":546,"target":624},{"edgeType":"calls","source":1415,"target":588},{"edgeType":"calls","source":700,"target":291},{"edgeType":"calls","source":386,"target":247},{"edgeType":"calls","source":906,"target":247},{"edgeType":"calls","source":1003,"target":411},{"edgeType":"calls","source":1566,"target":928},{"edgeType":"contains","source":1246,"target":1385},{"edgeType":"contains","source":394,"target":579},{"edgeType":"contains","source":29,"target":1060},{"edgeType":"contains","source":1539,"target":1551},{"edgeType":"calls","source":1001,"target":1002},{"edgeType":"contains","source":391,"target":408},{"edgeType":"contains","source":1465,"target":1467},{"edgeType":"contains","source":490,"target":510},{"edgeType":"calls","source":744,"target":507},{"edgeType":"contains","source":1539,"target":1556},{"edgeType":"calls","source":1652,"target":371},{"edgeType":"imports","source":1062,"target":225},{"edgeType":"contains","source":1246,"target":1390},{"edgeType":"calls","source":1315,"target":247},{"edgeType":"calls","source":1160,"target":824},{"edgeType":"calls","source":693,"target":503},{"edgeType":"contains","source":94,"target":105},{"edgeType":"contains","source":546,"target":627},{"edgeType":"contains","source":490,"target":513},{"edgeType":"calls","source":1076,"target":1050},{"edgeType":"calls","source":560,"target":398},{"edgeType":"calls","source":629,"target":636},{"edgeType":"calls","source":1351,"target":716},{"edgeType":"calls","source":995,"target":653},{"edgeType":"contains","source":1539,"target":1557},{"edgeType":"contains","source":1498,"target":1771},{"edgeType":"calls","source":1652,"target":370},{"edgeType":"calls","source":1711,"target":1712},{"edgeType":"contains","source":1246,"target":1391},{"edgeType":"contains","source":929,"target":1173},{"edgeType":"calls","source":1077,"target":489},{"edgeType":"calls","source":1638,"target":1068},{"edgeType":"contains","source":394,"target":581},{"edgeType":"contains","source":27,"target":1124},{"edgeType":"contains","source":94,"target":104},{"edgeType":"contains","source":490,"target":512},{"edgeType":"calls","source":1667,"target":433},{"edgeType":"contains","source":391,"target":410},{"edgeType":"calls","source":693,"target":502},{"edgeType":"calls","source":1423,"target":337},{"edgeType":"extends","source":589,"target":333},{"edgeType":"imports","source":119,"target":387},{"edgeType":"calls","source":1652,"target":373},{"edgeType":"calls","source":1707,"target":1046},{"edgeType":"contains","source":490,"target":515},{"edgeType":"contains","source":1539,"target":1554},{"edgeType":"contains","source":476,"target":685},{"edgeType":"contains","source":94,"target":107},{"edgeType":"calls","source":877,"target":879},{"edgeType":"contains","source":1539,"target":1555},{"edgeType":"calls","source":1652,"target":372},{"edgeType":"calls","source":1695,"target":96},{"edgeType":"calls","source":1077,"target":492},{"edgeType":"calls","source":1608,"target":679},{"edgeType":"contains","source":1246,"target":1389},{"edgeType":"calls","source":1794,"target":462},{"edgeType":"contains","source":490,"target":514},{"edgeType":"contains","source":17,"target":379},{"edgeType":"contains","source":719,"target":814},{"edgeType":"calls","source":1271,"target":291},{"edgeType":"contains","source":94,"target":106},{"edgeType":"contains","source":1300,"target":1301},{"edgeType":"imports","source":674,"target":360},{"edgeType":"calls","source":1653,"target":369},{"edgeType":"calls","source":1663,"target":59},{"edgeType":"contains","source":2,"target":21},{"edgeType":"calls","source":574,"target":521},{"edgeType":"contains","source":944,"target":945},{"edgeType":"contains","source":942,"target":1007},{"edgeType":"contains","source":941,"target":1038},{"edgeType":"contains","source":250,"target":261},{"edgeType":"calls","source":1657,"target":509},{"edgeType":"calls","source":928,"target":910},{"edgeType":"calls","source":118,"target":387},{"edgeType":"calls","source":1583,"target":689},{"edgeType":"contains","source":385,"target":833},{"edgeType":"calls","source":1164,"target":994},{"edgeType":"contains","source":719,"target":1049},{"edgeType":"contains","source":1183,"target":1199},{"edgeType":"imports","source":674,"target":76},{"edgeType":"imports","source":1062,"target":204},{"edgeType":"imports","source":29,"target":1045},{"edgeType":"imports","source":1498,"target":1222},{"edgeType":"calls","source":574,"target":520},{"edgeType":"calls","source":1653,"target":368},{"edgeType":"calls","source":702,"target":516},{"edgeType":"contains","source":942,"target":1006},{"edgeType":"contains","source":941,"target":1037},{"edgeType":"contains","source":250,"target":260},{"edgeType":"calls","source":1746,"target":392},{"edgeType":"calls","source":1561,"target":49},{"edgeType":"contains","source":376,"target":318},{"edgeType":"calls","source":1626,"target":412},{"edgeType":"contains","source":1246,"target":1362},{"edgeType":"imports","source":29,"target":782},{"edgeType":"calls","source":1339,"target":588},{"edgeType":"calls","source":1191,"target":1212},{"edgeType":"contains","source":17,"target":1141},{"edgeType":"contains","source":27,"target":38},{"edgeType":"calls","source":1195,"target":824},{"edgeType":"contains","source":942,"target":1005},{"edgeType":"contains","source":941,"target":1036},{"edgeType":"imports","source":1062,"target":205},{"edgeType":"calls","source":728,"target":503},{"edgeType":"contains","source":1124,"target":384},{"edgeType":"contains","source":225,"target":242},{"edgeType":"contains","source":944,"target":947},{"edgeType":"contains","source":942,"target":1009},{"edgeType":"contains","source":2,"target":23},{"edgeType":"extends","source":760,"target":288},{"edgeType":"contains","source":825,"target":936},{"edgeType":"contains","source":1175,"target":1185},{"edgeType":"contains","source":719,"target":1051},{"edgeType":"imports","source":29,"target":778},{"edgeType":"calls","source":1651,"target":433},{"edgeType":"calls","source":1653,"target":371},{"edgeType":"contains","source":1246,"target":1359},{"edgeType":"calls","source":432,"target":433},{"edgeType":"calls","source":1002,"target":204},{"edgeType":"contains","source":102,"target":886},{"edgeType":"calls","source":1653,"target":370},{"edgeType":"contains","source":674,"target":331},{"edgeType":"contains","source":376,"target":320},{"edgeType":"contains","source":941,"target":1039},{"edgeType":"contains","source":942,"target":1008},{"edgeType":"contains","source":944,"target":946},{"edgeType":"contains","source":825,"target":935},{"edgeType":"calls","source":1789,"target":382},{"edgeType":"contains","source":119,"target":888},{"edgeType":"imports","source":1062,"target":203},{"edgeType":"imports","source":29,"target":1044},{"edgeType":"calls","source":1002,"target":203},{"edgeType":"contains","source":908,"target":740},{"edgeType":"calls","source":629,"target":403},{"edgeType":"contains","source":916,"target":760},{"edgeType":"imports","source":674,"target":601},{"edgeType":"contains","source":944,"target":949},{"edgeType":"contains","source":942,"target":1011},{"edgeType":"calls","source":818,"target":88},{"edgeType":"contains","source":845,"target":847},{"edgeType":"contains","source":1246,"target":1365},{"edgeType":"contains","source":1070,"target":1271},{"edgeType":"calls","source":1658,"target":474},{"edgeType":"extends","source":771,"target":742},{"edgeType":"calls","source":728,"target":500},{"edgeType":"contains","source":1183,"target":1203},{"edgeType":"imports","source":29,"target":1041},{"edgeType":"contains","source":85,"target":95},{"edgeType":"contains","source":944,"target":948},{"edgeType":"calls","source":547,"target":560},{"edgeType":"contains","source":674,"target":333},{"edgeType":"contains","source":845,"target":846},{"edgeType":"contains","source":376,"target":322},{"edgeType":"calls","source":1602,"target":888},{"edgeType":"contains","source":1175,"target":1186},{"edgeType":"calls","source":818,"target":87},{"edgeType":"calls","source":1003,"target":433},{"edgeType":"contains","source":845,"target":849},{"edgeType":"calls","source":1652,"target":398},{"edgeType":"calls","source":1653,"target":367},{"edgeType":"calls","source":574,"target":519},{"edgeType":"contains","source":1246,"target":1363},{"edgeType":"calls","source":1673,"target":1068},{"edgeType":"contains","source":2,"target":27},{"edgeType":"calls","source":1626,"target":411},{"edgeType":"contains","source":1106,"target":1214},{"edgeType":"calls","source":824,"target":433},{"edgeType":"calls","source":1775,"target":284},{"edgeType":"calls","source":1191,"target":1211},{"edgeType":"calls","source":1056,"target":111},{"edgeType":"imports","source":29,"target":246},{"edgeType":"calls","source":728,"target":502},{"edgeType":"calls","source":671,"target":682},{"edgeType":"contains","source":376,"target":324},{"edgeType":"contains","source":944,"target":950},{"edgeType":"contains","source":845,"target":848},{"edgeType":"contains","source":546,"target":868},{"edgeType":"calls","source":118,"target":384},{"edgeType":"contains","source":1246,"target":1364},{"edgeType":"contains","source":1106,"target":1213},{"edgeType":"calls","source":1617,"target":689},{"edgeType":"calls","source":1490,"target":398},{"edgeType":"calls","source":1191,"target":1210},{"edgeType":"calls","source":728,"target":501},{"edgeType":"imports","source":29,"target":1040},{"edgeType":"calls","source":1056,"target":110},{"edgeType":"contains","source":1192,"target":1193},{"edgeType":"contains","source":845,"target":851},{"edgeType":"contains","source":376,"target":327},{"edgeType":"calls","source":1068,"target":526},{"edgeType":"contains","source":394,"target":562},{"edgeType":"calls","source":685,"target":507},{"edgeType":"contains","source":916,"target":764},{"edgeType":"contains","source":942,"target":1015},{"edgeType":"contains","source":424,"target":425},{"edgeType":"contains","source":1070,"target":1275},{"edgeType":"calls","source":1550,"target":382},{"edgeType":"calls","source":1798,"target":622},{"edgeType":"calls","source":1795,"target":1772},{"edgeType":"contains","source":845,"target":850},{"edgeType":"contains","source":376,"target":326},{"edgeType":"contains","source":394,"target":561},{"edgeType":"calls","source":1664,"target":547},{"edgeType":"contains","source":944,"target":952},{"edgeType":"contains","source":1070,"target":1274},{"edgeType":"calls","source":757,"target":653},{"edgeType":"extends","source":699,"target":334},{"edgeType":"calls","source":1798,"target":621},{"edgeType":"calls","source":671,"target":676},{"edgeType":"calls","source":1068,"target":528},{"edgeType":"contains","source":1159,"target":1161},{"edgeType":"contains","source":394,"target":564},{"edgeType":"calls","source":1685,"target":428},{"edgeType":"contains","source":719,"target":795},{"edgeType":"imports","source":38,"target":243},{"edgeType":"contains","source":845,"target":853},{"edgeType":"contains","source":546,"target":873},{"edgeType":"contains","source":1070,"target":1277},{"edgeType":"calls","source":1195,"target":291},{"edgeType":"calls","source":1695,"target":382},{"edgeType":"calls","source":660,"target":492},{"edgeType":"calls","source":795,"target":799},{"edgeType":"imports","source":29,"target":1051},{"edgeType":"contains","source":376,"target":329},{"edgeType":"calls","source":1783,"target":296},{"edgeType":"contains","source":845,"target":852},{"edgeType":"contains","source":1192,"target":1194},{"edgeType":"calls","source":1068,"target":527},{"edgeType":"contains","source":394,"target":563},{"edgeType":"calls","source":1754,"target":1459},{"edgeType":"extends","source":675,"target":288},{"edgeType":"contains","source":1070,"target":1276},{"edgeType":"contains","source":27,"target":312},{"edgeType":"imports","source":38,"target":244},{"edgeType":"calls","source":877,"target":898},{"edgeType":"contains","source":941,"target":1047},{"edgeType":"contains","source":942,"target":1016},{"edgeType":"contains","source":2,"target":30},{"edgeType":"calls","source":1795,"target":1773},{"edgeType":"calls","source":1668,"target":689},{"edgeType":"imports","source":38,"target":245},{"edgeType":"calls","source":1798,"target":623},{"edgeType":"contains","source":376,"target":328},{"edgeType":"calls","source":734,"target":575},{"edgeType":"contains","source":394,"target":566},{"edgeType":"calls","source":1719,"target":689},{"edgeType":"contains","source":1060,"target":1061},{"edgeType":"contains","source":546,"target":875},{"edgeType":"calls","source":753,"target":509},{"edgeType":"extends","source":768,"target":315},{"edgeType":"calls","source":1179,"target":1045},{"edgeType":"calls","source":1798,"target":618},{"edgeType":"contains","source":2,"target":33},{"edgeType":"imports","source":38,"target":241},{"edgeType":"contains","source":845,"target":855},{"edgeType":"calls","source":1595,"target":40},{"edgeType":"contains","source":30,"target":1280},{"edgeType":"calls","source":1591,"target":428},{"edgeType":"contains","source":1539,"target":1540},{"edgeType":"contains","source":1062,"target":734},{"edgeType":"imports","source":29,"target":1049},{"edgeType":"contains","source":1246,"target":1374},{"edgeType":"calls","source":712,"target":722},{"edgeType":"calls","source":1797,"target":384},{"edgeType":"contains","source":394,"target":565},{"edgeType":"calls","source":1567,"target":643},{"edgeType":"contains","source":845,"target":854},{"edgeType":"calls","source":1337,"target":374},{"edgeType":"calls","source":798,"target":434},{"edgeType":"calls","source":1726,"target":735},{"edgeType":"calls","source":736,"target":507},{"edgeType":"calls","source":1023,"target":594},{"edgeType":"calls","source":118,"target":374},{"edgeType":"contains","source":1539,"target":1541},{"edgeType":"calls","source":670,"target":703},{"edgeType":"contains","source":1246,"target":1375},{"edgeType":"calls","source":1003,"target":689},{"edgeType":"contains","source":8,"target":378},{"edgeType":"calls","source":1567,"target":382},{"edgeType":"contains","source":394,"target":568},{"edgeType":"calls","source":682,"target":598},{"edgeType":"calls","source":1068,"target":524},{"edgeType":"contains","source":845,"target":857},{"edgeType":"calls","source":1567,"target":118},{"edgeType":"contains","source":916,"target":770},{"edgeType":"calls","source":1001,"target":222},{"edgeType":"contains","source":1070,"target":1281},{"edgeType":"calls","source":1798,"target":620},{"edgeType":"imports","source":1219,"target":360},{"edgeType":"imports","source":1062,"target":206},{"edgeType":"calls","source":689,"target":117},{"edgeType":"calls","source":1596,"target":1068},{"edgeType":"contains","source":394,"target":567},{"edgeType":"contains","source":1246,"target":1372},{"edgeType":"calls","source":1068,"target":523},{"edgeType":"contains","source":916,"target":769},{"edgeType":"calls","source":1798,"target":619},{"edgeType":"contains","source":845,"target":856},{"edgeType":"contains","source":23,"target":1498},{"edgeType":"calls","source":1667,"target":716},{"edgeType":"calls","source":382,"target":384},{"edgeType":"calls","source":1584,"target":118},{"edgeType":"calls","source":646,"target":392},{"edgeType":"calls","source":1584,"target":382}],"nodes":[{"groupName":"","id":2,"nodeType":"Repo","repoName":"psf#requests#39d0fdd9096f7dceccbc8f82e1eda7dd64717a8e"},{"id":7,"name":".github","nodeType":"Package"},{"id":8,"name":"docs","nodeType":"Package"},{"id":9,"name":"docs/user","nodeType":"Package"},{"id":10,"name":"docs/community","nodeType":"Package"},{"id":12,"name":"ISSUE_TEMPLATE.md","nodeType":"TextFile","path":".github","text":"Summary.\n\n## Expected Result\n\nWhat you expected.\n\n## Actual Result\n\nWhat happened instead.\n\n## Reproduction Steps\n\n```python\nimport requests\n\n```\n\n## System Information\n\n $ python -m requests.help\n\n```\n\n```\n\nThis command is only available on Requests v2.16.4 and greater. Otherwise,\nplease provide some basic information about your system (Python version,\noperating system, &c)."},{"id":13,"name":"faq.rst","nodeType":"TextFile","path":"docs/community","text":".. _faq:\n\nFrequently Asked Questions\n==========================\n\nThis part of the documentation answers common questions about Requests.\n\nEncoded Data?\n-------------\n\nRequests automatically decompresses gzip-encoded responses, and does\nits best to decode response content to unicode when possible.\n\nWhen either the `brotli `_ or `brotlicffi `_\npackage is installed, requests also decodes Brotli-encoded responses.\n\nYou can get direct access to the raw response (and even the socket),\nif needed as well.\n\n\nCustom User-Agents?\n-------------------\n\nRequests allows you to easily override User-Agent strings, along with\nany other HTTP Header. See `documentation about headers `_.\n\n\n\nWhy not Httplib2?\n-----------------\n\nChris Adams gave an excellent summary on\n`Hacker News `_:\n\n httplib2 is part of why you should use requests: it's far more respectable\n as a client but not as well documented and it still takes way too much code\n for basic operations. I appreciate what httplib2 is trying to do, that\n there's a ton of hard low-level annoyances in building a modern HTTP\n client, but really, just use requests instead. Kenneth Reitz is very\n motivated and he gets the degree to which simple things should be simple\n whereas httplib2 feels more like an academic exercise than something\n people should use to build production systems[1].\n\n Disclosure: I'm listed in the requests AUTHORS file but can claim credit\n for, oh, about 0.0001% of the awesomeness.\n\n 1. http://code.google.com/p/httplib2/issues/detail?id=96 is a good example:\n an annoying bug which affect many people, there was a fix available for\n months, which worked great when I applied it in a fork and pounded a couple\n TB of data through it, but it took over a year to make it into trunk and\n even longer to make it onto PyPI where any other project which required \"\n httplib2\" would get the working version.\n\n\nPython 3 Support?\n-----------------\n\nYes! Requests officially supports Python 2.7 & 3.6+ and PyPy.\n\nPython 2 Support?\n-----------------\n\nYes! We do not have immediate plans to `sunset\n`_ our support for Python\n2.7. We understand that we have a large user base with varying needs.\n\nThat said, it is *highly* recommended users migrate to Python 3.6+ since Python\n2.7 is no longer receiving bug fixes or security updates as of January 1, 2020.\n\nWhat are \"hostname doesn't match\" errors?\n-----------------------------------------\n\nThese errors occur when :ref:`SSL certificate verification `\nfails to match the certificate the server responds with to the hostname\nRequests thinks it's contacting. If you're certain the server's SSL setup is\ncorrect (for example, because you can visit the site with your browser) and\nyou're using Python 2.7, a possible explanation is that you need\nServer-Name-Indication.\n\n`Server-Name-Indication`_, or SNI, is an official extension to SSL where the\nclient tells the server what hostname it is contacting. This is important\nwhen servers are using `Virtual Hosting`_. When such servers are hosting\nmore than one SSL site they need to be able to return the appropriate\ncertificate based on the hostname the client is connecting to.\n\nPython3 and Python 2.7.9+ include native support for SNI in their SSL modules.\nFor information on using SNI with Requests on Python < 2.7.9 refer to this\n`Stack Overflow answer`_.\n\n.. _`Server-Name-Indication`: https://en.wikipedia.org/wiki/Server_Name_Indication\n.. _`virtual hosting`: https://en.wikipedia.org/wiki/Virtual_hosting\n.. _`Stack Overflow answer`: https://stackoverflow.com/questions/18578439/using-requests-with-tls-doesnt-give-sni-support/18579484#18579484\n"},{"id":14,"name":"quickstart.rst","nodeType":"TextFile","path":"docs/user","text":".. _quickstart:\n\nQuickstart\n==========\n\n.. module:: requests.models\n\nEager to get started? This page gives a good introduction in how to get started\nwith Requests.\n\nFirst, make sure that:\n\n* Requests is :ref:`installed `\n* Requests is :ref:`up-to-date `\n\n\nLet's get started with some simple examples.\n\n\nMake a Request\n--------------\n\nMaking a request with Requests is very simple.\n\nBegin by importing the Requests module::\n\n >>> import requests\n\nNow, let's try to get a webpage. For this example, let's get GitHub's public\ntimeline::\n\n >>> r = requests.get('https://api.github.com/events')\n\nNow, we have a :class:`Response ` object called ``r``. We can\nget all the information we need from this object.\n\nRequests' simple API means that all forms of HTTP request are as obvious. For\nexample, this is how you make an HTTP POST request::\n\n >>> r = requests.post('https://httpbin.org/post', data={'key': 'value'})\n\nNice, right? What about the other HTTP request types: PUT, DELETE, HEAD and\nOPTIONS? These are all just as simple::\n\n >>> r = requests.put('https://httpbin.org/put', data={'key': 'value'})\n >>> r = requests.delete('https://httpbin.org/delete')\n >>> r = requests.head('https://httpbin.org/get')\n >>> r = requests.options('https://httpbin.org/get')\n\nThat's all well and good, but it's also only the start of what Requests can\ndo.\n\n\nPassing Parameters In URLs\n--------------------------\n\nYou often want to send some sort of data in the URL's query string. If\nyou were constructing the URL by hand, this data would be given as key/value\npairs in the URL after a question mark, e.g. ``httpbin.org/get?key=val``.\nRequests allows you to provide these arguments as a dictionary of strings,\nusing the ``params`` keyword argument. As an example, if you wanted to pass\n``key1=value1`` and ``key2=value2`` to ``httpbin.org/get``, you would use the\nfollowing code::\n\n >>> payload = {'key1': 'value1', 'key2': 'value2'}\n >>> r = requests.get('https://httpbin.org/get', params=payload)\n\nYou can see that the URL has been correctly encoded by printing the URL::\n\n >>> print(r.url)\n https://httpbin.org/get?key2=value2&key1=value1\n\nNote that any dictionary key whose value is ``None`` will not be added to the\nURL's query string.\n\nYou can also pass a list of items as a value::\n\n >>> payload = {'key1': 'value1', 'key2': ['value2', 'value3']}\n\n >>> r = requests.get('https://httpbin.org/get', params=payload)\n >>> print(r.url)\n https://httpbin.org/get?key1=value1&key2=value2&key2=value3\n\nResponse Content\n----------------\n\nWe can read the content of the server's response. Consider the GitHub timeline\nagain::\n\n >>> import requests\n\n >>> r = requests.get('https://api.github.com/events')\n >>> r.text\n '[{\"repository\":{\"open_issues\":0,\"url\":\"https://github.com/...\n\nRequests will automatically decode content from the server. Most unicode\ncharsets are seamlessly decoded.\n\nWhen you make a request, Requests makes educated guesses about the encoding of\nthe response based on the HTTP headers. The text encoding guessed by Requests\nis used when you access ``r.text``. You can find out what encoding Requests is\nusing, and change it, using the ``r.encoding`` property::\n\n >>> r.encoding\n 'utf-8'\n >>> r.encoding = 'ISO-8859-1'\n\nIf you change the encoding, Requests will use the new value of ``r.encoding``\nwhenever you call ``r.text``. You might want to do this in any situation where\nyou can apply special logic to work out what the encoding of the content will\nbe. For example, HTML and XML have the ability to specify their encoding in\ntheir body. In situations like this, you should use ``r.content`` to find the\nencoding, and then set ``r.encoding``. This will let you use ``r.text`` with\nthe correct encoding.\n\nRequests will also use custom encodings in the event that you need them. If\nyou have created your own encoding and registered it with the ``codecs``\nmodule, you can simply use the codec name as the value of ``r.encoding`` and\nRequests will handle the decoding for you.\n\nBinary Response Content\n-----------------------\n\nYou can also access the response body as bytes, for non-text requests::\n\n >>> r.content\n b'[{\"repository\":{\"open_issues\":0,\"url\":\"https://github.com/...\n\nThe ``gzip`` and ``deflate`` transfer-encodings are automatically decoded for you.\n\nThe ``br`` transfer-encoding is automatically decoded for you if a Brotli library\nlike `brotli `_ or `brotlicffi `_ is installed.\n\nFor example, to create an image from binary data returned by a request, you can\nuse the following code::\n\n >>> from PIL import Image\n >>> from io import BytesIO\n\n >>> i = Image.open(BytesIO(r.content))\n\n\nJSON Response Content\n---------------------\n\nThere's also a builtin JSON decoder, in case you're dealing with JSON data::\n\n >>> import requests\n\n >>> r = requests.get('https://api.github.com/events')\n >>> r.json()\n [{'repository': {'open_issues': 0, 'url': 'https://github.com/...\n\nIn case the JSON decoding fails, ``r.json()`` raises an exception. For example, if\nthe response gets a 204 (No Content), or if the response contains invalid JSON,\nattempting ``r.json()`` raises ``requests.exceptions.JSONDecodeError``. This wrapper exception\nprovides interoperability for multiple exceptions that may be thrown by different\npython versions and json serialization libraries.\n\nIt should be noted that the success of the call to ``r.json()`` does **not**\nindicate the success of the response. Some servers may return a JSON object in a\nfailed response (e.g. error details with HTTP 500). Such JSON will be decoded\nand returned. To check that a request is successful, use\n``r.raise_for_status()`` or check ``r.status_code`` is what you expect.\n\n\nRaw Response Content\n--------------------\n\nIn the rare case that you'd like to get the raw socket response from the\nserver, you can access ``r.raw``. If you want to do this, make sure you set\n``stream=True`` in your initial request. Once you do, you can do this::\n\n >>> r = requests.get('https://api.github.com/events', stream=True)\n\n >>> r.raw\n \n\n >>> r.raw.read(10)\n '\\x1f\\x8b\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x03'\n\nIn general, however, you should use a pattern like this to save what is being\nstreamed to a file::\n\n with open(filename, 'wb') as fd:\n for chunk in r.iter_content(chunk_size=128):\n fd.write(chunk)\n\nUsing ``Response.iter_content`` will handle a lot of what you would otherwise\nhave to handle when using ``Response.raw`` directly. When streaming a\ndownload, the above is the preferred and recommended way to retrieve the\ncontent. Note that ``chunk_size`` can be freely adjusted to a number that\nmay better fit your use cases.\n\n.. note::\n\n An important note about using ``Response.iter_content`` versus ``Response.raw``.\n ``Response.iter_content`` will automatically decode the ``gzip`` and ``deflate``\n transfer-encodings. ``Response.raw`` is a raw stream of bytes -- it does not\n transform the response content. If you really need access to the bytes as they\n were returned, use ``Response.raw``.\n\n\nCustom Headers\n--------------\n\nIf you'd like to add HTTP headers to a request, simply pass in a ``dict`` to the\n``headers`` parameter.\n\nFor example, we didn't specify our user-agent in the previous example::\n\n >>> url = 'https://api.github.com/some/endpoint'\n >>> headers = {'user-agent': 'my-app/0.0.1'}\n\n >>> r = requests.get(url, headers=headers)\n\nNote: Custom headers are given less precedence than more specific sources of information. For instance:\n\n* Authorization headers set with `headers=` will be overridden if credentials\n are specified in ``.netrc``, which in turn will be overridden by the ``auth=``\n parameter. Requests will search for the netrc file at `~/.netrc`, `~/_netrc`,\n or at the path specified by the `NETRC` environment variable.\n* Authorization headers will be removed if you get redirected off-host.\n* Proxy-Authorization headers will be overridden by proxy credentials provided in the URL.\n* Content-Length headers will be overridden when we can determine the length of the content.\n\nFurthermore, Requests does not change its behavior at all based on which custom headers are specified. The headers are simply passed on into the final request.\n\nNote: All header values must be a ``string``, bytestring, or unicode. While permitted, it's advised to avoid passing unicode header values.\n\nMore complicated POST requests\n------------------------------\n\nTypically, you want to send some form-encoded data — much like an HTML form.\nTo do this, simply pass a dictionary to the ``data`` argument. Your\ndictionary of data will automatically be form-encoded when the request is made::\n\n >>> payload = {'key1': 'value1', 'key2': 'value2'}\n\n >>> r = requests.post(\"https://httpbin.org/post\", data=payload)\n >>> print(r.text)\n {\n ...\n \"form\": {\n \"key2\": \"value2\",\n \"key1\": \"value1\"\n },\n ...\n }\n\nThe ``data`` argument can also have multiple values for each key. This can be\ndone by making ``data`` either a list of tuples or a dictionary with lists\nas values. This is particularly useful when the form has multiple elements that\nuse the same key::\n\n >>> payload_tuples = [('key1', 'value1'), ('key1', 'value2')]\n >>> r1 = requests.post('https://httpbin.org/post', data=payload_tuples)\n >>> payload_dict = {'key1': ['value1', 'value2']}\n >>> r2 = requests.post('https://httpbin.org/post', data=payload_dict)\n >>> print(r1.text)\n {\n ...\n \"form\": {\n \"key1\": [\n \"value1\",\n \"value2\"\n ]\n },\n ...\n }\n >>> r1.text == r2.text\n True\n\nThere are times that you may want to send data that is not form-encoded. If\nyou pass in a ``string`` instead of a ``dict``, that data will be posted directly.\n\nFor example, the GitHub API v3 accepts JSON-Encoded POST/PATCH data::\n\n >>> import json\n\n >>> url = 'https://api.github.com/some/endpoint'\n >>> payload = {'some': 'data'}\n\n >>> r = requests.post(url, data=json.dumps(payload))\n\nInstead of encoding the ``dict`` yourself, you can also pass it directly using\nthe ``json`` parameter (added in version 2.4.2) and it will be encoded automatically::\n\n >>> url = 'https://api.github.com/some/endpoint'\n >>> payload = {'some': 'data'}\n\n >>> r = requests.post(url, json=payload)\n\nNote, the ``json`` parameter is ignored if either ``data`` or ``files`` is passed.\n\nUsing the ``json`` parameter in the request will change the ``Content-Type`` in the header to ``application/json``.\n\nPOST a Multipart-Encoded File\n-----------------------------\n\nRequests makes it simple to upload Multipart-encoded files::\n\n >>> url = 'https://httpbin.org/post'\n >>> files = {'file': open('report.xls', 'rb')}\n\n >>> r = requests.post(url, files=files)\n >>> r.text\n {\n ...\n \"files\": {\n \"file\": \"\"\n },\n ...\n }\n\nYou can set the filename, content_type and headers explicitly::\n\n >>> url = 'https://httpbin.org/post'\n >>> files = {'file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel', {'Expires': '0'})}\n\n >>> r = requests.post(url, files=files)\n >>> r.text\n {\n ...\n \"files\": {\n \"file\": \"\"\n },\n ...\n }\n\nIf you want, you can send strings to be received as files::\n\n >>> url = 'https://httpbin.org/post'\n >>> files = {'file': ('report.csv', 'some,data,to,send\\nanother,row,to,send\\n')}\n\n >>> r = requests.post(url, files=files)\n >>> r.text\n {\n ...\n \"files\": {\n \"file\": \"some,data,to,send\\\\nanother,row,to,send\\\\n\"\n },\n ...\n }\n\nIn the event you are posting a very large file as a ``multipart/form-data``\nrequest, you may want to stream the request. By default, ``requests`` does not\nsupport this, but there is a separate package which does -\n``requests-toolbelt``. You should read `the toolbelt's documentation\n`_ for more details about how to use it.\n\nFor sending multiple files in one request refer to the :ref:`advanced `\nsection.\n\n.. warning:: It is strongly recommended that you open files in :ref:`binary\n mode `. This is because Requests may attempt to provide\n the ``Content-Length`` header for you, and if it does this value\n will be set to the number of *bytes* in the file. Errors may occur\n if you open the file in *text mode*.\n\n\nResponse Status Codes\n---------------------\n\nWe can check the response status code::\n\n >>> r = requests.get('https://httpbin.org/get')\n >>> r.status_code\n 200\n\nRequests also comes with a built-in status code lookup object for easy\nreference::\n\n >>> r.status_code == requests.codes.ok\n True\n\nIf we made a bad request (a 4XX client error or 5XX server error response), we\ncan raise it with\n:meth:`Response.raise_for_status() `::\n\n >>> bad_r = requests.get('https://httpbin.org/status/404')\n >>> bad_r.status_code\n 404\n\n >>> bad_r.raise_for_status()\n Traceback (most recent call last):\n File \"requests/models.py\", line 832, in raise_for_status\n raise http_error\n requests.exceptions.HTTPError: 404 Client Error\n\nBut, since our ``status_code`` for ``r`` was ``200``, when we call\n``raise_for_status()`` we get::\n\n >>> r.raise_for_status()\n None\n\nAll is well.\n\n\nResponse Headers\n----------------\n\nWe can view the server's response headers using a Python dictionary::\n\n >>> r.headers\n {\n 'content-encoding': 'gzip',\n 'transfer-encoding': 'chunked',\n 'connection': 'close',\n 'server': 'nginx/1.0.4',\n 'x-runtime': '148ms',\n 'etag': '\"e1ca502697e5c9317743dc078f67693f\"',\n 'content-type': 'application/json'\n }\n\nThe dictionary is special, though: it's made just for HTTP headers. According to\n`RFC 7230 `_, HTTP Header names\nare case-insensitive.\n\nSo, we can access the headers using any capitalization we want::\n\n >>> r.headers['Content-Type']\n 'application/json'\n\n >>> r.headers.get('content-type')\n 'application/json'\n\nIt is also special in that the server could have sent the same header multiple\ntimes with different values, but requests combines them so they can be\nrepresented in the dictionary within a single mapping, as per\n`RFC 7230 `_:\n\n A recipient MAY combine multiple header fields with the same field name\n into one \"field-name: field-value\" pair, without changing the semantics\n of the message, by appending each subsequent field value to the combined\n field value in order, separated by a comma.\n\nCookies\n-------\n\nIf a response contains some Cookies, you can quickly access them::\n\n >>> url = 'http://example.com/some/cookie/setting/url'\n >>> r = requests.get(url)\n\n >>> r.cookies['example_cookie_name']\n 'example_cookie_value'\n\nTo send your own cookies to the server, you can use the ``cookies``\nparameter::\n\n >>> url = 'https://httpbin.org/cookies'\n >>> cookies = dict(cookies_are='working')\n\n >>> r = requests.get(url, cookies=cookies)\n >>> r.text\n '{\"cookies\": {\"cookies_are\": \"working\"}}'\n\nCookies are returned in a :class:`~requests.cookies.RequestsCookieJar`,\nwhich acts like a ``dict`` but also offers a more complete interface,\nsuitable for use over multiple domains or paths. Cookie jars can\nalso be passed in to requests::\n\n >>> jar = requests.cookies.RequestsCookieJar()\n >>> jar.set('tasty_cookie', 'yum', domain='httpbin.org', path='/cookies')\n >>> jar.set('gross_cookie', 'blech', domain='httpbin.org', path='/elsewhere')\n >>> url = 'https://httpbin.org/cookies'\n >>> r = requests.get(url, cookies=jar)\n >>> r.text\n '{\"cookies\": {\"tasty_cookie\": \"yum\"}}'\n\n\nRedirection and History\n-----------------------\n\nBy default Requests will perform location redirection for all verbs except\nHEAD.\n\nWe can use the ``history`` property of the Response object to track redirection.\n\nThe :attr:`Response.history ` list contains the\n:class:`Response ` objects that were created in order to\ncomplete the request. The list is sorted from the oldest to the most recent\nresponse.\n\nFor example, GitHub redirects all HTTP requests to HTTPS::\n\n >>> r = requests.get('http://github.com/')\n\n >>> r.url\n 'https://github.com/'\n\n >>> r.status_code\n 200\n\n >>> r.history\n []\n\n\nIf you're using GET, OPTIONS, POST, PUT, PATCH or DELETE, you can disable\nredirection handling with the ``allow_redirects`` parameter::\n\n >>> r = requests.get('http://github.com/', allow_redirects=False)\n\n >>> r.status_code\n 301\n\n >>> r.history\n []\n\nIf you're using HEAD, you can enable redirection as well::\n\n >>> r = requests.head('http://github.com/', allow_redirects=True)\n\n >>> r.url\n 'https://github.com/'\n\n >>> r.history\n []\n\n\nTimeouts\n--------\n\nYou can tell Requests to stop waiting for a response after a given number of\nseconds with the ``timeout`` parameter. Nearly all production code should use\nthis parameter in nearly all requests. Failure to do so can cause your program\nto hang indefinitely::\n\n >>> requests.get('https://github.com/', timeout=0.001)\n Traceback (most recent call last):\n File \"\", line 1, in \n requests.exceptions.Timeout: HTTPConnectionPool(host='github.com', port=80): Request timed out. (timeout=0.001)\n\n\n.. admonition:: Note\n\n ``timeout`` is not a time limit on the entire response download;\n rather, an exception is raised if the server has not issued a\n response for ``timeout`` seconds (more precisely, if no bytes have been\n received on the underlying socket for ``timeout`` seconds). If no timeout is specified explicitly, requests do\n not time out.\n\n\nErrors and Exceptions\n---------------------\n\nIn the event of a network problem (e.g. DNS failure, refused connection, etc),\nRequests will raise a :exc:`~requests.exceptions.ConnectionError` exception.\n\n:meth:`Response.raise_for_status() ` will\nraise an :exc:`~requests.exceptions.HTTPError` if the HTTP request\nreturned an unsuccessful status code.\n\nIf a request times out, a :exc:`~requests.exceptions.Timeout` exception is\nraised.\n\nIf a request exceeds the configured number of maximum redirections, a\n:exc:`~requests.exceptions.TooManyRedirects` exception is raised.\n\nAll exceptions that Requests explicitly raises inherit from\n:exc:`requests.exceptions.RequestException`.\n\n-----------------------\n\nReady for more? Check out the :ref:`advanced ` section.\n\n\nIf you're on the job market, consider taking `this programming quiz `_. A substantial donation will be made to this project, if you find a job through this platform.\n"},{"id":15,"name":"Makefile","nodeType":"TextFile","path":"docs","text":"# Makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS =\nSPHINXBUILD = sphinx-build\nPAPER =\nBUILDDIR = _build\n\n# User-friendly check for sphinx-build\nifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)\n$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)\nendif\n\n# Internal variables.\nPAPEROPT_a4 = -D latex_paper_size=a4\nPAPEROPT_letter = -D latex_paper_size=letter\nALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n# the i18n builder cannot share the environment and doctrees with the others\nI18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n\n.PHONY: help\nhelp:\n\t@echo \"Please use \\`make ' where is one of\"\n\t@echo \" html to make standalone HTML files\"\n\t@echo \" dirhtml to make HTML files named index.html in directories\"\n\t@echo \" singlehtml to make a single large HTML file\"\n\t@echo \" pickle to make pickle files\"\n\t@echo \" json to make JSON files\"\n\t@echo \" htmlhelp to make HTML files and a HTML help project\"\n\t@echo \" qthelp to make HTML files and a qthelp project\"\n\t@echo \" applehelp to make an Apple Help Book\"\n\t@echo \" devhelp to make HTML files and a Devhelp project\"\n\t@echo \" epub to make an epub\"\n\t@echo \" latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter\"\n\t@echo \" latexpdf to make LaTeX files and run them through pdflatex\"\n\t@echo \" latexpdfja to make LaTeX files and run them through platex/dvipdfmx\"\n\t@echo \" text to make text files\"\n\t@echo \" man to make manual pages\"\n\t@echo \" texinfo to make Texinfo files\"\n\t@echo \" info to make Texinfo files and run them through makeinfo\"\n\t@echo \" gettext to make PO message catalogs\"\n\t@echo \" changes to make an overview of all changed/added/deprecated items\"\n\t@echo \" xml to make Docutils-native XML files\"\n\t@echo \" pseudoxml to make pseudoxml-XML files for display purposes\"\n\t@echo \" linkcheck to check all external links for integrity\"\n\t@echo \" doctest to run all doctests embedded in the documentation (if enabled)\"\n\t@echo \" coverage to run coverage check of the documentation (if enabled)\"\n\n.PHONY: clean\nclean:\n\trm -rf $(BUILDDIR)/*\n\n.PHONY: html\nhtml:\n\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/html.\"\n\n.PHONY: dirhtml\ndirhtml:\n\t$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/dirhtml.\"\n\n.PHONY: singlehtml\nsinglehtml:\n\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml\n\t@echo\n\t@echo \"Build finished. The HTML page is in $(BUILDDIR)/singlehtml.\"\n\n.PHONY: pickle\npickle:\n\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle\n\t@echo\n\t@echo \"Build finished; now you can process the pickle files.\"\n\n.PHONY: json\njson:\n\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json\n\t@echo\n\t@echo \"Build finished; now you can process the JSON files.\"\n\n.PHONY: htmlhelp\nhtmlhelp:\n\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp\n\t@echo\n\t@echo \"Build finished; now you can run HTML Help Workshop with the\" \\\n\t \".hhp project file in $(BUILDDIR)/htmlhelp.\"\n\n.PHONY: qthelp\nqthelp:\n\t$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp\n\t@echo\n\t@echo \"Build finished; now you can run \"qcollectiongenerator\" with the\" \\\n\t \".qhcp project file in $(BUILDDIR)/qthelp, like this:\"\n\t@echo \"# qcollectiongenerator $(BUILDDIR)/qthelp/Requests.qhcp\"\n\t@echo \"To view the help file:\"\n\t@echo \"# assistant -collectionFile $(BUILDDIR)/qthelp/Requests.qhc\"\n\n.PHONY: applehelp\napplehelp:\n\t$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp\n\t@echo\n\t@echo \"Build finished. The help book is in $(BUILDDIR)/applehelp.\"\n\t@echo \"N.B. You won't be able to view it unless you put it in\" \\\n\t \"~/Library/Documentation/Help or install it in your application\" \\\n\t \"bundle.\"\n\n.PHONY: devhelp\ndevhelp:\n\t$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp\n\t@echo\n\t@echo \"Build finished.\"\n\t@echo \"To view the help file:\"\n\t@echo \"# mkdir -p $$HOME/.local/share/devhelp/Requests\"\n\t@echo \"# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Requests\"\n\t@echo \"# devhelp\"\n\n.PHONY: epub\nepub:\n\t$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub\n\t@echo\n\t@echo \"Build finished. The epub file is in $(BUILDDIR)/epub.\"\n\n.PHONY: latex\nlatex:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo\n\t@echo \"Build finished; the LaTeX files are in $(BUILDDIR)/latex.\"\n\t@echo \"Run \\`make' in that directory to run these through (pdf)latex\" \\\n\t \"(use \\`make latexpdf' here to do that automatically).\"\n\n.PHONY: latexpdf\nlatexpdf:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through pdflatex...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\n.PHONY: latexpdfja\nlatexpdfja:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through platex and dvipdfmx...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\n.PHONY: text\ntext:\n\t$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text\n\t@echo\n\t@echo \"Build finished. The text files are in $(BUILDDIR)/text.\"\n\n.PHONY: man\nman:\n\t$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man\n\t@echo\n\t@echo \"Build finished. The manual pages are in $(BUILDDIR)/man.\"\n\n.PHONY: texinfo\ntexinfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo\n\t@echo \"Build finished. The Texinfo files are in $(BUILDDIR)/texinfo.\"\n\t@echo \"Run \\`make' in that directory to run these through makeinfo\" \\\n\t \"(use \\`make info' here to do that automatically).\"\n\n.PHONY: info\ninfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo \"Running Texinfo files through makeinfo...\"\n\tmake -C $(BUILDDIR)/texinfo info\n\t@echo \"makeinfo finished; the Info files are in $(BUILDDIR)/texinfo.\"\n\n.PHONY: gettext\ngettext:\n\t$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale\n\t@echo\n\t@echo \"Build finished. The message catalogs are in $(BUILDDIR)/locale.\"\n\n.PHONY: changes\nchanges:\n\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes\n\t@echo\n\t@echo \"The overview file is in $(BUILDDIR)/changes.\"\n\n.PHONY: linkcheck\nlinkcheck:\n\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck\n\t@echo\n\t@echo \"Link check complete; look for any errors in the above output \" \\\n\t \"or in $(BUILDDIR)/linkcheck/output.txt.\"\n\n.PHONY: doctest\ndoctest:\n\t$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest\n\t@echo \"Testing of doctests in the sources finished, look at the \" \\\n\t \"results in $(BUILDDIR)/doctest/output.txt.\"\n\n.PHONY: coverage\ncoverage:\n\t$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage\n\t@echo \"Testing of coverage in the sources finished, look at the \" \\\n\t \"results in $(BUILDDIR)/coverage/python.txt.\"\n\n.PHONY: xml\nxml:\n\t$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml\n\t@echo\n\t@echo \"Build finished. The XML files are in $(BUILDDIR)/xml.\"\n\n.PHONY: pseudoxml\npseudoxml:\n\t$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml\n\t@echo\n\t@echo \"Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml.\"\n"},{"id":16,"name":"CODE_OF_CONDUCT.md","nodeType":"TextFile","path":".github","text":"# Treat each other well\n\nEveryone participating in the _requests_ project, and in particular in the issue tracker,\npull requests, and social media activity, is expected to treat other people with respect\nand more generally to follow the guidelines articulated in the\n[Python Community Code of Conduct](https://www.python.org/psf/codeofconduct/).\n"},{"id":17,"name":"","nodeType":"Package"},{"id":18,"name":"README.md","nodeType":"TextFile","path":"","text":"# Requests\n\n**Requests** is a simple, yet elegant, HTTP library.\n\n```python\n>>> import requests\n>>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'application/json; charset=utf8'\n>>> r.encoding\n'utf-8'\n>>> r.text\n'{\"type\":\"User\"...'\n>>> r.json()\n{'disk_usage': 368627, 'private_gists': 484, ...}\n```\n\nRequests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!\n\nRequests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `500,000+` repositories. You may certainly put your trust in this code.\n\n[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)\n[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)\n[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)\n\n## Installing Requests and Supported Versions\n\nRequests is available on PyPI:\n\n```console\n$ python -m pip install requests\n```\n\nRequests officially supports Python 2.7 & 3.6+.\n\n## Supported Features & Best–Practices\n\nRequests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style TLS/SSL Verification\n- Basic & Digest Authentication\n- Familiar `dict`–like Cookies\n- Automatic Content Decompression and Decoding\n- Multi-part File Uploads\n- SOCKS Proxy Support\n- Connection Timeouts\n- Streaming Downloads\n- Automatic honoring of `.netrc`\n- Chunked HTTP Requests\n\n## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)\n\n[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)\n\n## Cloning the repository\n\nWhen cloning the Requests repository, you may need to add the `-c\nfetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see\n[this issue](https://github.com/psf/requests/issues/2690) for more background):\n\n```shell\ngit clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git\n```\n\nYou can also apply this setting to your global Git config:\n\n```shell\ngit config --global fetch.fsck.badTimezone ignore\n```\n\n---\n\n[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)\n"},{"id":19,"name":"tox.ini","nodeType":"TextFile","path":"","text":"[tox]\nenvlist = py{27,36,37,38,39}-{default,use_chardet_on_py3}\n\n[testenv]\ndeps = -rrequirements-dev.txt\nextras =\n security\n socks\ncommands =\n pytest tests\n\n[testenv:default]\n\n[testenv:use_chardet_on_py3]\nextras =\n security\n socks\n use_chardet_on_py3\n"},{"id":20,"name":"setup.cfg","nodeType":"TextFile","path":"","text":"[bdist_wheel]\nuniversal = 1\n\n[metadata]\nlicense_file = LICENSE\n"},{"id":21,"name":".github/workflows","nodeType":"Package"},{"id":22,"name":"lock-issues.yml","nodeType":"TextFile","path":".github/workflows","text":"name: 'Lock Threads'\n\non:\n schedule:\n - cron: '0 * * * *'\n\npermissions:\n issues: write\n pull-requests: write\n\njobs:\n action:\n runs-on: ubuntu-latest\n steps:\n - uses: dessant/lock-threads@v2\n with:\n issue-lock-inactive-days: 90\n pr-lock-inactive-days: 90\n"},{"id":23,"name":"tests","nodeType":"Package"},{"fileName":"test_packages.py","filePath":"tests","id":24,"nodeType":"File","text":"import requests\n\n\ndef test_can_access_urllib3_attribute():\n requests.packages.urllib3\n\n\ndef test_can_access_idna_attribute():\n requests.packages.idna\n\n\ndef test_can_access_chardet_attribute():\n requests.packages.chardet\n"},{"fileName":"utils.py","filePath":"tests","id":25,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport contextlib\nimport os\n\n\n@contextlib.contextmanager\ndef override_environ(**kwargs):\n save_env = dict(os.environ)\n for key, value in kwargs.items():\n if value is None:\n del os.environ[key]\n else:\n os.environ[key] = value\n try:\n yield\n finally:\n os.environ.clear()\n os.environ.update(save_env)\n"},{"fileName":"__init__.py","filePath":"tests","id":26,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"Requests test package initialisation.\"\"\"\n\nimport warnings\n\nimport urllib3\nfrom urllib3.exceptions import SNIMissingWarning\n\n# urllib3 sets SNIMissingWarning to only go off once,\n# while this test suite requires it to always fire\n# so that it occurs during test_requests.test_https_warnings\nwarnings.simplefilter('always', SNIMissingWarning)\n"},{"id":27,"name":"requests","nodeType":"Package"},{"fileName":"certs.py","filePath":"requests","id":28,"nodeType":"File","text":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.certs\n~~~~~~~~~~~~~~\n\nThis module returns the preferred default CA certificate bundle. There is\nonly one — the one from the certifi package.\n\nIf you are packaging Requests, e.g., for a Linux distribution or a managed\nenvironment, you can change the definition of where() to return a separately\npackaged CA bundle.\n\"\"\"\nfrom certifi import where\n\nif __name__ == '__main__':\n print(where())\n"},{"fileName":"test_utils.py","filePath":"tests","id":29,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport os\nimport copy\nimport filecmp\nfrom io import BytesIO\nimport tarfile\nimport zipfile\nfrom collections import deque\n\nimport pytest\nfrom requests import compat\nfrom requests.cookies import RequestsCookieJar\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.utils import (\n address_in_network, dotted_netmask, extract_zipped_paths,\n get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,\n get_encodings_from_content, get_environ_proxies,\n guess_filename, guess_json_utf, is_ipv4_address,\n is_valid_cidr, iter_slices, parse_dict_header,\n parse_header_links, prepend_scheme_if_needed,\n requote_uri, select_proxy, should_bypass_proxies, super_len,\n to_key_val_list, to_native_string,\n unquote_header_value, unquote_unreserved,\n urldefragauth, add_dict_to_cookiejar, set_environ)\nfrom requests._internal_utils import unicode_is_ascii\n\nfrom .compat import StringIO, cStringIO\n\n\nclass TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_tarfile_member(self, tmpdir):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n\n tar_obj = str(tmpdir.join('test.tar'))\n with tarfile.open(tar_obj, 'w') as tar:\n tar.add(str(file_obj), arcname='test.txt')\n\n with tarfile.open(tar_obj) as tar:\n member = tar.extractfile('test.txt')\n assert super_len(member) == 4\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0\n\n\nclass TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')\n\n\nclass TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'\n\n\nclass TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}\n\n\nclass TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)\n\n\nclass TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)\n\n\nclass TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')\n\n\nclass TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)\n\n\nclass TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path\n\n\nclass TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '',\n # HTML4 pragma directive\n '',\n # XHTML 1.x served with text/html MIME type\n '',\n # XHTML 1.x served as XML\n '',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n \n \n \n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']\n\n\nclass TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected\n\n\nUSER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\nENCODED_USER = compat.quote(USER, '')\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\n\n@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/psf/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected\n\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}\n@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n '; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n ';',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '; type=\"image/jpeg\",;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ('file:///some/path/on/disk', True),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)\n\n\n@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = {cookie.name: cookie.value for cookie in cj}\n assert cookiedict == cookies\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected\n\n\n@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, ''),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected\n\n\n@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy\n\n\ndef test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)\n"},{"id":30,"name":"docs/_templates","nodeType":"Package"},{"id":31,"name":"sidebarintro.html","nodeType":"TextFile","path":"docs/_templates","text":"

\n \n \"Requests\n \n

\n\n

\n \n

\n\n\n\n\n\n

\n Requests is an elegant and simple HTTP library for Python, built for\n human beings.\n

\n

Sponsored by CERT Gouvernemental - GOVCERT.LU.

\n\n\n\n

Useful Links

\n\n\n\n

Translations

\n\n\n\n
\n
\n"},{"fileName":"compat.py","filePath":"requests","id":32,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.compat\n~~~~~~~~~~~~~~~\n\nThis module handles import compatibility issues between Python 2 and\nPython 3.\n\"\"\"\n\ntry:\n import chardet\nexcept ImportError:\n import charset_normalizer as chardet\n\nimport sys\n\n# -------\n# Pythons\n# -------\n\n# Syntax sugar.\n_ver = sys.version_info\n\n#: Python 2.x?\nis_py2 = (_ver[0] == 2)\n\n#: Python 3.x?\nis_py3 = (_ver[0] == 3)\n\nhas_simplejson = False\ntry:\n import simplejson as json\n has_simplejson = True\nexcept ImportError:\n import json\n\n# ---------\n# Specifics\n# ---------\n\nif is_py2:\n from urllib import (\n quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,\n proxy_bypass, proxy_bypass_environment, getproxies_environment)\n from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag\n from urllib2 import parse_http_list\n import cookielib\n from Cookie import Morsel\n from StringIO import StringIO\n # Keep OrderedDict for backwards compatibility.\n from collections import Callable, Mapping, MutableMapping, OrderedDict\n\n builtin_str = str\n bytes = str\n str = unicode\n basestring = basestring\n numeric_types = (int, long, float)\n integer_types = (int, long)\n JSONDecodeError = ValueError\n\nelif is_py3:\n from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag\n from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment\n from http import cookiejar as cookielib\n from http.cookies import Morsel\n from io import StringIO\n # Keep OrderedDict for backwards compatibility.\n from collections import OrderedDict\n from collections.abc import Callable, Mapping, MutableMapping\n if has_simplejson:\n from simplejson import JSONDecodeError\n else:\n from json import JSONDecodeError\n\n builtin_str = str\n str = str\n bytes = bytes\n basestring = (str, bytes)\n numeric_types = (int, float)\n integer_types = (int,)\n"},{"id":33,"name":"ext","nodeType":"Package"},{"id":34,"name":"requests-logo.svg","nodeType":"TextFile","path":"ext","text":"requestsRequestshumanshttp for "},{"id":35,"name":"codeql-analysis.yml","nodeType":"TextFile","path":".github/workflows","text":"# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# You may wish to alter this file to override the set of languages analyzed,\n# or to provide custom queries or build logic.\nname: \"CodeQL\"\n\non:\n push:\n branches: [main]\n pull_request:\n # The branches below must be a subset of the branches above\n branches: [main]\n schedule:\n - cron: '0 23 * * 0'\n\njobs:\n analyze:\n name: Analyze\n runs-on: ubuntu-latest\n\n strategy:\n fail-fast: false\n\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v2\n with:\n # We must fetch at least the immediate parents so that if this is\n # a pull request then we can checkout the head.\n fetch-depth: 2\n\n # If this run was triggered by a pull request event, then checkout\n # the head of the pull request instead of the merge commit.\n - run: git checkout HEAD^2\n if: ${{ github.event_name == 'pull_request' }}\n\n # Initializes the CodeQL tools for scanning.\n - name: Initialize CodeQL\n uses: github/codeql-action/init@v1\n with:\n languages: \"python\"\n # If you wish to specify custom queries, you can do so here or in a config file.\n # By default, queries listed here will override any specified in a config file. \n # Prefix the list here with \"+\" to use these queries and those in the config file.\n # queries: ./path/to/local/query, your-org/your-repo/queries@main\n\n # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).\n # If this step fails, then you should remove it and run the build manually (see below)\n - name: Autobuild\n uses: github/codeql-action/autobuild@v1\n\n # ℹ️ Command-line programs to run using the OS shell.\n # 📚 https://git.io/JvXDl\n\n # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines\n # and modify them (or add more) to build your code if your project\n # uses a compiled language\n\n #- run: |\n # make bootstrap\n # make release\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@v1\n"},{"id":36,"name":".nojekyll","nodeType":"TextFile","path":"docs","text":"\n"},{"id":37,"name":"pytest.ini","nodeType":"TextFile","path":"","text":"[pytest]\naddopts = -p no:warnings --doctest-modules\ndoctest_optionflags= NORMALIZE_WHITESPACE ELLIPSIS"},{"fileName":"_internal_utils.py","filePath":"requests","id":38,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests._internal_utils\n~~~~~~~~~~~~~~\n\nProvides utility functions that are consumed internally by Requests\nwhich depend on extremely few external helpers (such as compat)\n\"\"\"\n\nfrom .compat import is_py2, builtin_str, str\n\n\ndef to_native_string(string, encoding='ascii'):\n \"\"\"Given a string object, regardless of type, returns a representation of\n that string in the native string type, encoding and decoding where\n necessary. This assumes ASCII unless told otherwise.\n \"\"\"\n if isinstance(string, builtin_str):\n out = string\n else:\n if is_py2:\n out = string.encode(encoding)\n else:\n out = string.decode(encoding)\n\n return out\n\n\ndef unicode_is_ascii(u_string):\n \"\"\"Determine if unicode string only contains ASCII characters.\n\n :param str u_string: unicode string to check. Must be unicode\n and not Python 2 `str`.\n :rtype: bool\n \"\"\"\n assert isinstance(u_string, str)\n try:\n u_string.encode('ascii')\n return True\n except UnicodeEncodeError:\n return False\n"},{"attributeType":"null","col":0,"comment":"null","endLoc":26,"id":39,"name":"is_py2","nodeType":"Attribute","startLoc":26,"text":"is_py2"},{"col":0,"comment":"null","endLoc":19,"header":"@contextlib.contextmanager\ndef override_environ(**kwargs)","id":40,"name":"override_environ","nodeType":"Function","startLoc":7,"text":"@contextlib.contextmanager\ndef override_environ(**kwargs):\n save_env = dict(os.environ)\n for key, value in kwargs.items():\n if value is None:\n del os.environ[key]\n else:\n os.environ[key] = value\n try:\n yield\n finally:\n os.environ.clear()\n os.environ.update(save_env)"},{"col":0,"comment":"null","endLoc":5,"header":"def test_can_access_urllib3_attribute()","id":41,"name":"test_can_access_urllib3_attribute","nodeType":"Function","startLoc":4,"text":"def test_can_access_urllib3_attribute():\n requests.packages.urllib3"},{"col":0,"comment":"null","endLoc":9,"header":"def test_can_access_idna_attribute()","id":42,"name":"test_can_access_idna_attribute","nodeType":"Function","startLoc":8,"text":"def test_can_access_idna_attribute():\n requests.packages.idna"},{"col":0,"comment":"null","endLoc":13,"header":"def test_can_access_chardet_attribute()","id":43,"name":"test_can_access_chardet_attribute","nodeType":"Function","startLoc":12,"text":"def test_can_access_chardet_attribute():\n requests.packages.chardet"},{"col":0,"comment":"","endLoc":3,"header":"__init__.py#","id":44,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"Requests test package initialisation.\"\"\"\n\nwarnings.simplefilter('always', SNIMissingWarning)"},{"col":0,"comment":"","endLoc":14,"header":"certs.py#","id":45,"name":"","nodeType":"Function","startLoc":4,"text":"\"\"\"\nrequests.certs\n~~~~~~~~~~~~~~\n\nThis module returns the preferred default CA certificate bundle. There is\nonly one — the one from the certifi package.\n\nIf you are packaging Requests, e.g., for a Linux distribution or a managed\nenvironment, you can change the definition of where() to return a separately\npackaged CA bundle.\n\"\"\"\n\nif __name__ == '__main__':\n print(where())"},{"className":"BytesIO","col":0,"comment":"null","endLoc":119,"id":46,"nodeType":"Class","startLoc":110,"text":"class BytesIO(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes\n def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ...\n # BytesIO does not contain a \"name\" field. This workaround is necessary\n # to allow BytesIO sub-classes to add this field, as it is defined\n # as a read-only property on IO[].\n name: Any\n def __enter__(self) -> Self: ...\n def getvalue(self) -> bytes: ...\n def getbuffer(self) -> memoryview: ...\n def read1(self, size: int | None = -1, /) -> bytes: ..."},{"attributeType":"Callable","col":0,"comment":"null","endLoc":204,"id":47,"name":"Callable","nodeType":"Attribute","startLoc":204,"text":"Callable"},{"id":50,"name":"api.rst","nodeType":"TextFile","path":"docs","text":".. _api:\n\nDeveloper Interface\n===================\n\n.. module:: requests\n\nThis part of the documentation covers all the interfaces of Requests. For\nparts where Requests depends on external libraries, we document the most\nimportant right here and provide links to the canonical documentation.\n\n\nMain Interface\n--------------\n\nAll of Requests' functionality can be accessed by these 7 methods.\nThey all return an instance of the :class:`Response ` object.\n\n.. autofunction:: request\n\n.. autofunction:: head\n.. autofunction:: get\n.. autofunction:: post\n.. autofunction:: put\n.. autofunction:: patch\n.. autofunction:: delete\n\nExceptions\n----------\n\n.. autoexception:: requests.RequestException\n.. autoexception:: requests.ConnectionError\n.. autoexception:: requests.HTTPError\n.. autoexception:: requests.URLRequired\n.. autoexception:: requests.TooManyRedirects\n.. autoexception:: requests.ConnectTimeout\n.. autoexception:: requests.ReadTimeout\n.. autoexception:: requests.Timeout\n\n\nRequest Sessions\n----------------\n\n.. _sessionapi:\n\n.. autoclass:: Session\n :inherited-members:\n\n\nLower-Level Classes\n-------------------\n\n.. autoclass:: requests.Request\n :inherited-members:\n\n.. autoclass:: Response\n :inherited-members:\n\n\nLower-Lower-Level Classes\n-------------------------\n\n.. autoclass:: requests.PreparedRequest\n :inherited-members:\n\n.. autoclass:: requests.adapters.BaseAdapter\n :inherited-members:\n\n.. autoclass:: requests.adapters.HTTPAdapter\n :inherited-members:\n\nAuthentication\n--------------\n\n.. autoclass:: requests.auth.AuthBase\n.. autoclass:: requests.auth.HTTPBasicAuth\n.. autoclass:: requests.auth.HTTPProxyAuth\n.. autoclass:: requests.auth.HTTPDigestAuth\n\n\n\nEncodings\n---------\n\n.. autofunction:: requests.utils.get_encodings_from_content\n.. autofunction:: requests.utils.get_encoding_from_headers\n.. autofunction:: requests.utils.get_unicode_from_response\n\n\n.. _api-cookies:\n\nCookies\n-------\n\n.. autofunction:: requests.utils.dict_from_cookiejar\n.. autofunction:: requests.utils.add_dict_to_cookiejar\n.. autofunction:: requests.cookies.cookiejar_from_dict\n\n.. autoclass:: requests.cookies.RequestsCookieJar\n :inherited-members:\n\n.. autoclass:: requests.cookies.CookieConflictError\n :inherited-members:\n\n\n\nStatus Code Lookup\n------------------\n\n.. autoclass:: requests.codes\n\n.. automodule:: requests.status_codes\n\n\nMigrating to 1.x\n----------------\n\nThis section details the main differences between 0.x and 1.x and is meant\nto ease the pain of upgrading.\n\n\nAPI Changes\n~~~~~~~~~~~\n\n* ``Response.json`` is now a callable and not a property of a response.\n\n ::\n\n import requests\n r = requests.get('https://api.github.com/events')\n r.json() # This *call* raises an exception if JSON decoding fails\n\n* The ``Session`` API has changed. Sessions objects no longer take parameters.\n ``Session`` is also now capitalized, but it can still be\n instantiated with a lowercase ``session`` for backwards compatibility.\n\n ::\n\n s = requests.Session() # formerly, session took parameters\n s.auth = auth\n s.headers.update(headers)\n r = s.get('https://httpbin.org/headers')\n\n* All request hooks have been removed except 'response'.\n\n* Authentication helpers have been broken out into separate modules. See\n requests-oauthlib_ and requests-kerberos_.\n\n.. _requests-oauthlib: https://github.com/requests/requests-oauthlib\n.. _requests-kerberos: https://github.com/requests/requests-kerberos\n\n* The parameter for streaming requests was changed from ``prefetch`` to\n ``stream`` and the logic was inverted. In addition, ``stream`` is now\n required for raw response reading.\n\n ::\n\n # in 0.x, passing prefetch=False would accomplish the same thing\n r = requests.get('https://api.github.com/events', stream=True)\n for chunk in r.iter_content(8192):\n ...\n\n* The ``config`` parameter to the requests method has been removed. Some of\n these options are now configured on a ``Session`` such as keep-alive and\n maximum number of redirects. The verbosity option should be handled by\n configuring logging.\n\n ::\n\n import requests\n import logging\n\n # Enabling debugging at http.client level (requests->urllib3->http.client)\n # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA.\n # the only thing missing will be the response.body which is not logged.\n try: # for Python 3\n from http.client import HTTPConnection\n except ImportError:\n from httplib import HTTPConnection\n HTTPConnection.debuglevel = 1\n\n logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests\n logging.getLogger().setLevel(logging.DEBUG)\n requests_log = logging.getLogger(\"urllib3\")\n requests_log.setLevel(logging.DEBUG)\n requests_log.propagate = True\n\n requests.get('https://httpbin.org/headers')\n\n\n\nLicensing\n~~~~~~~~~\n\nOne key difference that has nothing to do with the API is a change in the\nlicense from the ISC_ license to the `Apache 2.0`_ license. The Apache 2.0\nlicense ensures that contributions to Requests are also covered by the Apache\n2.0 license.\n\n.. _ISC: https://opensource.org/licenses/ISC\n.. _Apache 2.0: https://opensource.org/licenses/Apache-2.0\n\n\nMigrating to 2.x\n----------------\n\n\nCompared with the 1.0 release, there were relatively few backwards\nincompatible changes, but there are still a few issues to be aware of with\nthis major release.\n\nFor more details on the changes in this release including new APIs, links\nto the relevant GitHub issues and some of the bug fixes, read Cory's blog_\non the subject.\n\n.. _blog: https://lukasa.co.uk/2013/09/Requests_20/\n\n\nAPI Changes\n~~~~~~~~~~~\n\n* There were a couple changes to how Requests handles exceptions.\n ``RequestException`` is now a subclass of ``IOError`` rather than\n ``RuntimeError`` as that more accurately categorizes the type of error.\n In addition, an invalid URL escape sequence now raises a subclass of\n ``RequestException`` rather than a ``ValueError``.\n\n ::\n\n requests.get('http://%zz/') # raises requests.exceptions.InvalidURL\n\n Lastly, ``httplib.IncompleteRead`` exceptions caused by incorrect chunked\n encoding will now raise a Requests ``ChunkedEncodingError`` instead.\n\n* The proxy API has changed slightly. The scheme for a proxy URL is now\n required.\n\n ::\n\n proxies = {\n \"http\": \"10.10.1.10:3128\", # use http://10.10.1.10:3128 instead\n }\n\n # In requests 1.x, this was legal, in requests 2.x,\n # this raises requests.exceptions.MissingSchema\n requests.get(\"http://example.org\", proxies=proxies)\n\n\nBehavioural Changes\n~~~~~~~~~~~~~~~~~~~~~~~\n\n* Keys in the ``headers`` dictionary are now native strings on all Python\n versions, i.e. bytestrings on Python 2 and unicode on Python 3. If the\n keys are not native strings (unicode on Python 2 or bytestrings on Python 3)\n they will be converted to the native string type assuming UTF-8 encoding.\n\n* Values in the ``headers`` dictionary should always be strings. This has\n been the project's position since before 1.0 but a recent change\n (since version 2.11.0) enforces this more strictly. It's advised to avoid\n passing header values as unicode when possible.\n"},{"id":51,"name":"run-tests.yml","nodeType":"TextFile","path":".github/workflows","text":"name: Tests\n\non: [push, pull_request]\n\njobs:\n build:\n runs-on: ${{ matrix.os }}\n timeout-minutes: 10\n strategy:\n fail-fast: false\n matrix:\n python-version: [2.7, 3.6, 3.7, 3.8, 3.9, 3.10-dev]\n os: [ubuntu-18.04, macOS-latest, windows-latest]\n include:\n # pypy3 on Mac OS currently fails trying to compile\n # brotlipy. Moving pypy3 to only test linux.\n - python-version: pypy3\n os: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v2\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v2\n with:\n python-version: ${{ matrix.python-version }}\n - name: Install dependencies\n run: |\n make\n - name: Run tests\n run: |\n make ci\n"},{"id":52,"name":"SECURITY.md","nodeType":"TextFile","path":".github","text":"# Vulnerability Disclosure\n\nIf you think you have found a potential security vulnerability in\nrequests, please email [Nate](mailto:nate.prewitt@gmail.com)\nand [Seth](mailto:sethmichaellarson@gmail.com) directly.\n**Do not file a public issue.**\n\nOur PGP Key fingerprints are:\n\n- 8722 7E29 AD9C FF5C FAC3 EA6A 44D3 FF97 B80D C864 ([@nateprewitt](https://keybase.io/nateprewitt))\n\n- EDD5 6765 A9D8 4653 CBC8 A134 51B0 6736 1740 F5FC ([@sethmlarson](https://keybase.io/sethmlarson))\n\nYou can also contact us on [Keybase](https://keybase.io) with the\nprofiles above if desired.\n\nIf English is not your first language, please try to describe the\nproblem and its impact to the best of your ability. For greater detail,\nplease use your native language and we will try our best to translate it\nusing online services.\n\nPlease also include the code you used to find the problem and the\nshortest amount of code necessary to reproduce it.\n\nPlease do not disclose this to anyone else. We will retrieve a CVE\nidentifier if necessary and give you full credit under whatever name or\nalias you provide. We will only request an identifier when we have a fix\nand can publish it in a release.\n\nWe will respect your privacy and will only publicize your involvement if\nyou grant us permission.\n\n## Process\n\nThis following information discusses the process the requests project\nfollows in response to vulnerability disclosures. If you are disclosing\na vulnerability, this section of the documentation lets you know how we\nwill respond to your disclosure.\n\n### Timeline\n\nWhen you report an issue, one of the project members will respond to you\nwithin two days *at the outside*. In most cases responses will be\nfaster, usually within 12 hours. This initial response will at the very\nleast confirm receipt of the report.\n\nIf we were able to rapidly reproduce the issue, the initial response\nwill also contain confirmation of the issue. If we are not, we will\noften ask for more information about the reproduction scenario.\n\nOur goal is to have a fix for any vulnerability released within two\nweeks of the initial disclosure. This may potentially involve shipping\nan interim release that simply disables function while a more mature fix\ncan be prepared, but will in the vast majority of cases mean shipping a\ncomplete release as soon as possible.\n\nThroughout the fix process we will keep you up to speed with how the fix\nis progressing. Once the fix is prepared, we will notify you that we\nbelieve we have a fix. Often we will ask you to confirm the fix resolves\nthe problem in your environment, especially if we are not confident of\nour reproduction scenario.\n\nAt this point, we will prepare for the release. We will obtain a CVE\nnumber if one is required, providing you with full credit for the\ndiscovery. We will also decide on a planned release date, and let you\nknow when it is. This release date will *always* be on a weekday.\n\nAt this point we will reach out to our major downstream packagers to\nnotify them of an impending security-related patch so they can make\narrangements. In addition, these packagers will be provided with the\nintended patch ahead of time, to ensure that they are able to promptly\nrelease their downstream packages. Currently the list of people we\nactively contact *ahead of a public release* is:\n\n- Jeremy Cline, Red Hat (@jeremycline)\n- Daniele Tricoli, Debian (@eriol)\n\nWe will notify these individuals at least a week ahead of our planned\nrelease date to ensure that they have sufficient time to prepare. If you\nbelieve you should be on this list, please let one of the maintainers\nknow at one of the email addresses at the top of this article.\n\nOn release day, we will push the patch to our public repository, along\nwith an updated changelog that describes the issue and credits you. We\nwill then issue a PyPI release containing the patch.\n\nAt this point, we will publicise the release. This will involve mails to\nmailing lists, Tweets, and all other communication mechanisms available\nto the core team.\n\nWe will also explicitly mention which commits contain the fix to make it\neasier for other distributors and users to easily patch their own\nversions of requests if upgrading is not an option.\n"},{"className":"BufferedIOBase","col":0,"comment":"null","endLoc":93,"id":53,"nodeType":"Class","startLoc":86,"text":"class BufferedIOBase(IOBase):\n raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations.\n def detach(self) -> RawIOBase: ...\n def readinto(self, buffer: WriteableBuffer, /) -> int: ...\n def write(self, buffer: ReadableBuffer, /) -> int: ...\n def readinto1(self, buffer: WriteableBuffer, /) -> int: ...\n def read(self, size: int | None = ..., /) -> bytes: ...\n def read1(self, size: int = ..., /) -> bytes: ..."},{"className":"IOBase","col":0,"comment":"null","endLoc":78,"id":54,"nodeType":"Class","startLoc":53,"text":"class IOBase(metaclass=abc.ABCMeta):\n def __iter__(self) -> Iterator[bytes]: ...\n def __next__(self) -> bytes: ...\n def __enter__(self) -> Self: ...\n def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None\n ) -> None: ...\n def close(self) -> None: ...\n def fileno(self) -> int: ...\n def flush(self) -> None: ...\n def isatty(self) -> bool: ...\n def readable(self) -> bool: ...\n read: Callable[..., Any]\n def readlines(self, hint: int = -1, /) -> list[bytes]: ...\n def seek(self, offset: int, whence: int = ..., /) -> int: ...\n def seekable(self) -> bool: ...\n def tell(self) -> int: ...\n def truncate(self, size: int | None = ..., /) -> int: ...\n def writable(self) -> bool: ...\n write: Callable[..., Any]\n def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ...\n def readline(self, size: int | None = -1, /) -> bytes: ...\n def __del__(self) -> None: ...\n @property\n def closed(self) -> bool: ...\n def _checkClosed(self) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":54,"header":"def __iter__(self) -> Iterator[bytes]","id":55,"name":"__iter__","nodeType":"Function","startLoc":54,"text":"def __iter__(self) -> Iterator[bytes]: ..."},{"col":4,"comment":"null","endLoc":55,"header":"def __next__(self) -> bytes","id":56,"name":"__next__","nodeType":"Function","startLoc":55,"text":"def __next__(self) -> bytes: ..."},{"col":4,"comment":"null","endLoc":56,"header":"def __enter__(self) -> Self","id":57,"name":"__enter__","nodeType":"Function","startLoc":56,"text":"def __enter__(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":59,"header":"def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None\n ) -> None","id":58,"name":"__exit__","nodeType":"Function","startLoc":57,"text":"def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":60,"header":"def close(self) -> None","id":59,"name":"close","nodeType":"Function","startLoc":60,"text":"def close(self) -> None: ..."},{"col":4,"comment":"null","endLoc":61,"header":"def fileno(self) -> int","id":60,"name":"fileno","nodeType":"Function","startLoc":61,"text":"def fileno(self) -> int: ..."},{"col":4,"comment":"null","endLoc":62,"header":"def flush(self) -> None","id":61,"name":"flush","nodeType":"Function","startLoc":62,"text":"def flush(self) -> None: ..."},{"col":4,"comment":"null","endLoc":63,"header":"def isatty(self) -> bool","id":62,"name":"isatty","nodeType":"Function","startLoc":63,"text":"def isatty(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":64,"header":"def readable(self) -> bool","id":63,"name":"readable","nodeType":"Function","startLoc":64,"text":"def readable(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":66,"header":"def readlines(self, hint: int = -1, /) -> list[bytes]","id":64,"name":"readlines","nodeType":"Function","startLoc":66,"text":"def readlines(self, hint: int = -1, /) -> list[bytes]: ..."},{"col":4,"comment":"null","endLoc":67,"header":"def seek(self, offset: int, whence: int = ..., /) -> int","id":65,"name":"seek","nodeType":"Function","startLoc":67,"text":"def seek(self, offset: int, whence: int = ..., /) -> int: ..."},{"col":4,"comment":"null","endLoc":68,"header":"def seekable(self) -> bool","id":66,"name":"seekable","nodeType":"Function","startLoc":68,"text":"def seekable(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":69,"header":"def tell(self) -> int","id":67,"name":"tell","nodeType":"Function","startLoc":69,"text":"def tell(self) -> int: ..."},{"col":4,"comment":"null","endLoc":70,"header":"def truncate(self, size: int | None = ..., /) -> int","id":68,"name":"truncate","nodeType":"Function","startLoc":70,"text":"def truncate(self, size: int | None = ..., /) -> int: ..."},{"col":4,"comment":"null","endLoc":71,"header":"def writable(self) -> bool","id":69,"name":"writable","nodeType":"Function","startLoc":71,"text":"def writable(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":73,"header":"def writelines(self, lines: Iterable[ReadableBuffer], /) -> None","id":70,"name":"writelines","nodeType":"Function","startLoc":73,"text":"def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ..."},{"col":4,"comment":"null","endLoc":74,"header":"def readline(self, size: int | None = -1, /) -> bytes","id":71,"name":"readline","nodeType":"Function","startLoc":74,"text":"def readline(self, size: int | None = -1, /) -> bytes: ..."},{"col":4,"comment":"null","endLoc":75,"header":"def __del__(self) -> None","id":72,"name":"__del__","nodeType":"Function","startLoc":75,"text":"def __del__(self) -> None: ..."},{"col":4,"comment":"null","endLoc":77,"header":"@property\n def closed(self) -> bool","id":73,"name":"closed","nodeType":"Function","startLoc":76,"text":"@property\n def closed(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":78,"header":"def _checkClosed(self) -> None","id":74,"name":"_checkClosed","nodeType":"Function","startLoc":78,"text":"def _checkClosed(self) -> None: ... # undocumented"},{"attributeType":"(...) -> Any","col":4,"comment":"null","endLoc":65,"id":75,"name":"read","nodeType":"Attribute","startLoc":65,"text":"read"},{"className":"Mapping","col":0,"comment":"null","endLoc":691,"id":76,"nodeType":"Class","startLoc":677,"text":"class Mapping(Collection[_KT], Generic[_KT, _VT_co]):\n # TODO: We wish the key type could also be covariant, but that doesn't work,\n # see discussion in https://github.com/python/typing/pull/273.\n @abstractmethod\n def __getitem__(self, key: _KT, /) -> _VT_co: ...\n # Mixin methods\n @overload\n def get(self, key: _KT, /) -> _VT_co | None: ...\n @overload\n def get(self, key: _KT, /, default: _VT_co | _T) -> _VT_co | _T: ...\n def items(self) -> ItemsView[_KT, _VT_co]: ...\n def keys(self) -> KeysView[_KT]: ...\n def values(self) -> ValuesView[_VT_co]: ...\n def __contains__(self, key: object, /) -> bool: ...\n def __eq__(self, other: object, /) -> bool: ..."},{"attributeType":"(...) -> Any","col":4,"comment":"null","endLoc":72,"id":77,"name":"write","nodeType":"Attribute","startLoc":72,"text":"write"},{"col":4,"comment":"null","endLoc":88,"header":"def detach(self) -> RawIOBase","id":78,"name":"detach","nodeType":"Function","startLoc":88,"text":"def detach(self) -> RawIOBase: ..."},{"col":4,"comment":"null","endLoc":89,"header":"def readinto(self, buffer: WriteableBuffer, /) -> int","id":79,"name":"readinto","nodeType":"Function","startLoc":89,"text":"def readinto(self, buffer: WriteableBuffer, /) -> int: ..."},{"col":4,"comment":"null","endLoc":90,"header":"def write(self, buffer: ReadableBuffer, /) -> int","id":80,"name":"write","nodeType":"Function","startLoc":90,"text":"def write(self, buffer: ReadableBuffer, /) -> int: ..."},{"col":4,"comment":"null","endLoc":91,"header":"def readinto1(self, buffer: WriteableBuffer, /) -> int","id":81,"name":"readinto1","nodeType":"Function","startLoc":91,"text":"def readinto1(self, buffer: WriteableBuffer, /) -> int: ..."},{"col":4,"comment":"null","endLoc":92,"header":"def read(self, size: int | None = ..., /) -> bytes","id":82,"name":"read","nodeType":"Function","startLoc":92,"text":"def read(self, size: int | None = ..., /) -> bytes: ..."},{"col":4,"comment":"null","endLoc":93,"header":"def read1(self, size: int = ..., /) -> bytes","id":83,"name":"read1","nodeType":"Function","startLoc":93,"text":"def read1(self, size: int = ..., /) -> bytes: ..."},{"attributeType":"RawIOBase","col":4,"comment":"null","endLoc":87,"id":84,"name":"raw","nodeType":"Attribute","startLoc":87,"text":"raw"},{"className":"BinaryIO","col":0,"comment":"null","endLoc":816,"id":85,"nodeType":"Class","startLoc":814,"text":"class BinaryIO(IO[bytes]):\n @abstractmethod\n def __enter__(self) -> BinaryIO: ..."},{"col":4,"comment":"null","endLoc":681,"header":"@abstractmethod\n def __getitem__(self, key: _KT, /) -> _VT_co","id":86,"name":"__getitem__","nodeType":"Function","startLoc":680,"text":"@abstractmethod\n def __getitem__(self, key: _KT, /) -> _VT_co: ..."},{"col":4,"comment":"null","endLoc":684,"header":"@overload\n def get(self, key: _KT, /) -> _VT_co | None","id":87,"name":"get","nodeType":"Function","startLoc":683,"text":"@overload\n def get(self, key: _KT, /) -> _VT_co | None: ..."},{"col":4,"comment":"null","endLoc":686,"header":"@overload\n def get(self, key: _KT, /, default: _VT_co | _T) -> _VT_co | _T","id":88,"name":"get","nodeType":"Function","startLoc":685,"text":"@overload\n def get(self, key: _KT, /, default: _VT_co | _T) -> _VT_co | _T: ..."},{"col":4,"comment":"null","endLoc":687,"header":"def items(self) -> ItemsView[_KT, _VT_co]","id":89,"name":"items","nodeType":"Function","startLoc":687,"text":"def items(self) -> ItemsView[_KT, _VT_co]: ..."},{"col":4,"comment":"null","endLoc":688,"header":"def keys(self) -> KeysView[_KT]","id":90,"name":"keys","nodeType":"Function","startLoc":688,"text":"def keys(self) -> KeysView[_KT]: ..."},{"col":4,"comment":"null","endLoc":689,"header":"def values(self) -> ValuesView[_VT_co]","id":91,"name":"values","nodeType":"Function","startLoc":689,"text":"def values(self) -> ValuesView[_VT_co]: ..."},{"col":4,"comment":"null","endLoc":690,"header":"def __contains__(self, key: object, /) -> bool","id":92,"name":"__contains__","nodeType":"Function","startLoc":690,"text":"def __contains__(self, key: object, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":691,"header":"def __eq__(self, other: object, /) -> bool","id":93,"name":"__eq__","nodeType":"Function","startLoc":691,"text":"def __eq__(self, other: object, /) -> bool: ..."},{"className":"MutableMapping","col":0,"comment":"null","endLoc":741,"id":94,"nodeType":"Class","startLoc":693,"text":"class MutableMapping(Mapping[_KT, _VT]):\n @abstractmethod\n def __setitem__(self, key: _KT, value: _VT, /) -> None: ...\n @abstractmethod\n def __delitem__(self, key: _KT, /) -> None: ...\n def clear(self) -> None: ...\n @overload\n def pop(self, key: _KT, /) -> _VT: ...\n @overload\n def pop(self, key: _KT, /, default: _VT) -> _VT: ...\n @overload\n def pop(self, key: _KT, /, default: _T) -> _VT | _T: ...\n def popitem(self) -> tuple[_KT, _VT]: ...\n # This overload should be allowed only if the value type is compatible with None.\n #\n # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences:\n # -- collections.OrderedDict.setdefault\n # -- collections.ChainMap.setdefault\n # -- weakref.WeakKeyDictionary.setdefault\n @overload\n def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: ...\n @overload\n def setdefault(self, key: _KT, default: _VT, /) -> _VT: ...\n # 'update' used to take a Union, but using overloading is better.\n # The second overloaded type here is a bit too general, because\n # Mapping[tuple[_KT, _VT], W] is a subclass of Iterable[tuple[_KT, _VT]],\n # but will always have the behavior of the first overloaded type\n # at runtime, leading to keys of a mix of types _KT and tuple[_KT, _VT].\n # We don't currently have any way of forcing all Mappings to use\n # the first overload, but by using overloading rather than a Union,\n # mypy will commit to using the first overload when the argument is\n # known to be a Mapping with unknown type parameters, which is closer\n # to the behavior we want. See mypy issue #1430.\n #\n # Various mapping classes have __ior__ methods that should be kept roughly in line with .update():\n # -- dict.__ior__\n # -- os._Environ.__ior__\n # -- collections.UserDict.__ior__\n # -- collections.ChainMap.__ior__\n # -- peewee.attrdict.__add__\n # -- peewee.attrdict.__iadd__\n # -- weakref.WeakValueDictionary.__ior__\n # -- weakref.WeakKeyDictionary.__ior__\n @overload\n def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /, **kwargs: _VT) -> None: ...\n @overload\n def update(self, m: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ...\n @overload\n def update(self, **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":816,"header":"@abstractmethod\n def __enter__(self) -> BinaryIO","id":95,"name":"__enter__","nodeType":"Function","startLoc":815,"text":"@abstractmethod\n def __enter__(self) -> BinaryIO: ..."},{"col":4,"comment":"null","endLoc":111,"header":"def __init__(self, initial_bytes: ReadableBuffer = ...) -> None","id":96,"name":"__init__","nodeType":"Function","startLoc":111,"text":"def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":116,"header":"def __enter__(self) -> Self","id":97,"name":"__enter__","nodeType":"Function","startLoc":116,"text":"def __enter__(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":117,"header":"def getvalue(self) -> bytes","id":98,"name":"getvalue","nodeType":"Function","startLoc":117,"text":"def getvalue(self) -> bytes: ..."},{"col":4,"comment":"null","endLoc":118,"header":"def getbuffer(self) -> memoryview","id":99,"name":"getbuffer","nodeType":"Function","startLoc":118,"text":"def getbuffer(self) -> memoryview: ..."},{"col":4,"comment":"null","endLoc":119,"header":"def read1(self, size: int | None = -1, /) -> bytes","id":100,"name":"read1","nodeType":"Function","startLoc":119,"text":"def read1(self, size: int | None = -1, /) -> bytes: ..."},{"attributeType":"null","col":4,"comment":"null","endLoc":115,"id":101,"name":"name","nodeType":"Attribute","startLoc":115,"text":"name"},{"fileName":"api.py","filePath":"requests","id":102,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.api\n~~~~~~~~~~~~\n\nThis module implements the Requests API.\n\n:copyright: (c) 2012 by Kenneth Reitz.\n:license: Apache2, see LICENSE for more details.\n\"\"\"\n\nfrom . import sessions\n\n\ndef request(method, url, **kwargs):\n \"\"\"Constructs and sends a :class:`Request `.\n\n :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.\n :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.\n ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``\n or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string\n defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers\n to add for the file.\n :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How many seconds to wait for the server to send data\n before giving up, as a float, or a :ref:`(connect timeout, read\n timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``.\n :param stream: (optional) if ``False``, the response content will be immediately downloaded.\n :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.\n :return: :class:`Response ` object\n :rtype: requests.Response\n\n Usage::\n\n >>> import requests\n >>> req = requests.request('GET', 'https://httpbin.org/get')\n >>> req\n \n \"\"\"\n\n # By using the 'with' statement we are sure the session is closed, thus we\n # avoid leaving sockets open which can trigger a ResourceWarning in some\n # cases, and look like a memory leak in others.\n with sessions.Session() as session:\n return session.request(method=method, url=url, **kwargs)\n\n\ndef get(url, params=None, **kwargs):\n r\"\"\"Sends a GET request.\n\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('get', url, params=params, **kwargs)\n\n\ndef options(url, **kwargs):\n r\"\"\"Sends an OPTIONS request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('options', url, **kwargs)\n\n\ndef head(url, **kwargs):\n r\"\"\"Sends a HEAD request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes. If\n `allow_redirects` is not provided, it will be set to `False` (as\n opposed to the default :meth:`request` behavior).\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return request('head', url, **kwargs)\n\n\ndef post(url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('post', url, data=data, json=json, **kwargs)\n\n\ndef put(url, data=None, **kwargs):\n r\"\"\"Sends a PUT request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('put', url, data=data, **kwargs)\n\n\ndef patch(url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('patch', url, data=data, **kwargs)\n\n\ndef delete(url, **kwargs):\n r\"\"\"Sends a DELETE request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('delete', url, **kwargs)\n"},{"col":4,"comment":"null","endLoc":695,"header":"@abstractmethod\n def __setitem__(self, key: _KT, value: _VT, /) -> None","id":103,"name":"__setitem__","nodeType":"Function","startLoc":694,"text":"@abstractmethod\n def __setitem__(self, key: _KT, value: _VT, /) -> None: ..."},{"col":4,"comment":"null","endLoc":697,"header":"@abstractmethod\n def __delitem__(self, key: _KT, /) -> None","id":104,"name":"__delitem__","nodeType":"Function","startLoc":696,"text":"@abstractmethod\n def __delitem__(self, key: _KT, /) -> None: ..."},{"col":4,"comment":"null","endLoc":698,"header":"def clear(self) -> None","id":105,"name":"clear","nodeType":"Function","startLoc":698,"text":"def clear(self) -> None: ..."},{"col":4,"comment":"null","endLoc":700,"header":"@overload\n def pop(self, key: _KT, /) -> _VT","id":106,"name":"pop","nodeType":"Function","startLoc":699,"text":"@overload\n def pop(self, key: _KT, /) -> _VT: ..."},{"col":4,"comment":"null","endLoc":702,"header":"@overload\n def pop(self, key: _KT, /, default: _VT) -> _VT","id":107,"name":"pop","nodeType":"Function","startLoc":701,"text":"@overload\n def pop(self, key: _KT, /, default: _VT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":704,"header":"@overload\n def pop(self, key: _KT, /, default: _T) -> _VT | _T","id":108,"name":"pop","nodeType":"Function","startLoc":703,"text":"@overload\n def pop(self, key: _KT, /, default: _T) -> _VT | _T: ..."},{"col":4,"comment":"null","endLoc":705,"header":"def popitem(self) -> tuple[_KT, _VT]","id":109,"name":"popitem","nodeType":"Function","startLoc":705,"text":"def popitem(self) -> tuple[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":713,"header":"@overload\n def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None","id":110,"name":"setdefault","nodeType":"Function","startLoc":712,"text":"@overload\n def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: ..."},{"col":4,"comment":"null","endLoc":715,"header":"@overload\n def setdefault(self, key: _KT, default: _VT, /) -> _VT","id":111,"name":"setdefault","nodeType":"Function","startLoc":714,"text":"@overload\n def setdefault(self, key: _KT, default: _VT, /) -> _VT: ..."},{"col":4,"comment":"null","endLoc":737,"header":"@overload\n def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /, **kwargs: _VT) -> None","id":112,"name":"update","nodeType":"Function","startLoc":736,"text":"@overload\n def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /, **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":739,"header":"@overload\n def update(self, m: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None","id":113,"name":"update","nodeType":"Function","startLoc":738,"text":"@overload\n def update(self, m: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":741,"header":"@overload\n def update(self, **kwargs: _VT) -> None","id":114,"name":"update","nodeType":"Function","startLoc":740,"text":"@overload\n def update(self, **kwargs: _VT) -> None: ..."},{"className":"OrderedDict","col":0,"comment":"null","endLoc":398,"id":115,"nodeType":"Class","startLoc":360,"text":"class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):\n def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ...\n def move_to_end(self, key: _KT, last: bool = True) -> None: ...\n def copy(self) -> Self: ...\n def __reversed__(self) -> Iterator[_KT]: ...\n def keys(self) -> _odict_keys[_KT, _VT]: ...\n def items(self) -> _odict_items[_KT, _VT]: ...\n def values(self) -> _odict_values[_KT, _VT]: ...\n # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences.\n # Like dict.fromkeys, its true signature is not expressible in the current type system.\n # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.\n @classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ...\n @classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ...\n # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.\n @overload\n def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ...\n @overload\n def setdefault(self, key: _KT, default: _VT) -> _VT: ...\n # Same as dict.pop, but accepts keyword arguments\n @overload\n def pop(self, key: _KT) -> _VT: ...\n @overload\n def pop(self, key: _KT, default: _VT) -> _VT: ...\n @overload\n def pop(self, key: _KT, default: _T) -> _VT | _T: ...\n def __eq__(self, value: object, /) -> bool: ...\n if sys.version_info >= (3, 9):\n @overload\n def __or__(self, value: dict[_KT, _VT], /) -> Self: ...\n @overload\n def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ...\n @overload\n def __ror__(self, value: dict[_KT, _VT], /) -> Self: ...\n @overload\n def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc]"},{"className":"deque","col":0,"comment":"null","endLoc":275,"id":116,"nodeType":"Class","startLoc":238,"text":"class deque(MutableSequence[_T]):\n @property\n def maxlen(self) -> int | None: ...\n @overload\n def __init__(self, *, maxlen: int | None = None) -> None: ...\n @overload\n def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ...\n def append(self, x: _T, /) -> None: ...\n def appendleft(self, x: _T, /) -> None: ...\n def copy(self) -> Self: ...\n def count(self, x: _T, /) -> int: ...\n def extend(self, iterable: Iterable[_T], /) -> None: ...\n def extendleft(self, iterable: Iterable[_T], /) -> None: ...\n def insert(self, i: int, x: _T, /) -> None: ...\n def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ...\n def pop(self) -> _T: ... # type: ignore[override]\n def popleft(self) -> _T: ...\n def remove(self, value: _T, /) -> None: ...\n def rotate(self, n: int = 1, /) -> None: ...\n def __copy__(self) -> Self: ...\n def __len__(self) -> int: ...\n # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores\n def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override]\n def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override]\n def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override]\n def __contains__(self, key: object, /) -> bool: ...\n def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ...\n def __iadd__(self, value: Iterable[_T], /) -> Self: ...\n def __add__(self, value: Self, /) -> Self: ...\n def __mul__(self, value: int, /) -> Self: ...\n def __imul__(self, value: int, /) -> Self: ...\n def __lt__(self, value: deque[_T], /) -> bool: ...\n def __le__(self, value: deque[_T], /) -> bool: ...\n def __gt__(self, value: deque[_T], /) -> bool: ...\n def __ge__(self, value: deque[_T], /) -> bool: ...\n def __eq__(self, value: object, /) -> bool: ...\n if sys.version_info >= (3, 9):\n def __class_getitem__(cls, item: Any, /) -> GenericAlias: ..."},{"col":0,"comment":"Constructs and sends a :class:`Request `.\n\n :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.\n :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.\n ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``\n or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string\n defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers\n to add for the file.\n :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How many seconds to wait for the server to send data\n before giving up, as a float, or a :ref:`(connect timeout, read\n timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``.\n :param stream: (optional) if ``False``, the response content will be immediately downloaded.\n :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.\n :return: :class:`Response ` object\n :rtype: requests.Response\n\n Usage::\n\n >>> import requests\n >>> req = requests.request('GET', 'https://httpbin.org/get')\n >>> req\n \n ","endLoc":61,"header":"def request(method, url, **kwargs)","id":117,"name":"request","nodeType":"Function","startLoc":16,"text":"def request(method, url, **kwargs):\n \"\"\"Constructs and sends a :class:`Request `.\n\n :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.\n :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.\n ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``\n or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string\n defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers\n to add for the file.\n :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How many seconds to wait for the server to send data\n before giving up, as a float, or a :ref:`(connect timeout, read\n timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``.\n :param stream: (optional) if ``False``, the response content will be immediately downloaded.\n :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.\n :return: :class:`Response ` object\n :rtype: requests.Response\n\n Usage::\n\n >>> import requests\n >>> req = requests.request('GET', 'https://httpbin.org/get')\n >>> req\n \n \"\"\"\n\n # By using the 'with' statement we are sure the session is closed, thus we\n # avoid leaving sockets open which can trigger a ResourceWarning in some\n # cases, and look like a memory leak in others.\n with sessions.Session() as session:\n return session.request(method=method, url=url, **kwargs)"},{"col":4,"comment":"null","endLoc":409,"header":"def __init__(self)","id":118,"name":"__init__","nodeType":"Function","startLoc":349,"text":"def __init__(self):\n\n #: A case-insensitive dictionary of headers to be sent on each\n #: :class:`Request ` sent from this\n #: :class:`Session `.\n self.headers = default_headers()\n\n #: Default Authentication tuple or object to attach to\n #: :class:`Request `.\n self.auth = None\n\n #: Dictionary mapping protocol or protocol and host to the URL of the proxy\n #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to\n #: be used on each :class:`Request `.\n self.proxies = {}\n\n #: Event-handling hooks.\n self.hooks = default_hooks()\n\n #: Dictionary of querystring data to attach to each\n #: :class:`Request `. The dictionary values may be lists for\n #: representing multivalued query parameters.\n self.params = {}\n\n #: Stream response content default.\n self.stream = False\n\n #: SSL Verification default.\n #: Defaults to `True`, requiring requests to verify the TLS certificate at the\n #: remote end.\n #: If verify is set to `False`, requests will accept any TLS certificate\n #: presented by the server, and will ignore hostname mismatches and/or\n #: expired certificates, which will make your application vulnerable to\n #: man-in-the-middle (MitM) attacks.\n #: Only set this to `False` for testing.\n self.verify = True\n\n #: SSL client certificate default, if String, path to ssl client\n #: cert file (.pem). If Tuple, ('cert', 'key') pair.\n self.cert = None\n\n #: Maximum number of redirects allowed. If the request exceeds this\n #: limit, a :class:`TooManyRedirects` exception is raised.\n #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is\n #: 30.\n self.max_redirects = DEFAULT_REDIRECT_LIMIT\n\n #: Trust environment settings for proxy configuration, default\n #: authentication and similar.\n self.trust_env = True\n\n #: A CookieJar containing all currently outstanding cookies set on this\n #: session. By default it is a\n #: :class:`RequestsCookieJar `, but\n #: may be any other ``cookielib.CookieJar`` compatible object.\n self.cookies = cookiejar_from_dict({})\n\n # Default connection adapters.\n self.adapters = OrderedDict()\n self.mount('https://', HTTPAdapter())\n self.mount('http://', HTTPAdapter())"},{"fileName":"sessions.py","filePath":"requests","id":119,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.sessions\n~~~~~~~~~~~~~~~~~\n\nThis module provides a Session object to manage and persist settings across\nrequests (cookies, auth, proxies).\n\"\"\"\nimport os\nimport sys\nimport time\nfrom datetime import timedelta\nfrom collections import OrderedDict\n\nfrom .auth import _basic_auth_str\nfrom .compat import cookielib, is_py3, urljoin, urlparse, Mapping\nfrom .cookies import (\n cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)\nfrom .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT\nfrom .hooks import default_hooks, dispatch_hook\nfrom ._internal_utils import to_native_string\nfrom .utils import to_key_val_list, default_headers, DEFAULT_PORTS\nfrom .exceptions import (\n TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)\n\nfrom .structures import CaseInsensitiveDict\nfrom .adapters import HTTPAdapter\n\nfrom .utils import (\n requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,\n get_auth_from_url, rewind_body, resolve_proxies\n)\n\nfrom .status_codes import codes\n\n# formerly defined here, reexposed here for backward compatibility\nfrom .models import REDIRECT_STATI\n\n# Preferred clock, based on which one is more accurate on a given system.\nif sys.platform == 'win32':\n try: # Python 3.4+\n preferred_clock = time.perf_counter\n except AttributeError: # Earlier than Python 3.\n preferred_clock = time.clock\nelse:\n preferred_clock = time.time\n\n\ndef merge_setting(request_setting, session_setting, dict_class=OrderedDict):\n \"\"\"Determines appropriate setting for a given request, taking into account\n the explicit setting on that request, and the setting in the session. If a\n setting is a dictionary, they will be merged together using `dict_class`\n \"\"\"\n\n if session_setting is None:\n return request_setting\n\n if request_setting is None:\n return session_setting\n\n # Bypass if not a dictionary (e.g. verify)\n if not (\n isinstance(session_setting, Mapping) and\n isinstance(request_setting, Mapping)\n ):\n return request_setting\n\n merged_setting = dict_class(to_key_val_list(session_setting))\n merged_setting.update(to_key_val_list(request_setting))\n\n # Remove keys that are set to None. Extract keys first to avoid altering\n # the dictionary during iteration.\n none_keys = [k for (k, v) in merged_setting.items() if v is None]\n for key in none_keys:\n del merged_setting[key]\n\n return merged_setting\n\n\ndef merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):\n \"\"\"Properly merges both requests and session hooks.\n\n This is necessary because when request_hooks == {'response': []}, the\n merge breaks Session hooks entirely.\n \"\"\"\n if session_hooks is None or session_hooks.get('response') == []:\n return request_hooks\n\n if request_hooks is None or request_hooks.get('response') == []:\n return session_hooks\n\n return merge_setting(request_hooks, session_hooks, dict_class)\n\n\nclass SessionRedirectMixin(object):\n\n def get_redirect_target(self, resp):\n \"\"\"Receives a Response. Returns a redirect URI or ``None``\"\"\"\n # Due to the nature of how requests processes redirects this method will\n # be called at least once upon the original response and at least twice\n # on each subsequent redirect response (if any).\n # If a custom mixin is used to handle this logic, it may be advantageous\n # to cache the redirect location onto the response object as a private\n # attribute.\n if resp.is_redirect:\n location = resp.headers['location']\n # Currently the underlying http module on py3 decode headers\n # in latin1, but empirical evidence suggests that latin1 is very\n # rarely used with non-ASCII characters in HTTP headers.\n # It is more likely to get UTF8 header rather than latin1.\n # This causes incorrect handling of UTF8 encoded location headers.\n # To solve this, we re-encode the location in latin1.\n if is_py3:\n location = location.encode('latin1')\n return to_native_string(location, 'utf8')\n return None\n\n def should_strip_auth(self, old_url, new_url):\n \"\"\"Decide whether Authorization header should be removed when redirecting\"\"\"\n old_parsed = urlparse(old_url)\n new_parsed = urlparse(new_url)\n if old_parsed.hostname != new_parsed.hostname:\n return True\n # Special case: allow http -> https redirect when using the standard\n # ports. This isn't specified by RFC 7235, but is kept to avoid\n # breaking backwards compatibility with older versions of requests\n # that allowed any redirects on the same host.\n if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)\n and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):\n return False\n\n # Handle default port usage corresponding to scheme.\n changed_port = old_parsed.port != new_parsed.port\n changed_scheme = old_parsed.scheme != new_parsed.scheme\n default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)\n if (not changed_scheme and old_parsed.port in default_port\n and new_parsed.port in default_port):\n return False\n\n # Standard case: root URI must match\n return changed_port or changed_scheme\n\n def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):\n \"\"\"Receives a Response. Returns a generator of Responses or Requests.\"\"\"\n\n hist = [] # keep track of history\n\n url = self.get_redirect_target(resp)\n previous_fragment = urlparse(req.url).fragment\n while url:\n prepared_request = req.copy()\n\n # Update history and keep track of redirects.\n # resp.history must ignore the original request in this loop\n hist.append(resp)\n resp.history = hist[1:]\n\n try:\n resp.content # Consume socket so it can be released\n except (ChunkedEncodingError, ContentDecodingError, RuntimeError):\n resp.raw.read(decode_content=False)\n\n if len(resp.history) >= self.max_redirects:\n raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)\n\n # Release the connection back into the pool.\n resp.close()\n\n # Handle redirection without scheme (see: RFC 1808 Section 4)\n if url.startswith('//'):\n parsed_rurl = urlparse(resp.url)\n url = ':'.join([to_native_string(parsed_rurl.scheme), url])\n\n # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)\n parsed = urlparse(url)\n if parsed.fragment == '' and previous_fragment:\n parsed = parsed._replace(fragment=previous_fragment)\n elif parsed.fragment:\n previous_fragment = parsed.fragment\n url = parsed.geturl()\n\n # Facilitate relative 'location' headers, as allowed by RFC 7231.\n # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')\n # Compliant with RFC3986, we percent encode the url.\n if not parsed.netloc:\n url = urljoin(resp.url, requote_uri(url))\n else:\n url = requote_uri(url)\n\n prepared_request.url = to_native_string(url)\n\n self.rebuild_method(prepared_request, resp)\n\n # https://github.com/psf/requests/issues/1084\n if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):\n # https://github.com/psf/requests/issues/3490\n purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')\n for header in purged_headers:\n prepared_request.headers.pop(header, None)\n prepared_request.body = None\n\n headers = prepared_request.headers\n headers.pop('Cookie', None)\n\n # Extract any cookies sent on the response to the cookiejar\n # in the new request. Because we've mutated our copied prepared\n # request, use the old one that we haven't yet touched.\n extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)\n merge_cookies(prepared_request._cookies, self.cookies)\n prepared_request.prepare_cookies(prepared_request._cookies)\n\n # Rebuild auth and proxy information.\n proxies = self.rebuild_proxies(prepared_request, proxies)\n self.rebuild_auth(prepared_request, resp)\n\n # A failed tell() sets `_body_position` to `object()`. This non-None\n # value ensures `rewindable` will be True, allowing us to raise an\n # UnrewindableBodyError, instead of hanging the connection.\n rewindable = (\n prepared_request._body_position is not None and\n ('Content-Length' in headers or 'Transfer-Encoding' in headers)\n )\n\n # Attempt to rewind consumed file-like object.\n if rewindable:\n rewind_body(prepared_request)\n\n # Override the original request.\n req = prepared_request\n\n if yield_requests:\n yield req\n else:\n\n resp = self.send(\n req,\n stream=stream,\n timeout=timeout,\n verify=verify,\n cert=cert,\n proxies=proxies,\n allow_redirects=False,\n **adapter_kwargs\n )\n\n extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)\n\n # extract redirect url, if any, for the next loop\n url = self.get_redirect_target(resp)\n yield resp\n\n def rebuild_auth(self, prepared_request, response):\n \"\"\"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n \"\"\"\n headers = prepared_request.headers\n url = prepared_request.url\n\n if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):\n # If we get redirected to a new host, we should strip out any\n # authentication headers.\n del headers['Authorization']\n\n # .netrc might have more auth for us on our new host.\n new_auth = get_netrc_auth(url) if self.trust_env else None\n if new_auth is not None:\n prepared_request.prepare_auth(new_auth)\n\n def rebuild_proxies(self, prepared_request, proxies):\n \"\"\"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n \"\"\"\n headers = prepared_request.headers\n scheme = urlparse(prepared_request.url).scheme\n new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)\n\n if 'Proxy-Authorization' in headers:\n del headers['Proxy-Authorization']\n\n try:\n username, password = get_auth_from_url(new_proxies[scheme])\n except KeyError:\n username, password = None, None\n\n if username and password:\n headers['Proxy-Authorization'] = _basic_auth_str(username, password)\n\n return new_proxies\n\n def rebuild_method(self, prepared_request, response):\n \"\"\"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n \"\"\"\n method = prepared_request.method\n\n # https://tools.ietf.org/html/rfc7231#section-6.4.4\n if response.status_code == codes.see_other and method != 'HEAD':\n method = 'GET'\n\n # Do what the browsers do, despite standards...\n # First, turn 302s into GETs.\n if response.status_code == codes.found and method != 'HEAD':\n method = 'GET'\n\n # Second, if a POST is responded to with a 301, turn it into a GET.\n # This bizarre behaviour is explained in Issue 1704.\n if response.status_code == codes.moved and method == 'POST':\n method = 'GET'\n\n prepared_request.method = method\n\n\nclass Session(SessionRedirectMixin):\n \"\"\"A Requests session.\n\n Provides cookie persistence, connection-pooling, and configuration.\n\n Basic Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> s.get('https://httpbin.org/get')\n \n\n Or as a context manager::\n\n >>> with requests.Session() as s:\n ... s.get('https://httpbin.org/get')\n \n \"\"\"\n\n __attrs__ = [\n 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',\n 'cert', 'adapters', 'stream', 'trust_env',\n 'max_redirects',\n ]\n\n def __init__(self):\n\n #: A case-insensitive dictionary of headers to be sent on each\n #: :class:`Request ` sent from this\n #: :class:`Session `.\n self.headers = default_headers()\n\n #: Default Authentication tuple or object to attach to\n #: :class:`Request `.\n self.auth = None\n\n #: Dictionary mapping protocol or protocol and host to the URL of the proxy\n #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to\n #: be used on each :class:`Request `.\n self.proxies = {}\n\n #: Event-handling hooks.\n self.hooks = default_hooks()\n\n #: Dictionary of querystring data to attach to each\n #: :class:`Request `. The dictionary values may be lists for\n #: representing multivalued query parameters.\n self.params = {}\n\n #: Stream response content default.\n self.stream = False\n\n #: SSL Verification default.\n #: Defaults to `True`, requiring requests to verify the TLS certificate at the\n #: remote end.\n #: If verify is set to `False`, requests will accept any TLS certificate\n #: presented by the server, and will ignore hostname mismatches and/or\n #: expired certificates, which will make your application vulnerable to\n #: man-in-the-middle (MitM) attacks.\n #: Only set this to `False` for testing.\n self.verify = True\n\n #: SSL client certificate default, if String, path to ssl client\n #: cert file (.pem). If Tuple, ('cert', 'key') pair.\n self.cert = None\n\n #: Maximum number of redirects allowed. If the request exceeds this\n #: limit, a :class:`TooManyRedirects` exception is raised.\n #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is\n #: 30.\n self.max_redirects = DEFAULT_REDIRECT_LIMIT\n\n #: Trust environment settings for proxy configuration, default\n #: authentication and similar.\n self.trust_env = True\n\n #: A CookieJar containing all currently outstanding cookies set on this\n #: session. By default it is a\n #: :class:`RequestsCookieJar `, but\n #: may be any other ``cookielib.CookieJar`` compatible object.\n self.cookies = cookiejar_from_dict({})\n\n # Default connection adapters.\n self.adapters = OrderedDict()\n self.mount('https://', HTTPAdapter())\n self.mount('http://', HTTPAdapter())\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def prepare_request(self, request):\n \"\"\"Constructs a :class:`PreparedRequest ` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request ` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n \"\"\"\n cookies = request.cookies or {}\n\n # Bootstrap CookieJar.\n if not isinstance(cookies, cookielib.CookieJar):\n cookies = cookiejar_from_dict(cookies)\n\n # Merge with session cookies\n merged_cookies = merge_cookies(\n merge_cookies(RequestsCookieJar(), self.cookies), cookies)\n\n # Set environment's basic authentication if not explicitly set.\n auth = request.auth\n if self.trust_env and not auth and not self.auth:\n auth = get_netrc_auth(request.url)\n\n p = PreparedRequest()\n p.prepare(\n method=request.method.upper(),\n url=request.url,\n files=request.files,\n data=request.data,\n json=request.json,\n headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),\n params=merge_setting(request.params, self.params),\n auth=merge_setting(auth, self.auth),\n cookies=merged_cookies,\n hooks=merge_hooks(request.hooks, self.hooks),\n )\n return p\n\n def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None):\n \"\"\"Constructs a :class:`Request `, prepares it and sends it.\n Returns :class:`Response ` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n \"\"\"\n # Create the Request.\n req = Request(\n method=method.upper(),\n url=url,\n headers=headers,\n files=files,\n data=data or {},\n json=json,\n params=params or {},\n auth=auth,\n cookies=cookies,\n hooks=hooks,\n )\n prep = self.prepare_request(req)\n\n proxies = proxies or {}\n\n settings = self.merge_environment_settings(\n prep.url, proxies, stream, verify, cert\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': timeout,\n 'allow_redirects': allow_redirects,\n }\n send_kwargs.update(settings)\n resp = self.send(prep, **send_kwargs)\n\n return resp\n\n def get(self, url, **kwargs):\n r\"\"\"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('GET', url, **kwargs)\n\n def options(self, url, **kwargs):\n r\"\"\"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('OPTIONS', url, **kwargs)\n\n def head(self, url, **kwargs):\n r\"\"\"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return self.request('HEAD', url, **kwargs)\n\n def post(self, url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('POST', url, data=data, json=json, **kwargs)\n\n def put(self, url, data=None, **kwargs):\n r\"\"\"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PUT', url, data=data, **kwargs)\n\n def patch(self, url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PATCH', url, data=data, **kwargs)\n\n def delete(self, url, **kwargs):\n r\"\"\"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('DELETE', url, **kwargs)\n\n def send(self, request, **kwargs):\n \"\"\"Send a given PreparedRequest.\n\n :rtype: requests.Response\n \"\"\"\n # Set defaults that the hooks can utilize to ensure they always have\n # the correct parameters to reproduce the previous request.\n kwargs.setdefault('stream', self.stream)\n kwargs.setdefault('verify', self.verify)\n kwargs.setdefault('cert', self.cert)\n if 'proxies' not in kwargs:\n kwargs['proxies'] = resolve_proxies(\n request, self.proxies, self.trust_env\n )\n\n # It's possible that users might accidentally send a Request object.\n # Guard against that specific failure case.\n if isinstance(request, Request):\n raise ValueError('You can only send PreparedRequests.')\n\n # Set up variables needed for resolve_redirects and dispatching of hooks\n allow_redirects = kwargs.pop('allow_redirects', True)\n stream = kwargs.get('stream')\n hooks = request.hooks\n\n # Get the appropriate adapter to use\n adapter = self.get_adapter(url=request.url)\n\n # Start time (approximately) of the request\n start = preferred_clock()\n\n # Send the request\n r = adapter.send(request, **kwargs)\n\n # Total elapsed time of the request (approximately)\n elapsed = preferred_clock() - start\n r.elapsed = timedelta(seconds=elapsed)\n\n # Response manipulation hooks\n r = dispatch_hook('response', hooks, r, **kwargs)\n\n # Persist cookies\n if r.history:\n\n # If the hooks create history then we want those cookies too\n for resp in r.history:\n extract_cookies_to_jar(self.cookies, resp.request, resp.raw)\n\n extract_cookies_to_jar(self.cookies, request, r.raw)\n\n # Resolve redirects if allowed.\n if allow_redirects:\n # Redirect resolving generator.\n gen = self.resolve_redirects(r, request, **kwargs)\n history = [resp for resp in gen]\n else:\n history = []\n\n # Shuffle things around if there's history.\n if history:\n # Insert the first (original) request at the start\n history.insert(0, r)\n # Get the last request made\n r = history.pop()\n r.history = history\n\n # If redirects aren't being followed, store the response on the Request for Response.next().\n if not allow_redirects:\n try:\n r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))\n except StopIteration:\n pass\n\n if not stream:\n r.content\n\n return r\n\n def merge_environment_settings(self, url, proxies, stream, verify, cert):\n \"\"\"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n \"\"\"\n # Gather clues from the surrounding environment.\n if self.trust_env:\n # Set environment's proxies.\n no_proxy = proxies.get('no_proxy') if proxies is not None else None\n env_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n for (k, v) in env_proxies.items():\n proxies.setdefault(k, v)\n\n # Look for requests environment configuration and be compatible\n # with cURL.\n if verify is True or verify is None:\n verify = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n\n # Merge all the kwargs.\n proxies = merge_setting(proxies, self.proxies)\n stream = merge_setting(stream, self.stream)\n verify = merge_setting(verify, self.verify)\n cert = merge_setting(cert, self.cert)\n\n return {'verify': verify, 'proxies': proxies, 'stream': stream,\n 'cert': cert}\n\n def get_adapter(self, url):\n \"\"\"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n \"\"\"\n for (prefix, adapter) in self.adapters.items():\n\n if url.lower().startswith(prefix.lower()):\n return adapter\n\n # Nothing matches :-/\n raise InvalidSchema(\"No connection adapters were found for {!r}\".format(url))\n\n def close(self):\n \"\"\"Closes all adapters and as such the session\"\"\"\n for v in self.adapters.values():\n v.close()\n\n def mount(self, prefix, adapter):\n \"\"\"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n \"\"\"\n self.adapters[prefix] = adapter\n keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]\n\n for key in keys_to_move:\n self.adapters[key] = self.adapters.pop(key)\n\n def __getstate__(self):\n state = {attr: getattr(self, attr, None) for attr in self.__attrs__}\n return state\n\n def __setstate__(self, state):\n for attr, value in state.items():\n setattr(self, attr, value)\n\n\ndef session():\n \"\"\"\n Returns a :class:`Session` for context-management.\n\n .. deprecated:: 1.0.0\n\n This method has been deprecated since version 1.0.0 and is only kept for\n backwards compatibility. New code should use :class:`~requests.sessions.Session`\n to create a session. This may be removed at a future date.\n\n :rtype: Session\n \"\"\"\n return Session()\n"},{"className":"timedelta","col":0,"comment":"null","endLoc":227,"id":120,"nodeType":"Class","startLoc":181,"text":"class timedelta:\n min: ClassVar[timedelta]\n max: ClassVar[timedelta]\n resolution: ClassVar[timedelta]\n def __new__(\n cls,\n days: float = ...,\n seconds: float = ...,\n microseconds: float = ...,\n milliseconds: float = ...,\n minutes: float = ...,\n hours: float = ...,\n weeks: float = ...,\n ) -> Self: ...\n @property\n def days(self) -> int: ...\n @property\n def seconds(self) -> int: ...\n @property\n def microseconds(self) -> int: ...\n def total_seconds(self) -> float: ...\n def __add__(self, value: timedelta, /) -> timedelta: ...\n def __radd__(self, value: timedelta, /) -> timedelta: ...\n def __sub__(self, value: timedelta, /) -> timedelta: ...\n def __rsub__(self, value: timedelta, /) -> timedelta: ...\n def __neg__(self) -> timedelta: ...\n def __pos__(self) -> timedelta: ...\n def __abs__(self) -> timedelta: ...\n def __mul__(self, value: float, /) -> timedelta: ...\n def __rmul__(self, value: float, /) -> timedelta: ...\n @overload\n def __floordiv__(self, value: timedelta, /) -> int: ...\n @overload\n def __floordiv__(self, value: int, /) -> timedelta: ...\n @overload\n def __truediv__(self, value: timedelta, /) -> float: ...\n @overload\n def __truediv__(self, value: float, /) -> timedelta: ...\n def __mod__(self, value: timedelta, /) -> timedelta: ...\n def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: ...\n def __le__(self, value: timedelta, /) -> bool: ...\n def __lt__(self, value: timedelta, /) -> bool: ...\n def __ge__(self, value: timedelta, /) -> bool: ...\n def __gt__(self, value: timedelta, /) -> bool: ...\n def __eq__(self, value: object, /) -> bool: ...\n def __bool__(self) -> bool: ...\n def __hash__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":194,"header":"def __new__(\n cls,\n days: float = ...,\n seconds: float = ...,\n microseconds: float = ...,\n milliseconds: float = ...,\n minutes: float = ...,\n hours: float = ...,\n weeks: float = ...,\n ) -> Self","id":122,"name":"__new__","nodeType":"Function","startLoc":185,"text":"def __new__(\n cls,\n days: float = ...,\n seconds: float = ...,\n microseconds: float = ...,\n milliseconds: float = ...,\n minutes: float = ...,\n hours: float = ...,\n weeks: float = ...,\n ) -> Self: ..."},{"col":4,"comment":"null","endLoc":196,"header":"@property\n def days(self) -> int","id":123,"name":"days","nodeType":"Function","startLoc":195,"text":"@property\n def days(self) -> int: ..."},{"col":4,"comment":"null","endLoc":198,"header":"@property\n def seconds(self) -> int","id":124,"name":"seconds","nodeType":"Function","startLoc":197,"text":"@property\n def seconds(self) -> int: ..."},{"col":4,"comment":"null","endLoc":200,"header":"@property\n def microseconds(self) -> int","id":125,"name":"microseconds","nodeType":"Function","startLoc":199,"text":"@property\n def microseconds(self) -> int: ..."},{"col":4,"comment":"null","endLoc":201,"header":"def total_seconds(self) -> float","id":126,"name":"total_seconds","nodeType":"Function","startLoc":201,"text":"def total_seconds(self) -> float: ..."},{"col":4,"comment":"null","endLoc":202,"header":"def __add__(self, value: timedelta, /) -> timedelta","id":127,"name":"__add__","nodeType":"Function","startLoc":202,"text":"def __add__(self, value: timedelta, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":203,"header":"def __radd__(self, value: timedelta, /) -> timedelta","id":128,"name":"__radd__","nodeType":"Function","startLoc":203,"text":"def __radd__(self, value: timedelta, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":204,"header":"def __sub__(self, value: timedelta, /) -> timedelta","id":129,"name":"__sub__","nodeType":"Function","startLoc":204,"text":"def __sub__(self, value: timedelta, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":205,"header":"def __rsub__(self, value: timedelta, /) -> timedelta","id":130,"name":"__rsub__","nodeType":"Function","startLoc":205,"text":"def __rsub__(self, value: timedelta, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":206,"header":"def __neg__(self) -> timedelta","id":131,"name":"__neg__","nodeType":"Function","startLoc":206,"text":"def __neg__(self) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":207,"header":"def __pos__(self) -> timedelta","id":132,"name":"__pos__","nodeType":"Function","startLoc":207,"text":"def __pos__(self) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":208,"header":"def __abs__(self) -> timedelta","id":133,"name":"__abs__","nodeType":"Function","startLoc":208,"text":"def __abs__(self) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":209,"header":"def __mul__(self, value: float, /) -> timedelta","id":134,"name":"__mul__","nodeType":"Function","startLoc":209,"text":"def __mul__(self, value: float, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":210,"header":"def __rmul__(self, value: float, /) -> timedelta","id":135,"name":"__rmul__","nodeType":"Function","startLoc":210,"text":"def __rmul__(self, value: float, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":212,"header":"@overload\n def __floordiv__(self, value: timedelta, /) -> int","id":136,"name":"__floordiv__","nodeType":"Function","startLoc":211,"text":"@overload\n def __floordiv__(self, value: timedelta, /) -> int: ..."},{"col":4,"comment":"null","endLoc":214,"header":"@overload\n def __floordiv__(self, value: int, /) -> timedelta","id":137,"name":"__floordiv__","nodeType":"Function","startLoc":213,"text":"@overload\n def __floordiv__(self, value: int, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":216,"header":"@overload\n def __truediv__(self, value: timedelta, /) -> float","id":138,"name":"__truediv__","nodeType":"Function","startLoc":215,"text":"@overload\n def __truediv__(self, value: timedelta, /) -> float: ..."},{"col":4,"comment":"null","endLoc":218,"header":"@overload\n def __truediv__(self, value: float, /) -> timedelta","id":139,"name":"__truediv__","nodeType":"Function","startLoc":217,"text":"@overload\n def __truediv__(self, value: float, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":219,"header":"def __mod__(self, value: timedelta, /) -> timedelta","id":140,"name":"__mod__","nodeType":"Function","startLoc":219,"text":"def __mod__(self, value: timedelta, /) -> timedelta: ..."},{"col":4,"comment":"null","endLoc":220,"header":"def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]","id":141,"name":"__divmod__","nodeType":"Function","startLoc":220,"text":"def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: ..."},{"col":4,"comment":"null","endLoc":221,"header":"def __le__(self, value: timedelta, /) -> bool","id":142,"name":"__le__","nodeType":"Function","startLoc":221,"text":"def __le__(self, value: timedelta, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":222,"header":"def __lt__(self, value: timedelta, /) -> bool","id":143,"name":"__lt__","nodeType":"Function","startLoc":222,"text":"def __lt__(self, value: timedelta, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":223,"header":"def __ge__(self, value: timedelta, /) -> bool","id":144,"name":"__ge__","nodeType":"Function","startLoc":223,"text":"def __ge__(self, value: timedelta, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":224,"header":"def __gt__(self, value: timedelta, /) -> bool","id":145,"name":"__gt__","nodeType":"Function","startLoc":224,"text":"def __gt__(self, value: timedelta, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":225,"header":"def __eq__(self, value: object, /) -> bool","id":146,"name":"__eq__","nodeType":"Function","startLoc":225,"text":"def __eq__(self, value: object, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":226,"header":"def __bool__(self) -> bool","id":147,"name":"__bool__","nodeType":"Function","startLoc":226,"text":"def __bool__(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":227,"header":"def __hash__(self) -> int","id":148,"name":"__hash__","nodeType":"Function","startLoc":227,"text":"def __hash__(self) -> int: ..."},{"attributeType":"timedelta","col":4,"comment":"null","endLoc":182,"id":149,"name":"min","nodeType":"Attribute","startLoc":182,"text":"min"},{"attributeType":"timedelta","col":4,"comment":"null","endLoc":183,"id":150,"name":"max","nodeType":"Attribute","startLoc":183,"text":"max"},{"attributeType":"timedelta","col":4,"comment":"null","endLoc":184,"id":151,"name":"resolution","nodeType":"Attribute","startLoc":184,"text":"resolution"},{"col":4,"comment":"null","endLoc":240,"header":"@property\n def maxlen(self) -> int | None","id":152,"name":"maxlen","nodeType":"Function","startLoc":239,"text":"@property\n def maxlen(self) -> int | None: ..."},{"col":4,"comment":"null","endLoc":242,"header":"@overload\n def __init__(self, *, maxlen: int | None = None) -> None","id":153,"name":"__init__","nodeType":"Function","startLoc":241,"text":"@overload\n def __init__(self, *, maxlen: int | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":244,"header":"@overload\n def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None","id":154,"name":"__init__","nodeType":"Function","startLoc":243,"text":"@overload\n def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":245,"header":"def append(self, x: _T, /) -> None","id":155,"name":"append","nodeType":"Function","startLoc":245,"text":"def append(self, x: _T, /) -> None: ..."},{"col":4,"comment":"null","endLoc":246,"header":"def appendleft(self, x: _T, /) -> None","id":156,"name":"appendleft","nodeType":"Function","startLoc":246,"text":"def appendleft(self, x: _T, /) -> None: ..."},{"col":4,"comment":"null","endLoc":247,"header":"def copy(self) -> Self","id":157,"name":"copy","nodeType":"Function","startLoc":247,"text":"def copy(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":248,"header":"def count(self, x: _T, /) -> int","id":158,"name":"count","nodeType":"Function","startLoc":248,"text":"def count(self, x: _T, /) -> int: ..."},{"col":4,"comment":"null","endLoc":249,"header":"def extend(self, iterable: Iterable[_T], /) -> None","id":159,"name":"extend","nodeType":"Function","startLoc":249,"text":"def extend(self, iterable: Iterable[_T], /) -> None: ..."},{"col":4,"comment":"null","endLoc":250,"header":"def extendleft(self, iterable: Iterable[_T], /) -> None","id":160,"name":"extendleft","nodeType":"Function","startLoc":250,"text":"def extendleft(self, iterable: Iterable[_T], /) -> None: ..."},{"col":4,"comment":"null","endLoc":251,"header":"def insert(self, i: int, x: _T, /) -> None","id":161,"name":"insert","nodeType":"Function","startLoc":251,"text":"def insert(self, i: int, x: _T, /) -> None: ..."},{"col":4,"comment":"null","endLoc":252,"header":"def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int","id":162,"name":"index","nodeType":"Function","startLoc":252,"text":"def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ..."},{"col":4,"comment":"null","endLoc":253,"header":"def pop(self) -> _T","id":163,"name":"pop","nodeType":"Function","startLoc":253,"text":"def pop(self) -> _T: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":254,"header":"def popleft(self) -> _T","id":164,"name":"popleft","nodeType":"Function","startLoc":254,"text":"def popleft(self) -> _T: ..."},{"col":4,"comment":"null","endLoc":255,"header":"def remove(self, value: _T, /) -> None","id":165,"name":"remove","nodeType":"Function","startLoc":255,"text":"def remove(self, value: _T, /) -> None: ..."},{"col":4,"comment":"null","endLoc":256,"header":"def rotate(self, n: int = 1, /) -> None","id":166,"name":"rotate","nodeType":"Function","startLoc":256,"text":"def rotate(self, n: int = 1, /) -> None: ..."},{"col":4,"comment":"null","endLoc":257,"header":"def __copy__(self) -> Self","id":167,"name":"__copy__","nodeType":"Function","startLoc":257,"text":"def __copy__(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":258,"header":"def __len__(self) -> int","id":168,"name":"__len__","nodeType":"Function","startLoc":258,"text":"def __len__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":260,"header":"def __getitem__(self, key: SupportsIndex, /) -> _T","id":169,"name":"__getitem__","nodeType":"Function","startLoc":260,"text":"def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":261,"header":"def __setitem__(self, key: SupportsIndex, value: _T, /) -> None","id":170,"name":"__setitem__","nodeType":"Function","startLoc":261,"text":"def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":262,"header":"def __delitem__(self, key: SupportsIndex, /) -> None","id":171,"name":"__delitem__","nodeType":"Function","startLoc":262,"text":"def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":263,"header":"def __contains__(self, key: object, /) -> bool","id":172,"name":"__contains__","nodeType":"Function","startLoc":263,"text":"def __contains__(self, key: object, /) -> bool: ..."},{"col":4,"comment":"null","endLoc":264,"header":"def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]","id":173,"name":"__reduce__","nodeType":"Function","startLoc":264,"text":"def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ..."},{"col":4,"comment":"null","endLoc":265,"header":"def __iadd__(self, value: Iterable[_T], /) -> Self","id":174,"name":"__iadd__","nodeType":"Function","startLoc":265,"text":"def __iadd__(self, value: Iterable[_T], /) -> Self: ..."},{"col":4,"comment":"null","endLoc":266,"header":"def __add__(self, value: Self, /) -> Self","id":175,"name":"__add__","nodeType":"Function","startLoc":266,"text":"def __add__(self, value: Self, /) -> Self: ..."},{"col":4,"comment":"null","endLoc":267,"header":"def __mul__(self, value: int, /) -> Self","id":176,"name":"__mul__","nodeType":"Function","startLoc":267,"text":"def __mul__(self, value: int, /) -> Self: ..."},{"col":4,"comment":"null","endLoc":268,"header":"def __imul__(self, value: int, /) -> Self","id":177,"name":"__imul__","nodeType":"Function","startLoc":268,"text":"def __imul__(self, value: int, /) -> Self: ..."},{"col":4,"comment":"null","endLoc":269,"header":"def __lt__(self, value: deque[_T], /) -> bool","id":178,"name":"__lt__","nodeType":"Function","startLoc":269,"text":"def __lt__(self, value: deque[_T], /) -> bool: ..."},{"col":4,"comment":"null","endLoc":270,"header":"def __le__(self, value: deque[_T], /) -> bool","id":179,"name":"__le__","nodeType":"Function","startLoc":270,"text":"def __le__(self, value: deque[_T], /) -> bool: ..."},{"col":4,"comment":"null","endLoc":271,"header":"def __gt__(self, value: deque[_T], /) -> bool","id":180,"name":"__gt__","nodeType":"Function","startLoc":271,"text":"def __gt__(self, value: deque[_T], /) -> bool: ..."},{"col":4,"comment":"null","endLoc":272,"header":"def __ge__(self, value: deque[_T], /) -> bool","id":181,"name":"__ge__","nodeType":"Function","startLoc":272,"text":"def __ge__(self, value: deque[_T], /) -> bool: ..."},{"col":4,"comment":"null","endLoc":273,"header":"def __eq__(self, value: object, /) -> bool","id":182,"name":"__eq__","nodeType":"Function","startLoc":273,"text":"def __eq__(self, value: object, /) -> bool: ..."},{"col":8,"comment":"null","endLoc":275,"header":"def __class_getitem__(cls, item: Any, /) -> GenericAlias","id":183,"name":"__class_getitem__","nodeType":"Function","startLoc":275,"text":"def __class_getitem__(cls, item: Any, /) -> GenericAlias: ..."},{"col":4,"comment":"null","endLoc":361,"header":"def popitem(self, last: bool = True) -> tuple[_KT, _VT]","id":184,"name":"popitem","nodeType":"Function","startLoc":361,"text":"def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":362,"header":"def move_to_end(self, key: _KT, last: bool = True) -> None","id":185,"name":"move_to_end","nodeType":"Function","startLoc":362,"text":"def move_to_end(self, key: _KT, last: bool = True) -> None: ..."},{"col":4,"comment":"null","endLoc":363,"header":"def copy(self) -> Self","id":186,"name":"copy","nodeType":"Function","startLoc":363,"text":"def copy(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":364,"header":"def __reversed__(self) -> Iterator[_KT]","id":187,"name":"__reversed__","nodeType":"Function","startLoc":364,"text":"def __reversed__(self) -> Iterator[_KT]: ..."},{"col":4,"comment":"null","endLoc":365,"header":"def keys(self) -> _odict_keys[_KT, _VT]","id":188,"name":"keys","nodeType":"Function","startLoc":365,"text":"def keys(self) -> _odict_keys[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":366,"header":"def items(self) -> _odict_items[_KT, _VT]","id":189,"name":"items","nodeType":"Function","startLoc":366,"text":"def items(self) -> _odict_items[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":367,"header":"def values(self) -> _odict_values[_KT, _VT]","id":190,"name":"values","nodeType":"Function","startLoc":367,"text":"def values(self) -> _odict_values[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":373,"header":"@classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]","id":191,"name":"fromkeys","nodeType":"Function","startLoc":371,"text":"@classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ..."},{"col":4,"comment":"null","endLoc":376,"header":"@classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]","id":192,"name":"fromkeys","nodeType":"Function","startLoc":374,"text":"@classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ..."},{"col":4,"comment":"null","endLoc":379,"header":"@overload\n def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None","id":193,"name":"setdefault","nodeType":"Function","startLoc":378,"text":"@overload\n def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ..."},{"col":4,"comment":"null","endLoc":381,"header":"@overload\n def setdefault(self, key: _KT, default: _VT) -> _VT","id":194,"name":"setdefault","nodeType":"Function","startLoc":380,"text":"@overload\n def setdefault(self, key: _KT, default: _VT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":384,"header":"@overload\n def pop(self, key: _KT) -> _VT","id":195,"name":"pop","nodeType":"Function","startLoc":383,"text":"@overload\n def pop(self, key: _KT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":386,"header":"@overload\n def pop(self, key: _KT, default: _VT) -> _VT","id":196,"name":"pop","nodeType":"Function","startLoc":385,"text":"@overload\n def pop(self, key: _KT, default: _VT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":388,"header":"@overload\n def pop(self, key: _KT, default: _T) -> _VT | _T","id":197,"name":"pop","nodeType":"Function","startLoc":387,"text":"@overload\n def pop(self, key: _KT, default: _T) -> _VT | _T: ..."},{"col":4,"comment":"null","endLoc":389,"header":"def __eq__(self, value: object, /) -> bool","id":198,"name":"__eq__","nodeType":"Function","startLoc":389,"text":"def __eq__(self, value: object, /) -> bool: ..."},{"col":8,"comment":"null","endLoc":392,"header":"@overload\n def __or__(self, value: dict[_KT, _VT], /) -> Self","id":199,"name":"__or__","nodeType":"Function","startLoc":391,"text":"@overload\n def __or__(self, value: dict[_KT, _VT], /) -> Self: ..."},{"col":8,"comment":"null","endLoc":394,"header":"@overload\n def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]","id":200,"name":"__or__","nodeType":"Function","startLoc":393,"text":"@overload\n def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ..."},{"col":8,"comment":"null","endLoc":396,"header":"@overload\n def __ror__(self, value: dict[_KT, _VT], /) -> Self","id":201,"name":"__ror__","nodeType":"Function","startLoc":395,"text":"@overload\n def __ror__(self, value: dict[_KT, _VT], /) -> Self: ..."},{"col":8,"comment":"null","endLoc":398,"header":"@overload\n def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]","id":202,"name":"__ror__","nodeType":"Function","startLoc":397,"text":"@overload\n def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc]"},{"col":0,"comment":"null","endLoc":179,"header":"@overload\ndef urlparse(url: str, scheme: str = \"\", allow_fragments: bool = True) -> ParseResult","id":203,"name":"urlparse","nodeType":"Function","startLoc":178,"text":"@overload\ndef urlparse(url: str, scheme: str = \"\", allow_fragments: bool = True) -> ParseResult: ..."},{"col":0,"comment":"null","endLoc":183,"header":"@overload\ndef urlparse(\n url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[\"\"] = \"\", allow_fragments: bool = True\n) -> ParseResultBytes","id":204,"name":"urlparse","nodeType":"Function","startLoc":180,"text":"@overload\ndef urlparse(\n url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[\"\"] = \"\", allow_fragments: bool = True\n) -> ParseResultBytes: ..."},{"col":0,"comment":"null","endLoc":201,"header":"@overload\ndef urlunparse(components: Iterable[None]) -> Literal[b\"\"]","id":205,"name":"urlunparse","nodeType":"Function","startLoc":200,"text":"@overload\ndef urlunparse(components: Iterable[None]) -> Literal[b\"\"]: ..."},{"col":0,"comment":"null","endLoc":203,"header":"@overload\ndef urlunparse(components: Iterable[AnyStr | None]) -> AnyStr","id":206,"name":"urlunparse","nodeType":"Function","startLoc":202,"text":"@overload\ndef urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ..."},{"col":0,"comment":"null","endLoc":177,"header":"def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr","id":207,"name":"urljoin","nodeType":"Function","startLoc":177,"text":"def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ..."},{"col":0,"comment":"null","endLoc":185,"header":"@overload\ndef urlsplit(url: str, scheme: str = \"\", allow_fragments: bool = True) -> SplitResult","id":208,"name":"urlsplit","nodeType":"Function","startLoc":184,"text":"@overload\ndef urlsplit(url: str, scheme: str = \"\", allow_fragments: bool = True) -> SplitResult: ..."},{"col":4,"comment":"null","endLoc":197,"header":"@overload\n def urlsplit(\n url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[\"\"] = \"\", allow_fragments: bool = True\n ) -> SplitResultBytes","id":209,"name":"urlsplit","nodeType":"Function","startLoc":194,"text":"@overload\n def urlsplit(\n url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[\"\"] = \"\", allow_fragments: bool = True\n ) -> SplitResultBytes: ..."},{"col":0,"comment":"null","endLoc":157,"header":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool = False,\n safe: str = \"\",\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, str, str, str], str] = ...,\n) -> str","id":210,"name":"urlencode","nodeType":"Function","startLoc":149,"text":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool = False,\n safe: str = \"\",\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, str, str, str], str] = ...,\n) -> str: ..."},{"col":0,"comment":"null","endLoc":166,"header":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool,\n safe: _Q,\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,\n) -> str","id":211,"name":"urlencode","nodeType":"Function","startLoc":158,"text":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool,\n safe: _Q,\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,\n) -> str: ..."},{"col":0,"comment":"null","endLoc":176,"header":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool = False,\n *,\n safe: _Q,\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,\n) -> str","id":212,"name":"urlencode","nodeType":"Function","startLoc":167,"text":"@overload\ndef urlencode(\n query: _QueryType,\n doseq: bool = False,\n *,\n safe: _Q,\n encoding: str | None = None,\n errors: str | None = None,\n quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,\n) -> str: ..."},{"col":0,"comment":"null","endLoc":122,"header":"@overload\ndef quote(string: str, safe: str | Iterable[int] = \"/\", encoding: str | None = None, errors: str | None = None) -> str","id":213,"name":"quote","nodeType":"Function","startLoc":121,"text":"@overload\ndef quote(string: str, safe: str | Iterable[int] = \"/\", encoding: str | None = None, errors: str | None = None) -> str: ..."},{"col":0,"comment":"null","endLoc":124,"header":"@overload\ndef quote(string: bytes | bytearray, safe: str | Iterable[int] = \"/\") -> str","id":214,"name":"quote","nodeType":"Function","startLoc":123,"text":"@overload\ndef quote(string: bytes | bytearray, safe: str | Iterable[int] = \"/\") -> str: ..."},{"col":4,"comment":"null","endLoc":132,"header":"def unquote(string: str | bytes, encoding: str = \"utf-8\", errors: str = \"replace\") -> str","id":215,"name":"unquote","nodeType":"Function","startLoc":132,"text":"def unquote(string: str | bytes, encoding: str = \"utf-8\", errors: str = \"replace\") -> str: ..."},{"col":0,"comment":"null","endLoc":127,"header":"@overload\ndef quote_plus(string: str, safe: str | Iterable[int] = \"\", encoding: str | None = None, errors: str | None = None) -> str","id":216,"name":"quote_plus","nodeType":"Function","startLoc":126,"text":"@overload\ndef quote_plus(string: str, safe: str | Iterable[int] = \"\", encoding: str | None = None, errors: str | None = None) -> str: ..."},{"col":0,"comment":"null","endLoc":129,"header":"@overload\ndef quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = \"\") -> str","id":217,"name":"quote_plus","nodeType":"Function","startLoc":128,"text":"@overload\ndef quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = \"\") -> str: ..."},{"col":0,"comment":"null","endLoc":138,"header":"def unquote_plus(string: str, encoding: str = \"utf-8\", errors: str = \"replace\") -> str","id":218,"name":"unquote_plus","nodeType":"Function","startLoc":138,"text":"def unquote_plus(string: str, encoding: str = \"utf-8\", errors: str = \"replace\") -> str: ..."},{"col":0,"comment":"null","endLoc":140,"header":"@overload\ndef urldefrag(url: str) -> DefragResult","id":219,"name":"urldefrag","nodeType":"Function","startLoc":139,"text":"@overload\ndef urldefrag(url: str) -> DefragResult: ..."},{"col":0,"comment":"null","endLoc":142,"header":"@overload\ndef urldefrag(url: bytes | bytearray | None) -> DefragResultBytes","id":220,"name":"urldefrag","nodeType":"Function","startLoc":141,"text":"@overload\ndef urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ..."},{"col":0,"comment":"null","endLoc":82,"header":"def parse_http_list(s: str) -> list[str]","id":221,"name":"parse_http_list","nodeType":"Function","startLoc":82,"text":"def parse_http_list(s: str) -> list[str]: ..."},{"col":0,"comment":"null","endLoc":81,"header":"def getproxies() -> dict[str, str]","id":222,"name":"getproxies","nodeType":"Function","startLoc":81,"text":"def getproxies() -> dict[str, str]: ..."},{"col":4,"comment":"null","endLoc":86,"header":"def proxy_bypass(host: str) -> Any","id":223,"name":"proxy_bypass","nodeType":"Function","startLoc":86,"text":"def proxy_bypass(host: str) -> Any: ... # undocumented"},{"col":4,"comment":"null","endLoc":89,"header":"def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any","id":224,"name":"proxy_bypass","nodeType":"Function","startLoc":89,"text":"def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented"},{"className":"Morsel","col":0,"comment":"null","endLoc":48,"id":225,"nodeType":"Class","startLoc":25,"text":"class Morsel(dict[str, Any], Generic[_T]):\n @property\n def value(self) -> str: ...\n @property\n def coded_value(self) -> _T: ...\n @property\n def key(self) -> str: ...\n def __init__(self) -> None: ...\n def set(self, key: str, val: str, coded_val: _T) -> None: ...\n def setdefault(self, key: str, val: str | None = None) -> str: ...\n # The dict update can also get a keywords argument so this is incompatible\n @overload # type: ignore[override]\n def update(self, values: Mapping[str, str]) -> None: ...\n @overload\n def update(self, values: Iterable[tuple[str, str]]) -> None: ...\n def isReservedKey(self, K: str) -> bool: ...\n def output(self, attrs: list[str] | None = None, header: str = \"Set-Cookie:\") -> str: ...\n __str__ = output\n def js_output(self, attrs: list[str] | None = None) -> str: ...\n def OutputString(self, attrs: list[str] | None = None) -> str: ...\n def __eq__(self, morsel: object) -> bool: ...\n def __setitem__(self, K: str, V: Any) -> None: ...\n if sys.version_info >= (3, 9):\n def __class_getitem__(cls, item: Any, /) -> GenericAlias: ..."},{"col":4,"comment":"null","endLoc":27,"header":"@property\n def value(self) -> str","id":226,"name":"value","nodeType":"Function","startLoc":26,"text":"@property\n def value(self) -> str: ..."},{"col":4,"comment":"null","endLoc":29,"header":"@property\n def coded_value(self) -> _T","id":227,"name":"coded_value","nodeType":"Function","startLoc":28,"text":"@property\n def coded_value(self) -> _T: ..."},{"col":4,"comment":"null","endLoc":31,"header":"@property\n def key(self) -> str","id":228,"name":"key","nodeType":"Function","startLoc":30,"text":"@property\n def key(self) -> str: ..."},{"col":4,"comment":"null","endLoc":32,"header":"def __init__(self) -> None","id":229,"name":"__init__","nodeType":"Function","startLoc":32,"text":"def __init__(self) -> None: ..."},{"col":4,"comment":"null","endLoc":33,"header":"def set(self, key: str, val: str, coded_val: _T) -> None","id":230,"name":"set","nodeType":"Function","startLoc":33,"text":"def set(self, key: str, val: str, coded_val: _T) -> None: ..."},{"col":4,"comment":"null","endLoc":34,"header":"def setdefault(self, key: str, val: str | None = None) -> str","id":231,"name":"setdefault","nodeType":"Function","startLoc":34,"text":"def setdefault(self, key: str, val: str | None = None) -> str: ..."},{"col":4,"comment":"null","endLoc":37,"header":"@overload # type: ignore[override]\n def update(self, values: Mapping[str, str]) -> None","id":232,"name":"update","nodeType":"Function","startLoc":36,"text":"@overload # type: ignore[override]\n def update(self, values: Mapping[str, str]) -> None: ..."},{"col":4,"comment":"null","endLoc":39,"header":"@overload\n def update(self, values: Iterable[tuple[str, str]]) -> None","id":233,"name":"update","nodeType":"Function","startLoc":38,"text":"@overload\n def update(self, values: Iterable[tuple[str, str]]) -> None: ..."},{"col":4,"comment":"null","endLoc":40,"header":"def isReservedKey(self, K: str) -> bool","id":234,"name":"isReservedKey","nodeType":"Function","startLoc":40,"text":"def isReservedKey(self, K: str) -> bool: ..."},{"col":4,"comment":"null","endLoc":41,"header":"def output(self, attrs: list[str] | None = None, header: str = \"Set-Cookie:\") -> str","id":235,"name":"output","nodeType":"Function","startLoc":41,"text":"def output(self, attrs: list[str] | None = None, header: str = \"Set-Cookie:\") -> str: ..."},{"col":4,"comment":"null","endLoc":43,"header":"def js_output(self, attrs: list[str] | None = None) -> str","id":236,"name":"js_output","nodeType":"Function","startLoc":43,"text":"def js_output(self, attrs: list[str] | None = None) -> str: ..."},{"col":4,"comment":"null","endLoc":44,"header":"def OutputString(self, attrs: list[str] | None = None) -> str","id":237,"name":"OutputString","nodeType":"Function","startLoc":44,"text":"def OutputString(self, attrs: list[str] | None = None) -> str: ..."},{"col":4,"comment":"null","endLoc":45,"header":"def __eq__(self, morsel: object) -> bool","id":238,"name":"__eq__","nodeType":"Function","startLoc":45,"text":"def __eq__(self, morsel: object) -> bool: ..."},{"col":4,"comment":"null","endLoc":46,"header":"def __setitem__(self, K: str, V: Any) -> None","id":239,"name":"__setitem__","nodeType":"Function","startLoc":46,"text":"def __setitem__(self, K: str, V: Any) -> None: ..."},{"col":8,"comment":"null","endLoc":48,"header":"def __class_getitem__(cls, item: Any, /) -> GenericAlias","id":240,"name":"__class_getitem__","nodeType":"Function","startLoc":48,"text":"def __class_getitem__(cls, item: Any, /) -> GenericAlias: ..."},{"attributeType":"null","col":4,"comment":"null","endLoc":76,"id":241,"name":"builtin_str","nodeType":"Attribute","startLoc":76,"text":"builtin_str"},{"attributeType":"function","col":4,"comment":"null","endLoc":42,"id":242,"name":"__str__","nodeType":"Attribute","startLoc":42,"text":"__str__"},{"attributeType":"null","col":4,"comment":"null","endLoc":54,"id":243,"name":"builtin_str","nodeType":"Attribute","startLoc":54,"text":"builtin_str"},{"attributeType":"null","col":4,"comment":"null","endLoc":77,"id":244,"name":"str","nodeType":"Attribute","startLoc":77,"text":"str"},{"attributeType":"null","col":4,"comment":"null","endLoc":56,"id":245,"name":"str","nodeType":"Attribute","startLoc":56,"text":"str"},{"col":0,"comment":"Given a string object, regardless of type, returns a representation of\n that string in the native string type, encoding and decoding where\n necessary. This assumes ASCII unless told otherwise.\n ","endLoc":27,"header":"def to_native_string(string, encoding='ascii')","id":246,"name":"to_native_string","nodeType":"Function","startLoc":14,"text":"def to_native_string(string, encoding='ascii'):\n \"\"\"Given a string object, regardless of type, returns a representation of\n that string in the native string type, encoding and decoding where\n necessary. This assumes ASCII unless told otherwise.\n \"\"\"\n if isinstance(string, builtin_str):\n out = string\n else:\n if is_py2:\n out = string.encode(encoding)\n else:\n out = string.decode(encoding)\n\n return out"},{"className":"StringIO","col":0,"comment":"null","endLoc":225,"id":248,"nodeType":"Class","startLoc":219,"text":"class StringIO(TextIOWrapper):\n def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ...\n # StringIO does not contain a \"name\" field. This workaround is necessary\n # to allow StringIO sub-classes to add this field, as it is defined\n # as a read-only property on IO[].\n name: Any\n def getvalue(self) -> str: ..."},{"className":"TextIOWrapper","col":0,"comment":"null","endLoc":217,"id":249,"nodeType":"Class","startLoc":178,"text":"class TextIOWrapper(TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes\n def __init__(\n self,\n buffer: _BufferT_co,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool = False,\n write_through: bool = False,\n ) -> None: ...\n # Equals the \"buffer\" argument passed in to the constructor.\n @property\n def buffer(self) -> _BufferT_co: ... # type: ignore[override]\n @property\n def closed(self) -> bool: ...\n @property\n def line_buffering(self) -> bool: ...\n @property\n def write_through(self) -> bool: ...\n def reconfigure(\n self,\n *,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool | None = None,\n write_through: bool | None = None,\n ) -> None: ...\n # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy.\n def __enter__(self) -> Self: ...\n def __iter__(self) -> Iterator[str]: ... # type: ignore[override]\n def __next__(self) -> str: ... # type: ignore[override]\n def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override]\n def readline(self, size: int = -1, /) -> str: ... # type: ignore[override]\n def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]\n # Equals the \"buffer\" argument passed in to the constructor.\n def detach(self) -> _BufferT_co: ... # type: ignore[override]\n # TextIOWrapper's version of seek only supports a limited subset of\n # operations.\n def seek(self, cookie: int, whence: int = 0, /) -> int: ..."},{"className":"TextIOBase","col":0,"comment":"null","endLoc":150,"id":250,"nodeType":"Class","startLoc":139,"text":"class TextIOBase(IOBase):\n encoding: str\n errors: str | None\n newlines: str | tuple[str, ...] | None\n def __iter__(self) -> Iterator[str]: ... # type: ignore[override]\n def __next__(self) -> str: ... # type: ignore[override]\n def detach(self) -> BinaryIO: ...\n def write(self, s: str, /) -> int: ...\n def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override]\n def readline(self, size: int = ..., /) -> str: ... # type: ignore[override]\n def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]\n def read(self, size: int | None = ..., /) -> str: ..."},{"col":4,"comment":"null","endLoc":143,"header":"def __iter__(self) -> Iterator[str]","id":251,"name":"__iter__","nodeType":"Function","startLoc":143,"text":"def __iter__(self) -> Iterator[str]: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":144,"header":"def __next__(self) -> str","id":252,"name":"__next__","nodeType":"Function","startLoc":144,"text":"def __next__(self) -> str: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":145,"header":"def detach(self) -> BinaryIO","id":253,"name":"detach","nodeType":"Function","startLoc":145,"text":"def detach(self) -> BinaryIO: ..."},{"col":4,"comment":"null","endLoc":146,"header":"def write(self, s: str, /) -> int","id":254,"name":"write","nodeType":"Function","startLoc":146,"text":"def write(self, s: str, /) -> int: ..."},{"col":4,"comment":"null","endLoc":147,"header":"def writelines(self, lines: Iterable[str], /) -> None","id":255,"name":"writelines","nodeType":"Function","startLoc":147,"text":"def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":148,"header":"def readline(self, size: int = ..., /) -> str","id":256,"name":"readline","nodeType":"Function","startLoc":148,"text":"def readline(self, size: int = ..., /) -> str: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":149,"header":"def readlines(self, hint: int = -1, /) -> list[str]","id":257,"name":"readlines","nodeType":"Function","startLoc":149,"text":"def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":150,"header":"def read(self, size: int | None = ..., /) -> str","id":258,"name":"read","nodeType":"Function","startLoc":150,"text":"def read(self, size: int | None = ..., /) -> str: ..."},{"attributeType":"str","col":4,"comment":"null","endLoc":140,"id":259,"name":"encoding","nodeType":"Attribute","startLoc":140,"text":"encoding"},{"attributeType":"str | None","col":4,"comment":"null","endLoc":141,"id":260,"name":"errors","nodeType":"Attribute","startLoc":141,"text":"errors"},{"attributeType":"str | (str, ...) | None","col":4,"comment":"null","endLoc":142,"id":261,"name":"newlines","nodeType":"Attribute","startLoc":142,"text":"newlines"},{"className":"TextIO","col":0,"comment":"null","endLoc":831,"id":262,"nodeType":"Class","startLoc":818,"text":"class TextIO(IO[str]):\n # See comment regarding the @properties in the `IO` class\n @property\n def buffer(self) -> BinaryIO: ...\n @property\n def encoding(self) -> str: ...\n @property\n def errors(self) -> str | None: ...\n @property\n def line_buffering(self) -> int: ... # int on PyPy, bool on CPython\n @property\n def newlines(self) -> Any: ... # None, str or tuple\n @abstractmethod\n def __enter__(self) -> TextIO: ..."},{"col":0,"comment":"Determine if unicode string only contains ASCII characters.\n\n :param str u_string: unicode string to check. Must be unicode\n and not Python 2 `str`.\n :rtype: bool\n ","endLoc":42,"header":"def unicode_is_ascii(u_string)","id":263,"name":"unicode_is_ascii","nodeType":"Function","startLoc":30,"text":"def unicode_is_ascii(u_string):\n \"\"\"Determine if unicode string only contains ASCII characters.\n\n :param str u_string: unicode string to check. Must be unicode\n and not Python 2 `str`.\n :rtype: bool\n \"\"\"\n assert isinstance(u_string, str)\n try:\n u_string.encode('ascii')\n return True\n except UnicodeEncodeError:\n return False"},{"col":4,"comment":"null","endLoc":821,"header":"@property\n def buffer(self) -> BinaryIO","id":264,"name":"buffer","nodeType":"Function","startLoc":820,"text":"@property\n def buffer(self) -> BinaryIO: ..."},{"col":4,"comment":"null","endLoc":823,"header":"@property\n def encoding(self) -> str","id":265,"name":"encoding","nodeType":"Function","startLoc":822,"text":"@property\n def encoding(self) -> str: ..."},{"col":4,"comment":"null","endLoc":825,"header":"@property\n def errors(self) -> str | None","id":266,"name":"errors","nodeType":"Function","startLoc":824,"text":"@property\n def errors(self) -> str | None: ..."},{"col":4,"comment":"null","endLoc":827,"header":"@property\n def line_buffering(self) -> int","id":267,"name":"line_buffering","nodeType":"Function","startLoc":826,"text":"@property\n def line_buffering(self) -> int: ... # int on PyPy, bool on CPython"},{"col":4,"comment":"null","endLoc":829,"header":"@property\n def newlines(self) -> Any","id":268,"name":"newlines","nodeType":"Function","startLoc":828,"text":"@property\n def newlines(self) -> Any: ... # None, str or tuple"},{"col":4,"comment":"null","endLoc":831,"header":"@abstractmethod\n def __enter__(self) -> TextIO","id":269,"name":"__enter__","nodeType":"Function","startLoc":830,"text":"@abstractmethod\n def __enter__(self) -> TextIO: ..."},{"col":4,"comment":"null","endLoc":187,"header":"def __init__(\n self,\n buffer: _BufferT_co,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool = False,\n write_through: bool = False,\n ) -> None","id":270,"name":"__init__","nodeType":"Function","startLoc":179,"text":"def __init__(\n self,\n buffer: _BufferT_co,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool = False,\n write_through: bool = False,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":190,"header":"@property\n def buffer(self) -> _BufferT_co","id":271,"name":"buffer","nodeType":"Function","startLoc":189,"text":"@property\n def buffer(self) -> _BufferT_co: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":192,"header":"@property\n def closed(self) -> bool","id":272,"name":"closed","nodeType":"Function","startLoc":191,"text":"@property\n def closed(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":194,"header":"@property\n def line_buffering(self) -> bool","id":273,"name":"line_buffering","nodeType":"Function","startLoc":193,"text":"@property\n def line_buffering(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":196,"header":"@property\n def write_through(self) -> bool","id":274,"name":"write_through","nodeType":"Function","startLoc":195,"text":"@property\n def write_through(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":205,"header":"def reconfigure(\n self,\n *,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool | None = None,\n write_through: bool | None = None,\n ) -> None","id":275,"name":"reconfigure","nodeType":"Function","startLoc":197,"text":"def reconfigure(\n self,\n *,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n line_buffering: bool | None = None,\n write_through: bool | None = None,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":207,"header":"def __enter__(self) -> Self","id":276,"name":"__enter__","nodeType":"Function","startLoc":207,"text":"def __enter__(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":208,"header":"def __iter__(self) -> Iterator[str]","id":277,"name":"__iter__","nodeType":"Function","startLoc":208,"text":"def __iter__(self) -> Iterator[str]: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":209,"header":"def __next__(self) -> str","id":278,"name":"__next__","nodeType":"Function","startLoc":209,"text":"def __next__(self) -> str: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":210,"header":"def writelines(self, lines: Iterable[str], /) -> None","id":279,"name":"writelines","nodeType":"Function","startLoc":210,"text":"def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":211,"header":"def readline(self, size: int = -1, /) -> str","id":280,"name":"readline","nodeType":"Function","startLoc":211,"text":"def readline(self, size: int = -1, /) -> str: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":212,"header":"def readlines(self, hint: int = -1, /) -> list[str]","id":281,"name":"readlines","nodeType":"Function","startLoc":212,"text":"def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":214,"header":"def detach(self) -> _BufferT_co","id":282,"name":"detach","nodeType":"Function","startLoc":214,"text":"def detach(self) -> _BufferT_co: ... # type: ignore[override]"},{"col":4,"comment":"null","endLoc":217,"header":"def seek(self, cookie: int, whence: int = 0, /) -> int","id":283,"name":"seek","nodeType":"Function","startLoc":217,"text":"def seek(self, cookie: int, whence: int = 0, /) -> int: ..."},{"col":4,"comment":"null","endLoc":220,"header":"def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None","id":284,"name":"__init__","nodeType":"Function","startLoc":220,"text":"def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":225,"header":"def getvalue(self) -> str","id":285,"name":"getvalue","nodeType":"Function","startLoc":225,"text":"def getvalue(self) -> str: ..."},{"attributeType":"null","col":4,"comment":"null","endLoc":224,"id":286,"name":"name","nodeType":"Attribute","startLoc":224,"text":"name"},{"className":"JSONDecodeError","col":0,"comment":"null","endLoc":12,"id":287,"nodeType":"Class","startLoc":6,"text":"class JSONDecodeError(ValueError):\n msg: str\n doc: str\n pos: int\n lineno: int\n colno: int\n def __init__(self, msg: str, doc: str, pos: int) -> None: ..."},{"className":"ValueError","col":0,"comment":"null","endLoc":2011,"id":288,"nodeType":"Class","startLoc":2011,"text":"class ValueError(Exception): ..."},{"className":"Exception","col":0,"comment":"null","endLoc":1947,"id":289,"nodeType":"Class","startLoc":1947,"text":"class Exception(BaseException): ..."},{"className":"BaseException","col":0,"comment":"null","endLoc":1939,"id":290,"nodeType":"Class","startLoc":1927,"text":"class BaseException:\n args: tuple[Any, ...]\n __cause__: BaseException | None\n __context__: BaseException | None\n __suppress_context__: bool\n __traceback__: TracebackType | None\n def __init__(self, *args: object) -> None: ...\n def __setstate__(self, __state: dict[str, Any] | None) -> None: ...\n def with_traceback(self, __tb: TracebackType | None) -> Self: ...\n if sys.version_info >= (3, 11):\n # only present after add_note() is called\n __notes__: list[str]\n def add_note(self, __note: str) -> None: ..."},{"col":4,"comment":"null","endLoc":1933,"header":"def __init__(self, *args: object) -> None","id":291,"name":"__init__","nodeType":"Function","startLoc":1933,"text":"def __init__(self, *args: object) -> None: ..."},{"col":4,"comment":"null","endLoc":1934,"header":"def __setstate__(self, __state: dict[str, Any] | None) -> None","id":292,"name":"__setstate__","nodeType":"Function","startLoc":1934,"text":"def __setstate__(self, __state: dict[str, Any] | None) -> None: ..."},{"col":4,"comment":"null","endLoc":1935,"header":"def with_traceback(self, __tb: TracebackType | None) -> Self","id":293,"name":"with_traceback","nodeType":"Function","startLoc":1935,"text":"def with_traceback(self, __tb: TracebackType | None) -> Self: ..."},{"attributeType":"(Any, ...)","col":4,"comment":"null","endLoc":1928,"id":294,"name":"args","nodeType":"Attribute","startLoc":1928,"text":"args"},{"attributeType":"BaseException | None","col":4,"comment":"null","endLoc":1929,"id":295,"name":"__cause__","nodeType":"Attribute","startLoc":1929,"text":"__cause__"},{"col":0,"comment":"","endLoc":9,"header":"_internal_utils.py#","id":297,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests._internal_utils\n~~~~~~~~~~~~~~\n\nProvides utility functions that are consumed internally by Requests\nwhich depend on extremely few external helpers (such as compat)\n\"\"\""},{"attributeType":"BaseException | None","col":4,"comment":"null","endLoc":1930,"id":298,"name":"__context__","nodeType":"Attribute","startLoc":1930,"text":"__context__"},{"attributeType":"bool","col":4,"comment":"null","endLoc":1931,"id":299,"name":"__suppress_context__","nodeType":"Attribute","startLoc":1931,"text":"__suppress_context__"},{"id":300,"name":".coveragerc","nodeType":"TextFile","path":"","text":"[run]\nomit = requests/packages/*"},{"id":301,"name":"Makefile","nodeType":"TextFile","path":"","text":".PHONY: docs\ninit:\n\tpip install -e .[socks]\n\tpip install -r requirements-dev.txt\ntest:\n\t# This runs all of the tests, on both Python 2 and Python 3.\n\ttox -p\nci:\n\tpytest tests --junitxml=report.xml\n\ntest-readme:\n\tpython setup.py check --restructuredtext --strict && ([ $$? -eq 0 ] && echo \"README.rst and HISTORY.rst ok\") || echo \"Invalid markup in README.rst or HISTORY.rst!\"\n\nflake8:\n\tflake8 --ignore=E501,F401,E128,E402,E731,F821 requests\n\ncoverage:\n\tpytest --cov-config .coveragerc --verbose --cov-report term --cov-report xml --cov=requests tests\n\npublish:\n\tpip install 'twine>=1.5.0'\n\tpython setup.py sdist bdist_wheel\n\ttwine upload dist/*\n\trm -fr build dist .egg requests.egg-info\n\ndocs:\n\tcd docs && make html\n\t@echo \"\\033[95m\\n\\nBuild successful! View the docs homepage at docs/_build/html/index.html.\\n\\033[0m\"\n"},{"attributeType":"TracebackType | None","col":4,"comment":"null","endLoc":1932,"id":302,"name":"__traceback__","nodeType":"Attribute","startLoc":1932,"text":"__traceback__"},{"col":4,"comment":"null","endLoc":12,"header":"def __init__(self, msg: str, doc: str, pos: int) -> None","id":303,"name":"__init__","nodeType":"Function","startLoc":12,"text":"def __init__(self, msg: str, doc: str, pos: int) -> None: ..."},{"attributeType":"str","col":4,"comment":"null","endLoc":7,"id":304,"name":"msg","nodeType":"Attribute","startLoc":7,"text":"msg"},{"attributeType":"str","col":4,"comment":"null","endLoc":8,"id":305,"name":"doc","nodeType":"Attribute","startLoc":8,"text":"doc"},{"attributeType":"int","col":4,"comment":"null","endLoc":9,"id":306,"name":"pos","nodeType":"Attribute","startLoc":9,"text":"pos"},{"attributeType":"int","col":4,"comment":"null","endLoc":10,"id":307,"name":"lineno","nodeType":"Attribute","startLoc":10,"text":"lineno"},{"attributeType":"int","col":4,"comment":"null","endLoc":11,"id":308,"name":"colno","nodeType":"Attribute","startLoc":11,"text":"colno"},{"attributeType":"null","col":33,"comment":"null","endLoc":14,"id":309,"name":"chardet","nodeType":"Attribute","startLoc":14,"text":"chardet"},{"attributeType":"_version_info","col":0,"comment":"null","endLoc":23,"id":310,"name":"_ver","nodeType":"Attribute","startLoc":23,"text":"_ver"},{"attributeType":"bool","col":0,"comment":"null","endLoc":29,"id":311,"name":"is_py3","nodeType":"Attribute","startLoc":29,"text":"is_py3"},{"fileName":"__init__.py","filePath":"requests","id":312,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n# __\n# /__) _ _ _ _ _/ _\n# / ( (- (/ (/ (- _) / _)\n# /\n\n\"\"\"\nRequests HTTP Library\n~~~~~~~~~~~~~~~~~~~~~\n\nRequests is an HTTP library, written in Python, for human beings.\nBasic GET usage:\n\n >>> import requests\n >>> r = requests.get('https://www.python.org')\n >>> r.status_code\n 200\n >>> b'Python is a programming language' in r.content\n True\n\n... or POST:\n\n >>> payload = dict(key1='value1', key2='value2')\n >>> r = requests.post('https://httpbin.org/post', data=payload)\n >>> print(r.text)\n {\n ...\n \"form\": {\n \"key1\": \"value1\",\n \"key2\": \"value2\"\n },\n ...\n }\n\nThe other HTTP methods are supported - see `requests.api`. Full documentation\nis at .\n\n:copyright: (c) 2017 by Kenneth Reitz.\n:license: Apache 2.0, see LICENSE for more details.\n\"\"\"\n\nimport urllib3\nimport warnings\nfrom .exceptions import RequestsDependencyWarning\n\ntry:\n from charset_normalizer import __version__ as charset_normalizer_version\nexcept ImportError:\n charset_normalizer_version = None\n\ntry:\n from chardet import __version__ as chardet_version\nexcept ImportError:\n chardet_version = None\n\ndef check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):\n urllib3_version = urllib3_version.split('.')\n assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.\n\n # Sometimes, urllib3 only reports its version as 16.1.\n if len(urllib3_version) == 2:\n urllib3_version.append('0')\n\n # Check urllib3 for compatibility.\n major, minor, patch = urllib3_version # noqa: F811\n major, minor, patch = int(major), int(minor), int(patch)\n # urllib3 >= 1.21.1, <= 1.26\n assert major == 1\n assert minor >= 21\n assert minor <= 26\n\n # Check charset_normalizer for compatibility.\n if chardet_version:\n major, minor, patch = chardet_version.split('.')[:3]\n major, minor, patch = int(major), int(minor), int(patch)\n # chardet_version >= 3.0.2, < 5.0.0\n assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)\n elif charset_normalizer_version:\n major, minor, patch = charset_normalizer_version.split('.')[:3]\n major, minor, patch = int(major), int(minor), int(patch)\n # charset_normalizer >= 2.0.0 < 3.0.0\n assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)\n else:\n raise Exception(\"You need either charset_normalizer or chardet installed\")\n\ndef _check_cryptography(cryptography_version):\n # cryptography < 1.3.4\n try:\n cryptography_version = list(map(int, cryptography_version.split('.')))\n except ValueError:\n return\n\n if cryptography_version < [1, 3, 4]:\n warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)\n warnings.warn(warning, RequestsDependencyWarning)\n\n# Check imported dependencies for compatibility.\ntry:\n check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)\nexcept (AssertionError, ValueError):\n warnings.warn(\"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported \"\n \"version!\".format(urllib3.__version__, chardet_version, charset_normalizer_version),\n RequestsDependencyWarning)\n\n# Attempt to enable urllib3's fallback for SNI support\n# if the standard library doesn't support SNI or the\n# 'ssl' library isn't available.\ntry:\n try:\n import ssl\n except ImportError:\n ssl = None\n\n if not getattr(ssl, \"HAS_SNI\", False):\n from urllib3.contrib import pyopenssl\n pyopenssl.inject_into_urllib3()\n\n # Check cryptography version\n from cryptography import __version__ as cryptography_version\n _check_cryptography(cryptography_version)\nexcept ImportError:\n pass\n\n# urllib3's DependencyWarnings should be silenced.\nfrom urllib3.exceptions import DependencyWarning\nwarnings.simplefilter('ignore', DependencyWarning)\n\nfrom .__version__ import __title__, __description__, __url__, __version__\nfrom .__version__ import __build__, __author__, __author_email__, __license__\nfrom .__version__ import __copyright__, __cake__\n\nfrom . import utils\nfrom . import packages\nfrom .models import Request, Response, PreparedRequest\nfrom .api import request, get, head, post, patch, put, delete, options\nfrom .sessions import session, Session\nfrom .status_codes import codes\nfrom .exceptions import (\n RequestException, Timeout, URLRequired,\n TooManyRedirects, HTTPError, ConnectionError,\n FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError\n)\n\n# Set default logging handler to avoid \"No handler found\" warnings.\nimport logging\nfrom logging import NullHandler\n\nlogging.getLogger(__name__).addHandler(NullHandler())\n\n# FileModeWarnings go off per the default.\nwarnings.simplefilter('default', FileModeWarning, append=True)\n"},{"className":"RequestsDependencyWarning","col":0,"comment":"An imported dependency doesn't match the expected version range.","endLoc":133,"id":313,"nodeType":"Class","startLoc":132,"text":"class RequestsDependencyWarning(RequestsWarning):\n \"\"\"An imported dependency doesn't match the expected version range.\"\"\""},{"className":"RequestsWarning","col":0,"comment":"Base warning for Requests.","endLoc":125,"id":314,"nodeType":"Class","startLoc":124,"text":"class RequestsWarning(Warning):\n \"\"\"Base warning for Requests.\"\"\""},{"className":"Warning","col":0,"comment":"null","endLoc":2067,"id":315,"nodeType":"Class","startLoc":2067,"text":"class Warning(Exception): ..."},{"attributeType":"null","col":0,"comment":"null","endLoc":5,"id":316,"name":"__title__","nodeType":"Attribute","startLoc":5,"text":"__title__"},{"attributeType":"null","col":0,"comment":"null","endLoc":6,"id":317,"name":"__description__","nodeType":"Attribute","startLoc":6,"text":"__description__"},{"attributeType":"null","col":0,"comment":"null","endLoc":7,"id":318,"name":"__url__","nodeType":"Attribute","startLoc":7,"text":"__url__"},{"attributeType":"bool","col":0,"comment":"null","endLoc":31,"id":319,"name":"has_simplejson","nodeType":"Attribute","startLoc":31,"text":"has_simplejson"},{"attributeType":"null","col":0,"comment":"null","endLoc":8,"id":320,"name":"__version__","nodeType":"Attribute","startLoc":8,"text":"__version__"},{"attributeType":"null","col":25,"comment":"null","endLoc":33,"id":321,"name":"json","nodeType":"Attribute","startLoc":33,"text":"json"},{"attributeType":"null","col":0,"comment":"null","endLoc":9,"id":322,"name":"__build__","nodeType":"Attribute","startLoc":9,"text":"__build__"},{"attributeType":"bool","col":4,"comment":"null","endLoc":34,"id":323,"name":"has_simplejson","nodeType":"Attribute","startLoc":34,"text":"has_simplejson"},{"attributeType":"null","col":0,"comment":"null","endLoc":10,"id":324,"name":"__author__","nodeType":"Attribute","startLoc":10,"text":"__author__"},{"attributeType":"str","col":4,"comment":"null","endLoc":55,"id":325,"name":"bytes","nodeType":"Attribute","startLoc":55,"text":"bytes"},{"attributeType":"null","col":0,"comment":"null","endLoc":11,"id":326,"name":"__author_email__","nodeType":"Attribute","startLoc":11,"text":"__author_email__"},{"attributeType":"null","col":0,"comment":"null","endLoc":12,"id":327,"name":"__license__","nodeType":"Attribute","startLoc":12,"text":"__license__"},{"attributeType":"null","col":0,"comment":"null","endLoc":13,"id":328,"name":"__copyright__","nodeType":"Attribute","startLoc":13,"text":"__copyright__"},{"attributeType":"null","col":0,"comment":"null","endLoc":14,"id":329,"name":"__cake__","nodeType":"Attribute","startLoc":14,"text":"__cake__"},{"attributeType":"null","col":4,"comment":"null","endLoc":57,"id":330,"name":"basestring","nodeType":"Attribute","startLoc":57,"text":"basestring"},{"className":"Request","col":0,"comment":"A user-created :class:`Request ` object.\n\n Used to prepare a :class:`PreparedRequest `, which is sent to the server.\n\n :param method: HTTP method to use.\n :param url: URL to send.\n :param headers: dictionary of headers to send.\n :param files: dictionary of {filename: fileobject} files to multipart upload.\n :param data: the body to attach to the request. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param json: json for the body to attach to the request (if files or data is not specified).\n :param params: URL parameters to append to the URL. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param auth: Auth handler or (user, pass) tuple.\n :param cookies: dictionary or CookieJar of cookies to attach to this request.\n :param hooks: dictionary of callback hooks, for internal usage.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> req.prepare()\n \n ","endLoc":271,"id":331,"nodeType":"Class","startLoc":200,"text":"class Request(RequestHooksMixin):\n \"\"\"A user-created :class:`Request ` object.\n\n Used to prepare a :class:`PreparedRequest `, which is sent to the server.\n\n :param method: HTTP method to use.\n :param url: URL to send.\n :param headers: dictionary of headers to send.\n :param files: dictionary of {filename: fileobject} files to multipart upload.\n :param data: the body to attach to the request. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param json: json for the body to attach to the request (if files or data is not specified).\n :param params: URL parameters to append to the URL. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param auth: Auth handler or (user, pass) tuple.\n :param cookies: dictionary or CookieJar of cookies to attach to this request.\n :param hooks: dictionary of callback hooks, for internal usage.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> req.prepare()\n \n \"\"\"\n\n def __init__(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n\n # Default empty dicts for dict params.\n data = [] if data is None else data\n files = [] if files is None else files\n headers = {} if headers is None else headers\n params = {} if params is None else params\n hooks = {} if hooks is None else hooks\n\n self.hooks = default_hooks()\n for (k, v) in list(hooks.items()):\n self.register_hook(event=k, hook=v)\n\n self.method = method\n self.url = url\n self.headers = headers\n self.files = files\n self.data = data\n self.json = json\n self.params = params\n self.auth = auth\n self.cookies = cookies\n\n def __repr__(self):\n return '' % (self.method)\n\n def prepare(self):\n \"\"\"Constructs a :class:`PreparedRequest ` for transmission and returns it.\"\"\"\n p = PreparedRequest()\n p.prepare(\n method=self.method,\n url=self.url,\n headers=self.headers,\n files=self.files,\n data=self.data,\n json=self.json,\n params=self.params,\n auth=self.auth,\n cookies=self.cookies,\n hooks=self.hooks,\n )\n return p"},{"attributeType":"(int, Any, float)","col":4,"comment":"null","endLoc":58,"id":332,"name":"numeric_types","nodeType":"Attribute","startLoc":58,"text":"numeric_types"},{"className":"RequestHooksMixin","col":0,"comment":"null","endLoc":197,"id":333,"nodeType":"Class","startLoc":176,"text":"class RequestHooksMixin(object):\n def register_hook(self, event, hook):\n \"\"\"Properly register a hook.\"\"\"\n\n if event not in self.hooks:\n raise ValueError('Unsupported event specified, with event name \"%s\"' % (event))\n\n if isinstance(hook, Callable):\n self.hooks[event].append(hook)\n elif hasattr(hook, '__iter__'):\n self.hooks[event].extend(h for h in hook if isinstance(h, Callable))\n\n def deregister_hook(self, event, hook):\n \"\"\"Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n \"\"\"\n\n try:\n self.hooks[event].remove(hook)\n return True\n except ValueError:\n return False"},{"className":"object","col":0,"comment":"null","endLoc":127,"id":334,"nodeType":"Class","startLoc":90,"text":"class object:\n __doc__: str | None\n __dict__: dict[str, Any]\n __module__: str\n __annotations__: dict[str, Any]\n @property\n def __class__(self) -> type[Self]: ...\n # Ignore errors about type mismatch between property getter and setter\n @__class__.setter\n def __class__(self, __type: type[object]) -> None: ... # noqa: F811\n def __init__(self) -> None: ...\n def __new__(cls) -> Self: ...\n # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers.\n # Overriding them in subclasses has different semantics, even if the override has an identical signature.\n def __setattr__(self, __name: str, __value: Any) -> None: ...\n def __delattr__(self, __name: str) -> None: ...\n def __eq__(self, __value: object) -> bool: ...\n def __ne__(self, __value: object) -> bool: ...\n def __str__(self) -> str: ... # noqa: Y029\n def __repr__(self) -> str: ... # noqa: Y029\n def __hash__(self) -> int: ...\n def __format__(self, __format_spec: str) -> str: ...\n def __getattribute__(self, __name: str) -> Any: ...\n def __sizeof__(self) -> int: ...\n # return type of pickle methods is rather hard to express in the current type system\n # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__\n def __reduce__(self) -> str | tuple[Any, ...]: ...\n if sys.version_info >= (3, 8):\n def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ...\n else:\n def __reduce_ex__(self, __protocol: int) -> str | tuple[Any, ...]: ...\n if sys.version_info >= (3, 11):\n def __getstate__(self) -> object: ...\n\n def __dir__(self) -> Iterable[str]: ...\n def __init_subclass__(cls) -> None: ...\n @classmethod\n def __subclasshook__(cls, __subclass: type) -> bool: ..."},{"col":4,"comment":"null","endLoc":96,"header":"@property\n def __class__(self) -> type[Self]","id":335,"name":"__class__","nodeType":"Function","startLoc":95,"text":"@property\n def __class__(self) -> type[Self]: ..."},{"col":4,"comment":"null","endLoc":99,"header":"@__class__.setter\n def __class__(self, __type: type[object]) -> None","id":336,"name":"__class__","nodeType":"Function","startLoc":98,"text":"@__class__.setter\n def __class__(self, __type: type[object]) -> None: ... # noqa: F811"},{"col":4,"comment":"null","endLoc":100,"header":"def __init__(self) -> None","id":337,"name":"__init__","nodeType":"Function","startLoc":100,"text":"def __init__(self) -> None: ..."},{"col":4,"comment":"null","endLoc":101,"header":"def __new__(cls) -> Self","id":338,"name":"__new__","nodeType":"Function","startLoc":101,"text":"def __new__(cls) -> Self: ..."},{"col":4,"comment":"null","endLoc":104,"header":"def __setattr__(self, __name: str, __value: Any) -> None","id":339,"name":"__setattr__","nodeType":"Function","startLoc":104,"text":"def __setattr__(self, __name: str, __value: Any) -> None: ..."},{"col":4,"comment":"null","endLoc":105,"header":"def __delattr__(self, __name: str) -> None","id":340,"name":"__delattr__","nodeType":"Function","startLoc":105,"text":"def __delattr__(self, __name: str) -> None: ..."},{"col":4,"comment":"null","endLoc":106,"header":"def __eq__(self, __value: object) -> bool","id":341,"name":"__eq__","nodeType":"Function","startLoc":106,"text":"def __eq__(self, __value: object) -> bool: ..."},{"col":4,"comment":"null","endLoc":107,"header":"def __ne__(self, __value: object) -> bool","id":342,"name":"__ne__","nodeType":"Function","startLoc":107,"text":"def __ne__(self, __value: object) -> bool: ..."},{"col":4,"comment":"null","endLoc":108,"header":"def __str__(self) -> str","id":343,"name":"__str__","nodeType":"Function","startLoc":108,"text":"def __str__(self) -> str: ... # noqa: Y029"},{"col":4,"comment":"null","endLoc":109,"header":"def __repr__(self) -> str","id":344,"name":"__repr__","nodeType":"Function","startLoc":109,"text":"def __repr__(self) -> str: ... # noqa: Y029"},{"col":4,"comment":"null","endLoc":110,"header":"def __hash__(self) -> int","id":345,"name":"__hash__","nodeType":"Function","startLoc":110,"text":"def __hash__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":111,"header":"def __format__(self, __format_spec: str) -> str","id":346,"name":"__format__","nodeType":"Function","startLoc":111,"text":"def __format__(self, __format_spec: str) -> str: ..."},{"col":4,"comment":"null","endLoc":112,"header":"def __getattribute__(self, __name: str) -> Any","id":347,"name":"__getattribute__","nodeType":"Function","startLoc":112,"text":"def __getattribute__(self, __name: str) -> Any: ..."},{"col":4,"comment":"null","endLoc":113,"header":"def __sizeof__(self) -> int","id":348,"name":"__sizeof__","nodeType":"Function","startLoc":113,"text":"def __sizeof__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":116,"header":"def __reduce__(self) -> str | tuple[Any, ...]","id":349,"name":"__reduce__","nodeType":"Function","startLoc":116,"text":"def __reduce__(self) -> str | tuple[Any, ...]: ..."},{"col":8,"comment":"null","endLoc":118,"header":"def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]","id":350,"name":"__reduce_ex__","nodeType":"Function","startLoc":118,"text":"def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ..."},{"col":4,"comment":"null","endLoc":124,"header":"def __dir__(self) -> Iterable[str]","id":351,"name":"__dir__","nodeType":"Function","startLoc":124,"text":"def __dir__(self) -> Iterable[str]: ..."},{"col":4,"comment":"null","endLoc":125,"header":"def __init_subclass__(cls) -> None","id":352,"name":"__init_subclass__","nodeType":"Function","startLoc":125,"text":"def __init_subclass__(cls) -> None: ..."},{"col":4,"comment":"null","endLoc":127,"header":"@classmethod\n def __subclasshook__(cls, __subclass: type) -> bool","id":353,"name":"__subclasshook__","nodeType":"Function","startLoc":126,"text":"@classmethod\n def __subclasshook__(cls, __subclass: type) -> bool: ..."},{"attributeType":"str | None","col":4,"comment":"null","endLoc":91,"id":354,"name":"__doc__","nodeType":"Attribute","startLoc":91,"text":"__doc__"},{"attributeType":"(int, Any)","col":4,"comment":"null","endLoc":59,"id":355,"name":"integer_types","nodeType":"Attribute","startLoc":59,"text":"integer_types"},{"attributeType":"ValueError","col":4,"comment":"null","endLoc":60,"id":356,"name":"JSONDecodeError","nodeType":"Attribute","startLoc":60,"text":"JSONDecodeError"},{"attributeType":"null","col":34,"comment":"null","endLoc":65,"id":357,"name":"cookielib","nodeType":"Attribute","startLoc":65,"text":"cookielib"},{"attributeType":"bytes","col":4,"comment":"null","endLoc":78,"id":358,"name":"bytes","nodeType":"Attribute","startLoc":78,"text":"bytes"},{"attributeType":"dict","col":4,"comment":"null","endLoc":92,"id":359,"name":"__dict__","nodeType":"Attribute","startLoc":92,"text":"__dict__"},{"attributeType":"(str, bytes)","col":4,"comment":"null","endLoc":79,"id":360,"name":"basestring","nodeType":"Attribute","startLoc":79,"text":"basestring"},{"attributeType":"str","col":4,"comment":"null","endLoc":93,"id":361,"name":"__module__","nodeType":"Attribute","startLoc":93,"text":"__module__"},{"attributeType":"dict","col":4,"comment":"null","endLoc":94,"id":362,"name":"__annotations__","nodeType":"Attribute","startLoc":94,"text":"__annotations__"},{"col":4,"comment":"Properly register a hook.","endLoc":186,"header":"def register_hook(self, event, hook)","id":363,"name":"register_hook","nodeType":"Function","startLoc":177,"text":"def register_hook(self, event, hook):\n \"\"\"Properly register a hook.\"\"\"\n\n if event not in self.hooks:\n raise ValueError('Unsupported event specified, with event name \"%s\"' % (event))\n\n if isinstance(hook, Callable):\n self.hooks[event].append(hook)\n elif hasattr(hook, '__iter__'):\n self.hooks[event].extend(h for h in hook if isinstance(h, Callable))"},{"attributeType":"(int, float)","col":4,"comment":"null","endLoc":80,"id":364,"name":"numeric_types","nodeType":"Attribute","startLoc":80,"text":"numeric_types"},{"attributeType":"(int)","col":4,"comment":"null","endLoc":81,"id":365,"name":"integer_types","nodeType":"Attribute","startLoc":81,"text":"integer_types"},{"col":0,"comment":"","endLoc":9,"header":"compat.py#","id":366,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.compat\n~~~~~~~~~~~~~~~\n\nThis module handles import compatibility issues between Python 2 and\nPython 3.\n\"\"\"\n\ntry:\n import chardet\nexcept ImportError:\n import charset_normalizer as chardet\n\n_ver = sys.version_info\n\nis_py2 = (_ver[0] == 2)\n\nis_py3 = (_ver[0] == 3)\n\nhas_simplejson = False\n\ntry:\n import simplejson as json\n has_simplejson = True\nexcept ImportError:\n import json\n\nif is_py2:\n from urllib import (\n quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,\n proxy_bypass, proxy_bypass_environment, getproxies_environment)\n from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag\n from urllib2 import parse_http_list\n import cookielib\n from Cookie import Morsel\n from StringIO import StringIO\n # Keep OrderedDict for backwards compatibility.\n from collections import Callable, Mapping, MutableMapping, OrderedDict\n\n builtin_str = str\n bytes = str\n str = unicode\n basestring = basestring\n numeric_types = (int, long, float)\n integer_types = (int, long)\n JSONDecodeError = ValueError\n\nelif is_py3:\n from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag\n from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment\n from http import cookiejar as cookielib\n from http.cookies import Morsel\n from io import StringIO\n # Keep OrderedDict for backwards compatibility.\n from collections import OrderedDict\n from collections.abc import Callable, Mapping, MutableMapping\n if has_simplejson:\n from simplejson import JSONDecodeError\n else:\n from json import JSONDecodeError\n\n builtin_str = str\n str = str\n bytes = bytes\n basestring = (str, bytes)\n numeric_types = (int, float)\n integer_types = (int,)"},{"col":4,"comment":"null","endLoc":1067,"header":"@overload\n def __init__(self) -> None","id":367,"name":"__init__","nodeType":"Function","startLoc":1066,"text":"@overload\n def __init__(self) -> None: ..."},{"col":4,"comment":"null","endLoc":1069,"header":"@overload\n def __init__(self: dict[str, _VT], **kwargs: _VT) -> None","id":368,"name":"__init__","nodeType":"Function","startLoc":1068,"text":"@overload\n def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":1071,"header":"@overload\n def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None","id":369,"name":"__init__","nodeType":"Function","startLoc":1070,"text":"@overload\n def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ..."},{"col":4,"comment":"null","endLoc":1073,"header":"@overload\n def __init__(self: dict[str, _VT], __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None","id":370,"name":"__init__","nodeType":"Function","startLoc":1072,"text":"@overload\n def __init__(self: dict[str, _VT], __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":1075,"header":"@overload\n def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None","id":371,"name":"__init__","nodeType":"Function","startLoc":1074,"text":"@overload\n def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ..."},{"col":4,"comment":"null","endLoc":1077,"header":"@overload\n def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None","id":372,"name":"__init__","nodeType":"Function","startLoc":1076,"text":"@overload\n def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":1081,"header":"@overload\n def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None","id":373,"name":"__init__","nodeType":"Function","startLoc":1080,"text":"@overload\n def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ..."},{"col":4,"comment":"null","endLoc":1083,"header":"@overload\n def __init__(self: dict[bytes, bytes], __iterable: Iterable[list[bytes]]) -> None","id":374,"name":"__init__","nodeType":"Function","startLoc":1082,"text":"@overload\n def __init__(self: dict[bytes, bytes], __iterable: Iterable[list[bytes]]) -> None: ..."},{"id":375,"name":"install.rst","nodeType":"TextFile","path":"docs/user","text":".. _install:\n\nInstallation of Requests\n========================\n\nThis part of the documentation covers the installation of Requests.\nThe first step to using any software package is getting it properly installed.\n\n\n$ python -m pip install requests\n--------------------------------\n\nTo install Requests, simply run this simple command in your terminal of choice::\n\n $ python -m pip install requests\n\nGet the Source Code\n-------------------\n\nRequests is actively developed on GitHub, where the code is\n`always available `_.\n\nYou can either clone the public repository::\n\n $ git clone git://github.com/psf/requests.git\n\nOr, download the `tarball `_::\n\n $ curl -OL https://github.com/psf/requests/tarball/main\n # optionally, zipball is also available (for Windows users).\n\nOnce you have a copy of the source, you can embed it in your own Python\npackage, or install it into your site-packages easily::\n\n $ cd requests\n $ python -m pip install .\n"},{"fileName":"__version__.py","filePath":"requests","id":376,"nodeType":"File","text":"# .-. .-. .-. . . .-. .-. .-. .-.\n# |( |- |.| | | |- `-. | `-.\n# ' ' `-' `-`.`-' `-' `-' ' `-'\n\n__title__ = 'requests'\n__description__ = 'Python HTTP for Humans.'\n__url__ = 'https://requests.readthedocs.io'\n__version__ = '2.26.0'\n__build__ = 0x022600\n__author__ = 'Kenneth Reitz'\n__author_email__ = 'me@kennethreitz.org'\n__license__ = 'Apache 2.0'\n__copyright__ = 'Copyright 2020 Kenneth Reitz'\n__cake__ = u'\\u2728 \\U0001f370 \\u2728'\n"},{"col":0,"comment":"","endLoc":5,"header":"__version__.py#","id":377,"name":"","nodeType":"Function","startLoc":5,"text":"__title__ = 'requests'\n\n__description__ = 'Python HTTP for Humans.'\n\n__url__ = 'https://requests.readthedocs.io'\n\n__version__ = '2.26.0'\n\n__build__ = 0x022600\n\n__author__ = 'Kenneth Reitz'\n\n__author_email__ = 'me@kennethreitz.org'\n\n__license__ = 'Apache 2.0'\n\n__copyright__ = 'Copyright 2020 Kenneth Reitz'\n\n__cake__ = u'\\u2728 \\U0001f370 \\u2728'"},{"id":378,"name":"make.bat","nodeType":"TextFile","path":"docs","text":"@ECHO OFF\n\nREM Command file for Sphinx documentation\n\nif \"%SPHINXBUILD%\" == \"\" (\n\tset SPHINXBUILD=sphinx-build\n)\nset BUILDDIR=_build\nset ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .\nset I18NSPHINXOPTS=%SPHINXOPTS% .\nif NOT \"%PAPER%\" == \"\" (\n\tset ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%\n\tset I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%\n)\n\nif \"%1\" == \"\" goto help\n\nif \"%1\" == \"help\" (\n\t:help\n\techo.Please use `make ^` where ^ is one of\n\techo. html to make standalone HTML files\n\techo. dirhtml to make HTML files named index.html in directories\n\techo. singlehtml to make a single large HTML file\n\techo. pickle to make pickle files\n\techo. json to make JSON files\n\techo. htmlhelp to make HTML files and a HTML help project\n\techo. qthelp to make HTML files and a qthelp project\n\techo. devhelp to make HTML files and a Devhelp project\n\techo. epub to make an epub\n\techo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter\n\techo. text to make text files\n\techo. man to make manual pages\n\techo. texinfo to make Texinfo files\n\techo. gettext to make PO message catalogs\n\techo. changes to make an overview over all changed/added/deprecated items\n\techo. xml to make Docutils-native XML files\n\techo. pseudoxml to make pseudoxml-XML files for display purposes\n\techo. linkcheck to check all external links for integrity\n\techo. doctest to run all doctests embedded in the documentation if enabled\n\techo. coverage to run coverage check of the documentation if enabled\n\tgoto end\n)\n\nif \"%1\" == \"clean\" (\n\tfor /d %%i in (%BUILDDIR%\\*) do rmdir /q /s %%i\n\tdel /q /s %BUILDDIR%\\*\n\tgoto end\n)\n\n\nREM Check if sphinx-build is available and fallback to Python version if any\n%SPHINXBUILD% 1>NUL 2>NUL\nif errorlevel 9009 goto sphinx_python\ngoto sphinx_ok\n\n:sphinx_python\n\nset SPHINXBUILD=python -m sphinx.__init__\n%SPHINXBUILD% 2> nul\nif errorlevel 9009 (\n\techo.\n\techo.The 'sphinx-build' command was not found. Make sure you have Sphinx\n\techo.installed, then set the SPHINXBUILD environment variable to point\n\techo.to the full path of the 'sphinx-build' executable. Alternatively you\n\techo.may add the Sphinx directory to PATH.\n\techo.\n\techo.If you don't have Sphinx installed, grab it from\n\techo.http://sphinx-doc.org/\n\texit /b 1\n)\n\n:sphinx_ok\n\n\nif \"%1\" == \"html\" (\n\t%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The HTML pages are in %BUILDDIR%/html.\n\tgoto end\n)\n\nif \"%1\" == \"dirhtml\" (\n\t%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.\n\tgoto end\n)\n\nif \"%1\" == \"singlehtml\" (\n\t%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.\n\tgoto end\n)\n\nif \"%1\" == \"pickle\" (\n\t%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished; now you can process the pickle files.\n\tgoto end\n)\n\nif \"%1\" == \"json\" (\n\t%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished; now you can process the JSON files.\n\tgoto end\n)\n\nif \"%1\" == \"htmlhelp\" (\n\t%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished; now you can run HTML Help Workshop with the ^\n.hhp project file in %BUILDDIR%/htmlhelp.\n\tgoto end\n)\n\nif \"%1\" == \"qthelp\" (\n\t%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished; now you can run \"qcollectiongenerator\" with the ^\n.qhcp project file in %BUILDDIR%/qthelp, like this:\n\techo.^> qcollectiongenerator %BUILDDIR%\\qthelp\\Requests.qhcp\n\techo.To view the help file:\n\techo.^> assistant -collectionFile %BUILDDIR%\\qthelp\\Requests.ghc\n\tgoto end\n)\n\nif \"%1\" == \"devhelp\" (\n\t%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished.\n\tgoto end\n)\n\nif \"%1\" == \"epub\" (\n\t%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The epub file is in %BUILDDIR%/epub.\n\tgoto end\n)\n\nif \"%1\" == \"latex\" (\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished; the LaTeX files are in %BUILDDIR%/latex.\n\tgoto end\n)\n\nif \"%1\" == \"latexpdf\" (\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\n\tcd %BUILDDIR%/latex\n\tmake all-pdf\n\tcd %~dp0\n\techo.\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\n\tgoto end\n)\n\nif \"%1\" == \"latexpdfja\" (\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\n\tcd %BUILDDIR%/latex\n\tmake all-pdf-ja\n\tcd %~dp0\n\techo.\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\n\tgoto end\n)\n\nif \"%1\" == \"text\" (\n\t%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The text files are in %BUILDDIR%/text.\n\tgoto end\n)\n\nif \"%1\" == \"man\" (\n\t%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The manual pages are in %BUILDDIR%/man.\n\tgoto end\n)\n\nif \"%1\" == \"texinfo\" (\n\t%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.\n\tgoto end\n)\n\nif \"%1\" == \"gettext\" (\n\t%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The message catalogs are in %BUILDDIR%/locale.\n\tgoto end\n)\n\nif \"%1\" == \"changes\" (\n\t%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.The overview file is in %BUILDDIR%/changes.\n\tgoto end\n)\n\nif \"%1\" == \"linkcheck\" (\n\t%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Link check complete; look for any errors in the above output ^\nor in %BUILDDIR%/linkcheck/output.txt.\n\tgoto end\n)\n\nif \"%1\" == \"doctest\" (\n\t%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Testing of doctests in the sources finished, look at the ^\nresults in %BUILDDIR%/doctest/output.txt.\n\tgoto end\n)\n\nif \"%1\" == \"coverage\" (\n\t%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Testing of coverage in the sources finished, look at the ^\nresults in %BUILDDIR%/coverage/python.txt.\n\tgoto end\n)\n\nif \"%1\" == \"xml\" (\n\t%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The XML files are in %BUILDDIR%/xml.\n\tgoto end\n)\n\nif \"%1\" == \"pseudoxml\" (\n\t%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml\n\tif errorlevel 1 exit /b 1\n\techo.\n\techo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.\n\tgoto end\n)\n\n:end\n"},{"fileName":"setup.py","filePath":"","id":379,"nodeType":"File","text":"#!/usr/bin/env python\n# Learn more: https://github.com/kennethreitz/setup.py\nimport os\nimport sys\n\nfrom codecs import open\n\nfrom setuptools import setup\nfrom setuptools.command.test import test as TestCommand\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nclass PyTest(TestCommand):\n user_options = [('pytest-args=', 'a', \"Arguments to pass into py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n try:\n from multiprocessing import cpu_count\n self.pytest_args = ['-n', str(cpu_count()), '--boxed']\n except (ImportError, NotImplementedError):\n self.pytest_args = ['-n', '1', '--boxed']\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n self.test_args = []\n self.test_suite = True\n\n def run_tests(self):\n import pytest\n\n errno = pytest.main(self.pytest_args)\n sys.exit(errno)\n\n# 'setup.py publish' shortcut.\nif sys.argv[-1] == 'publish':\n os.system('python setup.py sdist bdist_wheel')\n os.system('twine upload dist/*')\n sys.exit()\n\npackages = ['requests']\n\nrequires = [\n 'charset_normalizer~=2.0.0; python_version >= \"3\"',\n 'chardet>=3.0.2,<5; python_version < \"3\"',\n 'idna>=2.5,<3; python_version < \"3\"',\n 'idna>=2.5,<4; python_version >= \"3\"',\n 'urllib3>=1.21.1,<1.27',\n 'certifi>=2017.4.17'\n\n]\ntest_requirements = [\n 'pytest-httpbin==0.0.7',\n 'pytest-cov',\n 'pytest-mock',\n 'pytest-xdist',\n 'PySocks>=1.5.6, !=1.5.7',\n 'pytest>=3'\n]\n\nabout = {}\nwith open(os.path.join(here, 'requests', '__version__.py'), 'r', 'utf-8') as f:\n exec(f.read(), about)\n\nwith open('README.md', 'r', 'utf-8') as f:\n readme = f.read()\n\nsetup(\n name=about['__title__'],\n version=about['__version__'],\n description=about['__description__'],\n long_description=readme,\n long_description_content_type='text/markdown',\n author=about['__author__'],\n author_email=about['__author_email__'],\n url=about['__url__'],\n packages=packages,\n package_data={'': ['LICENSE', 'NOTICE']},\n package_dir={'requests': 'requests'},\n include_package_data=True,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*\",\n install_requires=requires,\n license=about['__license__'],\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy'\n ],\n cmdclass={'test': PyTest},\n tests_require=test_requirements,\n extras_require={\n 'security': [],\n 'socks': ['PySocks>=1.5.6, !=1.5.7'],\n 'socks:sys_platform == \"win32\" and python_version == \"2.7\"': ['win_inet_pton'],\n 'use_chardet_on_py3': ['chardet>=3.0.2,<5']\n },\n project_urls={\n 'Documentation': 'https://requests.readthedocs.io',\n 'Source': 'https://github.com/psf/requests',\n },\n)\n"},{"col":4,"comment":"Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n ","endLoc":197,"header":"def deregister_hook(self, event, hook)","id":381,"name":"deregister_hook","nodeType":"Function","startLoc":188,"text":"def deregister_hook(self, event, hook):\n \"\"\"Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n \"\"\"\n\n try:\n self.hooks[event].remove(hook)\n return True\n except ValueError:\n return False"},{"col":4,"comment":"null","endLoc":251,"header":"def __init__(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None)","id":382,"name":"__init__","nodeType":"Function","startLoc":228,"text":"def __init__(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n\n # Default empty dicts for dict params.\n data = [] if data is None else data\n files = [] if files is None else files\n headers = {} if headers is None else headers\n params = {} if params is None else params\n hooks = {} if hooks is None else hooks\n\n self.hooks = default_hooks()\n for (k, v) in list(hooks.items()):\n self.register_hook(event=k, hook=v)\n\n self.method = method\n self.url = url\n self.headers = headers\n self.files = files\n self.data = data\n self.json = json\n self.params = params\n self.auth = auth\n self.cookies = cookies"},{"col":0,"comment":"null","endLoc":133,"header":"def open(\n filename: str, mode: str = \"r\", encoding: str | None = None, errors: str = \"strict\", buffering: int = -1\n) -> StreamReaderWriter","id":383,"name":"open","nodeType":"Function","startLoc":131,"text":"def open(\n filename: str, mode: str = \"r\", encoding: str | None = None, errors: str = \"strict\", buffering: int = -1\n) -> StreamReaderWriter: ..."},{"col":0,"comment":"null","endLoc":18,"header":"def default_hooks()","id":384,"name":"default_hooks","nodeType":"Function","startLoc":17,"text":"def default_hooks():\n return {event: [] for event in HOOKS}"},{"className":"RequestsCookieJar","col":0,"comment":"Compatibility class; is a cookielib.CookieJar, but exposes a dict\n interface.\n\n This is the CookieJar we create by default for requests and sessions that\n don't specify one, since some clients may expect response.cookies and\n session.cookies to support dict operations.\n\n Requests does not use the dict interface internally; it's just for\n compatibility with external client code. All requests code should work\n out of the box with externally provided instances of ``CookieJar``, e.g.\n ``LWPCookieJar`` and ``FileCookieJar``.\n\n Unlike a regular CookieJar, this class is pickleable.\n\n .. warning:: dictionary operations that are normally O(1) may be O(n).\n ","endLoc":423,"id":385,"nodeType":"Class","startLoc":171,"text":"class RequestsCookieJar(cookielib.CookieJar, MutableMapping):\n \"\"\"Compatibility class; is a cookielib.CookieJar, but exposes a dict\n interface.\n\n This is the CookieJar we create by default for requests and sessions that\n don't specify one, since some clients may expect response.cookies and\n session.cookies to support dict operations.\n\n Requests does not use the dict interface internally; it's just for\n compatibility with external client code. All requests code should work\n out of the box with externally provided instances of ``CookieJar``, e.g.\n ``LWPCookieJar`` and ``FileCookieJar``.\n\n Unlike a regular CookieJar, this class is pickleable.\n\n .. warning:: dictionary operations that are normally O(1) may be O(n).\n \"\"\"\n\n def get(self, name, default=None, domain=None, path=None):\n \"\"\"Dict-like get() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n try:\n return self._find_no_duplicates(name, domain, path)\n except KeyError:\n return default\n\n def set(self, name, value, **kwargs):\n \"\"\"Dict-like set() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n \"\"\"\n # support client code that unsets cookies by assignment of a None value:\n if value is None:\n remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))\n return\n\n if isinstance(value, Morsel):\n c = morsel_to_cookie(value)\n else:\n c = create_cookie(name, value, **kwargs)\n self.set_cookie(c)\n return c\n\n def iterkeys(self):\n \"\"\"Dict-like iterkeys() that returns an iterator of names of cookies\n from the jar.\n\n .. seealso:: itervalues() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name\n\n def keys(self):\n \"\"\"Dict-like keys() that returns a list of names of cookies from the\n jar.\n\n .. seealso:: values() and items().\n \"\"\"\n return list(self.iterkeys())\n\n def itervalues(self):\n \"\"\"Dict-like itervalues() that returns an iterator of values of cookies\n from the jar.\n\n .. seealso:: iterkeys() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.value\n\n def values(self):\n \"\"\"Dict-like values() that returns a list of values of cookies from the\n jar.\n\n .. seealso:: keys() and items().\n \"\"\"\n return list(self.itervalues())\n\n def iteritems(self):\n \"\"\"Dict-like iteritems() that returns an iterator of name-value tuples\n from the jar.\n\n .. seealso:: iterkeys() and itervalues().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name, cookie.value\n\n def items(self):\n \"\"\"Dict-like items() that returns a list of name-value tuples from the\n jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a\n vanilla python dict of key value pairs.\n\n .. seealso:: keys() and values().\n \"\"\"\n return list(self.iteritems())\n\n def list_domains(self):\n \"\"\"Utility method to list all the domains in the jar.\"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain not in domains:\n domains.append(cookie.domain)\n return domains\n\n def list_paths(self):\n \"\"\"Utility method to list all the paths in the jar.\"\"\"\n paths = []\n for cookie in iter(self):\n if cookie.path not in paths:\n paths.append(cookie.path)\n return paths\n\n def multiple_domains(self):\n \"\"\"Returns True if there are multiple domains in the jar.\n Returns False otherwise.\n\n :rtype: bool\n \"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain is not None and cookie.domain in domains:\n return True\n domains.append(cookie.domain)\n return False # there is only one domain in jar\n\n def get_dict(self, domain=None, path=None):\n \"\"\"Takes as an argument an optional domain and path and returns a plain\n old Python dict of name-value pairs of cookies that meet the\n requirements.\n\n :rtype: dict\n \"\"\"\n dictionary = {}\n for cookie in iter(self):\n if (\n (domain is None or cookie.domain == domain) and\n (path is None or cookie.path == path)\n ):\n dictionary[cookie.name] = cookie.value\n return dictionary\n\n def __contains__(self, name):\n try:\n return super(RequestsCookieJar, self).__contains__(name)\n except CookieConflictError:\n return True\n\n def __getitem__(self, name):\n \"\"\"Dict-like __getitem__() for compatibility with client code. Throws\n exception if there are more than one cookie with name. In that case,\n use the more explicit get() method instead.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n return self._find_no_duplicates(name)\n\n def __setitem__(self, name, value):\n \"\"\"Dict-like __setitem__ for compatibility with client code. Throws\n exception if there is already a cookie of that name in the jar. In that\n case, use the more explicit set() method instead.\n \"\"\"\n self.set(name, value)\n\n def __delitem__(self, name):\n \"\"\"Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s\n ``remove_cookie_by_name()``.\n \"\"\"\n remove_cookie_by_name(self, name)\n\n def set_cookie(self, cookie, *args, **kwargs):\n if hasattr(cookie.value, 'startswith') and cookie.value.startswith('\"') and cookie.value.endswith('\"'):\n cookie.value = cookie.value.replace('\\\\\"', '')\n return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)\n\n def update(self, other):\n \"\"\"Updates this jar with cookies from another CookieJar or dict-like\"\"\"\n if isinstance(other, cookielib.CookieJar):\n for cookie in other:\n self.set_cookie(copy.copy(cookie))\n else:\n super(RequestsCookieJar, self).update(other)\n\n def _find(self, name, domain=None, path=None):\n \"\"\"Requests uses this method internally to get cookie values.\n\n If there are conflicting cookies, _find arbitrarily chooses one.\n See _find_no_duplicates if you want an exception thrown if there are\n conflicting cookies.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :return: cookie.value\n \"\"\"\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n return cookie.value\n\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))\n\n def _find_no_duplicates(self, name, domain=None, path=None):\n \"\"\"Both ``__get_item__`` and ``get`` call this function: it's never\n used elsewhere in Requests.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :raises KeyError: if cookie is not found\n :raises CookieConflictError: if there are multiple cookies\n that match name and optionally domain and path\n :return: cookie.value\n \"\"\"\n toReturn = None\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n if toReturn is not None: # if there are multiple cookies that meet passed in criteria\n raise CookieConflictError('There are multiple cookies with name, %r' % (name))\n toReturn = cookie.value # we will eventually return this as long as no cookie conflict\n\n if toReturn:\n return toReturn\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))\n\n def __getstate__(self):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n state = self.__dict__.copy()\n # remove the unpickleable RLock object\n state.pop('_cookies_lock')\n return state\n\n def __setstate__(self, state):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n self.__dict__.update(state)\n if '_cookies_lock' not in self.__dict__:\n self._cookies_lock = threading.RLock()\n\n def copy(self):\n \"\"\"Return a copy of this RequestsCookieJar.\"\"\"\n new_cj = RequestsCookieJar()\n new_cj.set_policy(self.get_policy())\n new_cj.update(self)\n return new_cj\n\n def get_policy(self):\n \"\"\"Return the CookiePolicy instance used.\"\"\"\n return self._policy"},{"col":0,"comment":"Returns a Basic Auth string.","endLoc":69,"header":"def _basic_auth_str(username, password)","id":386,"name":"_basic_auth_str","nodeType":"Function","startLoc":28,"text":"def _basic_auth_str(username, password):\n \"\"\"Returns a Basic Auth string.\"\"\"\n\n # \"I want us to put a big-ol' comment on top of it that\n # says that this behaviour is dumb but we need to preserve\n # it because people are relying on it.\"\n # - Lukasa\n #\n # These are here solely to maintain backwards compatibility\n # for things like ints. This will be removed in 3.0.0.\n if not isinstance(username, basestring):\n warnings.warn(\n \"Non-string usernames will no longer be supported in Requests \"\n \"3.0.0. Please convert the object you've passed in ({!r}) to \"\n \"a string or bytes object in the near future to avoid \"\n \"problems.\".format(username),\n category=DeprecationWarning,\n )\n username = str(username)\n\n if not isinstance(password, basestring):\n warnings.warn(\n \"Non-string passwords will no longer be supported in Requests \"\n \"3.0.0. Please convert the object you've passed in ({!r}) to \"\n \"a string or bytes object in the near future to avoid \"\n \"problems.\".format(type(password)),\n category=DeprecationWarning,\n )\n password = str(password)\n # -- End Removal --\n\n if isinstance(username, str):\n username = username.encode('latin1')\n\n if isinstance(password, str):\n password = password.encode('latin1')\n\n authstr = 'Basic ' + to_native_string(\n b64encode(b':'.join((username, password))).strip()\n )\n\n return authstr"},{"col":0,"comment":"\n :rtype: requests.structures.CaseInsensitiveDict\n ","endLoc":882,"header":"def default_headers()","id":387,"name":"default_headers","nodeType":"Function","startLoc":873,"text":"def default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })"},{"col":0,"comment":"\n Return a string representing the default user agent.\n\n :rtype: str\n ","endLoc":870,"header":"def default_user_agent(name=\"python-requests\")","id":388,"name":"default_user_agent","nodeType":"Function","startLoc":864,"text":"def default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)"},{"col":4,"comment":"null","endLoc":1088,"header":"def items(self) -> dict_items[_KT, _VT]","id":389,"name":"items","nodeType":"Function","startLoc":1088,"text":"def items(self) -> dict_items[_KT, _VT]: ..."},{"col":0,"comment":"null","endLoc":74,"header":"def setup(\n *,\n name: str = ...,\n version: str = ...,\n description: str = ...,\n long_description: str = ...,\n long_description_content_type: str = ...,\n author: str = ...,\n author_email: str = ...,\n maintainer: str = ...,\n maintainer_email: str = ...,\n url: str = ...,\n download_url: str = ...,\n packages: list[str] = ...,\n py_modules: list[str] = ...,\n scripts: list[str] = ...,\n ext_modules: Sequence[Extension] = ...,\n classifiers: list[str] = ...,\n distclass: type[Distribution] = ...,\n script_name: str = ...,\n script_args: list[str] = ...,\n options: Mapping[str, Incomplete] = ...,\n license: str = ...,\n keywords: list[str] | str = ...,\n platforms: list[str] | str = ...,\n cmdclass: Mapping[str, type[_Command]] = ...,\n data_files: list[tuple[str, list[str]]] = ...,\n package_dir: Mapping[str, str] = ...,\n obsoletes: list[str] = ...,\n provides: list[str] = ...,\n requires: list[str] = ...,\n command_packages: list[str] = ...,\n command_options: Mapping[str, Mapping[str, tuple[Incomplete, Incomplete]]] = ...,\n package_data: Mapping[str, list[str]] = ...,\n include_package_data: bool = ...,\n libraries: list[str] = ...,\n headers: list[str] = ...,\n ext_package: str = ...,\n include_dirs: list[str] = ...,\n password: str = ...,\n fullname: str = ...,\n **attrs,\n) -> Distribution","id":390,"name":"setup","nodeType":"Function","startLoc":32,"text":"def setup(\n *,\n name: str = ...,\n version: str = ...,\n description: str = ...,\n long_description: str = ...,\n long_description_content_type: str = ...,\n author: str = ...,\n author_email: str = ...,\n maintainer: str = ...,\n maintainer_email: str = ...,\n url: str = ...,\n download_url: str = ...,\n packages: list[str] = ...,\n py_modules: list[str] = ...,\n scripts: list[str] = ...,\n ext_modules: Sequence[Extension] = ...,\n classifiers: list[str] = ...,\n distclass: type[Distribution] = ...,\n script_name: str = ...,\n script_args: list[str] = ...,\n options: Mapping[str, Incomplete] = ...,\n license: str = ...,\n keywords: list[str] | str = ...,\n platforms: list[str] | str = ...,\n cmdclass: Mapping[str, type[_Command]] = ...,\n data_files: list[tuple[str, list[str]]] = ...,\n package_dir: Mapping[str, str] = ...,\n obsoletes: list[str] = ...,\n provides: list[str] = ...,\n requires: list[str] = ...,\n command_packages: list[str] = ...,\n command_options: Mapping[str, Mapping[str, tuple[Incomplete, Incomplete]]] = ...,\n package_data: Mapping[str, list[str]] = ...,\n include_package_data: bool = ...,\n libraries: list[str] = ...,\n headers: list[str] = ...,\n ext_package: str = ...,\n include_dirs: list[str] = ...,\n password: str = ...,\n fullname: str = ...,\n **attrs,\n) -> Distribution: ..."},{"className":"CookieJar","col":0,"comment":"null","endLoc":42,"id":391,"nodeType":"Class","startLoc":24,"text":"class CookieJar(Iterable[Cookie]):\n non_word_re: ClassVar[Pattern[str]] # undocumented\n quote_re: ClassVar[Pattern[str]] # undocumented\n strict_domain_re: ClassVar[Pattern[str]] # undocumented\n domain_re: ClassVar[Pattern[str]] # undocumented\n dots_re: ClassVar[Pattern[str]] # undocumented\n magic_re: ClassVar[Pattern[str]] # undocumented\n def __init__(self, policy: CookiePolicy | None = None) -> None: ...\n def add_cookie_header(self, request: Request) -> None: ...\n def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ...\n def set_policy(self, policy: CookiePolicy) -> None: ...\n def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ...\n def set_cookie(self, cookie: Cookie) -> None: ...\n def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ...\n def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ...\n def clear_session_cookies(self) -> None: ...\n def clear_expired_cookies(self) -> None: ... # undocumented\n def __iter__(self) -> Iterator[Cookie]: ...\n def __len__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":46,"header":"def __init__(self, data=None, **kwargs)","id":392,"name":"__init__","nodeType":"Function","startLoc":42,"text":"def __init__(self, data=None, **kwargs):\n self._store = OrderedDict()\n if data is None:\n data = {}\n self.update(data, **kwargs)"},{"fileName":"test_structures.py","filePath":"tests","id":393,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport pytest\n\nfrom requests.structures import CaseInsensitiveDict, LookupDict\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"CaseInsensitiveDict instance with \"Accept\" header.\"\"\"\n self.case_insensitive_dict = CaseInsensitiveDict()\n self.case_insensitive_dict['Accept'] = 'application/json'\n\n def test_list(self):\n assert list(self.case_insensitive_dict) == ['Accept']\n\n possible_keys = pytest.mark.parametrize('key', ('accept', 'ACCEPT', 'aCcEpT', 'Accept'))\n\n @possible_keys\n def test_getitem(self, key):\n assert self.case_insensitive_dict[key] == 'application/json'\n\n @possible_keys\n def test_delitem(self, key):\n del self.case_insensitive_dict[key]\n assert key not in self.case_insensitive_dict\n\n def test_lower_items(self):\n assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')]\n\n def test_repr(self):\n assert repr(self.case_insensitive_dict) == \"{'Accept': 'application/json'}\"\n\n def test_copy(self):\n copy = self.case_insensitive_dict.copy()\n assert copy is not self.case_insensitive_dict\n assert copy == self.case_insensitive_dict\n\n @pytest.mark.parametrize(\n 'other, result', (\n ({'AccePT': 'application/json'}, True),\n ({}, False),\n (None, False)\n )\n )\n def test_instance_equality(self, other, result):\n assert (self.case_insensitive_dict == other) is result\n\n\nclass TestLookupDict:\n\n @pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"LookupDict instance with \"bad_gateway\" attribute.\"\"\"\n self.lookup_dict = LookupDict('test')\n self.lookup_dict.bad_gateway = 502\n\n def test_repr(self):\n assert repr(self.lookup_dict) == \"\"\n\n get_item_parameters = pytest.mark.parametrize(\n 'key, value', (\n ('bad_gateway', 502),\n ('not_a_key', None)\n )\n )\n\n @get_item_parameters\n def test_getitem(self, key, value):\n assert self.lookup_dict[key] == value\n\n @get_item_parameters\n def test_get(self, key, value):\n assert self.lookup_dict.get(key) == value\n"},{"className":"test","col":0,"comment":"null","endLoc":43,"id":394,"nodeType":"Class","startLoc":25,"text":"class test(Command):\n description: str\n user_options: ClassVar[list[tuple[str, str, str]]]\n test_suite: Incomplete\n test_module: Incomplete\n test_loader: Incomplete\n test_runner: Incomplete\n def initialize_options(self) -> None: ...\n def finalize_options(self) -> None: ...\n @NonDataProperty\n def test_args(self) -> list[str]: ...\n def with_project_on_sys_path(self, func) -> None: ...\n def project_on_sys_path(self, include_dists=()): ...\n @staticmethod\n def paths_on_pythonpath(paths) -> None: ...\n @staticmethod\n def install_dists(dist): ...\n def run(self) -> None: ...\n def run_tests(self) -> None: ..."},{"className":"CaseInsensitiveDict","col":0,"comment":"A case-insensitive ``dict``-like object.\n\n Implements all methods and operations of\n ``MutableMapping`` as well as dict's ``copy``. Also\n provides ``lower_items``.\n\n All keys are expected to be strings. The structure remembers the\n case of the last key to be set, and ``iter(instance)``,\n ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``\n will contain case-sensitive keys. However, querying and contains\n testing is case insensitive::\n\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n cid['aCCEPT'] == 'application/json' # True\n list(cid) == ['Accept'] # True\n\n For example, ``headers['content-encoding']`` will return the\n value of a ``'Content-Encoding'`` response header, regardless\n of how the header name was originally stored.\n\n If the constructor, ``.update``, or equality comparison\n operations are given keys that have equal ``.lower()``s, the\n behavior is undefined.\n ","endLoc":86,"id":395,"nodeType":"Class","startLoc":15,"text":"class CaseInsensitiveDict(MutableMapping):\n \"\"\"A case-insensitive ``dict``-like object.\n\n Implements all methods and operations of\n ``MutableMapping`` as well as dict's ``copy``. Also\n provides ``lower_items``.\n\n All keys are expected to be strings. The structure remembers the\n case of the last key to be set, and ``iter(instance)``,\n ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``\n will contain case-sensitive keys. However, querying and contains\n testing is case insensitive::\n\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n cid['aCCEPT'] == 'application/json' # True\n list(cid) == ['Accept'] # True\n\n For example, ``headers['content-encoding']`` will return the\n value of a ``'Content-Encoding'`` response header, regardless\n of how the header name was originally stored.\n\n If the constructor, ``.update``, or equality comparison\n operations are given keys that have equal ``.lower()``s, the\n behavior is undefined.\n \"\"\"\n\n def __init__(self, data=None, **kwargs):\n self._store = OrderedDict()\n if data is None:\n data = {}\n self.update(data, **kwargs)\n\n def __setitem__(self, key, value):\n # Use the lowercased key for lookups, but store the actual\n # key alongside the value.\n self._store[key.lower()] = (key, value)\n\n def __getitem__(self, key):\n return self._store[key.lower()][1]\n\n def __delitem__(self, key):\n del self._store[key.lower()]\n\n def __iter__(self):\n return (casedkey for casedkey, mappedvalue in self._store.values())\n\n def __len__(self):\n return len(self._store)\n\n def lower_items(self):\n \"\"\"Like iteritems(), but with all lowercase keys.\"\"\"\n return (\n (lowerkey, keyval[1])\n for (lowerkey, keyval)\n in self._store.items()\n )\n\n def __eq__(self, other):\n if isinstance(other, Mapping):\n other = CaseInsensitiveDict(other)\n else:\n return NotImplemented\n # Compare insensitively\n return dict(self.lower_items()) == dict(other.lower_items())\n\n # Copy is required\n def copy(self):\n return CaseInsensitiveDict(self._store.values())\n\n def __repr__(self):\n return str(dict(self.items()))"},{"className":"Command","col":0,"comment":"null","endLoc":91,"id":396,"nodeType":"Class","startLoc":76,"text":"class Command(_Command):\n command_consumes_arguments: bool\n distribution: Distribution\n # Any: Dynamic command subclass attributes\n def __init__(self, dist: Distribution, **kw: Any) -> None: ...\n def ensure_string_list(self, option: str) -> None: ...\n @overload # type: ignore[override] # Extra **kw param\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False, **kw) -> _Command: ...\n @overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False, **kw) -> _CommandT: ...\n @abstractmethod\n def initialize_options(self) -> None: ...\n @abstractmethod\n def finalize_options(self) -> None: ...\n @abstractmethod\n def run(self) -> None: ..."},{"col":4,"comment":"null","endLoc":51,"header":"def __setitem__(self, key, value)","id":397,"name":"__setitem__","nodeType":"Function","startLoc":48,"text":"def __setitem__(self, key, value):\n # Use the lowercased key for lookups, but store the actual\n # key alongside the value.\n self._store[key.lower()] = (key, value)"},{"col":4,"comment":"null","endLoc":31,"header":"def __init__(self, policy: CookiePolicy | None = None) -> None","id":398,"name":"__init__","nodeType":"Function","startLoc":31,"text":"def __init__(self, policy: CookiePolicy | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":32,"header":"def add_cookie_header(self, request: Request) -> None","id":399,"name":"add_cookie_header","nodeType":"Function","startLoc":32,"text":"def add_cookie_header(self, request: Request) -> None: ..."},{"col":4,"comment":"null","endLoc":33,"header":"def extract_cookies(self, response: HTTPResponse, request: Request) -> None","id":400,"name":"extract_cookies","nodeType":"Function","startLoc":33,"text":"def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ..."},{"col":4,"comment":"null","endLoc":34,"header":"def set_policy(self, policy: CookiePolicy) -> None","id":401,"name":"set_policy","nodeType":"Function","startLoc":34,"text":"def set_policy(self, policy: CookiePolicy) -> None: ..."},{"col":4,"comment":"null","endLoc":35,"header":"def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]","id":402,"name":"make_cookies","nodeType":"Function","startLoc":35,"text":"def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ..."},{"col":4,"comment":"null","endLoc":36,"header":"def set_cookie(self, cookie: Cookie) -> None","id":403,"name":"set_cookie","nodeType":"Function","startLoc":36,"text":"def set_cookie(self, cookie: Cookie) -> None: ..."},{"col":4,"comment":"null","endLoc":37,"header":"def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None","id":404,"name":"set_cookie_if_ok","nodeType":"Function","startLoc":37,"text":"def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ..."},{"col":4,"comment":"null","endLoc":38,"header":"def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None","id":405,"name":"clear","nodeType":"Function","startLoc":38,"text":"def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":39,"header":"def clear_session_cookies(self) -> None","id":406,"name":"clear_session_cookies","nodeType":"Function","startLoc":39,"text":"def clear_session_cookies(self) -> None: ..."},{"col":4,"comment":"null","endLoc":40,"header":"def clear_expired_cookies(self) -> None","id":407,"name":"clear_expired_cookies","nodeType":"Function","startLoc":40,"text":"def clear_expired_cookies(self) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":41,"header":"def __iter__(self) -> Iterator[Cookie]","id":408,"name":"__iter__","nodeType":"Function","startLoc":41,"text":"def __iter__(self) -> Iterator[Cookie]: ..."},{"col":4,"comment":"null","endLoc":42,"header":"def __len__(self) -> int","id":409,"name":"__len__","nodeType":"Function","startLoc":42,"text":"def __len__(self) -> int: ..."},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":25,"id":410,"name":"non_word_re","nodeType":"Attribute","startLoc":25,"text":"non_word_re"},{"col":4,"comment":"null","endLoc":54,"header":"def __getitem__(self, key)","id":413,"name":"__getitem__","nodeType":"Function","startLoc":53,"text":"def __getitem__(self, key):\n return self._store[key.lower()][1]"},{"col":4,"comment":"null","endLoc":57,"header":"def __delitem__(self, key)","id":418,"name":"__delitem__","nodeType":"Function","startLoc":56,"text":"def __delitem__(self, key):\n del self._store[key.lower()]"},{"className":"Command","col":0,"comment":"null","endLoc":109,"id":419,"nodeType":"Class","startLoc":14,"text":"class Command:\n distribution: Distribution\n # Any to work around variance issues\n sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]\n def __init__(self, dist: Distribution) -> None: ...\n def ensure_finalized(self) -> None: ...\n @abstractmethod\n def initialize_options(self) -> None: ...\n @abstractmethod\n def finalize_options(self) -> None: ...\n @abstractmethod\n def run(self) -> None: ...\n def announce(self, msg: str, level: int = ...) -> None: ...\n def debug_print(self, msg: str) -> None: ...\n def ensure_string(self, option: str, default: str | None = ...) -> None: ...\n def ensure_string_list(self, option: str) -> None: ...\n def ensure_filename(self, option: str) -> None: ...\n def ensure_dirname(self, option: str) -> None: ...\n def get_command_name(self) -> str: ...\n def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...\n def get_finalized_command(self, command: str, create: bool = True) -> Command: ...\n @overload\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...\n @overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...\n def run_command(self, command: str) -> None: ...\n def get_sub_commands(self) -> list[str]: ...\n def warn(self, msg: str) -> None: ...\n def execute(\n self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = ..., level: int = ...\n ) -> None: ...\n def mkpath(self, name: str, mode: int = ...) -> None: ...\n @overload\n def copy_file(\n self,\n infile: StrPath,\n outfile: _StrPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_StrPathT | str, bool]: ...\n @overload\n def copy_file(\n self,\n infile: BytesPath,\n outfile: _BytesPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_BytesPathT | bytes, bool]: ...\n def copy_tree(\n self,\n infile: StrPath,\n outfile: str,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n preserve_symlinks: bool = False,\n level: Unused = 1,\n ) -> list[str]: ...\n @overload\n def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ...\n @overload\n def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ...\n def spawn(self, cmd: Iterable[str], search_path: bool = True, level: Unused = 1) -> None: ...\n @overload\n def make_archive(\n self,\n base_name: str,\n format: str,\n root_dir: StrOrBytesPath | None = None,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str: ...\n @overload\n def make_archive(\n self,\n base_name: StrPath,\n format: str,\n root_dir: StrOrBytesPath,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str: ...\n def make_file(\n self,\n infiles: str | list[str] | tuple[str, ...],\n outfile: StrOrBytesPath,\n func: Callable[[Unpack[_Ts]], Unused],\n args: tuple[Unpack[_Ts]],\n exec_msg: str | None = None,\n skip_msg: str | None = None,\n level: Unused = 1,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":60,"header":"def __iter__(self)","id":420,"name":"__iter__","nodeType":"Function","startLoc":59,"text":"def __iter__(self):\n return (casedkey for casedkey, mappedvalue in self._store.values())"},{"id":421,"name":"MANIFEST.in","nodeType":"TextFile","path":"","text":"include README.md LICENSE NOTICE HISTORY.md pytest.ini requirements-dev.txt\nrecursive-include tests *.py\n"},{"id":422,"name":".github/ISSUE_TEMPLATE","nodeType":"Package"},{"id":423,"name":"Bug_report.md","nodeType":"TextFile","path":".github/ISSUE_TEMPLATE","text":"---\nname: Bug report\nabout: Create a report to help us improve\n\n---\n\n\n\n## Expected Result\n\n\n\n## Actual Result\n\n\n\n## Reproduction Steps\n\n```python\nimport requests\n\n```\n\n## System Information\n\n $ python -m requests.help\n\n```json\n{\n \"paste\": \"here\"\n}\n```\n\n\n"},{"id":424,"name":"docs/_themes","nodeType":"Package"},{"id":425,"name":".gitignore","nodeType":"TextFile","path":"docs/_themes","text":"*.pyc\n*.pyo\n.DS_Store\n"},{"id":426,"name":"release-process.rst","nodeType":"TextFile","path":"docs/community","text":"Release Process and Rules\n=========================\n\n.. versionadded:: v2.6.2\n\nStarting with the version to be released after ``v2.6.2``, the following rules\nwill govern and describe how the Requests core team produces a new release.\n\nMajor Releases\n--------------\n\nA major release will include breaking changes. When it is versioned, it will\nbe versioned as ``vX.0.0``. For example, if the previous release was\n``v10.2.7`` the next version will be ``v11.0.0``.\n\nBreaking changes are changes that break backwards compatibility with prior\nversions. If the project were to change the ``text`` attribute on a\n``Response`` object to a method, that would only happen in a Major release.\n\nMajor releases may also include miscellaneous bug fixes. The core developers of\nRequests are committed to providing a good user experience. This means we're\nalso committed to preserving backwards compatibility as much as possible. Major\nreleases will be infrequent and will need strong justifications before they are\nconsidered.\n\nMinor Releases\n--------------\n\nA minor release will not include breaking changes but may include miscellaneous\nbug fixes. If the previous version of Requests released was ``v10.2.7`` a minor\nrelease would be versioned as ``v10.3.0``.\n\nMinor releases will be backwards compatible with releases that have the same\nmajor version number. In other words, all versions that would start with\n``v10.`` should be compatible with each other.\n\nHotfix Releases\n---------------\n\nA hotfix release will only include bug fixes that were missed when the project\nreleased the previous version. If the previous version of Requests released\n``v10.2.7`` the hotfix release would be versioned as ``v10.2.8``.\n\nHotfixes will **not** include upgrades to vendored dependencies after\n``v2.6.2``\n\nReasoning\n---------\n\nIn the 2.5 and 2.6 release series, the Requests core team upgraded vendored\ndependencies and caused a great deal of headaches for both users and the core\nteam. To reduce this pain, we're forming a concrete set of procedures so\nexpectations will be properly set.\n"},{"col":4,"comment":"null","endLoc":254,"header":"def __repr__(self)","id":427,"name":"__repr__","nodeType":"Function","startLoc":253,"text":"def __repr__(self):\n return '' % (self.method)"},{"col":4,"comment":"Constructs a :class:`PreparedRequest ` for transmission and returns it.","endLoc":271,"header":"def prepare(self)","id":428,"name":"prepare","nodeType":"Function","startLoc":256,"text":"def prepare(self):\n \"\"\"Constructs a :class:`PreparedRequest ` for transmission and returns it.\"\"\"\n p = PreparedRequest()\n p.prepare(\n method=self.method,\n url=self.url,\n headers=self.headers,\n files=self.files,\n data=self.data,\n json=self.json,\n params=self.params,\n auth=self.auth,\n cookies=self.cookies,\n hooks=self.hooks,\n )\n return p"},{"id":429,"name":"recommended.rst","nodeType":"TextFile","path":"docs/community","text":".. _recommended:\n\nRecommended Packages and Extensions\n===================================\n\nRequests has a great variety of powerful and useful third-party extensions.\nThis page provides an overview of some of the best of them.\n\nCertifi CA Bundle\n-----------------\n\n`Certifi`_ is a carefully curated collection of Root Certificates for\nvalidating the trustworthiness of SSL certificates while verifying the\nidentity of TLS hosts. It has been extracted from the Requests project.\n\n.. _Certifi: https://github.com/certifi/python-certifi\n\nCacheControl\n------------\n\n`CacheControl`_ is an extension that adds a full HTTP cache to Requests. This\nmakes your web requests substantially more efficient, and should be used\nwhenever you're making a lot of web requests.\n\n.. _CacheControl: https://cachecontrol.readthedocs.io/en/latest/\n\nRequests-Toolbelt\n-----------------\n\n`Requests-Toolbelt`_ is a collection of utilities that some users of Requests may desire,\nbut do not belong in Requests proper. This library is actively maintained\nby members of the Requests core team, and reflects the functionality most\nrequested by users within the community.\n\n.. _Requests-Toolbelt: https://toolbelt.readthedocs.io/en/latest/index.html\n\n\nRequests-Threads\n----------------\n\n`Requests-Threads`_ is a Requests session that returns the amazing Twisted's awaitable Deferreds instead of Response objects. This allows the use of ``async``/``await`` keyword usage on Python 3, or Twisted's style of programming, if desired.\n\n.. _Requests-Threads: https://github.com/requests/requests-threads\n\nRequests-OAuthlib\n-----------------\n\n`requests-oauthlib`_ makes it possible to do the OAuth dance from Requests\nautomatically. This is useful for the large number of websites that use OAuth\nto provide authentication. It also provides a lot of tweaks that handle ways\nthat specific OAuth providers differ from the standard specifications.\n\n.. _requests-oauthlib: https://requests-oauthlib.readthedocs.io/en/latest/\n\n\nBetamax\n-------\n\n`Betamax`_ records your HTTP interactions so the NSA does not have to.\nA VCR imitation designed only for Python-Requests.\n\n.. _betamax: https://github.com/betamaxpy/betamax\n"},{"id":430,"name":"updates.rst","nodeType":"TextFile","path":"docs/community","text":".. _updates:\n\n\nCommunity Updates\n=================\n\nIf you'd like to stay up to date on the community and development of Requests,\nthere are several options:\n\n\nGitHub\n------\n\nThe best way to track the development of Requests is through\n`the GitHub repo `_.\n\n\n.. include:: ../../HISTORY.md\n"},{"id":431,"name":"LICENSE","nodeType":"TextFile","path":"ext","text":"Copyright 2019 Kenneth Reitz. All rights reserved.\n"},{"col":4,"comment":"null","endLoc":63,"header":"def __len__(self)","id":432,"name":"__len__","nodeType":"Function","startLoc":62,"text":"def __len__(self):\n return len(self._store)"},{"col":4,"comment":"Like iteritems(), but with all lowercase keys.","endLoc":71,"header":"def lower_items(self)","id":434,"name":"lower_items","nodeType":"Function","startLoc":65,"text":"def lower_items(self):\n \"\"\"Like iteritems(), but with all lowercase keys.\"\"\"\n return (\n (lowerkey, keyval[1])\n for (lowerkey, keyval)\n in self._store.items()\n )"},{"col":4,"comment":"null","endLoc":79,"header":"def __eq__(self, other)","id":435,"name":"__eq__","nodeType":"Function","startLoc":73,"text":"def __eq__(self, other):\n if isinstance(other, Mapping):\n other = CaseInsensitiveDict(other)\n else:\n return NotImplemented\n # Compare insensitively\n return dict(self.lower_items()) == dict(other.lower_items())"},{"col":4,"comment":"null","endLoc":18,"header":"def __init__(self, dist: Distribution) -> None","id":436,"name":"__init__","nodeType":"Function","startLoc":18,"text":"def __init__(self, dist: Distribution) -> None: ..."},{"col":4,"comment":"null","endLoc":19,"header":"def ensure_finalized(self) -> None","id":437,"name":"ensure_finalized","nodeType":"Function","startLoc":19,"text":"def ensure_finalized(self) -> None: ..."},{"col":4,"comment":"null","endLoc":21,"header":"@abstractmethod\n def initialize_options(self) -> None","id":438,"name":"initialize_options","nodeType":"Function","startLoc":20,"text":"@abstractmethod\n def initialize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":23,"header":"@abstractmethod\n def finalize_options(self) -> None","id":439,"name":"finalize_options","nodeType":"Function","startLoc":22,"text":"@abstractmethod\n def finalize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":25,"header":"@abstractmethod\n def run(self) -> None","id":440,"name":"run","nodeType":"Function","startLoc":24,"text":"@abstractmethod\n def run(self) -> None: ..."},{"col":4,"comment":"null","endLoc":26,"header":"def announce(self, msg: str, level: int = ...) -> None","id":441,"name":"announce","nodeType":"Function","startLoc":26,"text":"def announce(self, msg: str, level: int = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":27,"header":"def debug_print(self, msg: str) -> None","id":442,"name":"debug_print","nodeType":"Function","startLoc":27,"text":"def debug_print(self, msg: str) -> None: ..."},{"col":4,"comment":"null","endLoc":28,"header":"def ensure_string(self, option: str, default: str | None = ...) -> None","id":443,"name":"ensure_string","nodeType":"Function","startLoc":28,"text":"def ensure_string(self, option: str, default: str | None = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":29,"header":"def ensure_string_list(self, option: str) -> None","id":444,"name":"ensure_string_list","nodeType":"Function","startLoc":29,"text":"def ensure_string_list(self, option: str) -> None: ..."},{"col":4,"comment":"null","endLoc":30,"header":"def ensure_filename(self, option: str) -> None","id":445,"name":"ensure_filename","nodeType":"Function","startLoc":30,"text":"def ensure_filename(self, option: str) -> None: ..."},{"col":4,"comment":"null","endLoc":31,"header":"def ensure_dirname(self, option: str) -> None","id":446,"name":"ensure_dirname","nodeType":"Function","startLoc":31,"text":"def ensure_dirname(self, option: str) -> None: ..."},{"col":4,"comment":"null","endLoc":32,"header":"def get_command_name(self) -> str","id":447,"name":"get_command_name","nodeType":"Function","startLoc":32,"text":"def get_command_name(self) -> str: ..."},{"col":4,"comment":"null","endLoc":33,"header":"def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None","id":448,"name":"set_undefined_options","nodeType":"Function","startLoc":33,"text":"def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ..."},{"col":4,"comment":"null","endLoc":34,"header":"def get_finalized_command(self, command: str, create: bool = True) -> Command","id":449,"name":"get_finalized_command","nodeType":"Function","startLoc":34,"text":"def get_finalized_command(self, command: str, create: bool = True) -> Command: ..."},{"col":4,"comment":"null","endLoc":36,"header":"@overload\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command","id":450,"name":"reinitialize_command","nodeType":"Function","startLoc":35,"text":"@overload\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ..."},{"col":4,"comment":"null","endLoc":38,"header":"@overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT","id":451,"name":"reinitialize_command","nodeType":"Function","startLoc":37,"text":"@overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ..."},{"col":4,"comment":"null","endLoc":39,"header":"def run_command(self, command: str) -> None","id":452,"name":"run_command","nodeType":"Function","startLoc":39,"text":"def run_command(self, command: str) -> None: ..."},{"col":4,"comment":"null","endLoc":40,"header":"def get_sub_commands(self) -> list[str]","id":453,"name":"get_sub_commands","nodeType":"Function","startLoc":40,"text":"def get_sub_commands(self) -> list[str]: ..."},{"col":4,"comment":"null","endLoc":41,"header":"def warn(self, msg: str) -> None","id":454,"name":"warn","nodeType":"Function","startLoc":41,"text":"def warn(self, msg: str) -> None: ..."},{"col":4,"comment":"null","endLoc":44,"header":"def execute(\n self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = ..., level: int = ...\n ) -> None","id":455,"name":"execute","nodeType":"Function","startLoc":42,"text":"def execute(\n self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = ..., level: int = ...\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":45,"header":"def mkpath(self, name: str, mode: int = ...) -> None","id":456,"name":"mkpath","nodeType":"Function","startLoc":45,"text":"def mkpath(self, name: str, mode: int = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":55,"header":"@overload\n def copy_file(\n self,\n infile: StrPath,\n outfile: _StrPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_StrPathT | str, bool]","id":457,"name":"copy_file","nodeType":"Function","startLoc":46,"text":"@overload\n def copy_file(\n self,\n infile: StrPath,\n outfile: _StrPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_StrPathT | str, bool]: ..."},{"col":4,"comment":"null","endLoc":65,"header":"@overload\n def copy_file(\n self,\n infile: BytesPath,\n outfile: _BytesPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_BytesPathT | bytes, bool]","id":458,"name":"copy_file","nodeType":"Function","startLoc":56,"text":"@overload\n def copy_file(\n self,\n infile: BytesPath,\n outfile: _BytesPathT,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n link: str | None = None,\n level: Unused = 1,\n ) -> tuple[_BytesPathT | bytes, bool]: ..."},{"col":4,"comment":"null","endLoc":74,"header":"def copy_tree(\n self,\n infile: StrPath,\n outfile: str,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n preserve_symlinks: bool = False,\n level: Unused = 1,\n ) -> list[str]","id":459,"name":"copy_tree","nodeType":"Function","startLoc":66,"text":"def copy_tree(\n self,\n infile: StrPath,\n outfile: str,\n preserve_mode: bool = True,\n preserve_times: bool = True,\n preserve_symlinks: bool = False,\n level: Unused = 1,\n ) -> list[str]: ..."},{"col":4,"comment":"null","endLoc":76,"header":"@overload\n def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str","id":460,"name":"move_file","nodeType":"Function","startLoc":75,"text":"@overload\n def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ..."},{"col":4,"comment":"null","endLoc":78,"header":"@overload\n def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes","id":461,"name":"move_file","nodeType":"Function","startLoc":77,"text":"@overload\n def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ..."},{"col":4,"comment":"null","endLoc":310,"header":"def __init__(self)","id":462,"name":"__init__","nodeType":"Function","startLoc":295,"text":"def __init__(self):\n #: HTTP verb to send to the server.\n self.method = None\n #: HTTP URL to send the request to.\n self.url = None\n #: dictionary of HTTP headers.\n self.headers = None\n # The `CookieJar` used to create the Cookie header will be stored here\n # after prepare_cookies is called\n self._cookies = None\n #: request body to send to the server.\n self.body = None\n #: dictionary of callback hooks, for internal usage.\n self.hooks = default_hooks()\n #: integer denoting starting position of a readable file-like body.\n self._body_position = None"},{"col":4,"comment":"null","endLoc":79,"header":"def spawn(self, cmd: Iterable[str], search_path: bool = True, level: Unused = 1) -> None","id":463,"name":"spawn","nodeType":"Function","startLoc":79,"text":"def spawn(self, cmd: Iterable[str], search_path: bool = True, level: Unused = 1) -> None: ..."},{"col":4,"comment":"null","endLoc":89,"header":"@overload\n def make_archive(\n self,\n base_name: str,\n format: str,\n root_dir: StrOrBytesPath | None = None,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str","id":464,"name":"make_archive","nodeType":"Function","startLoc":80,"text":"@overload\n def make_archive(\n self,\n base_name: str,\n format: str,\n root_dir: StrOrBytesPath | None = None,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str: ..."},{"col":4,"comment":"null","endLoc":99,"header":"@overload\n def make_archive(\n self,\n base_name: StrPath,\n format: str,\n root_dir: StrOrBytesPath,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str","id":465,"name":"make_archive","nodeType":"Function","startLoc":90,"text":"@overload\n def make_archive(\n self,\n base_name: StrPath,\n format: str,\n root_dir: StrOrBytesPath,\n base_dir: str | None = None,\n owner: str | None = None,\n group: str | None = None,\n ) -> str: ..."},{"col":4,"comment":"null","endLoc":109,"header":"def make_file(\n self,\n infiles: str | list[str] | tuple[str, ...],\n outfile: StrOrBytesPath,\n func: Callable[[Unpack[_Ts]], Unused],\n args: tuple[Unpack[_Ts]],\n exec_msg: str | None = None,\n skip_msg: str | None = None,\n level: Unused = 1,\n ) -> None","id":466,"name":"make_file","nodeType":"Function","startLoc":100,"text":"def make_file(\n self,\n infiles: str | list[str] | tuple[str, ...],\n outfile: StrOrBytesPath,\n func: Callable[[Unpack[_Ts]], Unused],\n args: tuple[Unpack[_Ts]],\n exec_msg: str | None = None,\n skip_msg: str | None = None,\n level: Unused = 1,\n ) -> None: ..."},{"attributeType":"Distribution","col":4,"comment":"null","endLoc":15,"id":467,"name":"distribution","nodeType":"Attribute","startLoc":15,"text":"distribution"},{"attributeType":"null","col":8,"comment":"null","endLoc":245,"id":468,"name":"headers","nodeType":"Attribute","startLoc":245,"text":"self.headers"},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":26,"id":469,"name":"quote_re","nodeType":"Attribute","startLoc":26,"text":"quote_re"},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":27,"id":470,"name":"strict_domain_re","nodeType":"Attribute","startLoc":27,"text":"strict_domain_re"},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":28,"id":471,"name":"domain_re","nodeType":"Attribute","startLoc":28,"text":"domain_re"},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":29,"id":472,"name":"dots_re","nodeType":"Attribute","startLoc":29,"text":"dots_re"},{"attributeType":"Pattern","col":4,"comment":"null","endLoc":30,"id":473,"name":"magic_re","nodeType":"Attribute","startLoc":30,"text":"magic_re"},{"col":4,"comment":"Dict-like get() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n\n .. warning:: operation is O(n), not O(1).\n ","endLoc":199,"header":"def get(self, name, default=None, domain=None, path=None)","id":474,"name":"get","nodeType":"Function","startLoc":189,"text":"def get(self, name, default=None, domain=None, path=None):\n \"\"\"Dict-like get() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n try:\n return self._find_no_duplicates(name, domain, path)\n except KeyError:\n return default"},{"attributeType":"list","col":4,"comment":"null","endLoc":17,"id":475,"name":"sub_commands","nodeType":"Attribute","startLoc":17,"text":"sub_commands"},{"fileName":"test_lowlevel.py","filePath":"tests","id":476,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport pytest\nimport threading\nimport requests\n\nfrom tests.testserver.server import Server, consume_socket_content\n\nfrom .utils import override_environ\n\n\ndef echo_response_handler(sock):\n \"\"\"Simple handler that will take request and echo it back to requester.\"\"\"\n request_content = consume_socket_content(sock, timeout=0.5)\n\n text_200 = (\n b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: %d\\r\\n\\r\\n'\n b'%s'\n ) % (len(request_content), request_content)\n sock.send(text_200)\n\n\ndef test_chunked_upload():\n \"\"\"can safely send generators\"\"\"\n close_server = threading.Event()\n server = Server.basic_response_server(wait_to_close_event=close_server)\n data = iter([b'a', b'b', b'c'])\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, stream=True)\n close_server.set() # release server block\n\n assert r.status_code == 200\n assert r.request.headers['Transfer-Encoding'] == 'chunked'\n\n\ndef test_chunked_encoding_error():\n \"\"\"get a ChunkedEncodingError if the server returns a bad response\"\"\"\n\n def incomplete_chunked_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=0.5)\n\n # The server never ends the request and doesn't provide any valid chunks\n sock.send(b\"HTTP/1.1 200 OK\\r\\n\" +\n b\"Transfer-Encoding: chunked\\r\\n\")\n\n return request_content\n\n close_server = threading.Event()\n server = Server(incomplete_chunked_response_handler)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n with pytest.raises(requests.exceptions.ChunkedEncodingError):\n r = requests.get(url)\n close_server.set() # release server block\n\n\ndef test_chunked_upload_uses_only_specified_host_header():\n \"\"\"Ensure we use only the specified Host header for chunked requests.\"\"\"\n close_server = threading.Event()\n server = Server(echo_response_handler, wait_to_close_event=close_server)\n\n data = iter([b'a', b'b', b'c'])\n custom_host = 'sample-host'\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, headers={'Host': custom_host}, stream=True)\n close_server.set() # release server block\n\n expected_header = b'Host: %s\\r\\n' % custom_host.encode('utf-8')\n assert expected_header in r.content\n assert r.content.count(b'Host: ') == 1\n\n\ndef test_chunked_upload_doesnt_skip_host_header():\n \"\"\"Ensure we don't omit all Host headers with chunked requests.\"\"\"\n close_server = threading.Event()\n server = Server(echo_response_handler, wait_to_close_event=close_server)\n\n data = iter([b'a', b'b', b'c'])\n\n with server as (host, port):\n expected_host = '{}:{}'.format(host, port)\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, stream=True)\n close_server.set() # release server block\n\n expected_header = b'Host: %s\\r\\n' % expected_host.encode('utf-8')\n assert expected_header in r.content\n assert r.content.count(b'Host: ') == 1\n\n\ndef test_conflicting_content_lengths():\n \"\"\"Ensure we correctly throw an InvalidHeader error if multiple\n conflicting Content-Length headers are returned.\n \"\"\"\n\n def multiple_content_length_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=0.5)\n\n sock.send(b\"HTTP/1.1 200 OK\\r\\n\" +\n b\"Content-Type: text/plain\\r\\n\" +\n b\"Content-Length: 16\\r\\n\" +\n b\"Content-Length: 32\\r\\n\\r\\n\" +\n b\"-- Bad Actor -- Original Content\\r\\n\")\n\n return request_content\n\n close_server = threading.Event()\n server = Server(multiple_content_length_response_handler)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n with pytest.raises(requests.exceptions.InvalidHeader):\n r = requests.get(url)\n close_server.set()\n\n\ndef test_digestauth_401_count_reset_on_redirect():\n \"\"\"Ensure we correctly reset num_401_calls after a successful digest auth,\n followed by a 302 redirect to another digest auth prompt.\n\n See https://github.com/psf/requests/issues/1979.\n \"\"\"\n text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n text_302 = (b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /\\r\\n\\r\\n')\n\n text_200 = (b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: 0\\r\\n\\r\\n')\n\n expected_digest = (b'Authorization: Digest username=\"user\", '\n b'realm=\"me@kennethreitz.com\", '\n b'nonce=\"6bf5d6e4da1ce66918800195d6b9130d\", uri=\"/\"')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_response_handler(sock):\n # Respond to initial GET with a challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_401)\n\n # Verify we receive an Authorization header in response, then redirect.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_302)\n\n # Verify Authorization isn't sent to the redirected host,\n # then send another challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert b'Authorization:' not in request_content\n sock.send(text_401)\n\n # Verify Authorization is sent correctly again, and return 200 OK.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_200)\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server succeeded in authenticating.\n assert r.status_code == 200\n # Verify Authorization was sent in final request.\n assert 'Authorization' in r.request.headers\n assert r.request.headers['Authorization'].startswith('Digest ')\n # Verify redirect happened as we expected.\n assert r.history[0].status_code == 302\n close_server.set()\n\n\ndef test_digestauth_401_only_sent_once():\n \"\"\"Ensure we correctly respond to a 401 challenge once, and then\n stop responding if challenged again.\n \"\"\"\n text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n expected_digest = (b'Authorization: Digest username=\"user\", '\n b'realm=\"me@kennethreitz.com\", '\n b'nonce=\"6bf5d6e4da1ce66918800195d6b9130d\", uri=\"/\"')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_failed_response_handler(sock):\n # Respond to initial GET with a challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_401)\n\n # Verify we receive an Authorization header in response, then\n # challenge again.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_401)\n\n # Verify the client didn't respond to second challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content == b''\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_failed_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server didn't authenticate us.\n assert r.status_code == 401\n assert r.history[0].status_code == 401\n close_server.set()\n\n\ndef test_digestauth_only_on_4xx():\n \"\"\"Ensure we only send digestauth on 4xx challenges.\n\n See https://github.com/psf/requests/issues/3772.\n \"\"\"\n text_200_chal = (b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_response_handler(sock):\n # Respond to GET with a 200 containing www-authenticate header.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_200_chal)\n\n # Verify the client didn't respond with auth.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content == b''\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server didn't receive auth from us.\n assert r.status_code == 200\n assert len(r.history) == 0\n close_server.set()\n\n\n_schemes_by_var_prefix = [\n ('http', ['http']),\n ('https', ['https']),\n ('all', ['http', 'https']),\n]\n\n_proxy_combos = []\nfor prefix, schemes in _schemes_by_var_prefix:\n for scheme in schemes:\n _proxy_combos.append((\"{}_proxy\".format(prefix), scheme))\n\n_proxy_combos += [(var.upper(), scheme) for var, scheme in _proxy_combos]\n\n\n@pytest.mark.parametrize(\"var,scheme\", _proxy_combos)\ndef test_use_proxy_from_environment(httpbin, var, scheme):\n url = \"{}://httpbin.org\".format(scheme)\n fake_proxy = Server() # do nothing with the requests; just close the socket\n with fake_proxy as (host, port):\n proxy_url = \"socks5://{}:{}\".format(host, port)\n kwargs = {var: proxy_url}\n with override_environ(**kwargs):\n # fake proxy's lack of response will cause a ConnectionError\n with pytest.raises(requests.exceptions.ConnectionError):\n requests.get(url)\n\n # the fake proxy received a request\n assert len(fake_proxy.handler_results) == 1\n\n # it had actual content (not checking for SOCKS protocol for now)\n assert len(fake_proxy.handler_results[0]) > 0\n\n\ndef test_redirect_rfc1808_to_non_ascii_location():\n path = u'š'\n expected_path = b'%C5%A1'\n redirect_request = [] # stores the second request to the server\n\n def redirect_resp_handler(sock):\n consume_socket_content(sock, timeout=0.5)\n location = u'//{}:{}/{}'.format(host, port, path)\n sock.send(\n b'HTTP/1.1 301 Moved Permanently\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: ' + location.encode('utf8') + b'\\r\\n'\n b'\\r\\n'\n )\n redirect_request.append(consume_socket_content(sock, timeout=0.5))\n sock.send(b'HTTP/1.1 200 OK\\r\\n\\r\\n')\n\n close_server = threading.Event()\n server = Server(redirect_resp_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = u'http://{}:{}'.format(host, port)\n r = requests.get(url=url, allow_redirects=True)\n assert r.status_code == 200\n assert len(r.history) == 1\n assert r.history[0].status_code == 301\n assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1')\n assert r.url == u'{}/{}'.format(url, expected_path.decode('ascii'))\n\n close_server.set()\n\ndef test_fragment_not_sent_with_request():\n \"\"\"Verify that the fragment portion of a URI isn't sent to the server.\"\"\"\n def response_handler(sock):\n req = consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: '+bytes(len(req))+b'\\r\\n'\n b'\\r\\n'+req\n )\n\n close_server = threading.Event()\n server = Server(response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port)\n r = requests.get(url)\n raw_request = r.content\n\n assert r.status_code == 200\n headers, body = raw_request.split(b'\\r\\n\\r\\n', 1)\n status_line, headers = headers.split(b'\\r\\n', 1)\n\n assert status_line == b'GET /path/to/thing/ HTTP/1.1'\n for frag in (b'view', b'edit', b'token', b'hunter2'):\n assert frag not in headers\n assert frag not in body\n\n close_server.set()\n\ndef test_fragment_update_on_redirect():\n \"\"\"Verify we only append previous fragment if one doesn't exist on new\n location. If a new fragment is encountered in a Location header, it should\n be added to all subsequent requests.\n \"\"\"\n\n def response_handler(sock):\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /get#relevant-section\\r\\n\\r\\n'\n )\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /final-url/\\r\\n\\r\\n'\n )\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 200 OK\\r\\n\\r\\n'\n )\n\n close_server = threading.Event()\n server = Server(response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port)\n r = requests.get(url)\n raw_request = r.content\n\n assert r.status_code == 200\n assert len(r.history) == 2\n assert r.history[0].request.url == url\n\n # Verify we haven't overwritten the location with our previous fragment.\n assert r.history[1].request.url == 'http://{}:{}/get#relevant-section'.format(host, port)\n # Verify previous fragment is used and not the original.\n assert r.url == 'http://{}:{}/final-url/#relevant-section'.format(host, port)\n\n close_server.set()\n"},{"className":"Server","col":0,"comment":"Dummy server using for unit testing","endLoc":130,"id":477,"nodeType":"Class","startLoc":26,"text":"class Server(threading.Thread):\n \"\"\"Dummy server using for unit testing\"\"\"\n WAIT_EVENT_TIMEOUT = 5\n\n def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None):\n super(Server, self).__init__()\n\n self.handler = handler or consume_socket_content\n self.handler_results = []\n\n self.host = host\n self.port = port\n self.requests_to_handle = requests_to_handle\n\n self.wait_to_close_event = wait_to_close_event\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n @classmethod\n def text_response_server(cls, text, request_timeout=0.5, **kwargs):\n def text_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=request_timeout)\n sock.send(text.encode('utf-8'))\n\n return request_content\n\n\n return Server(text_response_handler, **kwargs)\n\n @classmethod\n def basic_response_server(cls, **kwargs):\n return cls.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 0\\r\\n\\r\\n\",\n **kwargs\n )\n\n def run(self):\n try:\n self.server_sock = self._create_socket_and_bind()\n # in case self.port = 0\n self.port = self.server_sock.getsockname()[1]\n self.ready_event.set()\n self._handle_requests()\n\n if self.wait_to_close_event:\n self.wait_to_close_event.wait(self.WAIT_EVENT_TIMEOUT)\n finally:\n self.ready_event.set() # just in case of exception\n self._close_server_sock_ignore_errors()\n self.stop_event.set()\n\n def _create_socket_and_bind(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n # NB: when Python 2.7 is no longer supported, the argument\n # can be removed to use a default backlog size\n sock.listen(5)\n return sock\n\n def _close_server_sock_ignore_errors(self):\n try:\n self.server_sock.close()\n except IOError:\n pass\n\n def _handle_requests(self):\n for _ in range(self.requests_to_handle):\n sock = self._accept_connection()\n if not sock:\n break\n\n handler_result = self.handler(sock)\n\n self.handler_results.append(handler_result)\n sock.close()\n\n def _accept_connection(self):\n try:\n ready, _, _ = select.select([self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT)\n if not ready:\n return None\n\n return self.server_sock.accept()[0]\n except (select.error, socket.error):\n return None\n\n def __enter__(self):\n self.start()\n self.ready_event.wait(self.WAIT_EVENT_TIMEOUT)\n return self.host, self.port\n\n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait(self.WAIT_EVENT_TIMEOUT)\n else:\n if self.wait_to_close_event:\n # avoid server from waiting for event timeouts\n # if an exception is found in the main thread\n self.wait_to_close_event.set()\n\n # ensure server thread doesn't get stuck waiting for connections\n self._close_server_sock_ignore_errors()\n self.join()\n return False # allow exceptions to propagate"},{"attributeType":"null","col":8,"comment":"null","endLoc":243,"id":478,"name":"method","nodeType":"Attribute","startLoc":243,"text":"self.method"},{"attributeType":"null","col":8,"comment":"null","endLoc":247,"id":479,"name":"data","nodeType":"Attribute","startLoc":247,"text":"self.data"},{"className":"Thread","col":0,"comment":"null","endLoc":98,"id":480,"nodeType":"Class","startLoc":71,"text":"class Thread:\n name: str\n @property\n def ident(self) -> int | None: ...\n daemon: bool\n def __init__(\n self,\n group: None = None,\n target: Callable[..., object] | None = None,\n name: str | None = None,\n args: Iterable[Any] = (),\n kwargs: Mapping[str, Any] | None = None,\n *,\n daemon: bool | None = None,\n ) -> None: ...\n def start(self) -> None: ...\n def run(self) -> None: ...\n def join(self, timeout: float | None = None) -> None: ...\n @property\n def native_id(self) -> int | None: ... # only available on some platforms\n def is_alive(self) -> bool: ...\n if sys.version_info < (3, 9):\n def isAlive(self) -> bool: ...\n # the following methods are all deprecated\n def getName(self) -> str: ...\n def setName(self, name: str) -> None: ...\n def isDaemon(self) -> bool: ...\n def setDaemon(self, daemonic: bool) -> None: ..."},{"attributeType":"null","col":8,"comment":"null","endLoc":250,"id":481,"name":"auth","nodeType":"Attribute","startLoc":250,"text":"self.auth"},{"col":4,"comment":"Both ``__get_item__`` and ``get`` call this function: it's never\n used elsewhere in Requests.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :raises KeyError: if cookie is not found\n :raises CookieConflictError: if there are multiple cookies\n that match name and optionally domain and path\n :return: cookie.value\n ","endLoc":399,"header":"def _find_no_duplicates(self, name, domain=None, path=None)","id":483,"name":"_find_no_duplicates","nodeType":"Function","startLoc":376,"text":"def _find_no_duplicates(self, name, domain=None, path=None):\n \"\"\"Both ``__get_item__`` and ``get`` call this function: it's never\n used elsewhere in Requests.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :raises KeyError: if cookie is not found\n :raises CookieConflictError: if there are multiple cookies\n that match name and optionally domain and path\n :return: cookie.value\n \"\"\"\n toReturn = None\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n if toReturn is not None: # if there are multiple cookies that meet passed in criteria\n raise CookieConflictError('There are multiple cookies with name, %r' % (name))\n toReturn = cookie.value # we will eventually return this as long as no cookie conflict\n\n if toReturn:\n return toReturn\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))"},{"fileName":"status_codes.py","filePath":"requests","id":485,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nr\"\"\"\nThe ``codes`` object defines a mapping from common names for HTTP statuses\nto their numerical codes, accessible either as attributes or as dictionary\nitems.\n\nExample::\n\n >>> import requests\n >>> requests.codes['temporary_redirect']\n 307\n >>> requests.codes.teapot\n 418\n >>> requests.codes['\\o/']\n 200\n\nSome codes have multiple names, and both upper- and lower-case versions of\nthe names are allowed. For example, ``codes.ok``, ``codes.OK``, and\n``codes.okay`` all correspond to the HTTP status code 200.\n\"\"\"\n\nfrom .structures import LookupDict\n\n_codes = {\n\n # Informational.\n 100: ('continue',),\n 101: ('switching_protocols',),\n 102: ('processing',),\n 103: ('checkpoint',),\n 122: ('uri_too_long', 'request_uri_too_long'),\n 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\\\o/', '✓'),\n 201: ('created',),\n 202: ('accepted',),\n 203: ('non_authoritative_info', 'non_authoritative_information'),\n 204: ('no_content',),\n 205: ('reset_content', 'reset'),\n 206: ('partial_content', 'partial'),\n 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),\n 208: ('already_reported',),\n 226: ('im_used',),\n\n # Redirection.\n 300: ('multiple_choices',),\n 301: ('moved_permanently', 'moved', '\\\\o-'),\n 302: ('found',),\n 303: ('see_other', 'other'),\n 304: ('not_modified',),\n 305: ('use_proxy',),\n 306: ('switch_proxy',),\n 307: ('temporary_redirect', 'temporary_moved', 'temporary'),\n 308: ('permanent_redirect',\n 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0\n\n # Client Error.\n 400: ('bad_request', 'bad'),\n 401: ('unauthorized',),\n 402: ('payment_required', 'payment'),\n 403: ('forbidden',),\n 404: ('not_found', '-o-'),\n 405: ('method_not_allowed', 'not_allowed'),\n 406: ('not_acceptable',),\n 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),\n 408: ('request_timeout', 'timeout'),\n 409: ('conflict',),\n 410: ('gone',),\n 411: ('length_required',),\n 412: ('precondition_failed', 'precondition'),\n 413: ('request_entity_too_large',),\n 414: ('request_uri_too_large',),\n 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),\n 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),\n 417: ('expectation_failed',),\n 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),\n 421: ('misdirected_request',),\n 422: ('unprocessable_entity', 'unprocessable'),\n 423: ('locked',),\n 424: ('failed_dependency', 'dependency'),\n 425: ('unordered_collection', 'unordered'),\n 426: ('upgrade_required', 'upgrade'),\n 428: ('precondition_required', 'precondition'),\n 429: ('too_many_requests', 'too_many'),\n 431: ('header_fields_too_large', 'fields_too_large'),\n 444: ('no_response', 'none'),\n 449: ('retry_with', 'retry'),\n 450: ('blocked_by_windows_parental_controls', 'parental_controls'),\n 451: ('unavailable_for_legal_reasons', 'legal_reasons'),\n 499: ('client_closed_request',),\n\n # Server Error.\n 500: ('internal_server_error', 'server_error', '/o\\\\', '✗'),\n 501: ('not_implemented',),\n 502: ('bad_gateway',),\n 503: ('service_unavailable', 'unavailable'),\n 504: ('gateway_timeout',),\n 505: ('http_version_not_supported', 'http_version'),\n 506: ('variant_also_negotiates',),\n 507: ('insufficient_storage',),\n 509: ('bandwidth_limit_exceeded', 'bandwidth'),\n 510: ('not_extended',),\n 511: ('network_authentication_required', 'network_auth', 'network_authentication'),\n}\n\ncodes = LookupDict(name='status_codes')\n\ndef _init():\n for code, titles in _codes.items():\n for title in titles:\n setattr(codes, title, code)\n if not title.startswith(('\\\\', '/')):\n setattr(codes, title.upper(), code)\n\n def doc(code):\n names = ', '.join('``%s``' % n for n in _codes[code])\n return '* %d: %s' % (code, names)\n\n global __doc__\n __doc__ = (__doc__ + '\\n' +\n '\\n'.join(doc(code) for code in sorted(_codes))\n if __doc__ is not None else None)\n\n_init()\n"},{"className":"LookupDict","col":0,"comment":"Dictionary lookup object.","endLoc":105,"id":486,"nodeType":"Class","startLoc":89,"text":"class LookupDict(dict):\n \"\"\"Dictionary lookup object.\"\"\"\n\n def __init__(self, name=None):\n self.name = name\n super(LookupDict, self).__init__()\n\n def __repr__(self):\n return '' % (self.name)\n\n def __getitem__(self, key):\n # We allow fall-through here, so values default to None\n\n return self.__dict__.get(key, None)\n\n def get(self, key, default=None):\n return self.__dict__.get(key, default)"},{"col":4,"comment":"null","endLoc":74,"header":"@property\n def ident(self) -> int | None","id":487,"name":"ident","nodeType":"Function","startLoc":73,"text":"@property\n def ident(self) -> int | None: ..."},{"col":4,"comment":"null","endLoc":85,"header":"def __init__(\n self,\n group: None = None,\n target: Callable[..., object] | None = None,\n name: str | None = None,\n args: Iterable[Any] = (),\n kwargs","id":488,"name":"__init__","nodeType":"Function","startLoc":76,"text":"def __init__(\n self,\n group: None = None,\n target: Callable[..., object] | None = None,\n name: str | None = None,\n args: Iterable[Any] = (),\n kwargs: Mapping[str, Any] | None = None,\n *,\n daemon: bool | None = None,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":86,"header":"def start(self) -> None","id":489,"name":"start","nodeType":"Function","startLoc":86,"text":"def start(self) -> None: ..."},{"className":"dict","col":0,"comment":"null","endLoc":1134,"id":490,"nodeType":"Class","startLoc":1063,"text":"class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):\n # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics\n # Also multiprocessing.managers.SyncManager.dict()\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ...\n @overload\n def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ...\n @overload\n def __init__(self: dict[str, _VT], __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ...\n @overload\n def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ...\n @overload\n def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ...\n # Next two overloads are for dict(string.split(sep) for string in iterable)\n # Cannot be Iterable[Sequence[_T]] or otherwise dict([\"foo\", \"bar\", \"baz\"]) is not an error\n @overload\n def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ...\n @overload\n def __init__(self: dict[bytes, bytes], __iterable: Iterable[list[bytes]]) -> None: ...\n def __new__(cls, *args: Any, **kwargs: Any) -> Self: ...\n def copy(self) -> dict[_KT, _VT]: ...\n def keys(self) -> dict_keys[_KT, _VT]: ...\n def values(self) -> dict_values[_KT, _VT]: ...\n def items(self) -> dict_items[_KT, _VT]: ...\n # Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`\n # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system.\n # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.\n @classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]: ...\n @classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ...\n # Positional-only in dict, but not in MutableMapping\n @overload # type: ignore[override]\n def get(self, __key: _KT) -> _VT | None: ...\n @overload\n def get(self, __key: _KT, __default: _VT) -> _VT: ...\n @overload\n def get(self, __key: _KT, __default: _T) -> _VT | _T: ...\n @overload\n def pop(self, __key: _KT) -> _VT: ...\n @overload\n def pop(self, __key: _KT, __default: _VT) -> _VT: ...\n @overload\n def pop(self, __key: _KT, __default: _T) -> _VT | _T: ...\n def __len__(self) -> int: ...\n def __getitem__(self, __key: _KT) -> _VT: ...\n def __setitem__(self, __key: _KT, __value: _VT) -> None: ...\n def __delitem__(self, __key: _KT) -> None: ...\n def __iter__(self) -> Iterator[_KT]: ...\n def __eq__(self, __value: object) -> bool: ...\n if sys.version_info >= (3, 8):\n def __reversed__(self) -> Iterator[_KT]: ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n if sys.version_info >= (3, 9):\n def __class_getitem__(cls, __item: Any) -> GenericAlias: ...\n @overload\n def __or__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ...\n @overload\n def __or__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...\n @overload\n def __ror__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ...\n @overload\n def __ror__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...\n # dict.__ior__ should be kept roughly in line with MutableMapping.update()\n @overload # type: ignore[misc]\n def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...\n @overload\n def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ..."},{"col":4,"comment":"null","endLoc":87,"header":"def run(self) -> None","id":491,"name":"run","nodeType":"Function","startLoc":87,"text":"def run(self) -> None: ..."},{"col":4,"comment":"null","endLoc":88,"header":"def join(self, timeout: float | None = None) -> None","id":492,"name":"join","nodeType":"Function","startLoc":88,"text":"def join(self, timeout: float | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":90,"header":"@property\n def native_id(self) -> int | None","id":493,"name":"native_id","nodeType":"Function","startLoc":89,"text":"@property\n def native_id(self) -> int | None: ... # only available on some platforms"},{"col":4,"comment":"null","endLoc":91,"header":"def is_alive(self) -> bool","id":494,"name":"is_alive","nodeType":"Function","startLoc":91,"text":"def is_alive(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":95,"header":"def getName(self) -> str","id":495,"name":"getName","nodeType":"Function","startLoc":95,"text":"def getName(self) -> str: ..."},{"col":4,"comment":"null","endLoc":96,"header":"def setName(self, name: str) -> None","id":496,"name":"setName","nodeType":"Function","startLoc":96,"text":"def setName(self, name: str) -> None: ..."},{"col":4,"comment":"null","endLoc":97,"header":"def isDaemon(self) -> bool","id":497,"name":"isDaemon","nodeType":"Function","startLoc":97,"text":"def isDaemon(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":98,"header":"def setDaemon(self, daemonic: bool) -> None","id":498,"name":"setDaemon","nodeType":"Function","startLoc":98,"text":"def setDaemon(self, daemonic: bool) -> None: ..."},{"attributeType":"str","col":4,"comment":"null","endLoc":72,"id":499,"name":"name","nodeType":"Attribute","startLoc":72,"text":"name"},{"attributeType":"null","col":8,"comment":"null","endLoc":246,"id":504,"name":"files","nodeType":"Attribute","startLoc":246,"text":"self.files"},{"attributeType":"bool","col":4,"comment":"null","endLoc":75,"id":505,"name":"daemon","nodeType":"Attribute","startLoc":75,"text":"daemon"},{"attributeType":"null","col":8,"comment":"null","endLoc":248,"id":506,"name":"json","nodeType":"Attribute","startLoc":248,"text":"self.json"},{"col":4,"comment":"null","endLoc":42,"header":"def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None)","id":507,"name":"__init__","nodeType":"Function","startLoc":30,"text":"def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None):\n super(Server, self).__init__()\n\n self.handler = handler or consume_socket_content\n self.handler_results = []\n\n self.host = host\n self.port = port\n self.requests_to_handle = requests_to_handle\n\n self.wait_to_close_event = wait_to_close_event\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()"},{"attributeType":"null","col":8,"comment":"null","endLoc":249,"id":508,"name":"params","nodeType":"Attribute","startLoc":249,"text":"self.params"},{"col":4,"comment":"Dict-like set() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n ","endLoc":216,"header":"def set(self, name, value, **kwargs)","id":509,"name":"set","nodeType":"Function","startLoc":201,"text":"def set(self, name, value, **kwargs):\n \"\"\"Dict-like set() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n \"\"\"\n # support client code that unsets cookies by assignment of a None value:\n if value is None:\n remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))\n return\n\n if isinstance(value, Morsel):\n c = morsel_to_cookie(value)\n else:\n c = create_cookie(name, value, **kwargs)\n self.set_cookie(c)\n return c"},{"col":4,"comment":"null","endLoc":1084,"header":"def __new__(cls, *args: Any, **kwargs: Any) -> Self","id":510,"name":"__new__","nodeType":"Function","startLoc":1084,"text":"def __new__(cls, *args: Any, **kwargs: Any) -> Self: ..."},{"col":4,"comment":"null","endLoc":1085,"header":"def copy(self) -> dict[_KT, _VT]","id":511,"name":"copy","nodeType":"Function","startLoc":1085,"text":"def copy(self) -> dict[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":1086,"header":"def keys(self) -> dict_keys[_KT, _VT]","id":512,"name":"keys","nodeType":"Function","startLoc":1086,"text":"def keys(self) -> dict_keys[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":1087,"header":"def values(self) -> dict_values[_KT, _VT]","id":513,"name":"values","nodeType":"Function","startLoc":1087,"text":"def values(self) -> dict_values[_KT, _VT]: ..."},{"col":4,"comment":"null","endLoc":1094,"header":"@classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]","id":514,"name":"fromkeys","nodeType":"Function","startLoc":1092,"text":"@classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]: ..."},{"col":4,"comment":"null","endLoc":1097,"header":"@classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]","id":515,"name":"fromkeys","nodeType":"Function","startLoc":1095,"text":"@classmethod\n @overload\n def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ..."},{"col":4,"comment":"null","endLoc":1100,"header":"@overload # type: ignore[override]\n def get(self, __key: _KT) -> _VT | None","id":516,"name":"get","nodeType":"Function","startLoc":1099,"text":"@overload # type: ignore[override]\n def get(self, __key: _KT) -> _VT | None: ..."},{"attributeType":"null","col":8,"comment":"null","endLoc":239,"id":517,"name":"hooks","nodeType":"Attribute","startLoc":239,"text":"self.hooks"},{"attributeType":"null","col":8,"comment":"null","endLoc":244,"id":518,"name":"url","nodeType":"Attribute","startLoc":244,"text":"self.url"},{"col":4,"comment":"null","endLoc":1102,"header":"@overload\n def get(self, __key: _KT, __default: _VT) -> _VT","id":523,"name":"get","nodeType":"Function","startLoc":1101,"text":"@overload\n def get(self, __key: _KT, __default: _VT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":1104,"header":"@overload\n def get(self, __key: _KT, __default: _T) -> _VT | _T","id":524,"name":"get","nodeType":"Function","startLoc":1103,"text":"@overload\n def get(self, __key: _KT, __default: _T) -> _VT | _T: ..."},{"attributeType":"null","col":8,"comment":"null","endLoc":251,"id":525,"name":"cookies","nodeType":"Attribute","startLoc":251,"text":"self.cookies"},{"col":4,"comment":"null","endLoc":1106,"header":"@overload\n def pop(self, __key: _KT) -> _VT","id":526,"name":"pop","nodeType":"Function","startLoc":1105,"text":"@overload\n def pop(self, __key: _KT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":1108,"header":"@overload\n def pop(self, __key: _KT, __default: _VT) -> _VT","id":527,"name":"pop","nodeType":"Function","startLoc":1107,"text":"@overload\n def pop(self, __key: _KT, __default: _VT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":1110,"header":"@overload\n def pop(self, __key: _KT, __default: _T) -> _VT | _T","id":528,"name":"pop","nodeType":"Function","startLoc":1109,"text":"@overload\n def pop(self, __key: _KT, __default: _T) -> _VT | _T: ..."},{"col":4,"comment":"null","endLoc":1111,"header":"def __len__(self) -> int","id":529,"name":"__len__","nodeType":"Function","startLoc":1111,"text":"def __len__(self) -> int: ..."},{"col":4,"comment":"null","endLoc":1112,"header":"def __getitem__(self, __key: _KT) -> _VT","id":530,"name":"__getitem__","nodeType":"Function","startLoc":1112,"text":"def __getitem__(self, __key: _KT) -> _VT: ..."},{"col":4,"comment":"null","endLoc":1113,"header":"def __setitem__(self, __key: _KT, __value: _VT) -> None","id":531,"name":"__setitem__","nodeType":"Function","startLoc":1113,"text":"def __setitem__(self, __key: _KT, __value: _VT) -> None: ..."},{"col":4,"comment":"null","endLoc":1114,"header":"def __delitem__(self, __key: _KT) -> None","id":532,"name":"__delitem__","nodeType":"Function","startLoc":1114,"text":"def __delitem__(self, __key: _KT) -> None: ..."},{"col":4,"comment":"null","endLoc":1115,"header":"def __iter__(self) -> Iterator[_KT]","id":533,"name":"__iter__","nodeType":"Function","startLoc":1115,"text":"def __iter__(self) -> Iterator[_KT]: ..."},{"col":4,"comment":"null","endLoc":1116,"header":"def __eq__(self, __value: object) -> bool","id":534,"name":"__eq__","nodeType":"Function","startLoc":1116,"text":"def __eq__(self, __value: object) -> bool: ..."},{"col":8,"comment":"null","endLoc":1118,"header":"def __reversed__(self) -> Iterator[_KT]","id":535,"name":"__reversed__","nodeType":"Function","startLoc":1118,"text":"def __reversed__(self) -> Iterator[_KT]: ..."},{"col":8,"comment":"null","endLoc":1121,"header":"def __class_getitem__(cls, __item: Any) -> GenericAlias","id":536,"name":"__class_getitem__","nodeType":"Function","startLoc":1121,"text":"def __class_getitem__(cls, __item: Any) -> GenericAlias: ..."},{"col":8,"comment":"null","endLoc":1123,"header":"@overload\n def __or__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]","id":537,"name":"__or__","nodeType":"Function","startLoc":1122,"text":"@overload\n def __or__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ..."},{"col":8,"comment":"null","endLoc":1125,"header":"@overload\n def __or__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]","id":538,"name":"__or__","nodeType":"Function","startLoc":1124,"text":"@overload\n def __or__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ..."},{"col":8,"comment":"null","endLoc":1127,"header":"@overload\n def __ror__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]","id":539,"name":"__ror__","nodeType":"Function","startLoc":1126,"text":"@overload\n def __ror__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ..."},{"col":8,"comment":"null","endLoc":1129,"header":"@overload\n def __ror__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]","id":540,"name":"__ror__","nodeType":"Function","startLoc":1128,"text":"@overload\n def __ror__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ..."},{"col":8,"comment":"null","endLoc":1132,"header":"@overload # type: ignore[misc]\n def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self","id":541,"name":"__ior__","nodeType":"Function","startLoc":1131,"text":"@overload # type: ignore[misc]\n def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ..."},{"col":8,"comment":"null","endLoc":1134,"header":"@overload\n def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self","id":542,"name":"__ior__","nodeType":"Function","startLoc":1133,"text":"@overload\n def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ..."},{"attributeType":"None","col":4,"comment":"null","endLoc":1119,"id":543,"name":"__hash__","nodeType":"Attribute","startLoc":1119,"text":"__hash__"},{"col":0,"comment":"Unsets a cookie by name, by default over all domains and paths.\n\n Wraps CookieJar.clear(), is O(n).\n ","endLoc":162,"header":"def remove_cookie_by_name(cookiejar, name, domain=None, path=None)","id":544,"name":"remove_cookie_by_name","nodeType":"Function","startLoc":146,"text":"def remove_cookie_by_name(cookiejar, name, domain=None, path=None):\n \"\"\"Unsets a cookie by name, by default over all domains and paths.\n\n Wraps CookieJar.clear(), is O(n).\n \"\"\"\n clearables = []\n for cookie in cookiejar:\n if cookie.name != name:\n continue\n if domain is not None and domain != cookie.domain:\n continue\n if path is not None and path != cookie.path:\n continue\n clearables.append((cookie.domain, cookie.path, cookie.name))\n\n for domain, path, name in clearables:\n cookiejar.clear(domain, path, name)"},{"className":"Response","col":0,"comment":"The :class:`Response ` object, which contains a\n server's response to an HTTP request.\n ","endLoc":973,"id":546,"nodeType":"Class","startLoc":596,"text":"class Response(object):\n \"\"\"The :class:`Response ` object, which contains a\n server's response to an HTTP request.\n \"\"\"\n\n __attrs__ = [\n '_content', 'status_code', 'headers', 'url', 'history',\n 'encoding', 'reason', 'cookies', 'elapsed', 'request'\n ]\n\n def __init__(self):\n self._content = False\n self._content_consumed = False\n self._next = None\n\n #: Integer Code of responded HTTP Status, e.g. 404 or 200.\n self.status_code = None\n\n #: Case-insensitive Dictionary of Response Headers.\n #: For example, ``headers['content-encoding']`` will return the\n #: value of a ``'Content-Encoding'`` response header.\n self.headers = CaseInsensitiveDict()\n\n #: File-like object representation of response (for advanced usage).\n #: Use of ``raw`` requires that ``stream=True`` be set on the request.\n #: This requirement does not apply for use internally to Requests.\n self.raw = None\n\n #: Final URL location of Response.\n self.url = None\n\n #: Encoding to decode with when accessing r.text.\n self.encoding = None\n\n #: A list of :class:`Response ` objects from\n #: the history of the Request. Any redirect responses will end\n #: up here. The list is sorted from the oldest to the most recent request.\n self.history = []\n\n #: Textual reason of responded HTTP Status, e.g. \"Not Found\" or \"OK\".\n self.reason = None\n\n #: A CookieJar of Cookies the server sent back.\n self.cookies = cookiejar_from_dict({})\n\n #: The amount of time elapsed between sending the request\n #: and the arrival of the response (as a timedelta).\n #: This property specifically measures the time taken between sending\n #: the first byte of the request and finishing parsing the headers. It\n #: is therefore unaffected by consuming the response content or the\n #: value of the ``stream`` keyword argument.\n self.elapsed = datetime.timedelta(0)\n\n #: The :class:`PreparedRequest ` object to which this\n #: is a response.\n self.request = None\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def __getstate__(self):\n # Consume everything; accessing the content attribute makes\n # sure the content has been fully read.\n if not self._content_consumed:\n self.content\n\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}\n\n def __setstate__(self, state):\n for name, value in state.items():\n setattr(self, name, value)\n\n # pickled objects do not have .raw\n setattr(self, '_content_consumed', True)\n setattr(self, 'raw', None)\n\n def __repr__(self):\n return '' % (self.status_code)\n\n def __bool__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __nonzero__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __iter__(self):\n \"\"\"Allows you to use a response as an iterator.\"\"\"\n return self.iter_content(128)\n\n @property\n def ok(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400, False if not.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n try:\n self.raise_for_status()\n except HTTPError:\n return False\n return True\n\n @property\n def is_redirect(self):\n \"\"\"True if this Response is a well-formed HTTP redirect that could have\n been processed automatically (by :meth:`Session.resolve_redirects`).\n \"\"\"\n return ('location' in self.headers and self.status_code in REDIRECT_STATI)\n\n @property\n def is_permanent_redirect(self):\n \"\"\"True if this Response one of the permanent versions of redirect.\"\"\"\n return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))\n\n @property\n def next(self):\n \"\"\"Returns a PreparedRequest for the next request in a redirect chain, if there is one.\"\"\"\n return self._next\n\n @property\n def apparent_encoding(self):\n \"\"\"The apparent encoding, provided by the charset_normalizer or chardet libraries.\"\"\"\n return chardet.detect(self.content)['encoding']\n\n def iter_content(self, chunk_size=1, decode_unicode=False):\n \"\"\"Iterates over the response data. When stream=True is set on the\n request, this avoids reading the content at once into memory for\n large responses. The chunk size is the number of bytes it should\n read into memory. This is not necessarily the length of each item\n returned as decoding can take place.\n\n chunk_size must be of type int or None. A value of None will\n function differently depending on the value of `stream`.\n stream=True will read data as it arrives in whatever size the\n chunks are received. If stream=False, data is returned as\n a single chunk.\n\n If decode_unicode is True, content will be decoded using the best\n available encoding based on the response.\n \"\"\"\n\n def generate():\n # Special case for urllib3.\n if hasattr(self.raw, 'stream'):\n try:\n for chunk in self.raw.stream(chunk_size, decode_content=True):\n yield chunk\n except ProtocolError as e:\n raise ChunkedEncodingError(e)\n except DecodeError as e:\n raise ContentDecodingError(e)\n except ReadTimeoutError as e:\n raise ConnectionError(e)\n else:\n # Standard file-like object.\n while True:\n chunk = self.raw.read(chunk_size)\n if not chunk:\n break\n yield chunk\n\n self._content_consumed = True\n\n if self._content_consumed and isinstance(self._content, bool):\n raise StreamConsumedError()\n elif chunk_size is not None and not isinstance(chunk_size, int):\n raise TypeError(\"chunk_size must be an int, it is instead a %s.\" % type(chunk_size))\n # simulate reading small chunks of the content\n reused_chunks = iter_slices(self._content, chunk_size)\n\n stream_chunks = generate()\n\n chunks = reused_chunks if self._content_consumed else stream_chunks\n\n if decode_unicode:\n chunks = stream_decode_response_unicode(chunks, self)\n\n return chunks\n\n def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):\n \"\"\"Iterates over the response data, one line at a time. When\n stream=True is set on the request, this avoids reading the\n content at once into memory for large responses.\n\n .. note:: This method is not reentrant safe.\n \"\"\"\n\n pending = None\n\n for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):\n\n if pending is not None:\n chunk = pending + chunk\n\n if delimiter:\n lines = chunk.split(delimiter)\n else:\n lines = chunk.splitlines()\n\n if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:\n pending = lines.pop()\n else:\n pending = None\n\n for line in lines:\n yield line\n\n if pending is not None:\n yield pending\n\n @property\n def content(self):\n \"\"\"Content of the response, in bytes.\"\"\"\n\n if self._content is False:\n # Read the contents.\n if self._content_consumed:\n raise RuntimeError(\n 'The content for this response was already consumed')\n\n if self.status_code == 0 or self.raw is None:\n self._content = None\n else:\n self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''\n\n self._content_consumed = True\n # don't need to release the connection; that's been handled by urllib3\n # since we exhausted the data.\n return self._content\n\n @property\n def text(self):\n \"\"\"Content of the response, in unicode.\n\n If Response.encoding is None, encoding will be guessed using\n ``charset_normalizer`` or ``chardet``.\n\n The encoding of the response content is determined based solely on HTTP\n headers, following RFC 2616 to the letter. If you can take advantage of\n non-HTTP knowledge to make a better guess at the encoding, you should\n set ``r.encoding`` appropriately before accessing this property.\n \"\"\"\n\n # Try charset from content-type\n content = None\n encoding = self.encoding\n\n if not self.content:\n return str('')\n\n # Fallback to auto-detected encoding.\n if self.encoding is None:\n encoding = self.apparent_encoding\n\n # Decode unicode from given encoding.\n try:\n content = str(self.content, encoding, errors='replace')\n except (LookupError, TypeError):\n # A LookupError is raised if the encoding was not found which could\n # indicate a misspelling or similar mistake.\n #\n # A TypeError can be raised if encoding is None\n #\n # So we try blindly encoding.\n content = str(self.content, errors='replace')\n\n return content\n\n def json(self, **kwargs):\n r\"\"\"Returns the json-encoded content of a response, if any.\n\n :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes.\n :raises requests.exceptions.JSONDecodeError: If the response body does not\n contain valid json.\n \"\"\"\n\n if not self.encoding and self.content and len(self.content) > 3:\n # No encoding set. JSON RFC 4627 section 3 states we should expect\n # UTF-8, -16 or -32. Detect which one to use; If the detection or\n # decoding fails, fall back to `self.text` (using charset_normalizer to make\n # a best guess).\n encoding = guess_json_utf(self.content)\n if encoding is not None:\n try:\n return complexjson.loads(\n self.content.decode(encoding), **kwargs\n )\n except UnicodeDecodeError:\n # Wrong UTF codec detected; usually because it's not UTF-8\n # but some other 8-bit codec. This is an RFC violation,\n # and the server didn't bother to tell us what codec *was*\n # used.\n pass\n\n try:\n return complexjson.loads(self.text, **kwargs)\n except JSONDecodeError as e:\n # Catch JSON-related errors and raise as requests.JSONDecodeError\n # This aliases json.JSONDecodeError and simplejson.JSONDecodeError\n if is_py2: # e is a ValueError\n raise RequestsJSONDecodeError(e.message)\n else:\n raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)\n\n @property\n def links(self):\n \"\"\"Returns the parsed header links of the response, if any.\"\"\"\n\n header = self.headers.get('link')\n\n # l = MultiDict()\n l = {}\n\n if header:\n links = parse_header_links(header)\n\n for link in links:\n key = link.get('rel') or link.get('url')\n l[key] = link\n\n return l\n\n def raise_for_status(self):\n \"\"\"Raises :class:`HTTPError`, if one occurred.\"\"\"\n\n http_error_msg = ''\n if isinstance(self.reason, bytes):\n # We attempt to decode utf-8 first because some servers\n # choose to localize their reason strings. If the string\n # isn't utf-8, we fall back to iso-8859-1 for all other\n # encodings. (See PR #3538)\n try:\n reason = self.reason.decode('utf-8')\n except UnicodeDecodeError:\n reason = self.reason.decode('iso-8859-1')\n else:\n reason = self.reason\n\n if 400 <= self.status_code < 500:\n http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n elif 500 <= self.status_code < 600:\n http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n if http_error_msg:\n raise HTTPError(http_error_msg, response=self)\n\n def close(self):\n \"\"\"Releases the connection back to the pool. Once this method has been\n called the underlying ``raw`` object must not be accessed again.\n\n *Note: Should not normally need to be called explicitly.*\n \"\"\"\n if not self._content_consumed:\n self.raw.close()\n\n release_conn = getattr(self.raw, 'release_conn', None)\n if release_conn is not None:\n release_conn()"},{"col":4,"comment":"null","endLoc":651,"header":"def __init__(self)","id":547,"name":"__init__","nodeType":"Function","startLoc":606,"text":"def __init__(self):\n self._content = False\n self._content_consumed = False\n self._next = None\n\n #: Integer Code of responded HTTP Status, e.g. 404 or 200.\n self.status_code = None\n\n #: Case-insensitive Dictionary of Response Headers.\n #: For example, ``headers['content-encoding']`` will return the\n #: value of a ``'Content-Encoding'`` response header.\n self.headers = CaseInsensitiveDict()\n\n #: File-like object representation of response (for advanced usage).\n #: Use of ``raw`` requires that ``stream=True`` be set on the request.\n #: This requirement does not apply for use internally to Requests.\n self.raw = None\n\n #: Final URL location of Response.\n self.url = None\n\n #: Encoding to decode with when accessing r.text.\n self.encoding = None\n\n #: A list of :class:`Response ` objects from\n #: the history of the Request. Any redirect responses will end\n #: up here. The list is sorted from the oldest to the most recent request.\n self.history = []\n\n #: Textual reason of responded HTTP Status, e.g. \"Not Found\" or \"OK\".\n self.reason = None\n\n #: A CookieJar of Cookies the server sent back.\n self.cookies = cookiejar_from_dict({})\n\n #: The amount of time elapsed between sending the request\n #: and the arrival of the response (as a timedelta).\n #: This property specifically measures the time taken between sending\n #: the first byte of the request and finishing parsing the headers. It\n #: is therefore unaffected by consuming the response content or the\n #: value of the ``stream`` keyword argument.\n self.elapsed = datetime.timedelta(0)\n\n #: The :class:`PreparedRequest ` object to which this\n #: is a response.\n self.request = None"},{"col":4,"comment":"null","endLoc":80,"header":"def __init__(self, dist: Distribution, **kw: Any) -> None","id":548,"name":"__init__","nodeType":"Function","startLoc":80,"text":"def __init__(self, dist: Distribution, **kw: Any) -> None: ..."},{"col":4,"comment":"null","endLoc":81,"header":"def ensure_string_list(self, option: str) -> None","id":549,"name":"ensure_string_list","nodeType":"Function","startLoc":81,"text":"def ensure_string_list(self, option: str) -> None: ..."},{"col":4,"comment":"null","endLoc":83,"header":"@overload # type: ignore[override] # Extra **kw param\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False, **kw) -> _Command","id":550,"name":"reinitialize_command","nodeType":"Function","startLoc":82,"text":"@overload # type: ignore[override] # Extra **kw param\n def reinitialize_command(self, command: str, reinit_subcommands: bool = False, **kw) -> _Command: ..."},{"col":4,"comment":"null","endLoc":85,"header":"@overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False, **kw) -> _CommandT","id":551,"name":"reinitialize_command","nodeType":"Function","startLoc":84,"text":"@overload\n def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False, **kw) -> _CommandT: ..."},{"col":4,"comment":"null","endLoc":87,"header":"@abstractmethod\n def initialize_options(self) -> None","id":552,"name":"initialize_options","nodeType":"Function","startLoc":86,"text":"@abstractmethod\n def initialize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":89,"header":"@abstractmethod\n def finalize_options(self) -> None","id":553,"name":"finalize_options","nodeType":"Function","startLoc":88,"text":"@abstractmethod\n def finalize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":91,"header":"@abstractmethod\n def run(self) -> None","id":554,"name":"run","nodeType":"Function","startLoc":90,"text":"@abstractmethod\n def run(self) -> None: ..."},{"attributeType":"bool","col":4,"comment":"null","endLoc":77,"id":555,"name":"command_consumes_arguments","nodeType":"Attribute","startLoc":77,"text":"command_consumes_arguments"},{"col":0,"comment":"null","endLoc":31,"header":"def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes","id":556,"name":"b64encode","nodeType":"Function","startLoc":31,"text":"def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ..."},{"col":0,"comment":"Convert a Morsel object into a Cookie containing the one k/v pair.","endLoc":505,"header":"def morsel_to_cookie(morsel)","id":557,"name":"morsel_to_cookie","nodeType":"Function","startLoc":477,"text":"def morsel_to_cookie(morsel):\n \"\"\"Convert a Morsel object into a Cookie containing the one k/v pair.\"\"\"\n\n expires = None\n if morsel['max-age']:\n try:\n expires = int(time.time() + int(morsel['max-age']))\n except ValueError:\n raise TypeError('max-age: %s must be integer' % morsel['max-age'])\n elif morsel['expires']:\n time_template = '%a, %d-%b-%Y %H:%M:%S GMT'\n expires = calendar.timegm(\n time.strptime(morsel['expires'], time_template)\n )\n return create_cookie(\n comment=morsel['comment'],\n comment_url=bool(morsel['comment']),\n discard=False,\n domain=morsel['domain'],\n expires=expires,\n name=morsel.key,\n path=morsel['path'],\n port=None,\n rest={'HttpOnly': morsel['httponly']},\n rfc2109=False,\n secure=bool(morsel['secure']),\n value=morsel.value,\n version=morsel['version'] or 0,\n )"},{"attributeType":"Distribution","col":4,"comment":"null","endLoc":78,"id":558,"name":"distribution","nodeType":"Attribute","startLoc":78,"text":"distribution"},{"col":0,"comment":"Returns a CookieJar from a key/value dictionary.\n\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :param cookiejar: (optional) A cookiejar to add the cookies to.\n :param overwrite: (optional) If False, will not replace cookies\n already in the jar with new ones.\n :rtype: CookieJar\n ","endLoc":526,"header":"def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True)","id":560,"name":"cookiejar_from_dict","nodeType":"Function","startLoc":508,"text":"def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :param cookiejar: (optional) A cookiejar to add the cookies to.\n :param overwrite: (optional) If False, will not replace cookies\n already in the jar with new ones.\n :rtype: CookieJar\n \"\"\"\n if cookiejar is None:\n cookiejar = RequestsCookieJar()\n\n if cookie_dict is not None:\n names_from_jar = [cookie.name for cookie in cookiejar]\n for name in cookie_dict:\n if overwrite or (name not in names_from_jar):\n cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))\n\n return cookiejar"},{"col":4,"comment":"null","endLoc":32,"header":"def initialize_options(self) -> None","id":561,"name":"initialize_options","nodeType":"Function","startLoc":32,"text":"def initialize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":33,"header":"def finalize_options(self) -> None","id":562,"name":"finalize_options","nodeType":"Function","startLoc":33,"text":"def finalize_options(self) -> None: ..."},{"col":4,"comment":"null","endLoc":35,"header":"@NonDataProperty\n def test_args(self) -> list[str]","id":563,"name":"test_args","nodeType":"Function","startLoc":34,"text":"@NonDataProperty\n def test_args(self) -> list[str]: ..."},{"col":4,"comment":"null","endLoc":36,"header":"def with_project_on_sys_path(self, func) -> None","id":564,"name":"with_project_on_sys_path","nodeType":"Function","startLoc":36,"text":"def with_project_on_sys_path(self, func) -> None: ..."},{"col":4,"comment":"null","endLoc":37,"header":"def project_on_sys_path(self, include_dists=())","id":565,"name":"project_on_sys_path","nodeType":"Function","startLoc":37,"text":"def project_on_sys_path(self, include_dists=()): ..."},{"col":4,"comment":"null","endLoc":39,"header":"@staticmethod\n def paths_on_pythonpath(paths) -> None","id":566,"name":"paths_on_pythonpath","nodeType":"Function","startLoc":38,"text":"@staticmethod\n def paths_on_pythonpath(paths) -> None: ..."},{"col":4,"comment":"null","endLoc":41,"header":"@staticmethod\n def install_dists(dist)","id":567,"name":"install_dists","nodeType":"Function","startLoc":40,"text":"@staticmethod\n def install_dists(dist): ..."},{"col":4,"comment":"null","endLoc":42,"header":"def run(self) -> None","id":568,"name":"run","nodeType":"Function","startLoc":42,"text":"def run(self) -> None: ..."},{"col":4,"comment":"null","endLoc":43,"header":"def run_tests(self) -> None","id":569,"name":"run_tests","nodeType":"Function","startLoc":43,"text":"def run_tests(self) -> None: ..."},{"attributeType":"str","col":4,"comment":"null","endLoc":26,"id":570,"name":"description","nodeType":"Attribute","startLoc":26,"text":"description"},{"attributeType":"list","col":4,"comment":"null","endLoc":27,"id":572,"name":"user_options","nodeType":"Attribute","startLoc":27,"text":"user_options"},{"col":0,"comment":"Extract the cookies from the response into a CookieJar.\n\n :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)\n :param request: our own requests.Request object\n :param response: urllib3.HTTPResponse object\n ","endLoc":132,"header":"def extract_cookies_to_jar(jar, request, response)","id":573,"name":"extract_cookies_to_jar","nodeType":"Function","startLoc":118,"text":"def extract_cookies_to_jar(jar, request, response):\n \"\"\"Extract the cookies from the response into a CookieJar.\n\n :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)\n :param request: our own requests.Request object\n :param response: urllib3.HTTPResponse object\n \"\"\"\n if not (hasattr(response, '_original_response') and\n response._original_response):\n return\n # the _original_response field is the wrapped httplib.HTTPResponse object,\n req = MockRequest(request)\n # pull out the HTTPMessage with the headers and put it in the mock:\n res = MockResponse(response._original_response.msg)\n jar.extract_cookies(res, req)"},{"col":4,"comment":"null","endLoc":94,"header":"def __init__(self, name=None)","id":574,"name":"__init__","nodeType":"Function","startLoc":92,"text":"def __init__(self, name=None):\n self.name = name\n super(LookupDict, self).__init__()"},{"col":4,"comment":"null","endLoc":40,"header":"def __init__(self, request)","id":575,"name":"__init__","nodeType":"Function","startLoc":37,"text":"def __init__(self, request):\n self._r = request\n self._new_headers = {}\n self.type = urlparse(self._r.url).scheme"},{"col":4,"comment":"Make a MockResponse for `cookielib` to read.\n\n :param headers: a httplib.HTTPMessage or analogous carrying the headers\n ","endLoc":109,"header":"def __init__(self, headers)","id":576,"name":"__init__","nodeType":"Function","startLoc":104,"text":"def __init__(self, headers):\n \"\"\"Make a MockResponse for `cookielib` to read.\n\n :param headers: a httplib.HTTPMessage or analogous carrying the headers\n \"\"\"\n self._headers = headers"},{"attributeType":"null","col":4,"comment":"null","endLoc":28,"id":577,"name":"test_suite","nodeType":"Attribute","startLoc":28,"text":"test_suite"},{"col":0,"comment":"Add cookies to cookiejar and returns a merged CookieJar.\n\n :param cookiejar: CookieJar object to add the cookies to.\n :param cookies: Dictionary or CookieJar object to be added.\n :rtype: CookieJar\n ","endLoc":549,"header":"def merge_cookies(cookiejar, cookies)","id":578,"name":"merge_cookies","nodeType":"Function","startLoc":529,"text":"def merge_cookies(cookiejar, cookies):\n \"\"\"Add cookies to cookiejar and returns a merged CookieJar.\n\n :param cookiejar: CookieJar object to add the cookies to.\n :param cookies: Dictionary or CookieJar object to be added.\n :rtype: CookieJar\n \"\"\"\n if not isinstance(cookiejar, cookielib.CookieJar):\n raise ValueError('You can only merge into CookieJar')\n\n if isinstance(cookies, dict):\n cookiejar = cookiejar_from_dict(\n cookies, cookiejar=cookiejar, overwrite=False)\n elif isinstance(cookies, cookielib.CookieJar):\n try:\n cookiejar.update(cookies)\n except AttributeError:\n for cookie_in_jar in cookies:\n cookiejar.set_cookie(cookie_in_jar)\n\n return cookiejar"},{"attributeType":"null","col":4,"comment":"null","endLoc":29,"id":579,"name":"test_module","nodeType":"Attribute","startLoc":29,"text":"test_module"},{"attributeType":"null","col":4,"comment":"null","endLoc":30,"id":580,"name":"test_loader","nodeType":"Attribute","startLoc":30,"text":"test_loader"},{"attributeType":"null","col":4,"comment":"null","endLoc":31,"id":581,"name":"test_runner","nodeType":"Attribute","startLoc":31,"text":"test_runner"},{"className":"PyTest","col":0,"comment":"null","endLoc":33,"id":582,"nodeType":"Class","startLoc":13,"text":"class PyTest(TestCommand):\n user_options = [('pytest-args=', 'a', \"Arguments to pass into py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n try:\n from multiprocessing import cpu_count\n self.pytest_args = ['-n', str(cpu_count()), '--boxed']\n except (ImportError, NotImplementedError):\n self.pytest_args = ['-n', '1', '--boxed']\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n self.test_args = []\n self.test_suite = True\n\n def run_tests(self):\n import pytest\n\n errno = pytest.main(self.pytest_args)\n sys.exit(errno)"},{"col":0,"comment":"Make a cookie from underspecified parameters.\n\n By default, the pair of `name` and `value` will be set for the domain ''\n and sent on every request (this is sometimes called a \"supercookie\").\n ","endLoc":474,"header":"def create_cookie(name, value, **kwargs)","id":583,"name":"create_cookie","nodeType":"Function","startLoc":441,"text":"def create_cookie(name, value, **kwargs):\n \"\"\"Make a cookie from underspecified parameters.\n\n By default, the pair of `name` and `value` will be set for the domain ''\n and sent on every request (this is sometimes called a \"supercookie\").\n \"\"\"\n result = {\n 'version': 0,\n 'name': name,\n 'value': value,\n 'port': None,\n 'domain': '',\n 'path': '/',\n 'secure': False,\n 'expires': None,\n 'discard': True,\n 'comment': None,\n 'comment_url': None,\n 'rest': {'HttpOnly': None},\n 'rfc2109': False,\n }\n\n badargs = set(kwargs) - set(result)\n if badargs:\n err = 'create_cookie() got unexpected keyword arguments: %s'\n raise TypeError(err % list(badargs))\n\n result.update(kwargs)\n result['port_specified'] = bool(result['port'])\n result['domain_specified'] = bool(result['domain'])\n result['domain_initial_dot'] = result['domain'].startswith('.')\n result['path_specified'] = bool(result['path'])\n\n return cookielib.Cookie(**result)"},{"col":4,"comment":"null","endLoc":22,"header":"def initialize_options(self)","id":584,"name":"initialize_options","nodeType":"Function","startLoc":16,"text":"def initialize_options(self):\n TestCommand.initialize_options(self)\n try:\n from multiprocessing import cpu_count\n self.pytest_args = ['-n', str(cpu_count()), '--boxed']\n except (ImportError, NotImplementedError):\n self.pytest_args = ['-n', '1', '--boxed']"},{"col":4,"comment":"null","endLoc":53,"header":"@classmethod\n def text_response_server(cls, text, request_timeout=0.5, **kwargs)","id":585,"name":"text_response_server","nodeType":"Function","startLoc":44,"text":"@classmethod\n def text_response_server(cls, text, request_timeout=0.5, **kwargs):\n def text_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=request_timeout)\n sock.send(text.encode('utf-8'))\n\n return request_content\n\n\n return Server(text_response_handler, **kwargs)"},{"col":0,"comment":"null","endLoc":23,"header":"def consume_socket_content(sock, timeout=0.5)","id":586,"name":"consume_socket_content","nodeType":"Function","startLoc":8,"text":"def consume_socket_content(sock, timeout=0.5):\n chunks = 65536\n content = b''\n\n while True:\n more_to_read = select.select([sock], [], [], timeout)[0]\n if not more_to_read:\n break\n\n new_content = sock.recv(chunks)\n if not new_content:\n break\n\n content += new_content\n\n return content"},{"col":4,"comment":"null","endLoc":61,"header":"@classmethod\n def basic_response_server(cls, **kwargs)","id":588,"name":"basic_response_server","nodeType":"Function","startLoc":55,"text":"@classmethod\n def basic_response_server(cls, **kwargs):\n return cls.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 0\\r\\n\\r\\n\",\n **kwargs\n )"},{"className":"PreparedRequest","col":0,"comment":"The fully mutable :class:`PreparedRequest ` object,\n containing the exact bytes that will be sent to the server.\n\n Instances are generated from a :class:`Request ` object, and\n should not be instantiated manually; doing so may produce undesirable\n effects.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> r = req.prepare()\n >>> r\n \n\n >>> s = requests.Session()\n >>> s.send(r)\n \n ","endLoc":593,"id":589,"nodeType":"Class","startLoc":274,"text":"class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):\n \"\"\"The fully mutable :class:`PreparedRequest ` object,\n containing the exact bytes that will be sent to the server.\n\n Instances are generated from a :class:`Request ` object, and\n should not be instantiated manually; doing so may produce undesirable\n effects.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> r = req.prepare()\n >>> r\n \n\n >>> s = requests.Session()\n >>> s.send(r)\n \n \"\"\"\n\n def __init__(self):\n #: HTTP verb to send to the server.\n self.method = None\n #: HTTP URL to send the request to.\n self.url = None\n #: dictionary of HTTP headers.\n self.headers = None\n # The `CookieJar` used to create the Cookie header will be stored here\n # after prepare_cookies is called\n self._cookies = None\n #: request body to send to the server.\n self.body = None\n #: dictionary of callback hooks, for internal usage.\n self.hooks = default_hooks()\n #: integer denoting starting position of a readable file-like body.\n self._body_position = None\n\n def prepare(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n \"\"\"Prepares the entire request with the given parameters.\"\"\"\n\n self.prepare_method(method)\n self.prepare_url(url, params)\n self.prepare_headers(headers)\n self.prepare_cookies(cookies)\n self.prepare_body(data, files, json)\n self.prepare_auth(auth, url)\n\n # Note that prepare_auth must be last to enable authentication schemes\n # such as OAuth to work on a fully prepared request.\n\n # This MUST go after prepare_auth. Authenticators could add a hook\n self.prepare_hooks(hooks)\n\n def __repr__(self):\n return '' % (self.method)\n\n def copy(self):\n p = PreparedRequest()\n p.method = self.method\n p.url = self.url\n p.headers = self.headers.copy() if self.headers is not None else None\n p._cookies = _copy_cookie_jar(self._cookies)\n p.body = self.body\n p.hooks = self.hooks\n p._body_position = self._body_position\n return p\n\n def prepare_method(self, method):\n \"\"\"Prepares the given HTTP method.\"\"\"\n self.method = method\n if self.method is not None:\n self.method = to_native_string(self.method.upper())\n\n @staticmethod\n def _get_idna_encoded_host(host):\n import idna\n\n try:\n host = idna.encode(host, uts46=True).decode('utf-8')\n except idna.IDNAError:\n raise UnicodeError\n return host\n\n def prepare_url(self, url, params):\n \"\"\"Prepares the given HTTP URL.\"\"\"\n #: Accept objects that have string representations.\n #: We're unable to blindly call unicode/str functions\n #: as this will include the bytestring indicator (b'')\n #: on python 3.x.\n #: https://github.com/psf/requests/pull/2238\n if isinstance(url, bytes):\n url = url.decode('utf8')\n else:\n url = unicode(url) if is_py2 else str(url)\n\n # Remove leading whitespaces from url\n url = url.lstrip()\n\n # Don't do any URL preparation for non-HTTP schemes like `mailto`,\n # `data` etc to work around exceptions from `url_parse`, which\n # handles RFC 3986 only.\n if ':' in url and not url.lower().startswith('http'):\n self.url = url\n return\n\n # Support for unicode domain names and paths.\n try:\n scheme, auth, host, port, path, query, fragment = parse_url(url)\n except LocationParseError as e:\n raise InvalidURL(*e.args)\n\n if not scheme:\n error = (\"Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?\")\n error = error.format(to_native_string(url, 'utf8'))\n\n raise MissingSchema(error)\n\n if not host:\n raise InvalidURL(\"Invalid URL %r: No host supplied\" % url)\n\n # In general, we want to try IDNA encoding the hostname if the string contains\n # non-ASCII characters. This allows users to automatically get the correct IDNA\n # behaviour. For strings containing only ASCII characters, we need to also verify\n # it doesn't start with a wildcard (*), before allowing the unencoded hostname.\n if not unicode_is_ascii(host):\n try:\n host = self._get_idna_encoded_host(host)\n except UnicodeError:\n raise InvalidURL('URL has an invalid label.')\n elif host.startswith(u'*'):\n raise InvalidURL('URL has an invalid label.')\n\n # Carefully reconstruct the network location\n netloc = auth or ''\n if netloc:\n netloc += '@'\n netloc += host\n if port:\n netloc += ':' + str(port)\n\n # Bare domains aren't valid URLs.\n if not path:\n path = '/'\n\n if is_py2:\n if isinstance(scheme, str):\n scheme = scheme.encode('utf-8')\n if isinstance(netloc, str):\n netloc = netloc.encode('utf-8')\n if isinstance(path, str):\n path = path.encode('utf-8')\n if isinstance(query, str):\n query = query.encode('utf-8')\n if isinstance(fragment, str):\n fragment = fragment.encode('utf-8')\n\n if isinstance(params, (str, bytes)):\n params = to_native_string(params)\n\n enc_params = self._encode_params(params)\n if enc_params:\n if query:\n query = '%s&%s' % (query, enc_params)\n else:\n query = enc_params\n\n url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))\n self.url = url\n\n def prepare_headers(self, headers):\n \"\"\"Prepares the given HTTP headers.\"\"\"\n\n self.headers = CaseInsensitiveDict()\n if headers:\n for header in headers.items():\n # Raise exception on invalid header value.\n check_header_validity(header)\n name, value = header\n self.headers[to_native_string(name)] = value\n\n def prepare_body(self, data, files, json=None):\n \"\"\"Prepares the given HTTP body data.\"\"\"\n\n # Check if file, fo, generator, iterator.\n # If not, run through normal process.\n\n # Nottin' on you.\n body = None\n content_type = None\n\n if not data and json is not None:\n # urllib3 requires a bytes-like body. Python 2's json.dumps\n # provides this natively, but Python 3 gives a Unicode string.\n content_type = 'application/json'\n\n try:\n body = complexjson.dumps(json, allow_nan=False)\n except ValueError as ve:\n raise InvalidJSONError(ve, request=self)\n\n if not isinstance(body, bytes):\n body = body.encode('utf-8')\n\n is_stream = all([\n hasattr(data, '__iter__'),\n not isinstance(data, (basestring, list, tuple, Mapping))\n ])\n\n if is_stream:\n try:\n length = super_len(data)\n except (TypeError, AttributeError, UnsupportedOperation):\n length = None\n\n body = data\n\n if getattr(body, 'tell', None) is not None:\n # Record the current file position before reading.\n # This will allow us to rewind a file in the event\n # of a redirect.\n try:\n self._body_position = body.tell()\n except (IOError, OSError):\n # This differentiates from None, allowing us to catch\n # a failed `tell()` later when trying to rewind the body\n self._body_position = object()\n\n if files:\n raise NotImplementedError('Streamed bodies and files are mutually exclusive.')\n\n if length:\n self.headers['Content-Length'] = builtin_str(length)\n else:\n self.headers['Transfer-Encoding'] = 'chunked'\n else:\n # Multi-part file uploads.\n if files:\n (body, content_type) = self._encode_files(files, data)\n else:\n if data:\n body = self._encode_params(data)\n if isinstance(data, basestring) or hasattr(data, 'read'):\n content_type = None\n else:\n content_type = 'application/x-www-form-urlencoded'\n\n self.prepare_content_length(body)\n\n # Add content-type if it wasn't explicitly provided.\n if content_type and ('content-type' not in self.headers):\n self.headers['Content-Type'] = content_type\n\n self.body = body\n\n def prepare_content_length(self, body):\n \"\"\"Prepare Content-Length header based on request method and body\"\"\"\n if body is not None:\n length = super_len(body)\n if length:\n # If length exists, set it. Otherwise, we fallback\n # to Transfer-Encoding: chunked.\n self.headers['Content-Length'] = builtin_str(length)\n elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:\n # Set Content-Length to 0 for methods that can have a body\n # but don't provide one. (i.e. not GET or HEAD)\n self.headers['Content-Length'] = '0'\n\n def prepare_auth(self, auth, url=''):\n \"\"\"Prepares the given HTTP auth data.\"\"\"\n\n # If no Auth is explicitly provided, extract it from the URL first.\n if auth is None:\n url_auth = get_auth_from_url(self.url)\n auth = url_auth if any(url_auth) else None\n\n if auth:\n if isinstance(auth, tuple) and len(auth) == 2:\n # special-case basic HTTP auth\n auth = HTTPBasicAuth(*auth)\n\n # Allow auth to make its changes.\n r = auth(self)\n\n # Update self to reflect the auth changes.\n self.__dict__.update(r.__dict__)\n\n # Recompute Content-Length\n self.prepare_content_length(self.body)\n\n def prepare_cookies(self, cookies):\n \"\"\"Prepares the given HTTP cookie data.\n\n This function eventually generates a ``Cookie`` header from the\n given cookies using cookielib. Due to cookielib's design, the header\n will not be regenerated if it already exists, meaning this function\n can only be called once for the life of the\n :class:`PreparedRequest ` object. Any subsequent calls\n to ``prepare_cookies`` will have no actual effect, unless the \"Cookie\"\n header is removed beforehand.\n \"\"\"\n if isinstance(cookies, cookielib.CookieJar):\n self._cookies = cookies\n else:\n self._cookies = cookiejar_from_dict(cookies)\n\n cookie_header = get_cookie_header(self._cookies, self)\n if cookie_header is not None:\n self.headers['Cookie'] = cookie_header\n\n def prepare_hooks(self, hooks):\n \"\"\"Prepares the given hooks.\"\"\"\n # hooks can be passed as None to the prepare method and to this\n # method. To prevent iterating over None, simply use an empty list\n # if hooks is False-y\n hooks = hooks or []\n for event in hooks:\n self.register_hook(event, hooks[event])"},{"className":"RequestEncodingMixin","col":0,"comment":"null","endLoc":173,"id":590,"nodeType":"Class","startLoc":62,"text":"class RequestEncodingMixin(object):\n @property\n def path_url(self):\n \"\"\"Build the path URL to use.\"\"\"\n\n url = []\n\n p = urlsplit(self.url)\n\n path = p.path\n if not path:\n path = '/'\n\n url.append(path)\n\n query = p.query\n if query:\n url.append('?')\n url.append(query)\n\n return ''.join(url)\n\n @staticmethod\n def _encode_params(data):\n \"\"\"Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n \"\"\"\n\n if isinstance(data, (str, bytes)):\n return data\n elif hasattr(data, 'read'):\n return data\n elif hasattr(data, '__iter__'):\n result = []\n for k, vs in to_key_val_list(data):\n if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):\n vs = [vs]\n for v in vs:\n if v is not None:\n result.append(\n (k.encode('utf-8') if isinstance(k, str) else k,\n v.encode('utf-8') if isinstance(v, str) else v))\n return urlencode(result, doseq=True)\n else:\n return data\n\n @staticmethod\n def _encode_files(files, data):\n \"\"\"Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n \"\"\"\n if (not files):\n raise ValueError(\"Files must be provided.\")\n elif isinstance(data, basestring):\n raise ValueError(\"Data must not be a string.\")\n\n new_fields = []\n fields = to_key_val_list(data or {})\n files = to_key_val_list(files or {})\n\n for field, val in fields:\n if isinstance(val, basestring) or not hasattr(val, '__iter__'):\n val = [val]\n for v in val:\n if v is not None:\n # Don't call str() on bytestrings: in Py3 it all goes wrong.\n if not isinstance(v, bytes):\n v = str(v)\n\n new_fields.append(\n (field.decode('utf-8') if isinstance(field, bytes) else field,\n v.encode('utf-8') if isinstance(v, str) else v))\n\n for (k, v) in files:\n # support for explicit filename\n ft = None\n fh = None\n if isinstance(v, (tuple, list)):\n if len(v) == 2:\n fn, fp = v\n elif len(v) == 3:\n fn, fp, ft = v\n else:\n fn, fp, ft, fh = v\n else:\n fn = guess_filename(v) or k\n fp = v\n\n if isinstance(fp, (str, bytes, bytearray)):\n fdata = fp\n elif hasattr(fp, 'read'):\n fdata = fp.read()\n elif fp is None:\n continue\n else:\n fdata = fp\n\n rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)\n rf.make_multipart(content_type=ft)\n new_fields.append(rf)\n\n body, content_type = encode_multipart_formdata(new_fields)\n\n return body, content_type"},{"col":4,"comment":"Build the path URL to use.","endLoc":82,"header":"@property\n def path_url(self)","id":591,"name":"path_url","nodeType":"Function","startLoc":63,"text":"@property\n def path_url(self):\n \"\"\"Build the path URL to use.\"\"\"\n\n url = []\n\n p = urlsplit(self.url)\n\n path = p.path\n if not path:\n path = '/'\n\n url.append(path)\n\n query = p.query\n if query:\n url.append('?')\n url.append(query)\n\n return ''.join(url)"},{"col":4,"comment":"null","endLoc":76,"header":"def run(self)","id":592,"name":"run","nodeType":"Function","startLoc":63,"text":"def run(self):\n try:\n self.server_sock = self._create_socket_and_bind()\n # in case self.port = 0\n self.port = self.server_sock.getsockname()[1]\n self.ready_event.set()\n self._handle_requests()\n\n if self.wait_to_close_event:\n self.wait_to_close_event.wait(self.WAIT_EVENT_TIMEOUT)\n finally:\n self.ready_event.set() # just in case of exception\n self._close_server_sock_ignore_errors()\n self.stop_event.set()"},{"col":4,"comment":"null","endLoc":84,"header":"def _create_socket_and_bind(self)","id":595,"name":"_create_socket_and_bind","nodeType":"Function","startLoc":78,"text":"def _create_socket_and_bind(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n # NB: when Python 2.7 is no longer supported, the argument\n # can be removed to use a default backlog size\n sock.listen(5)\n return sock"},{"col":4,"comment":"Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n ","endLoc":109,"header":"@staticmethod\n def _encode_params(data)","id":598,"name":"_encode_params","nodeType":"Function","startLoc":84,"text":"@staticmethod\n def _encode_params(data):\n \"\"\"Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n \"\"\"\n\n if isinstance(data, (str, bytes)):\n return data\n elif hasattr(data, 'read'):\n return data\n elif hasattr(data, '__iter__'):\n result = []\n for k, vs in to_key_val_list(data):\n if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):\n vs = [vs]\n for v in vs:\n if v is not None:\n result.append(\n (k.encode('utf-8') if isinstance(k, str) else k,\n v.encode('utf-8') if isinstance(v, str) else v))\n return urlencode(result, doseq=True)\n else:\n return data"},{"col":0,"comment":"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n ","endLoc":347,"header":"def to_key_val_list(value)","id":601,"name":"to_key_val_list","nodeType":"Function","startLoc":321,"text":"def to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)"},{"col":4,"comment":"null","endLoc":101,"header":"def _handle_requests(self)","id":606,"name":"_handle_requests","nodeType":"Function","startLoc":92,"text":"def _handle_requests(self):\n for _ in range(self.requests_to_handle):\n sock = self._accept_connection()\n if not sock:\n break\n\n handler_result = self.handler(sock)\n\n self.handler_results.append(handler_result)\n sock.close()"},{"col":4,"comment":"null","endLoc":27,"header":"def finalize_options(self)","id":609,"name":"finalize_options","nodeType":"Function","startLoc":24,"text":"def finalize_options(self):\n TestCommand.finalize_options(self)\n self.test_args = []\n self.test_suite = True"},{"col":4,"comment":"null","endLoc":33,"header":"def run_tests(self)","id":610,"name":"run_tests","nodeType":"Function","startLoc":29,"text":"def run_tests(self):\n import pytest\n\n errno = pytest.main(self.pytest_args)\n sys.exit(errno)"},{"attributeType":"list","col":4,"comment":"null","endLoc":14,"id":613,"name":"user_options","nodeType":"Attribute","startLoc":14,"text":"user_options"},{"attributeType":"null","col":8,"comment":"null","endLoc":27,"id":614,"name":"test_suite","nodeType":"Attribute","startLoc":27,"text":"self.test_suite"},{"col":4,"comment":"null","endLoc":654,"header":"def __enter__(self)","id":615,"name":"__enter__","nodeType":"Function","startLoc":653,"text":"def __enter__(self):\n return self"},{"col":4,"comment":"null","endLoc":657,"header":"def __exit__(self, *args)","id":616,"name":"__exit__","nodeType":"Function","startLoc":656,"text":"def __exit__(self, *args):\n self.close()"},{"col":4,"comment":"Releases the connection back to the pool. Once this method has been\n called the underlying ``raw`` object must not be accessed again.\n\n *Note: Should not normally need to be called explicitly.*\n ","endLoc":973,"header":"def close(self)","id":617,"name":"close","nodeType":"Function","startLoc":962,"text":"def close(self):\n \"\"\"Releases the connection back to the pool. Once this method has been\n called the underlying ``raw`` object must not be accessed again.\n\n *Note: Should not normally need to be called explicitly.*\n \"\"\"\n if not self._content_consumed:\n self.raw.close()\n\n release_conn = getattr(self.raw, 'release_conn', None)\n if release_conn is not None:\n release_conn()"},{"col":4,"comment":"null","endLoc":665,"header":"def __getstate__(self)","id":624,"name":"__getstate__","nodeType":"Function","startLoc":659,"text":"def __getstate__(self):\n # Consume everything; accessing the content attribute makes\n # sure the content has been fully read.\n if not self._content_consumed:\n self.content\n\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}"},{"attributeType":"list","col":12,"comment":"null","endLoc":20,"id":625,"name":"pytest_args","nodeType":"Attribute","startLoc":20,"text":"self.pytest_args"},{"col":4,"comment":"Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n ","endLoc":173,"header":"@staticmethod\n def _encode_files(files, data)","id":626,"name":"_encode_files","nodeType":"Function","startLoc":111,"text":"@staticmethod\n def _encode_files(files, data):\n \"\"\"Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n \"\"\"\n if (not files):\n raise ValueError(\"Files must be provided.\")\n elif isinstance(data, basestring):\n raise ValueError(\"Data must not be a string.\")\n\n new_fields = []\n fields = to_key_val_list(data or {})\n files = to_key_val_list(files or {})\n\n for field, val in fields:\n if isinstance(val, basestring) or not hasattr(val, '__iter__'):\n val = [val]\n for v in val:\n if v is not None:\n # Don't call str() on bytestrings: in Py3 it all goes wrong.\n if not isinstance(v, bytes):\n v = str(v)\n\n new_fields.append(\n (field.decode('utf-8') if isinstance(field, bytes) else field,\n v.encode('utf-8') if isinstance(v, str) else v))\n\n for (k, v) in files:\n # support for explicit filename\n ft = None\n fh = None\n if isinstance(v, (tuple, list)):\n if len(v) == 2:\n fn, fp = v\n elif len(v) == 3:\n fn, fp, ft = v\n else:\n fn, fp, ft, fh = v\n else:\n fn = guess_filename(v) or k\n fp = v\n\n if isinstance(fp, (str, bytes, bytearray)):\n fdata = fp\n elif hasattr(fp, 'read'):\n fdata = fp.read()\n elif fp is None:\n continue\n else:\n fdata = fp\n\n rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)\n rf.make_multipart(content_type=ft)\n new_fields.append(rf)\n\n body, content_type = encode_multipart_formdata(new_fields)\n\n return body, content_type"},{"col":4,"comment":"null","endLoc":673,"header":"def __setstate__(self, state)","id":627,"name":"__setstate__","nodeType":"Function","startLoc":667,"text":"def __setstate__(self, state):\n for name, value in state.items():\n setattr(self, name, value)\n\n # pickled objects do not have .raw\n setattr(self, '_content_consumed', True)\n setattr(self, 'raw', None)"},{"attributeType":"list","col":8,"comment":"null","endLoc":26,"id":628,"name":"test_args","nodeType":"Attribute","startLoc":26,"text":"self.test_args"},{"col":4,"comment":"null","endLoc":346,"header":"def set_cookie(self, cookie, *args, **kwargs)","id":629,"name":"set_cookie","nodeType":"Function","startLoc":343,"text":"def set_cookie(self, cookie, *args, **kwargs):\n if hasattr(cookie.value, 'startswith') and cookie.value.startswith('\"') and cookie.value.endswith('\"'):\n cookie.value = cookie.value.replace('\\\\\"', '')\n return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)"},{"attributeType":"null","col":44,"comment":"null","endLoc":9,"id":630,"name":"TestCommand","nodeType":"Attribute","startLoc":9,"text":"TestCommand"},{"attributeType":"null","col":0,"comment":"null","endLoc":11,"id":631,"name":"here","nodeType":"Attribute","startLoc":11,"text":"here"},{"col":4,"comment":"null","endLoc":676,"header":"def __repr__(self)","id":637,"name":"__repr__","nodeType":"Function","startLoc":675,"text":"def __repr__(self):\n return '' % (self.status_code)"},{"col":4,"comment":"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n ","endLoc":686,"header":"def __bool__(self)","id":638,"name":"__bool__","nodeType":"Function","startLoc":678,"text":"def __bool__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok"},{"col":4,"comment":"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n ","endLoc":696,"header":"def __nonzero__(self)","id":639,"name":"__nonzero__","nodeType":"Function","startLoc":688,"text":"def __nonzero__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok"},{"col":4,"comment":"Allows you to use a response as an iterator.","endLoc":700,"header":"def __iter__(self)","id":640,"name":"__iter__","nodeType":"Function","startLoc":698,"text":"def __iter__(self):\n \"\"\"Allows you to use a response as an iterator.\"\"\"\n return self.iter_content(128)"},{"col":0,"comment":"Tries to guess the filename of the given object.","endLoc":240,"header":"def guess_filename(obj)","id":641,"name":"guess_filename","nodeType":"Function","startLoc":235,"text":"def guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)"},{"col":4,"comment":"Iterates over the response data. When stream=True is set on the\n request, this avoids reading the content at once into memory for\n large responses. The chunk size is the number of bytes it should\n read into memory. This is not necessarily the length of each item\n returned as decoding can take place.\n\n chunk_size must be of type int or None. A value of None will\n function differently depending on the value of `stream`.\n stream=True will read data as it arrives in whatever size the\n chunks are received. If stream=False, data is returned as\n a single chunk.\n\n If decode_unicode is True, content will be decoded using the best\n available encoding based on the response.\n ","endLoc":792,"header":"def iter_content(self, chunk_size=1, decode_unicode=False)","id":644,"name":"iter_content","nodeType":"Function","startLoc":739,"text":"def iter_content(self, chunk_size=1, decode_unicode=False):\n \"\"\"Iterates over the response data. When stream=True is set on the\n request, this avoids reading the content at once into memory for\n large responses. The chunk size is the number of bytes it should\n read into memory. This is not necessarily the length of each item\n returned as decoding can take place.\n\n chunk_size must be of type int or None. A value of None will\n function differently depending on the value of `stream`.\n stream=True will read data as it arrives in whatever size the\n chunks are received. If stream=False, data is returned as\n a single chunk.\n\n If decode_unicode is True, content will be decoded using the best\n available encoding based on the response.\n \"\"\"\n\n def generate():\n # Special case for urllib3.\n if hasattr(self.raw, 'stream'):\n try:\n for chunk in self.raw.stream(chunk_size, decode_content=True):\n yield chunk\n except ProtocolError as e:\n raise ChunkedEncodingError(e)\n except DecodeError as e:\n raise ContentDecodingError(e)\n except ReadTimeoutError as e:\n raise ConnectionError(e)\n else:\n # Standard file-like object.\n while True:\n chunk = self.raw.read(chunk_size)\n if not chunk:\n break\n yield chunk\n\n self._content_consumed = True\n\n if self._content_consumed and isinstance(self._content, bool):\n raise StreamConsumedError()\n elif chunk_size is not None and not isinstance(chunk_size, int):\n raise TypeError(\"chunk_size must be an int, it is instead a %s.\" % type(chunk_size))\n # simulate reading small chunks of the content\n reused_chunks = iter_slices(self._content, chunk_size)\n\n stream_chunks = generate()\n\n chunks = reused_chunks if self._content_consumed else stream_chunks\n\n if decode_unicode:\n chunks = stream_decode_response_unicode(chunks, self)\n\n return chunks"},{"col":4,"comment":"null","endLoc":111,"header":"def _accept_connection(self)","id":645,"name":"_accept_connection","nodeType":"Function","startLoc":103,"text":"def _accept_connection(self):\n try:\n ready, _, _ = select.select([self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT)\n if not ready:\n return None\n\n return self.server_sock.accept()[0]\n except (select.error, socket.error):\n return None"},{"col":4,"comment":"null","endLoc":83,"header":"def copy(self)","id":646,"name":"copy","nodeType":"Function","startLoc":82,"text":"def copy(self):\n return CaseInsensitiveDict(self._store.values())"},{"col":4,"comment":"null","endLoc":90,"header":"def _close_server_sock_ignore_errors(self)","id":648,"name":"_close_server_sock_ignore_errors","nodeType":"Function","startLoc":86,"text":"def _close_server_sock_ignore_errors(self):\n try:\n self.server_sock.close()\n except IOError:\n pass"},{"attributeType":"list","col":0,"comment":"null","endLoc":41,"id":649,"name":"packages","nodeType":"Attribute","startLoc":41,"text":"packages"},{"attributeType":"list","col":0,"comment":"null","endLoc":43,"id":651,"name":"requires","nodeType":"Attribute","startLoc":43,"text":"requires"},{"attributeType":"list","col":0,"comment":"null","endLoc":52,"id":652,"name":"test_requirements","nodeType":"Attribute","startLoc":52,"text":"test_requirements"},{"col":4,"comment":"Initialize RequestException with `request` and `response` objects.","endLoc":27,"header":"def __init__(self, *args, **kwargs)","id":653,"name":"__init__","nodeType":"Function","startLoc":19,"text":"def __init__(self, *args, **kwargs):\n \"\"\"Initialize RequestException with `request` and `response` objects.\"\"\"\n response = kwargs.pop('response', None)\n self.response = response\n self.request = kwargs.pop('request', None)\n if (response is not None and not self.request and\n hasattr(response, 'request')):\n self.request = self.response.request\n super(RequestException, self).__init__(*args, **kwargs)"},{"col":4,"comment":"Dict-like iterkeys() that returns an iterator of names of cookies\n from the jar.\n\n .. seealso:: itervalues() and iteritems().\n ","endLoc":225,"header":"def iterkeys(self)","id":654,"name":"iterkeys","nodeType":"Function","startLoc":218,"text":"def iterkeys(self):\n \"\"\"Dict-like iterkeys() that returns an iterator of names of cookies\n from the jar.\n\n .. seealso:: itervalues() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":61,"id":655,"name":"about","nodeType":"Attribute","startLoc":61,"text":"about"},{"col":4,"comment":"Dict-like keys() that returns a list of names of cookies from the\n jar.\n\n .. seealso:: values() and items().\n ","endLoc":233,"header":"def keys(self)","id":656,"name":"keys","nodeType":"Function","startLoc":227,"text":"def keys(self):\n \"\"\"Dict-like keys() that returns a list of names of cookies from the\n jar.\n\n .. seealso:: values() and items().\n \"\"\"\n return list(self.iterkeys())"},{"col":4,"comment":"null","endLoc":116,"header":"def __enter__(self)","id":657,"name":"__enter__","nodeType":"Function","startLoc":113,"text":"def __enter__(self):\n self.start()\n self.ready_event.wait(self.WAIT_EVENT_TIMEOUT)\n return self.host, self.port"},{"col":4,"comment":"null","endLoc":86,"header":"def __repr__(self)","id":658,"name":"__repr__","nodeType":"Function","startLoc":85,"text":"def __repr__(self):\n return str(dict(self.items()))"},{"attributeType":"StreamReaderWriter","col":77,"comment":"null","endLoc":62,"id":659,"name":"f","nodeType":"Attribute","startLoc":62,"text":"f"},{"col":4,"comment":"null","endLoc":130,"header":"def __exit__(self, exc_type, exc_value, traceback)","id":660,"name":"__exit__","nodeType":"Function","startLoc":118,"text":"def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait(self.WAIT_EVENT_TIMEOUT)\n else:\n if self.wait_to_close_event:\n # avoid server from waiting for event timeouts\n # if an exception is found in the main thread\n self.wait_to_close_event.set()\n\n # ensure server thread doesn't get stuck waiting for connections\n self._close_server_sock_ignore_errors()\n self.join()\n return False # allow exceptions to propagate"},{"attributeType":"null","col":4,"comment":"null","endLoc":28,"id":661,"name":"WAIT_EVENT_TIMEOUT","nodeType":"Attribute","startLoc":28,"text":"WAIT_EVENT_TIMEOUT"},{"attributeType":"null","col":8,"comment":"null","endLoc":33,"id":662,"name":"handler","nodeType":"Attribute","startLoc":33,"text":"self.handler"},{"attributeType":"null","col":8,"comment":"null","endLoc":42,"id":663,"name":"stop_event","nodeType":"Attribute","startLoc":42,"text":"self.stop_event"},{"attributeType":"null","col":8,"comment":"null","endLoc":38,"id":664,"name":"requests_to_handle","nodeType":"Attribute","startLoc":38,"text":"self.requests_to_handle"},{"attributeType":"null","col":8,"comment":"null","endLoc":37,"id":665,"name":"port","nodeType":"Attribute","startLoc":37,"text":"self.port"},{"attributeType":"null","col":8,"comment":"null","endLoc":34,"id":666,"name":"handler_results","nodeType":"Attribute","startLoc":34,"text":"self.handler_results"},{"attributeType":"null","col":8,"comment":"null","endLoc":40,"id":667,"name":"wait_to_close_event","nodeType":"Attribute","startLoc":40,"text":"self.wait_to_close_event"},{"attributeType":"null","col":8,"comment":"null","endLoc":41,"id":668,"name":"ready_event","nodeType":"Attribute","startLoc":41,"text":"self.ready_event"},{"attributeType":"null","col":8,"comment":"null","endLoc":36,"id":669,"name":"host","nodeType":"Attribute","startLoc":36,"text":"self.host"},{"col":4,"comment":"null","endLoc":130,"header":"def __init__(self, pool_connections=DEFAULT_POOLSIZE,\n pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,\n pool_block=DEFAULT_POOLBLOCK)","id":670,"name":"__init__","nodeType":"Function","startLoc":114,"text":"def __init__(self, pool_connections=DEFAULT_POOLSIZE,\n pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,\n pool_block=DEFAULT_POOLBLOCK):\n if max_retries == DEFAULT_RETRIES:\n self.max_retries = Retry(0, read=False)\n else:\n self.max_retries = Retry.from_int(max_retries)\n self.config = {}\n self.proxy_manager = {}\n\n super(HTTPAdapter, self).__init__()\n\n self._pool_connections = pool_connections\n self._pool_maxsize = pool_maxsize\n self._pool_block = pool_block\n\n self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)"},{"col":4,"comment":"Prepares the entire request with the given parameters.","endLoc":328,"header":"def prepare(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None)","id":671,"name":"prepare","nodeType":"Function","startLoc":312,"text":"def prepare(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n \"\"\"Prepares the entire request with the given parameters.\"\"\"\n\n self.prepare_method(method)\n self.prepare_url(url, params)\n self.prepare_headers(headers)\n self.prepare_cookies(cookies)\n self.prepare_body(data, files, json)\n self.prepare_auth(auth, url)\n\n # Note that prepare_auth must be last to enable authentication schemes\n # such as OAuth to work on a fully prepared request.\n\n # This MUST go after prepare_auth. Authenticators could add a hook\n self.prepare_hooks(hooks)"},{"attributeType":"null","col":12,"comment":"null","endLoc":65,"id":672,"name":"server_sock","nodeType":"Attribute","startLoc":65,"text":"self.server_sock"},{"col":0,"comment":"Simple handler that will take request and echo it back to requester.","endLoc":21,"header":"def echo_response_handler(sock)","id":673,"name":"echo_response_handler","nodeType":"Function","startLoc":12,"text":"def echo_response_handler(sock):\n \"\"\"Simple handler that will take request and echo it back to requester.\"\"\"\n request_content = consume_socket_content(sock, timeout=0.5)\n\n text_200 = (\n b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: %d\\r\\n\\r\\n'\n b'%s'\n ) % (len(request_content), request_content)\n sock.send(text_200)"},{"fileName":"models.py","filePath":"requests","id":674,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.models\n~~~~~~~~~~~~~~~\n\nThis module contains the primary objects that power Requests.\n\"\"\"\n\nimport datetime\nimport sys\n\n# Import encoding now, to avoid implicit import later.\n# Implicit import within threads may cause LookupError when standard library is in a ZIP,\n# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.\nimport encodings.idna\n\nfrom urllib3.fields import RequestField\nfrom urllib3.filepost import encode_multipart_formdata\nfrom urllib3.util import parse_url\nfrom urllib3.exceptions import (\n DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)\n\nfrom io import UnsupportedOperation\nfrom .hooks import default_hooks\nfrom .structures import CaseInsensitiveDict\n\nfrom .auth import HTTPBasicAuth\nfrom .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar\nfrom .exceptions import (\n HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,\n ContentDecodingError, ConnectionError, StreamConsumedError,\n InvalidJSONError)\nfrom .exceptions import JSONDecodeError as RequestsJSONDecodeError\nfrom ._internal_utils import to_native_string, unicode_is_ascii\nfrom .utils import (\n guess_filename, get_auth_from_url, requote_uri,\n stream_decode_response_unicode, to_key_val_list, parse_header_links,\n iter_slices, guess_json_utf, super_len, check_header_validity)\nfrom .compat import (\n Callable, Mapping,\n cookielib, urlunparse, urlsplit, urlencode, str, bytes,\n is_py2, chardet, builtin_str, basestring, JSONDecodeError)\nfrom .compat import json as complexjson\nfrom .status_codes import codes\n\n#: The set of HTTP status codes that indicate an automatically\n#: processable redirect.\nREDIRECT_STATI = (\n codes.moved, # 301\n codes.found, # 302\n codes.other, # 303\n codes.temporary_redirect, # 307\n codes.permanent_redirect, # 308\n)\n\nDEFAULT_REDIRECT_LIMIT = 30\nCONTENT_CHUNK_SIZE = 10 * 1024\nITER_CHUNK_SIZE = 512\n\n\nclass RequestEncodingMixin(object):\n @property\n def path_url(self):\n \"\"\"Build the path URL to use.\"\"\"\n\n url = []\n\n p = urlsplit(self.url)\n\n path = p.path\n if not path:\n path = '/'\n\n url.append(path)\n\n query = p.query\n if query:\n url.append('?')\n url.append(query)\n\n return ''.join(url)\n\n @staticmethod\n def _encode_params(data):\n \"\"\"Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n \"\"\"\n\n if isinstance(data, (str, bytes)):\n return data\n elif hasattr(data, 'read'):\n return data\n elif hasattr(data, '__iter__'):\n result = []\n for k, vs in to_key_val_list(data):\n if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):\n vs = [vs]\n for v in vs:\n if v is not None:\n result.append(\n (k.encode('utf-8') if isinstance(k, str) else k,\n v.encode('utf-8') if isinstance(v, str) else v))\n return urlencode(result, doseq=True)\n else:\n return data\n\n @staticmethod\n def _encode_files(files, data):\n \"\"\"Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n \"\"\"\n if (not files):\n raise ValueError(\"Files must be provided.\")\n elif isinstance(data, basestring):\n raise ValueError(\"Data must not be a string.\")\n\n new_fields = []\n fields = to_key_val_list(data or {})\n files = to_key_val_list(files or {})\n\n for field, val in fields:\n if isinstance(val, basestring) or not hasattr(val, '__iter__'):\n val = [val]\n for v in val:\n if v is not None:\n # Don't call str() on bytestrings: in Py3 it all goes wrong.\n if not isinstance(v, bytes):\n v = str(v)\n\n new_fields.append(\n (field.decode('utf-8') if isinstance(field, bytes) else field,\n v.encode('utf-8') if isinstance(v, str) else v))\n\n for (k, v) in files:\n # support for explicit filename\n ft = None\n fh = None\n if isinstance(v, (tuple, list)):\n if len(v) == 2:\n fn, fp = v\n elif len(v) == 3:\n fn, fp, ft = v\n else:\n fn, fp, ft, fh = v\n else:\n fn = guess_filename(v) or k\n fp = v\n\n if isinstance(fp, (str, bytes, bytearray)):\n fdata = fp\n elif hasattr(fp, 'read'):\n fdata = fp.read()\n elif fp is None:\n continue\n else:\n fdata = fp\n\n rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)\n rf.make_multipart(content_type=ft)\n new_fields.append(rf)\n\n body, content_type = encode_multipart_formdata(new_fields)\n\n return body, content_type\n\n\nclass RequestHooksMixin(object):\n def register_hook(self, event, hook):\n \"\"\"Properly register a hook.\"\"\"\n\n if event not in self.hooks:\n raise ValueError('Unsupported event specified, with event name \"%s\"' % (event))\n\n if isinstance(hook, Callable):\n self.hooks[event].append(hook)\n elif hasattr(hook, '__iter__'):\n self.hooks[event].extend(h for h in hook if isinstance(h, Callable))\n\n def deregister_hook(self, event, hook):\n \"\"\"Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n \"\"\"\n\n try:\n self.hooks[event].remove(hook)\n return True\n except ValueError:\n return False\n\n\nclass Request(RequestHooksMixin):\n \"\"\"A user-created :class:`Request ` object.\n\n Used to prepare a :class:`PreparedRequest `, which is sent to the server.\n\n :param method: HTTP method to use.\n :param url: URL to send.\n :param headers: dictionary of headers to send.\n :param files: dictionary of {filename: fileobject} files to multipart upload.\n :param data: the body to attach to the request. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param json: json for the body to attach to the request (if files or data is not specified).\n :param params: URL parameters to append to the URL. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param auth: Auth handler or (user, pass) tuple.\n :param cookies: dictionary or CookieJar of cookies to attach to this request.\n :param hooks: dictionary of callback hooks, for internal usage.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> req.prepare()\n \n \"\"\"\n\n def __init__(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n\n # Default empty dicts for dict params.\n data = [] if data is None else data\n files = [] if files is None else files\n headers = {} if headers is None else headers\n params = {} if params is None else params\n hooks = {} if hooks is None else hooks\n\n self.hooks = default_hooks()\n for (k, v) in list(hooks.items()):\n self.register_hook(event=k, hook=v)\n\n self.method = method\n self.url = url\n self.headers = headers\n self.files = files\n self.data = data\n self.json = json\n self.params = params\n self.auth = auth\n self.cookies = cookies\n\n def __repr__(self):\n return '' % (self.method)\n\n def prepare(self):\n \"\"\"Constructs a :class:`PreparedRequest ` for transmission and returns it.\"\"\"\n p = PreparedRequest()\n p.prepare(\n method=self.method,\n url=self.url,\n headers=self.headers,\n files=self.files,\n data=self.data,\n json=self.json,\n params=self.params,\n auth=self.auth,\n cookies=self.cookies,\n hooks=self.hooks,\n )\n return p\n\n\nclass PreparedRequest(RequestEncodingMixin, RequestHooksMixin):\n \"\"\"The fully mutable :class:`PreparedRequest ` object,\n containing the exact bytes that will be sent to the server.\n\n Instances are generated from a :class:`Request ` object, and\n should not be instantiated manually; doing so may produce undesirable\n effects.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> r = req.prepare()\n >>> r\n \n\n >>> s = requests.Session()\n >>> s.send(r)\n \n \"\"\"\n\n def __init__(self):\n #: HTTP verb to send to the server.\n self.method = None\n #: HTTP URL to send the request to.\n self.url = None\n #: dictionary of HTTP headers.\n self.headers = None\n # The `CookieJar` used to create the Cookie header will be stored here\n # after prepare_cookies is called\n self._cookies = None\n #: request body to send to the server.\n self.body = None\n #: dictionary of callback hooks, for internal usage.\n self.hooks = default_hooks()\n #: integer denoting starting position of a readable file-like body.\n self._body_position = None\n\n def prepare(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n \"\"\"Prepares the entire request with the given parameters.\"\"\"\n\n self.prepare_method(method)\n self.prepare_url(url, params)\n self.prepare_headers(headers)\n self.prepare_cookies(cookies)\n self.prepare_body(data, files, json)\n self.prepare_auth(auth, url)\n\n # Note that prepare_auth must be last to enable authentication schemes\n # such as OAuth to work on a fully prepared request.\n\n # This MUST go after prepare_auth. Authenticators could add a hook\n self.prepare_hooks(hooks)\n\n def __repr__(self):\n return '' % (self.method)\n\n def copy(self):\n p = PreparedRequest()\n p.method = self.method\n p.url = self.url\n p.headers = self.headers.copy() if self.headers is not None else None\n p._cookies = _copy_cookie_jar(self._cookies)\n p.body = self.body\n p.hooks = self.hooks\n p._body_position = self._body_position\n return p\n\n def prepare_method(self, method):\n \"\"\"Prepares the given HTTP method.\"\"\"\n self.method = method\n if self.method is not None:\n self.method = to_native_string(self.method.upper())\n\n @staticmethod\n def _get_idna_encoded_host(host):\n import idna\n\n try:\n host = idna.encode(host, uts46=True).decode('utf-8')\n except idna.IDNAError:\n raise UnicodeError\n return host\n\n def prepare_url(self, url, params):\n \"\"\"Prepares the given HTTP URL.\"\"\"\n #: Accept objects that have string representations.\n #: We're unable to blindly call unicode/str functions\n #: as this will include the bytestring indicator (b'')\n #: on python 3.x.\n #: https://github.com/psf/requests/pull/2238\n if isinstance(url, bytes):\n url = url.decode('utf8')\n else:\n url = unicode(url) if is_py2 else str(url)\n\n # Remove leading whitespaces from url\n url = url.lstrip()\n\n # Don't do any URL preparation for non-HTTP schemes like `mailto`,\n # `data` etc to work around exceptions from `url_parse`, which\n # handles RFC 3986 only.\n if ':' in url and not url.lower().startswith('http'):\n self.url = url\n return\n\n # Support for unicode domain names and paths.\n try:\n scheme, auth, host, port, path, query, fragment = parse_url(url)\n except LocationParseError as e:\n raise InvalidURL(*e.args)\n\n if not scheme:\n error = (\"Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?\")\n error = error.format(to_native_string(url, 'utf8'))\n\n raise MissingSchema(error)\n\n if not host:\n raise InvalidURL(\"Invalid URL %r: No host supplied\" % url)\n\n # In general, we want to try IDNA encoding the hostname if the string contains\n # non-ASCII characters. This allows users to automatically get the correct IDNA\n # behaviour. For strings containing only ASCII characters, we need to also verify\n # it doesn't start with a wildcard (*), before allowing the unencoded hostname.\n if not unicode_is_ascii(host):\n try:\n host = self._get_idna_encoded_host(host)\n except UnicodeError:\n raise InvalidURL('URL has an invalid label.')\n elif host.startswith(u'*'):\n raise InvalidURL('URL has an invalid label.')\n\n # Carefully reconstruct the network location\n netloc = auth or ''\n if netloc:\n netloc += '@'\n netloc += host\n if port:\n netloc += ':' + str(port)\n\n # Bare domains aren't valid URLs.\n if not path:\n path = '/'\n\n if is_py2:\n if isinstance(scheme, str):\n scheme = scheme.encode('utf-8')\n if isinstance(netloc, str):\n netloc = netloc.encode('utf-8')\n if isinstance(path, str):\n path = path.encode('utf-8')\n if isinstance(query, str):\n query = query.encode('utf-8')\n if isinstance(fragment, str):\n fragment = fragment.encode('utf-8')\n\n if isinstance(params, (str, bytes)):\n params = to_native_string(params)\n\n enc_params = self._encode_params(params)\n if enc_params:\n if query:\n query = '%s&%s' % (query, enc_params)\n else:\n query = enc_params\n\n url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))\n self.url = url\n\n def prepare_headers(self, headers):\n \"\"\"Prepares the given HTTP headers.\"\"\"\n\n self.headers = CaseInsensitiveDict()\n if headers:\n for header in headers.items():\n # Raise exception on invalid header value.\n check_header_validity(header)\n name, value = header\n self.headers[to_native_string(name)] = value\n\n def prepare_body(self, data, files, json=None):\n \"\"\"Prepares the given HTTP body data.\"\"\"\n\n # Check if file, fo, generator, iterator.\n # If not, run through normal process.\n\n # Nottin' on you.\n body = None\n content_type = None\n\n if not data and json is not None:\n # urllib3 requires a bytes-like body. Python 2's json.dumps\n # provides this natively, but Python 3 gives a Unicode string.\n content_type = 'application/json'\n\n try:\n body = complexjson.dumps(json, allow_nan=False)\n except ValueError as ve:\n raise InvalidJSONError(ve, request=self)\n\n if not isinstance(body, bytes):\n body = body.encode('utf-8')\n\n is_stream = all([\n hasattr(data, '__iter__'),\n not isinstance(data, (basestring, list, tuple, Mapping))\n ])\n\n if is_stream:\n try:\n length = super_len(data)\n except (TypeError, AttributeError, UnsupportedOperation):\n length = None\n\n body = data\n\n if getattr(body, 'tell', None) is not None:\n # Record the current file position before reading.\n # This will allow us to rewind a file in the event\n # of a redirect.\n try:\n self._body_position = body.tell()\n except (IOError, OSError):\n # This differentiates from None, allowing us to catch\n # a failed `tell()` later when trying to rewind the body\n self._body_position = object()\n\n if files:\n raise NotImplementedError('Streamed bodies and files are mutually exclusive.')\n\n if length:\n self.headers['Content-Length'] = builtin_str(length)\n else:\n self.headers['Transfer-Encoding'] = 'chunked'\n else:\n # Multi-part file uploads.\n if files:\n (body, content_type) = self._encode_files(files, data)\n else:\n if data:\n body = self._encode_params(data)\n if isinstance(data, basestring) or hasattr(data, 'read'):\n content_type = None\n else:\n content_type = 'application/x-www-form-urlencoded'\n\n self.prepare_content_length(body)\n\n # Add content-type if it wasn't explicitly provided.\n if content_type and ('content-type' not in self.headers):\n self.headers['Content-Type'] = content_type\n\n self.body = body\n\n def prepare_content_length(self, body):\n \"\"\"Prepare Content-Length header based on request method and body\"\"\"\n if body is not None:\n length = super_len(body)\n if length:\n # If length exists, set it. Otherwise, we fallback\n # to Transfer-Encoding: chunked.\n self.headers['Content-Length'] = builtin_str(length)\n elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:\n # Set Content-Length to 0 for methods that can have a body\n # but don't provide one. (i.e. not GET or HEAD)\n self.headers['Content-Length'] = '0'\n\n def prepare_auth(self, auth, url=''):\n \"\"\"Prepares the given HTTP auth data.\"\"\"\n\n # If no Auth is explicitly provided, extract it from the URL first.\n if auth is None:\n url_auth = get_auth_from_url(self.url)\n auth = url_auth if any(url_auth) else None\n\n if auth:\n if isinstance(auth, tuple) and len(auth) == 2:\n # special-case basic HTTP auth\n auth = HTTPBasicAuth(*auth)\n\n # Allow auth to make its changes.\n r = auth(self)\n\n # Update self to reflect the auth changes.\n self.__dict__.update(r.__dict__)\n\n # Recompute Content-Length\n self.prepare_content_length(self.body)\n\n def prepare_cookies(self, cookies):\n \"\"\"Prepares the given HTTP cookie data.\n\n This function eventually generates a ``Cookie`` header from the\n given cookies using cookielib. Due to cookielib's design, the header\n will not be regenerated if it already exists, meaning this function\n can only be called once for the life of the\n :class:`PreparedRequest ` object. Any subsequent calls\n to ``prepare_cookies`` will have no actual effect, unless the \"Cookie\"\n header is removed beforehand.\n \"\"\"\n if isinstance(cookies, cookielib.CookieJar):\n self._cookies = cookies\n else:\n self._cookies = cookiejar_from_dict(cookies)\n\n cookie_header = get_cookie_header(self._cookies, self)\n if cookie_header is not None:\n self.headers['Cookie'] = cookie_header\n\n def prepare_hooks(self, hooks):\n \"\"\"Prepares the given hooks.\"\"\"\n # hooks can be passed as None to the prepare method and to this\n # method. To prevent iterating over None, simply use an empty list\n # if hooks is False-y\n hooks = hooks or []\n for event in hooks:\n self.register_hook(event, hooks[event])\n\n\nclass Response(object):\n \"\"\"The :class:`Response ` object, which contains a\n server's response to an HTTP request.\n \"\"\"\n\n __attrs__ = [\n '_content', 'status_code', 'headers', 'url', 'history',\n 'encoding', 'reason', 'cookies', 'elapsed', 'request'\n ]\n\n def __init__(self):\n self._content = False\n self._content_consumed = False\n self._next = None\n\n #: Integer Code of responded HTTP Status, e.g. 404 or 200.\n self.status_code = None\n\n #: Case-insensitive Dictionary of Response Headers.\n #: For example, ``headers['content-encoding']`` will return the\n #: value of a ``'Content-Encoding'`` response header.\n self.headers = CaseInsensitiveDict()\n\n #: File-like object representation of response (for advanced usage).\n #: Use of ``raw`` requires that ``stream=True`` be set on the request.\n #: This requirement does not apply for use internally to Requests.\n self.raw = None\n\n #: Final URL location of Response.\n self.url = None\n\n #: Encoding to decode with when accessing r.text.\n self.encoding = None\n\n #: A list of :class:`Response ` objects from\n #: the history of the Request. Any redirect responses will end\n #: up here. The list is sorted from the oldest to the most recent request.\n self.history = []\n\n #: Textual reason of responded HTTP Status, e.g. \"Not Found\" or \"OK\".\n self.reason = None\n\n #: A CookieJar of Cookies the server sent back.\n self.cookies = cookiejar_from_dict({})\n\n #: The amount of time elapsed between sending the request\n #: and the arrival of the response (as a timedelta).\n #: This property specifically measures the time taken between sending\n #: the first byte of the request and finishing parsing the headers. It\n #: is therefore unaffected by consuming the response content or the\n #: value of the ``stream`` keyword argument.\n self.elapsed = datetime.timedelta(0)\n\n #: The :class:`PreparedRequest ` object to which this\n #: is a response.\n self.request = None\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def __getstate__(self):\n # Consume everything; accessing the content attribute makes\n # sure the content has been fully read.\n if not self._content_consumed:\n self.content\n\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}\n\n def __setstate__(self, state):\n for name, value in state.items():\n setattr(self, name, value)\n\n # pickled objects do not have .raw\n setattr(self, '_content_consumed', True)\n setattr(self, 'raw', None)\n\n def __repr__(self):\n return '' % (self.status_code)\n\n def __bool__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __nonzero__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __iter__(self):\n \"\"\"Allows you to use a response as an iterator.\"\"\"\n return self.iter_content(128)\n\n @property\n def ok(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400, False if not.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n try:\n self.raise_for_status()\n except HTTPError:\n return False\n return True\n\n @property\n def is_redirect(self):\n \"\"\"True if this Response is a well-formed HTTP redirect that could have\n been processed automatically (by :meth:`Session.resolve_redirects`).\n \"\"\"\n return ('location' in self.headers and self.status_code in REDIRECT_STATI)\n\n @property\n def is_permanent_redirect(self):\n \"\"\"True if this Response one of the permanent versions of redirect.\"\"\"\n return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))\n\n @property\n def next(self):\n \"\"\"Returns a PreparedRequest for the next request in a redirect chain, if there is one.\"\"\"\n return self._next\n\n @property\n def apparent_encoding(self):\n \"\"\"The apparent encoding, provided by the charset_normalizer or chardet libraries.\"\"\"\n return chardet.detect(self.content)['encoding']\n\n def iter_content(self, chunk_size=1, decode_unicode=False):\n \"\"\"Iterates over the response data. When stream=True is set on the\n request, this avoids reading the content at once into memory for\n large responses. The chunk size is the number of bytes it should\n read into memory. This is not necessarily the length of each item\n returned as decoding can take place.\n\n chunk_size must be of type int or None. A value of None will\n function differently depending on the value of `stream`.\n stream=True will read data as it arrives in whatever size the\n chunks are received. If stream=False, data is returned as\n a single chunk.\n\n If decode_unicode is True, content will be decoded using the best\n available encoding based on the response.\n \"\"\"\n\n def generate():\n # Special case for urllib3.\n if hasattr(self.raw, 'stream'):\n try:\n for chunk in self.raw.stream(chunk_size, decode_content=True):\n yield chunk\n except ProtocolError as e:\n raise ChunkedEncodingError(e)\n except DecodeError as e:\n raise ContentDecodingError(e)\n except ReadTimeoutError as e:\n raise ConnectionError(e)\n else:\n # Standard file-like object.\n while True:\n chunk = self.raw.read(chunk_size)\n if not chunk:\n break\n yield chunk\n\n self._content_consumed = True\n\n if self._content_consumed and isinstance(self._content, bool):\n raise StreamConsumedError()\n elif chunk_size is not None and not isinstance(chunk_size, int):\n raise TypeError(\"chunk_size must be an int, it is instead a %s.\" % type(chunk_size))\n # simulate reading small chunks of the content\n reused_chunks = iter_slices(self._content, chunk_size)\n\n stream_chunks = generate()\n\n chunks = reused_chunks if self._content_consumed else stream_chunks\n\n if decode_unicode:\n chunks = stream_decode_response_unicode(chunks, self)\n\n return chunks\n\n def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):\n \"\"\"Iterates over the response data, one line at a time. When\n stream=True is set on the request, this avoids reading the\n content at once into memory for large responses.\n\n .. note:: This method is not reentrant safe.\n \"\"\"\n\n pending = None\n\n for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):\n\n if pending is not None:\n chunk = pending + chunk\n\n if delimiter:\n lines = chunk.split(delimiter)\n else:\n lines = chunk.splitlines()\n\n if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:\n pending = lines.pop()\n else:\n pending = None\n\n for line in lines:\n yield line\n\n if pending is not None:\n yield pending\n\n @property\n def content(self):\n \"\"\"Content of the response, in bytes.\"\"\"\n\n if self._content is False:\n # Read the contents.\n if self._content_consumed:\n raise RuntimeError(\n 'The content for this response was already consumed')\n\n if self.status_code == 0 or self.raw is None:\n self._content = None\n else:\n self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''\n\n self._content_consumed = True\n # don't need to release the connection; that's been handled by urllib3\n # since we exhausted the data.\n return self._content\n\n @property\n def text(self):\n \"\"\"Content of the response, in unicode.\n\n If Response.encoding is None, encoding will be guessed using\n ``charset_normalizer`` or ``chardet``.\n\n The encoding of the response content is determined based solely on HTTP\n headers, following RFC 2616 to the letter. If you can take advantage of\n non-HTTP knowledge to make a better guess at the encoding, you should\n set ``r.encoding`` appropriately before accessing this property.\n \"\"\"\n\n # Try charset from content-type\n content = None\n encoding = self.encoding\n\n if not self.content:\n return str('')\n\n # Fallback to auto-detected encoding.\n if self.encoding is None:\n encoding = self.apparent_encoding\n\n # Decode unicode from given encoding.\n try:\n content = str(self.content, encoding, errors='replace')\n except (LookupError, TypeError):\n # A LookupError is raised if the encoding was not found which could\n # indicate a misspelling or similar mistake.\n #\n # A TypeError can be raised if encoding is None\n #\n # So we try blindly encoding.\n content = str(self.content, errors='replace')\n\n return content\n\n def json(self, **kwargs):\n r\"\"\"Returns the json-encoded content of a response, if any.\n\n :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes.\n :raises requests.exceptions.JSONDecodeError: If the response body does not\n contain valid json.\n \"\"\"\n\n if not self.encoding and self.content and len(self.content) > 3:\n # No encoding set. JSON RFC 4627 section 3 states we should expect\n # UTF-8, -16 or -32. Detect which one to use; If the detection or\n # decoding fails, fall back to `self.text` (using charset_normalizer to make\n # a best guess).\n encoding = guess_json_utf(self.content)\n if encoding is not None:\n try:\n return complexjson.loads(\n self.content.decode(encoding), **kwargs\n )\n except UnicodeDecodeError:\n # Wrong UTF codec detected; usually because it's not UTF-8\n # but some other 8-bit codec. This is an RFC violation,\n # and the server didn't bother to tell us what codec *was*\n # used.\n pass\n\n try:\n return complexjson.loads(self.text, **kwargs)\n except JSONDecodeError as e:\n # Catch JSON-related errors and raise as requests.JSONDecodeError\n # This aliases json.JSONDecodeError and simplejson.JSONDecodeError\n if is_py2: # e is a ValueError\n raise RequestsJSONDecodeError(e.message)\n else:\n raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)\n\n @property\n def links(self):\n \"\"\"Returns the parsed header links of the response, if any.\"\"\"\n\n header = self.headers.get('link')\n\n # l = MultiDict()\n l = {}\n\n if header:\n links = parse_header_links(header)\n\n for link in links:\n key = link.get('rel') or link.get('url')\n l[key] = link\n\n return l\n\n def raise_for_status(self):\n \"\"\"Raises :class:`HTTPError`, if one occurred.\"\"\"\n\n http_error_msg = ''\n if isinstance(self.reason, bytes):\n # We attempt to decode utf-8 first because some servers\n # choose to localize their reason strings. If the string\n # isn't utf-8, we fall back to iso-8859-1 for all other\n # encodings. (See PR #3538)\n try:\n reason = self.reason.decode('utf-8')\n except UnicodeDecodeError:\n reason = self.reason.decode('iso-8859-1')\n else:\n reason = self.reason\n\n if 400 <= self.status_code < 500:\n http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n elif 500 <= self.status_code < 600:\n http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n if http_error_msg:\n raise HTTPError(http_error_msg, response=self)\n\n def close(self):\n \"\"\"Releases the connection back to the pool. Once this method has been\n called the underlying ``raw`` object must not be accessed again.\n\n *Note: Should not normally need to be called explicitly.*\n \"\"\"\n if not self._content_consumed:\n self.raw.close()\n\n release_conn = getattr(self.raw, 'release_conn', None)\n if release_conn is not None:\n release_conn()\n"},{"className":"UnsupportedOperation","col":0,"comment":"null","endLoc":51,"id":675,"nodeType":"Class","startLoc":51,"text":"class UnsupportedOperation(OSError, ValueError): ..."},{"col":4,"comment":"Prepares the given HTTP method.","endLoc":348,"header":"def prepare_method(self, method)","id":676,"name":"prepare_method","nodeType":"Function","startLoc":344,"text":"def prepare_method(self, method):\n \"\"\"Prepares the given HTTP method.\"\"\"\n self.method = method\n if self.method is not None:\n self.method = to_native_string(self.method.upper())"},{"className":"OSError","col":0,"comment":"null","endLoc":1959,"id":677,"nodeType":"Class","startLoc":1952,"text":"class OSError(Exception):\n errno: int\n strerror: str\n # filename, filename2 are actually str | bytes | None\n filename: Any\n filename2: Any\n if sys.platform == \"win32\":\n winerror: int"},{"col":0,"comment":"can safely send generators","endLoc":36,"header":"def test_chunked_upload()","id":678,"name":"test_chunked_upload","nodeType":"Function","startLoc":24,"text":"def test_chunked_upload():\n \"\"\"can safely send generators\"\"\"\n close_server = threading.Event()\n server = Server.basic_response_server(wait_to_close_event=close_server)\n data = iter([b'a', b'b', b'c'])\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, stream=True)\n close_server.set() # release server block\n\n assert r.status_code == 200\n assert r.request.headers['Transfer-Encoding'] == 'chunked'"},{"col":0,"comment":"Sends a POST request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":117,"header":"def post(url, data=None, json=None, **kwargs)","id":679,"name":"post","nodeType":"Function","startLoc":105,"text":"def post(url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('post', url, data=data, json=json, **kwargs)"},{"col":4,"comment":"Dict-like itervalues() that returns an iterator of values of cookies\n from the jar.\n\n .. seealso:: iterkeys() and iteritems().\n ","endLoc":242,"header":"def itervalues(self)","id":680,"name":"itervalues","nodeType":"Function","startLoc":235,"text":"def itervalues(self):\n \"\"\"Dict-like itervalues() that returns an iterator of values of cookies\n from the jar.\n\n .. seealso:: iterkeys() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.value"},{"col":4,"comment":"Prepares the given HTTP URL.","endLoc":444,"header":"def prepare_url(self, url, params)","id":682,"name":"prepare_url","nodeType":"Function","startLoc":360,"text":"def prepare_url(self, url, params):\n \"\"\"Prepares the given HTTP URL.\"\"\"\n #: Accept objects that have string representations.\n #: We're unable to blindly call unicode/str functions\n #: as this will include the bytestring indicator (b'')\n #: on python 3.x.\n #: https://github.com/psf/requests/pull/2238\n if isinstance(url, bytes):\n url = url.decode('utf8')\n else:\n url = unicode(url) if is_py2 else str(url)\n\n # Remove leading whitespaces from url\n url = url.lstrip()\n\n # Don't do any URL preparation for non-HTTP schemes like `mailto`,\n # `data` etc to work around exceptions from `url_parse`, which\n # handles RFC 3986 only.\n if ':' in url and not url.lower().startswith('http'):\n self.url = url\n return\n\n # Support for unicode domain names and paths.\n try:\n scheme, auth, host, port, path, query, fragment = parse_url(url)\n except LocationParseError as e:\n raise InvalidURL(*e.args)\n\n if not scheme:\n error = (\"Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?\")\n error = error.format(to_native_string(url, 'utf8'))\n\n raise MissingSchema(error)\n\n if not host:\n raise InvalidURL(\"Invalid URL %r: No host supplied\" % url)\n\n # In general, we want to try IDNA encoding the hostname if the string contains\n # non-ASCII characters. This allows users to automatically get the correct IDNA\n # behaviour. For strings containing only ASCII characters, we need to also verify\n # it doesn't start with a wildcard (*), before allowing the unencoded hostname.\n if not unicode_is_ascii(host):\n try:\n host = self._get_idna_encoded_host(host)\n except UnicodeError:\n raise InvalidURL('URL has an invalid label.')\n elif host.startswith(u'*'):\n raise InvalidURL('URL has an invalid label.')\n\n # Carefully reconstruct the network location\n netloc = auth or ''\n if netloc:\n netloc += '@'\n netloc += host\n if port:\n netloc += ':' + str(port)\n\n # Bare domains aren't valid URLs.\n if not path:\n path = '/'\n\n if is_py2:\n if isinstance(scheme, str):\n scheme = scheme.encode('utf-8')\n if isinstance(netloc, str):\n netloc = netloc.encode('utf-8')\n if isinstance(path, str):\n path = path.encode('utf-8')\n if isinstance(query, str):\n query = query.encode('utf-8')\n if isinstance(fragment, str):\n fragment = fragment.encode('utf-8')\n\n if isinstance(params, (str, bytes)):\n params = to_native_string(params)\n\n enc_params = self._encode_params(params)\n if enc_params:\n if query:\n query = '%s&%s' % (query, enc_params)\n else:\n query = enc_params\n\n url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))\n self.url = url"},{"attributeType":"int","col":4,"comment":"null","endLoc":1953,"id":683,"name":"errno","nodeType":"Attribute","startLoc":1953,"text":"errno"},{"col":4,"comment":"Dict-like values() that returns a list of values of cookies from the\n jar.\n\n .. seealso:: keys() and items().\n ","endLoc":250,"header":"def values(self)","id":684,"name":"values","nodeType":"Function","startLoc":244,"text":"def values(self):\n \"\"\"Dict-like values() that returns a list of values of cookies from the\n jar.\n\n .. seealso:: keys() and items().\n \"\"\"\n return list(self.itervalues())"},{"col":0,"comment":"get a ChunkedEncodingError if the server returns a bad response","endLoc":58,"header":"def test_chunked_encoding_error()","id":685,"name":"test_chunked_encoding_error","nodeType":"Function","startLoc":39,"text":"def test_chunked_encoding_error():\n \"\"\"get a ChunkedEncodingError if the server returns a bad response\"\"\"\n\n def incomplete_chunked_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=0.5)\n\n # The server never ends the request and doesn't provide any valid chunks\n sock.send(b\"HTTP/1.1 200 OK\\r\\n\" +\n b\"Transfer-Encoding: chunked\\r\\n\")\n\n return request_content\n\n close_server = threading.Event()\n server = Server(incomplete_chunked_response_handler)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n with pytest.raises(requests.exceptions.ChunkedEncodingError):\n r = requests.get(url)\n close_server.set() # release server block"},{"attributeType":"str","col":4,"comment":"null","endLoc":1954,"id":686,"name":"strerror","nodeType":"Attribute","startLoc":1954,"text":"strerror"},{"attributeType":"null","col":4,"comment":"null","endLoc":1956,"id":687,"name":"filename","nodeType":"Attribute","startLoc":1956,"text":"filename"},{"attributeType":"StreamReaderWriter","col":40,"comment":"null","endLoc":65,"id":688,"name":"f","nodeType":"Attribute","startLoc":65,"text":"f"},{"col":0,"comment":"Sends a GET request.\n\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":75,"header":"def get(url, params=None, **kwargs)","id":689,"name":"get","nodeType":"Function","startLoc":64,"text":"def get(url, params=None, **kwargs):\n r\"\"\"Sends a GET request.\n\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary, list of tuples or bytes to send\n in the query string for the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('get', url, params=params, **kwargs)"},{"attributeType":"str","col":4,"comment":"null","endLoc":66,"id":690,"name":"readme","nodeType":"Attribute","startLoc":66,"text":"readme"},{"col":0,"comment":"Ensure we use only the specified Host header for chunked requests.","endLoc":76,"header":"def test_chunked_upload_uses_only_specified_host_header()","id":691,"name":"test_chunked_upload_uses_only_specified_host_header","nodeType":"Function","startLoc":61,"text":"def test_chunked_upload_uses_only_specified_host_header():\n \"\"\"Ensure we use only the specified Host header for chunked requests.\"\"\"\n close_server = threading.Event()\n server = Server(echo_response_handler, wait_to_close_event=close_server)\n\n data = iter([b'a', b'b', b'c'])\n custom_host = 'sample-host'\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, headers={'Host': custom_host}, stream=True)\n close_server.set() # release server block\n\n expected_header = b'Host: %s\\r\\n' % custom_host.encode('utf-8')\n assert expected_header in r.content\n assert r.content.count(b'Host: ') == 1"},{"col":4,"comment":"null","endLoc":60,"header":"def __init__(self)","id":692,"name":"__init__","nodeType":"Function","startLoc":59,"text":"def __init__(self):\n super(BaseAdapter, self).__init__()"},{"col":4,"comment":"Dict-like iteritems() that returns an iterator of name-value tuples\n from the jar.\n\n .. seealso:: iterkeys() and itervalues().\n ","endLoc":259,"header":"def iteritems(self)","id":693,"name":"iteritems","nodeType":"Function","startLoc":252,"text":"def iteritems(self):\n \"\"\"Dict-like iteritems() that returns an iterator of name-value tuples\n from the jar.\n\n .. seealso:: iterkeys() and itervalues().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name, cookie.value"},{"attributeType":"null","col":4,"comment":"null","endLoc":1957,"id":694,"name":"filename2","nodeType":"Attribute","startLoc":1957,"text":"filename2"},{"attributeType":"int","col":8,"comment":"null","endLoc":1959,"id":695,"name":"winerror","nodeType":"Attribute","startLoc":1959,"text":"winerror"},{"col":4,"comment":"null","endLoc":97,"header":"def __repr__(self)","id":696,"name":"__repr__","nodeType":"Function","startLoc":96,"text":"def __repr__(self):\n return '' % (self.name)"},{"col":4,"comment":"null","endLoc":102,"header":"def __getitem__(self, key)","id":697,"name":"__getitem__","nodeType":"Function","startLoc":99,"text":"def __getitem__(self, key):\n # We allow fall-through here, so values default to None\n\n return self.__dict__.get(key, None)"},{"className":"HTTPBasicAuth","col":0,"comment":"Attaches HTTP Basic Authentication to the given Request object.","endLoc":97,"id":698,"nodeType":"Class","startLoc":79,"text":"class HTTPBasicAuth(AuthBase):\n \"\"\"Attaches HTTP Basic Authentication to the given Request object.\"\"\"\n\n def __init__(self, username, password):\n self.username = username\n self.password = password\n\n def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])\n\n def __ne__(self, other):\n return not self == other\n\n def __call__(self, r):\n r.headers['Authorization'] = _basic_auth_str(self.username, self.password)\n return r"},{"className":"AuthBase","col":0,"comment":"Base class that all auth implementations derive from","endLoc":76,"id":699,"nodeType":"Class","startLoc":72,"text":"class AuthBase(object):\n \"\"\"Base class that all auth implementations derive from\"\"\"\n\n def __call__(self, r):\n raise NotImplementedError('Auth hooks must be callable.')"},{"col":4,"comment":"null","endLoc":76,"header":"def __call__(self, r)","id":700,"name":"__call__","nodeType":"Function","startLoc":75,"text":"def __call__(self, r):\n raise NotImplementedError('Auth hooks must be callable.')"},{"col":4,"comment":"Dict-like items() that returns a list of name-value tuples from the\n jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a\n vanilla python dict of key value pairs.\n\n .. seealso:: keys() and values().\n ","endLoc":268,"header":"def items(self)","id":701,"name":"items","nodeType":"Function","startLoc":261,"text":"def items(self):\n \"\"\"Dict-like items() that returns a list of name-value tuples from the\n jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a\n vanilla python dict of key value pairs.\n\n .. seealso:: keys() and values().\n \"\"\"\n return list(self.iteritems())"},{"col":4,"comment":"null","endLoc":105,"header":"def get(self, key, default=None)","id":702,"name":"get","nodeType":"Function","startLoc":104,"text":"def get(self, key, default=None):\n return self.__dict__.get(key, default)"},{"col":4,"comment":"Initializes a urllib3 PoolManager.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param connections: The number of urllib3 connection pools to cache.\n :param maxsize: The maximum number of connections to save in the pool.\n :param block: Block when no free connections are available.\n :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.\n ","endLoc":165,"header":"def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs)","id":703,"name":"init_poolmanager","nodeType":"Function","startLoc":147,"text":"def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):\n \"\"\"Initializes a urllib3 PoolManager.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param connections: The number of urllib3 connection pools to cache.\n :param maxsize: The maximum number of connections to save in the pool.\n :param block: Block when no free connections are available.\n :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.\n \"\"\"\n # save these values for pickling\n self._pool_connections = connections\n self._pool_maxsize = maxsize\n self._pool_block = block\n\n self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,\n block=block, strict=True, **pool_kwargs)"},{"col":4,"comment":"null","endLoc":84,"header":"def __init__(self, username, password)","id":704,"name":"__init__","nodeType":"Function","startLoc":82,"text":"def __init__(self, username, password):\n self.username = username\n self.password = password"},{"col":4,"comment":"null","endLoc":90,"header":"def __eq__(self, other)","id":705,"name":"__eq__","nodeType":"Function","startLoc":86,"text":"def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])"},{"attributeType":"null","col":8,"comment":"null","endLoc":93,"id":706,"name":"name","nodeType":"Attribute","startLoc":93,"text":"self.name"},{"col":0,"comment":"Ensure we don't omit all Host headers with chunked requests.","endLoc":94,"header":"def test_chunked_upload_doesnt_skip_host_header()","id":707,"name":"test_chunked_upload_doesnt_skip_host_header","nodeType":"Function","startLoc":79,"text":"def test_chunked_upload_doesnt_skip_host_header():\n \"\"\"Ensure we don't omit all Host headers with chunked requests.\"\"\"\n close_server = threading.Event()\n server = Server(echo_response_handler, wait_to_close_event=close_server)\n\n data = iter([b'a', b'b', b'c'])\n\n with server as (host, port):\n expected_host = '{}:{}'.format(host, port)\n url = 'http://{}:{}/'.format(host, port)\n r = requests.post(url, data=data, stream=True)\n close_server.set() # release server block\n\n expected_header = b'Host: %s\\r\\n' % expected_host.encode('utf-8')\n assert expected_header in r.content\n assert r.content.count(b'Host: ') == 1"},{"col":0,"comment":"Iterate over slices of a string.","endLoc":564,"header":"def iter_slices(string, slice_length)","id":708,"name":"iter_slices","nodeType":"Function","startLoc":557,"text":"def iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length"},{"col":4,"comment":"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n ","endLoc":748,"header":"def mount(self, prefix, adapter)","id":710,"name":"mount","nodeType":"Function","startLoc":739,"text":"def mount(self, prefix, adapter):\n \"\"\"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n \"\"\"\n self.adapters[prefix] = adapter\n keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]\n\n for key in keys_to_move:\n self.adapters[key] = self.adapters.pop(key)"},{"col":0,"comment":"null","endLoc":121,"header":"def _init()","id":711,"name":"_init","nodeType":"Function","startLoc":107,"text":"def _init():\n for code, titles in _codes.items():\n for title in titles:\n setattr(codes, title, code)\n if not title.startswith(('\\\\', '/')):\n setattr(codes, title.upper(), code)\n\n def doc(code):\n names = ', '.join('``%s``' % n for n in _codes[code])\n return '* %d: %s' % (code, names)\n\n global __doc__\n __doc__ = (__doc__ + '\\n' +\n '\\n'.join(doc(code) for code in sorted(_codes))\n if __doc__ is not None else None)"},{"col":0,"comment":"Stream decodes a iterator.","endLoc":554,"header":"def stream_decode_response_unicode(iterator, r)","id":712,"name":"stream_decode_response_unicode","nodeType":"Function","startLoc":539,"text":"def stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv"},{"col":4,"comment":"null","endLoc":358,"header":"@staticmethod\n def _get_idna_encoded_host(host)","id":713,"name":"_get_idna_encoded_host","nodeType":"Function","startLoc":350,"text":"@staticmethod\n def _get_idna_encoded_host(host):\n import idna\n\n try:\n host = idna.encode(host, uts46=True).decode('utf-8')\n except idna.IDNAError:\n raise UnicodeError\n return host"},{"col":4,"comment":"Utility method to list all the domains in the jar.","endLoc":276,"header":"def list_domains(self)","id":715,"name":"list_domains","nodeType":"Function","startLoc":270,"text":"def list_domains(self):\n \"\"\"Utility method to list all the domains in the jar.\"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain not in domains:\n domains.append(cookie.domain)\n return domains"},{"col":4,"comment":"null","endLoc":93,"header":"def __ne__(self, other)","id":717,"name":"__ne__","nodeType":"Function","startLoc":92,"text":"def __ne__(self, other):\n return not self == other"},{"col":4,"comment":"null","endLoc":97,"header":"def __call__(self, r)","id":718,"name":"__call__","nodeType":"Function","startLoc":95,"text":"def __call__(self, r):\n r.headers['Authorization'] = _basic_auth_str(self.username, self.password)\n return r"},{"fileName":"utils.py","filePath":"requests","id":719,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\nfrom urllib3.util import make_headers\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r']', flags=re.I)\n pragma_re = re.compile(r']', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get('no_proxy')\n new_proxies = proxies.copy()\n\n bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)\n if trust_env and not bypass_proxy:\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: ; rel=front; type=\"image/jpeg\",; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n"},{"col":4,"comment":"Utility method to list all the paths in the jar.","endLoc":284,"header":"def list_paths(self)","id":720,"name":"list_paths","nodeType":"Function","startLoc":278,"text":"def list_paths(self):\n \"\"\"Utility method to list all the paths in the jar.\"\"\"\n paths = []\n for cookie in iter(self):\n if cookie.path not in paths:\n paths.append(cookie.path)\n return paths"},{"attributeType":"null","col":8,"comment":"null","endLoc":84,"id":721,"name":"password","nodeType":"Attribute","startLoc":84,"text":"self.password"},{"col":4,"comment":"Returns True if there are multiple domains in the jar.\n Returns False otherwise.\n\n :rtype: bool\n ","endLoc":297,"header":"def multiple_domains(self)","id":723,"name":"multiple_domains","nodeType":"Function","startLoc":286,"text":"def multiple_domains(self):\n \"\"\"Returns True if there are multiple domains in the jar.\n Returns False otherwise.\n\n :rtype: bool\n \"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain is not None and cookie.domain in domains:\n return True\n domains.append(cookie.domain)\n return False # there is only one domain in jar"},{"col":4,"comment":"Returns True if :attr:`status_code` is less than 400, False if not.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n ","endLoc":715,"header":"@property\n def ok(self)","id":724,"name":"ok","nodeType":"Function","startLoc":702,"text":"@property\n def ok(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400, False if not.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n try:\n self.raise_for_status()\n except HTTPError:\n return False\n return True"},{"col":4,"comment":"Raises :class:`HTTPError`, if one occurred.","endLoc":960,"header":"def raise_for_status(self)","id":725,"name":"raise_for_status","nodeType":"Function","startLoc":937,"text":"def raise_for_status(self):\n \"\"\"Raises :class:`HTTPError`, if one occurred.\"\"\"\n\n http_error_msg = ''\n if isinstance(self.reason, bytes):\n # We attempt to decode utf-8 first because some servers\n # choose to localize their reason strings. If the string\n # isn't utf-8, we fall back to iso-8859-1 for all other\n # encodings. (See PR #3538)\n try:\n reason = self.reason.decode('utf-8')\n except UnicodeDecodeError:\n reason = self.reason.decode('iso-8859-1')\n else:\n reason = self.reason\n\n if 400 <= self.status_code < 500:\n http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n elif 500 <= self.status_code < 600:\n http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n if http_error_msg:\n raise HTTPError(http_error_msg, response=self)"},{"col":0,"comment":"Ensure we correctly throw an InvalidHeader error if multiple\n conflicting Content-Length headers are returned.\n ","endLoc":120,"header":"def test_conflicting_content_lengths()","id":726,"name":"test_conflicting_content_lengths","nodeType":"Function","startLoc":97,"text":"def test_conflicting_content_lengths():\n \"\"\"Ensure we correctly throw an InvalidHeader error if multiple\n conflicting Content-Length headers are returned.\n \"\"\"\n\n def multiple_content_length_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=0.5)\n\n sock.send(b\"HTTP/1.1 200 OK\\r\\n\" +\n b\"Content-Type: text/plain\\r\\n\" +\n b\"Content-Length: 16\\r\\n\" +\n b\"Content-Length: 32\\r\\n\\r\\n\" +\n b\"-- Bad Actor -- Original Content\\r\\n\")\n\n return request_content\n\n close_server = threading.Event()\n server = Server(multiple_content_length_response_handler)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n with pytest.raises(requests.exceptions.InvalidHeader):\n r = requests.get(url)\n close_server.set()"},{"attributeType":"null","col":8,"comment":"null","endLoc":83,"id":727,"name":"username","nodeType":"Attribute","startLoc":83,"text":"self.username"},{"col":4,"comment":"Takes as an argument an optional domain and path and returns a plain\n old Python dict of name-value pairs of cookies that meet the\n requirements.\n\n :rtype: dict\n ","endLoc":313,"header":"def get_dict(self, domain=None, path=None)","id":728,"name":"get_dict","nodeType":"Function","startLoc":299,"text":"def get_dict(self, domain=None, path=None):\n \"\"\"Takes as an argument an optional domain and path and returns a plain\n old Python dict of name-value pairs of cookies that meet the\n requirements.\n\n :rtype: dict\n \"\"\"\n dictionary = {}\n for cookie in iter(self):\n if (\n (domain is None or cookie.domain == domain) and\n (path is None or cookie.path == path)\n ):\n dictionary[cookie.name] = cookie.value\n return dictionary"},{"col":4,"comment":"True if this Response is a well-formed HTTP redirect that could have\n been processed automatically (by :meth:`Session.resolve_redirects`).\n ","endLoc":722,"header":"@property\n def is_redirect(self)","id":729,"name":"is_redirect","nodeType":"Function","startLoc":717,"text":"@property\n def is_redirect(self):\n \"\"\"True if this Response is a well-formed HTTP redirect that could have\n been processed automatically (by :meth:`Session.resolve_redirects`).\n \"\"\"\n return ('location' in self.headers and self.status_code in REDIRECT_STATI)"},{"col":4,"comment":"True if this Response one of the permanent versions of redirect.","endLoc":727,"header":"@property\n def is_permanent_redirect(self)","id":730,"name":"is_permanent_redirect","nodeType":"Function","startLoc":724,"text":"@property\n def is_permanent_redirect(self):\n \"\"\"True if this Response one of the permanent versions of redirect.\"\"\"\n return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))"},{"col":4,"comment":"Returns a PreparedRequest for the next request in a redirect chain, if there is one.","endLoc":732,"header":"@property\n def next(self)","id":731,"name":"next","nodeType":"Function","startLoc":729,"text":"@property\n def next(self):\n \"\"\"Returns a PreparedRequest for the next request in a redirect chain, if there is one.\"\"\"\n return self._next"},{"col":4,"comment":"The apparent encoding, provided by the charset_normalizer or chardet libraries.","endLoc":737,"header":"@property\n def apparent_encoding(self)","id":732,"name":"apparent_encoding","nodeType":"Function","startLoc":734,"text":"@property\n def apparent_encoding(self):\n \"\"\"The apparent encoding, provided by the charset_normalizer or chardet libraries.\"\"\"\n return chardet.detect(self.content)['encoding']"},{"col":4,"comment":"null","endLoc":319,"header":"def __contains__(self, name)","id":733,"name":"__contains__","nodeType":"Function","startLoc":315,"text":"def __contains__(self, name):\n try:\n return super(RequestsCookieJar, self).__contains__(name)\n except CookieConflictError:\n return True"},{"col":0,"comment":"\n Produce an appropriate Cookie header string to be sent with `request`, or None.\n\n :rtype: str\n ","endLoc":143,"header":"def get_cookie_header(jar, request)","id":734,"name":"get_cookie_header","nodeType":"Function","startLoc":135,"text":"def get_cookie_header(jar, request):\n \"\"\"\n Produce an appropriate Cookie header string to be sent with `request`, or None.\n\n :rtype: str\n \"\"\"\n r = MockRequest(request)\n jar.add_cookie_header(r)\n return r.get_new_headers().get('Cookie')"},{"col":4,"comment":"Iterates over the response data, one line at a time. When\n stream=True is set on the request, this avoids reading the\n content at once into memory for large responses.\n\n .. note:: This method is not reentrant safe.\n ","endLoc":823,"header":"def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None)","id":735,"name":"iter_lines","nodeType":"Function","startLoc":794,"text":"def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):\n \"\"\"Iterates over the response data, one line at a time. When\n stream=True is set on the request, this avoids reading the\n content at once into memory for large responses.\n\n .. note:: This method is not reentrant safe.\n \"\"\"\n\n pending = None\n\n for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):\n\n if pending is not None:\n chunk = pending + chunk\n\n if delimiter:\n lines = chunk.split(delimiter)\n else:\n lines = chunk.splitlines()\n\n if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:\n pending = lines.pop()\n else:\n pending = None\n\n for line in lines:\n yield line\n\n if pending is not None:\n yield pending"},{"col":0,"comment":"Ensure we correctly reset num_401_calls after a successful digest auth,\n followed by a 302 redirect to another digest auth prompt.\n\n See https://github.com/psf/requests/issues/1979.\n ","endLoc":185,"header":"def test_digestauth_401_count_reset_on_redirect()","id":736,"name":"test_digestauth_401_count_reset_on_redirect","nodeType":"Function","startLoc":123,"text":"def test_digestauth_401_count_reset_on_redirect():\n \"\"\"Ensure we correctly reset num_401_calls after a successful digest auth,\n followed by a 302 redirect to another digest auth prompt.\n\n See https://github.com/psf/requests/issues/1979.\n \"\"\"\n text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n text_302 = (b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /\\r\\n\\r\\n')\n\n text_200 = (b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: 0\\r\\n\\r\\n')\n\n expected_digest = (b'Authorization: Digest username=\"user\", '\n b'realm=\"me@kennethreitz.com\", '\n b'nonce=\"6bf5d6e4da1ce66918800195d6b9130d\", uri=\"/\"')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_response_handler(sock):\n # Respond to initial GET with a challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_401)\n\n # Verify we receive an Authorization header in response, then redirect.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_302)\n\n # Verify Authorization isn't sent to the redirected host,\n # then send another challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert b'Authorization:' not in request_content\n sock.send(text_401)\n\n # Verify Authorization is sent correctly again, and return 200 OK.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_200)\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server succeeded in authenticating.\n assert r.status_code == 200\n # Verify Authorization was sent in final request.\n assert 'Authorization' in r.request.headers\n assert r.request.headers['Authorization'].startswith('Digest ')\n # Verify redirect happened as we expected.\n assert r.history[0].status_code == 302\n close_server.set()"},{"col":4,"comment":"Content of the response, in bytes.","endLoc":843,"header":"@property\n def content(self)","id":737,"name":"content","nodeType":"Function","startLoc":825,"text":"@property\n def content(self):\n \"\"\"Content of the response, in bytes.\"\"\"\n\n if self._content is False:\n # Read the contents.\n if self._content_consumed:\n raise RuntimeError(\n 'The content for this response was already consumed')\n\n if self.status_code == 0 or self.raw is None:\n self._content = None\n else:\n self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''\n\n self._content_consumed = True\n # don't need to release the connection; that's been handled by urllib3\n # since we exhausted the data.\n return self._content"},{"className":"InvalidURL","col":0,"comment":"The URL provided was somehow invalid.","endLoc":91,"id":738,"nodeType":"Class","startLoc":90,"text":"class InvalidURL(RequestException, ValueError):\n \"\"\"The URL provided was somehow invalid.\"\"\""},{"col":4,"comment":"Content of the response, in unicode.\n\n If Response.encoding is None, encoding will be guessed using\n ``charset_normalizer`` or ``chardet``.\n\n The encoding of the response content is determined based solely on HTTP\n headers, following RFC 2616 to the letter. If you can take advantage of\n non-HTTP knowledge to make a better guess at the encoding, you should\n set ``r.encoding`` appropriately before accessing this property.\n ","endLoc":881,"header":"@property\n def text(self)","id":739,"name":"text","nodeType":"Function","startLoc":845,"text":"@property\n def text(self):\n \"\"\"Content of the response, in unicode.\n\n If Response.encoding is None, encoding will be guessed using\n ``charset_normalizer`` or ``chardet``.\n\n The encoding of the response content is determined based solely on HTTP\n headers, following RFC 2616 to the letter. If you can take advantage of\n non-HTTP knowledge to make a better guess at the encoding, you should\n set ``r.encoding`` appropriately before accessing this property.\n \"\"\"\n\n # Try charset from content-type\n content = None\n encoding = self.encoding\n\n if not self.content:\n return str('')\n\n # Fallback to auto-detected encoding.\n if self.encoding is None:\n encoding = self.apparent_encoding\n\n # Decode unicode from given encoding.\n try:\n content = str(self.content, encoding, errors='replace')\n except (LookupError, TypeError):\n # A LookupError is raised if the encoding was not found which could\n # indicate a misspelling or similar mistake.\n #\n # A TypeError can be raised if encoding is None\n #\n # So we try blindly encoding.\n content = str(self.content, errors='replace')\n\n return content"},{"col":4,"comment":"Constructs a :class:`Request `, prepares it and sends it.\n Returns :class:`Response ` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n ","endLoc":531,"header":"def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None)","id":740,"name":"request","nodeType":"Function","startLoc":457,"text":"def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None):\n \"\"\"Constructs a :class:`Request `, prepares it and sends it.\n Returns :class:`Response ` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n \"\"\"\n # Create the Request.\n req = Request(\n method=method.upper(),\n url=url,\n headers=headers,\n files=files,\n data=data or {},\n json=json,\n params=params or {},\n auth=auth,\n cookies=cookies,\n hooks=hooks,\n )\n prep = self.prepare_request(req)\n\n proxies = proxies or {}\n\n settings = self.merge_environment_settings(\n prep.url, proxies, stream, verify, cert\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': timeout,\n 'allow_redirects': allow_redirects,\n }\n send_kwargs.update(settings)\n resp = self.send(prep, **send_kwargs)\n\n return resp"},{"col":0,"comment":"null","endLoc":438,"header":"def _copy_cookie_jar(jar)","id":741,"name":"_copy_cookie_jar","nodeType":"Function","startLoc":426,"text":"def _copy_cookie_jar(jar):\n if jar is None:\n return None\n\n if hasattr(jar, 'copy'):\n # We're dealing with an instance of RequestsCookieJar\n return jar.copy()\n # We're dealing with a generic CookieJar instance\n new_jar = copy.copy(jar)\n new_jar.clear()\n for cookie in jar:\n new_jar.set_cookie(copy.copy(cookie))\n return new_jar"},{"className":"RequestException","col":0,"comment":"There was an ambiguous exception that occurred while handling your\n request.\n ","endLoc":27,"id":742,"nodeType":"Class","startLoc":14,"text":"class RequestException(IOError):\n \"\"\"There was an ambiguous exception that occurred while handling your\n request.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize RequestException with `request` and `response` objects.\"\"\"\n response = kwargs.pop('response', None)\n self.response = response\n self.request = kwargs.pop('request', None)\n if (response is not None and not self.request and\n hasattr(response, 'request')):\n self.request = self.response.request\n super(RequestException, self).__init__(*args, **kwargs)"},{"attributeType":"null","col":12,"comment":"null","endLoc":26,"id":743,"name":"request","nodeType":"Attribute","startLoc":26,"text":"self.request"},{"col":0,"comment":"Ensure we correctly respond to a 401 challenge once, and then\n stop responding if challenged again.\n ","endLoc":231,"header":"def test_digestauth_401_only_sent_once()","id":744,"name":"test_digestauth_401_only_sent_once","nodeType":"Function","startLoc":188,"text":"def test_digestauth_401_only_sent_once():\n \"\"\"Ensure we correctly respond to a 401 challenge once, and then\n stop responding if challenged again.\n \"\"\"\n text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n expected_digest = (b'Authorization: Digest username=\"user\", '\n b'realm=\"me@kennethreitz.com\", '\n b'nonce=\"6bf5d6e4da1ce66918800195d6b9130d\", uri=\"/\"')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_failed_response_handler(sock):\n # Respond to initial GET with a challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_401)\n\n # Verify we receive an Authorization header in response, then\n # challenge again.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert expected_digest in request_content\n sock.send(text_401)\n\n # Verify the client didn't respond to second challenge.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content == b''\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_failed_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server didn't authenticate us.\n assert r.status_code == 401\n assert r.history[0].status_code == 401\n close_server.set()"},{"col":0,"comment":"","endLoc":3,"header":"setup.py#","id":745,"name":"","nodeType":"Function","startLoc":3,"text":"here = os.path.abspath(os.path.dirname(__file__))\n\nif sys.argv[-1] == 'publish':\n os.system('python setup.py sdist bdist_wheel')\n os.system('twine upload dist/*')\n sys.exit()\n\npackages = ['requests']\n\nrequires = [\n 'charset_normalizer~=2.0.0; python_version >= \"3\"',\n 'chardet>=3.0.2,<5; python_version < \"3\"',\n 'idna>=2.5,<3; python_version < \"3\"',\n 'idna>=2.5,<4; python_version >= \"3\"',\n 'urllib3>=1.21.1,<1.27',\n 'certifi>=2017.4.17'\n\n]\n\ntest_requirements = [\n 'pytest-httpbin==0.0.7',\n 'pytest-cov',\n 'pytest-mock',\n 'pytest-xdist',\n 'PySocks>=1.5.6, !=1.5.7',\n 'pytest>=3'\n]\n\nabout = {}\n\nwith open(os.path.join(here, 'requests', '__version__.py'), 'r', 'utf-8') as f:\n exec(f.read(), about)\n\nwith open('README.md', 'r', 'utf-8') as f:\n readme = f.read()\n\nsetup(\n name=about['__title__'],\n version=about['__version__'],\n description=about['__description__'],\n long_description=readme,\n long_description_content_type='text/markdown',\n author=about['__author__'],\n author_email=about['__author_email__'],\n url=about['__url__'],\n packages=packages,\n package_data={'': ['LICENSE', 'NOTICE']},\n package_dir={'requests': 'requests'},\n include_package_data=True,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*\",\n install_requires=requires,\n license=about['__license__'],\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy'\n ],\n cmdclass={'test': PyTest},\n tests_require=test_requirements,\n extras_require={\n 'security': [],\n 'socks': ['PySocks>=1.5.6, !=1.5.7'],\n 'socks:sys_platform == \"win32\" and python_version == \"2.7\"': ['win_inet_pton'],\n 'use_chardet_on_py3': ['chardet>=3.0.2,<5']\n },\n project_urls={\n 'Documentation': 'https://requests.readthedocs.io',\n 'Source': 'https://github.com/psf/requests',\n },\n)"},{"col":4,"comment":"Returns the json-encoded content of a response, if any.\n\n :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes.\n :raises requests.exceptions.JSONDecodeError: If the response body does not\n contain valid json.\n ","endLoc":917,"header":"def json(self, **kwargs)","id":750,"name":"json","nodeType":"Function","startLoc":883,"text":"def json(self, **kwargs):\n r\"\"\"Returns the json-encoded content of a response, if any.\n\n :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes.\n :raises requests.exceptions.JSONDecodeError: If the response body does not\n contain valid json.\n \"\"\"\n\n if not self.encoding and self.content and len(self.content) > 3:\n # No encoding set. JSON RFC 4627 section 3 states we should expect\n # UTF-8, -16 or -32. Detect which one to use; If the detection or\n # decoding fails, fall back to `self.text` (using charset_normalizer to make\n # a best guess).\n encoding = guess_json_utf(self.content)\n if encoding is not None:\n try:\n return complexjson.loads(\n self.content.decode(encoding), **kwargs\n )\n except UnicodeDecodeError:\n # Wrong UTF codec detected; usually because it's not UTF-8\n # but some other 8-bit codec. This is an RFC violation,\n # and the server didn't bother to tell us what codec *was*\n # used.\n pass\n\n try:\n return complexjson.loads(self.text, **kwargs)\n except JSONDecodeError as e:\n # Catch JSON-related errors and raise as requests.JSONDecodeError\n # This aliases json.JSONDecodeError and simplejson.JSONDecodeError\n if is_py2: # e is a ValueError\n raise RequestsJSONDecodeError(e.message)\n else:\n raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)"},{"col":4,"comment":"Dict-like __getitem__() for compatibility with client code. Throws\n exception if there are more than one cookie with name. In that case,\n use the more explicit get() method instead.\n\n .. warning:: operation is O(n), not O(1).\n ","endLoc":328,"header":"def __getitem__(self, name)","id":751,"name":"__getitem__","nodeType":"Function","startLoc":321,"text":"def __getitem__(self, name):\n \"\"\"Dict-like __getitem__() for compatibility with client code. Throws\n exception if there are more than one cookie with name. In that case,\n use the more explicit get() method instead.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n return self._find_no_duplicates(name)"},{"col":0,"comment":"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n ","endLoc":651,"header":"def requote_uri(uri)","id":752,"name":"requote_uri","nodeType":"Function","startLoc":632,"text":"def requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)"},{"col":4,"comment":"Dict-like __setitem__ for compatibility with client code. Throws\n exception if there is already a cookie of that name in the jar. In that\n case, use the more explicit set() method instead.\n ","endLoc":335,"header":"def __setitem__(self, name, value)","id":753,"name":"__setitem__","nodeType":"Function","startLoc":330,"text":"def __setitem__(self, name, value):\n \"\"\"Dict-like __setitem__ for compatibility with client code. Throws\n exception if there is already a cookie of that name in the jar. In that\n case, use the more explicit set() method instead.\n \"\"\"\n self.set(name, value)"},{"col":4,"comment":"Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s\n ``remove_cookie_by_name()``.\n ","endLoc":341,"header":"def __delitem__(self, name)","id":754,"name":"__delitem__","nodeType":"Function","startLoc":337,"text":"def __delitem__(self, name):\n \"\"\"Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s\n ``remove_cookie_by_name()``.\n \"\"\"\n remove_cookie_by_name(self, name)"},{"col":4,"comment":"Updates this jar with cookies from another CookieJar or dict-like","endLoc":354,"header":"def update(self, other)","id":755,"name":"update","nodeType":"Function","startLoc":348,"text":"def update(self, other):\n \"\"\"Updates this jar with cookies from another CookieJar or dict-like\"\"\"\n if isinstance(other, cookielib.CookieJar):\n for cookie in other:\n self.set_cookie(copy.copy(cookie))\n else:\n super(RequestsCookieJar, self).update(other)"},{"col":4,"comment":"Constructs a :class:`PreparedRequest ` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request ` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n ","endLoc":455,"header":"def prepare_request(self, request)","id":756,"name":"prepare_request","nodeType":"Function","startLoc":417,"text":"def prepare_request(self, request):\n \"\"\"Constructs a :class:`PreparedRequest ` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request ` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n \"\"\"\n cookies = request.cookies or {}\n\n # Bootstrap CookieJar.\n if not isinstance(cookies, cookielib.CookieJar):\n cookies = cookiejar_from_dict(cookies)\n\n # Merge with session cookies\n merged_cookies = merge_cookies(\n merge_cookies(RequestsCookieJar(), self.cookies), cookies)\n\n # Set environment's basic authentication if not explicitly set.\n auth = request.auth\n if self.trust_env and not auth and not self.auth:\n auth = get_netrc_auth(request.url)\n\n p = PreparedRequest()\n p.prepare(\n method=request.method.upper(),\n url=request.url,\n files=request.files,\n data=request.data,\n json=request.json,\n headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),\n params=merge_setting(request.params, self.params),\n auth=merge_setting(auth, self.auth),\n cookies=merged_cookies,\n hooks=merge_hooks(request.hooks, self.hooks),\n )\n return p"},{"col":0,"comment":"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n ","endLoc":629,"header":"def unquote_unreserved(uri)","id":757,"name":"unquote_unreserved","nodeType":"Function","startLoc":608,"text":"def unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)"},{"attributeType":"null","col":8,"comment":"null","endLoc":22,"id":758,"name":"response","nodeType":"Attribute","startLoc":22,"text":"self.response"},{"className":"InvalidHeader","col":0,"comment":"The header value provided was somehow invalid.","endLoc":95,"id":760,"nodeType":"Class","startLoc":94,"text":"class InvalidHeader(RequestException, ValueError):\n \"\"\"The header value provided was somehow invalid.\"\"\""},{"attributeType":"null","col":8,"comment":"null","endLoc":43,"id":762,"name":"_store","nodeType":"Attribute","startLoc":43,"text":"self._store"},{"className":"TestCaseInsensitiveDict","col":0,"comment":"null","endLoc":49,"id":763,"nodeType":"Class","startLoc":8,"text":"class TestCaseInsensitiveDict:\n\n @pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"CaseInsensitiveDict instance with \"Accept\" header.\"\"\"\n self.case_insensitive_dict = CaseInsensitiveDict()\n self.case_insensitive_dict['Accept'] = 'application/json'\n\n def test_list(self):\n assert list(self.case_insensitive_dict) == ['Accept']\n\n possible_keys = pytest.mark.parametrize('key', ('accept', 'ACCEPT', 'aCcEpT', 'Accept'))\n\n @possible_keys\n def test_getitem(self, key):\n assert self.case_insensitive_dict[key] == 'application/json'\n\n @possible_keys\n def test_delitem(self, key):\n del self.case_insensitive_dict[key]\n assert key not in self.case_insensitive_dict\n\n def test_lower_items(self):\n assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')]\n\n def test_repr(self):\n assert repr(self.case_insensitive_dict) == \"{'Accept': 'application/json'}\"\n\n def test_copy(self):\n copy = self.case_insensitive_dict.copy()\n assert copy is not self.case_insensitive_dict\n assert copy == self.case_insensitive_dict\n\n @pytest.mark.parametrize(\n 'other, result', (\n ({'AccePT': 'application/json'}, True),\n ({}, False),\n (None, False)\n )\n )\n def test_instance_equality(self, other, result):\n assert (self.case_insensitive_dict == other) is result"},{"className":"FileModeWarning","col":0,"comment":"A file was opened in text mode, but Requests determined its binary length.","endLoc":129,"id":764,"nodeType":"Class","startLoc":128,"text":"class FileModeWarning(RequestsWarning, DeprecationWarning):\n \"\"\"A file was opened in text mode, but Requests determined its binary length.\"\"\""},{"col":4,"comment":"CaseInsensitiveDict instance with \"Accept\" header.","endLoc":14,"header":"@pytest.fixture(autouse=True)\n def setup(self)","id":765,"name":"setup","nodeType":"Function","startLoc":10,"text":"@pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"CaseInsensitiveDict instance with \"Accept\" header.\"\"\"\n self.case_insensitive_dict = CaseInsensitiveDict()\n self.case_insensitive_dict['Accept'] = 'application/json'"},{"col":0,"comment":"Ensure we only send digestauth on 4xx challenges.\n\n See https://github.com/psf/requests/issues/3772.\n ","endLoc":268,"header":"def test_digestauth_only_on_4xx()","id":766,"name":"test_digestauth_only_on_4xx","nodeType":"Function","startLoc":234,"text":"def test_digestauth_only_on_4xx():\n \"\"\"Ensure we only send digestauth on 4xx challenges.\n\n See https://github.com/psf/requests/issues/3772.\n \"\"\"\n text_200_chal = (b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'WWW-Authenticate: Digest nonce=\"6bf5d6e4da1ce66918800195d6b9130d\"'\n b', opaque=\"372825293d1c26955496c80ed6426e9e\", '\n b'realm=\"me@kennethreitz.com\", qop=auth\\r\\n\\r\\n')\n\n auth = requests.auth.HTTPDigestAuth('user', 'pass')\n\n def digest_response_handler(sock):\n # Respond to GET with a 200 containing www-authenticate header.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content.startswith(b\"GET / HTTP/1.1\")\n sock.send(text_200_chal)\n\n # Verify the client didn't respond with auth.\n request_content = consume_socket_content(sock, timeout=0.5)\n assert request_content == b''\n\n return request_content\n\n close_server = threading.Event()\n server = Server(digest_response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/'.format(host, port)\n r = requests.get(url, auth=auth)\n # Verify server didn't receive auth from us.\n assert r.status_code == 200\n assert len(r.history) == 0\n close_server.set()"},{"col":4,"comment":"null","endLoc":17,"header":"def test_list(self)","id":767,"name":"test_list","nodeType":"Function","startLoc":16,"text":"def test_list(self):\n assert list(self.case_insensitive_dict) == ['Accept']"},{"className":"DeprecationWarning","col":0,"comment":"null","endLoc":2069,"id":768,"nodeType":"Class","startLoc":2069,"text":"class DeprecationWarning(Warning): ..."},{"className":"HTTPError","col":0,"comment":"An HTTP error occurred.","endLoc":39,"id":769,"nodeType":"Class","startLoc":38,"text":"class HTTPError(RequestException):\n \"\"\"An HTTP error occurred.\"\"\""},{"className":"MissingSchema","col":0,"comment":"The URL schema (e.g. http or https) is missing.","endLoc":83,"id":770,"nodeType":"Class","startLoc":82,"text":"class MissingSchema(RequestException, ValueError):\n \"\"\"The URL schema (e.g. http or https) is missing.\"\"\""},{"className":"UnrewindableBodyError","col":0,"comment":"Requests encountered an error when trying to rewind a body.","endLoc":119,"id":771,"nodeType":"Class","startLoc":118,"text":"class UnrewindableBodyError(RequestException):\n \"\"\"Requests encountered an error when trying to rewind a body.\"\"\""},{"className":"ChunkedEncodingError","col":0,"comment":"The server declared chunked encoding but sent an invalid chunk.","endLoc":103,"id":772,"nodeType":"Class","startLoc":102,"text":"class ChunkedEncodingError(RequestException):\n \"\"\"The server declared chunked encoding but sent an invalid chunk.\"\"\""},{"className":"ContentDecodingError","col":0,"comment":"Failed to decode response content.","endLoc":107,"id":773,"nodeType":"Class","startLoc":106,"text":"class ContentDecodingError(RequestException, BaseHTTPError):\n \"\"\"Failed to decode response content.\"\"\""},{"className":"ConnectionError","col":0,"comment":"A Connection error occurred.","endLoc":43,"id":774,"nodeType":"Class","startLoc":42,"text":"class ConnectionError(RequestException):\n \"\"\"A Connection error occurred.\"\"\""},{"className":"StreamConsumedError","col":0,"comment":"The content for this response was already consumed.","endLoc":111,"id":775,"nodeType":"Class","startLoc":110,"text":"class StreamConsumedError(RequestException, TypeError):\n \"\"\"The content for this response was already consumed.\"\"\""},{"className":"TypeError","col":0,"comment":"null","endLoc":2010,"id":776,"nodeType":"Class","startLoc":2010,"text":"class TypeError(Exception): ..."},{"className":"InvalidJSONError","col":0,"comment":"A JSON error occurred.","endLoc":31,"id":777,"nodeType":"Class","startLoc":30,"text":"class InvalidJSONError(RequestException):\n \"\"\"A JSON error occurred.\"\"\""},{"col":0,"comment":"\n :rtype: str\n ","endLoc":957,"header":"def guess_json_utf(data)","id":778,"name":"guess_json_utf","nodeType":"Function","startLoc":928,"text":"def guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None"},{"className":"JSONDecodeError","col":0,"comment":"Couldn't decode the text into json","endLoc":35,"id":779,"nodeType":"Class","startLoc":34,"text":"class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):\n \"\"\"Couldn't decode the text into json\"\"\""},{"col":4,"comment":"null","endLoc":91,"header":"def proxy_bypass_registry(host)","id":780,"name":"proxy_bypass_registry","nodeType":"Function","startLoc":54,"text":"def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False"},{"col":0,"comment":"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n ","endLoc":990,"header":"def get_auth_from_url(url)","id":782,"name":"get_auth_from_url","nodeType":"Function","startLoc":977,"text":"def get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth"},{"col":4,"comment":"Requests uses this method internally to get cookie values.\n\n If there are conflicting cookies, _find arbitrarily chooses one.\n See _find_no_duplicates if you want an exception thrown if there are\n conflicting cookies.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :return: cookie.value\n ","endLoc":374,"header":"def _find(self, name, domain=None, path=None)","id":783,"name":"_find","nodeType":"Function","startLoc":356,"text":"def _find(self, name, domain=None, path=None):\n \"\"\"Requests uses this method internally to get cookie values.\n\n If there are conflicting cookies, _find arbitrarily chooses one.\n See _find_no_duplicates if you want an exception thrown if there are\n conflicting cookies.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :return: cookie.value\n \"\"\"\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n return cookie.value\n\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))"},{"col":4,"comment":"Unlike a normal CookieJar, this class is pickleable.","endLoc":406,"header":"def __getstate__(self)","id":794,"name":"__getstate__","nodeType":"Function","startLoc":401,"text":"def __getstate__(self):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n state = self.__dict__.copy()\n # remove the unpickleable RLock object\n state.pop('_cookies_lock')\n return state"},{"col":0,"comment":"Return a list of parsed link headers proxies.\n\n i.e. Link: ; rel=front; type=\"image/jpeg\",; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n ","endLoc":919,"header":"def parse_header_links(value)","id":795,"name":"parse_header_links","nodeType":"Function","startLoc":885,"text":"def parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: ; rel=front; type=\"image/jpeg\",; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links"},{"col":4,"comment":"null","endLoc":23,"header":"@possible_keys\n def test_getitem(self, key)","id":796,"name":"test_getitem","nodeType":"Function","startLoc":21,"text":"@possible_keys\n def test_getitem(self, key):\n assert self.case_insensitive_dict[key] == 'application/json'"},{"col":4,"comment":"null","endLoc":28,"header":"@possible_keys\n def test_delitem(self, key)","id":797,"name":"test_delitem","nodeType":"Function","startLoc":25,"text":"@possible_keys\n def test_delitem(self, key):\n del self.case_insensitive_dict[key]\n assert key not in self.case_insensitive_dict"},{"col":4,"comment":"null","endLoc":31,"header":"def test_lower_items(self)","id":798,"name":"test_lower_items","nodeType":"Function","startLoc":30,"text":"def test_lower_items(self):\n assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')]"},{"id":801,"name":"hacks.html","nodeType":"TextFile","path":"docs/_templates","text":"\n\n\n\n\n\n\n\n\n
\n
\n\n\n"},{"id":802,"name":"docs/dev","nodeType":"Package"},{"id":803,"name":"authors.rst","nodeType":"TextFile","path":"docs/dev","text":"Authors\n=======\n\n.. include:: ../../AUTHORS.rst\n"},{"id":804,"name":"authentication.rst","nodeType":"TextFile","path":"docs/user","text":".. _authentication:\n\nAuthentication\n==============\n\nThis document discusses using various kinds of authentication with Requests.\n\nMany web services require authentication, and there are many different types.\nBelow, we outline various forms of authentication available in Requests, from\nthe simple to the complex.\n\n\nBasic Authentication\n--------------------\n\nMany web services that require authentication accept HTTP Basic Auth. This is\nthe simplest kind, and Requests supports it straight out of the box.\n\nMaking requests with HTTP Basic Auth is very simple::\n\n >>> from requests.auth import HTTPBasicAuth\n >>> requests.get('https://api.github.com/user', auth=HTTPBasicAuth('user', 'pass'))\n \n\nIn fact, HTTP Basic Auth is so common that Requests provides a handy shorthand\nfor using it::\n\n >>> requests.get('https://api.github.com/user', auth=('user', 'pass'))\n \n\nProviding the credentials in a tuple like this is exactly the same as the\n``HTTPBasicAuth`` example above.\n\n\nnetrc Authentication\n~~~~~~~~~~~~~~~~~~~~\n\nIf no authentication method is given with the ``auth`` argument, Requests will\nattempt to get the authentication credentials for the URL's hostname from the\nuser's netrc file. The netrc file overrides raw HTTP authentication headers\nset with `headers=`.\n\nIf credentials for the hostname are found, the request is sent with HTTP Basic\nAuth.\n\n\nDigest Authentication\n---------------------\n\nAnother very popular form of HTTP Authentication is Digest Authentication,\nand Requests supports this out of the box as well::\n\n >>> from requests.auth import HTTPDigestAuth\n >>> url = 'https://httpbin.org/digest-auth/auth/user/pass'\n >>> requests.get(url, auth=HTTPDigestAuth('user', 'pass'))\n \n\n\nOAuth 1 Authentication\n----------------------\n\nA common form of authentication for several web APIs is OAuth. The ``requests-oauthlib``\nlibrary allows Requests users to easily make OAuth 1 authenticated requests::\n\n >>> import requests\n >>> from requests_oauthlib import OAuth1\n\n >>> url = 'https://api.twitter.com/1.1/account/verify_credentials.json'\n >>> auth = OAuth1('YOUR_APP_KEY', 'YOUR_APP_SECRET',\n ... 'USER_OAUTH_TOKEN', 'USER_OAUTH_TOKEN_SECRET')\n\n >>> requests.get(url, auth=auth)\n \n\nFor more information on how to OAuth flow works, please see the official `OAuth`_ website.\nFor examples and documentation on requests-oauthlib, please see the `requests_oauthlib`_\nrepository on GitHub\n\nOAuth 2 and OpenID Connect Authentication\n-----------------------------------------\n\nThe ``requests-oauthlib`` library also handles OAuth 2, the authentication mechanism\nunderpinning OpenID Connect. See the `requests-oauthlib OAuth2 documentation`_ for\ndetails of the various OAuth 2 credential management flows:\n\n* `Web Application Flow`_\n* `Mobile Application Flow`_\n* `Legacy Application Flow`_\n* `Backend Application Flow`_\n\nOther Authentication\n--------------------\n\nRequests is designed to allow other forms of authentication to be easily and\nquickly plugged in. Members of the open-source community frequently write\nauthentication handlers for more complicated or less commonly-used forms of\nauthentication. Some of the best have been brought together under the\n`Requests organization`_, including:\n\n- Kerberos_\n- NTLM_\n\nIf you want to use any of these forms of authentication, go straight to their\nGitHub page and follow the instructions.\n\n\nNew Forms of Authentication\n---------------------------\n\nIf you can't find a good implementation of the form of authentication you\nwant, you can implement it yourself. Requests makes it easy to add your own\nforms of authentication.\n\nTo do so, subclass :class:`AuthBase ` and implement the\n``__call__()`` method::\n\n >>> import requests\n >>> class MyAuth(requests.auth.AuthBase):\n ... def __call__(self, r):\n ... # Implement my authentication\n ... return r\n ...\n >>> url = 'https://httpbin.org/get'\n >>> requests.get(url, auth=MyAuth())\n \n\nWhen an authentication handler is attached to a request,\nit is called during request setup. The ``__call__`` method must therefore do\nwhatever is required to make the authentication work. Some forms of\nauthentication will additionally add hooks to provide further functionality.\n\nFurther examples can be found under the `Requests organization`_ and in the\n``auth.py`` file.\n\n.. _OAuth: https://oauth.net/\n.. _requests_oauthlib: https://github.com/requests/requests-oauthlib\n.. _requests-oauthlib OAuth2 documentation: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html\n.. _Web Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#web-application-flow\n.. _Mobile Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#mobile-application-flow\n.. _Legacy Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#legacy-application-flow\n.. _Backend Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow\n.. _Kerberos: https://github.com/requests/requests-kerberos\n.. _NTLM: https://github.com/requests/requests-ntlm\n.. _Requests organization: https://github.com/requests\n"},{"id":805,"name":"CONTRIBUTING.md","nodeType":"TextFile","path":".github","text":"# Contribution Guidelines\n\nBefore opening any issues or proposing any pull requests, please read\nour [Contributor's Guide](https://requests.readthedocs.io/en/latest/dev/contributing/).\n\nTo get the greatest chance of helpful responses, please also observe the\nfollowing additional notes.\n\n## Questions\n\nThe GitHub issue tracker is for *bug reports* and *feature requests*. Please do\nnot use it to ask questions about how to use Requests. These questions should\ninstead be directed to [Stack Overflow](https://stackoverflow.com/). Make sure\nthat your question is tagged with the `python-requests` tag when asking it on\nStack Overflow, to ensure that it is answered promptly and accurately.\n\n## Good Bug Reports\n\nPlease be aware of the following things when filing bug reports:\n\n1. Avoid raising duplicate issues. *Please* use the GitHub issue search feature\n to check whether your bug report or feature request has been mentioned in\n the past. Duplicate bug reports and feature requests are a huge maintenance\n burden on the limited resources of the project. If it is clear from your\n report that you would have struggled to find the original, that's ok, but\n if searching for a selection of words in your issue title would have found\n the duplicate then the issue will likely be closed extremely abruptly.\n2. When filing bug reports about exceptions or tracebacks, please include the\n *complete* traceback. Partial tracebacks, or just the exception text, are\n not helpful. Issues that do not contain complete tracebacks may be closed\n without warning.\n3. Make sure you provide a suitable amount of information to work with. This\n means you should provide:\n\n - Guidance on **how to reproduce the issue**. Ideally, this should be a\n *small* code sample that can be run immediately by the maintainers.\n Failing that, let us know what you're doing, how often it happens, what\n environment you're using, etc. Be thorough: it prevents us needing to ask\n further questions.\n - Tell us **what you expected to happen**. When we run your example code,\n what are we expecting to happen? What does \"success\" look like for your\n code?\n - Tell us **what actually happens**. It's not helpful for you to say \"it\n doesn't work\" or \"it fails\". Tell us *how* it fails: do you get an\n exception? A hang? A non-200 status code? How was the actual result\n different from your expected result?\n - Tell us **what version of Requests you're using**, and\n **how you installed it**. Different versions of Requests behave\n differently and have different bugs, and some distributors of Requests\n ship patches on top of the code we supply.\n\n If you do not provide all of these things, it will take us much longer to\n fix your problem. If we ask you to clarify these and you never respond, we\n will close your issue without fixing it.\n"},{"id":806,"name":"tests/testserver","nodeType":"Package"},{"fileName":"__init__.py","filePath":"tests/testserver","id":807,"nodeType":"File","text":""},{"col":4,"comment":"null","endLoc":34,"header":"def test_repr(self)","id":808,"name":"test_repr","nodeType":"Function","startLoc":33,"text":"def test_repr(self):\n assert repr(self.case_insensitive_dict) == \"{'Accept': 'application/json'}\""},{"fileName":"conftest.py","filePath":"tests","id":810,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\ntry:\n from http.server import HTTPServer\n from http.server import SimpleHTTPRequestHandler\nexcept ImportError:\n from BaseHTTPServer import HTTPServer\n from SimpleHTTPServer import SimpleHTTPRequestHandler \n\nimport ssl\nimport tempfile\nimport threading\n\nimport pytest\nfrom requests.compat import urljoin\n\n\ndef prepare_url(value):\n # Issue #1483: Make sure the URL always has a trailing slash\n httpbin_url = value.url.rstrip('/') + '/'\n\n def inner(*suffix):\n return urljoin(httpbin_url, '/'.join(suffix))\n\n return inner\n\n\n@pytest.fixture\ndef httpbin(httpbin):\n return prepare_url(httpbin)\n\n\n@pytest.fixture\ndef httpbin_secure(httpbin_secure):\n return prepare_url(httpbin_secure)\n\n\n@pytest.fixture\ndef nosan_server(tmp_path_factory):\n # delay importing until the fixture in order to make it possible\n # to deselect the test via command-line when trustme is not available\n import trustme\n\n tmpdir = tmp_path_factory.mktemp(\"certs\")\n ca = trustme.CA()\n # only commonName, no subjectAltName\n server_cert = ca.issue_cert(common_name=u\"localhost\")\n ca_bundle = str(tmpdir / \"ca.pem\")\n ca.cert_pem.write_to_path(ca_bundle)\n\n context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)\n server_cert.configure_cert(context)\n server = HTTPServer((\"localhost\", 0), SimpleHTTPRequestHandler)\n server.socket = context.wrap_socket(server.socket, server_side=True)\n server_thread = threading.Thread(target=server.serve_forever)\n server_thread.start()\n\n yield \"localhost\", server.server_address[1], ca_bundle\n\n server.shutdown()\n server_thread.join()\n"},{"col":4,"comment":"null","endLoc":39,"header":"def test_copy(self)","id":812,"name":"test_copy","nodeType":"Function","startLoc":36,"text":"def test_copy(self):\n copy = self.case_insensitive_dict.copy()\n assert copy is not self.case_insensitive_dict\n assert copy == self.case_insensitive_dict"},{"col":4,"comment":"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n ","endLoc":102,"header":"def proxy_bypass(host)","id":814,"name":"proxy_bypass","nodeType":"Function","startLoc":93,"text":"def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)"},{"col":4,"comment":"Returns the parsed header links of the response, if any.","endLoc":935,"header":"@property\n def links(self)","id":815,"name":"links","nodeType":"Function","startLoc":919,"text":"@property\n def links(self):\n \"\"\"Returns the parsed header links of the response, if any.\"\"\"\n\n header = self.headers.get('link')\n\n # l = MultiDict()\n l = {}\n\n if header:\n links = parse_header_links(header)\n\n for link in links:\n key = link.get('rel') or link.get('url')\n l[key] = link\n\n return l"},{"col":4,"comment":"Unlike a normal CookieJar, this class is pickleable.","endLoc":412,"header":"def __setstate__(self, state)","id":816,"name":"__setstate__","nodeType":"Function","startLoc":408,"text":"def __setstate__(self, state):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n self.__dict__.update(state)\n if '_cookies_lock' not in self.__dict__:\n self._cookies_lock = threading.RLock()"},{"col":0,"comment":"null","endLoc":301,"header":"@pytest.mark.parametrize(\"var,scheme\", _proxy_combos)\ndef test_use_proxy_from_environment(httpbin, var, scheme)","id":817,"name":"test_use_proxy_from_environment","nodeType":"Function","startLoc":285,"text":"@pytest.mark.parametrize(\"var,scheme\", _proxy_combos)\ndef test_use_proxy_from_environment(httpbin, var, scheme):\n url = \"{}://httpbin.org\".format(scheme)\n fake_proxy = Server() # do nothing with the requests; just close the socket\n with fake_proxy as (host, port):\n proxy_url = \"socks5://{}:{}\".format(host, port)\n kwargs = {var: proxy_url}\n with override_environ(**kwargs):\n # fake proxy's lack of response will cause a ConnectionError\n with pytest.raises(requests.exceptions.ConnectionError):\n requests.get(url)\n\n # the fake proxy received a request\n assert len(fake_proxy.handler_results) == 1\n\n # it had actual content (not checking for SOCKS protocol for now)\n assert len(fake_proxy.handler_results[0]) > 0"},{"col":0,"comment":"Returns the Requests tuple auth for a given url from netrc.","endLoc":232,"header":"def get_netrc_auth(url, raise_errors=False)","id":818,"name":"get_netrc_auth","nodeType":"Function","startLoc":178,"text":"def get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass"},{"col":4,"comment":"null","endLoc":49,"header":"@pytest.mark.parametrize(\n 'other, result', (\n ({'AccePT': 'application/json'}, True),\n ({}, False),\n (None, False)\n )\n )\n def test_instance_equality(self, other, result)","id":819,"name":"test_instance_equality","nodeType":"Function","startLoc":41,"text":"@pytest.mark.parametrize(\n 'other, result', (\n ({'AccePT': 'application/json'}, True),\n ({}, False),\n (None, False)\n )\n )\n def test_instance_equality(self, other, result):\n assert (self.case_insensitive_dict == other) is result"},{"attributeType":"null","col":4,"comment":"null","endLoc":19,"id":820,"name":"possible_keys","nodeType":"Attribute","startLoc":19,"text":"possible_keys"},{"attributeType":"CaseInsensitiveDict","col":8,"comment":"null","endLoc":13,"id":821,"name":"case_insensitive_dict","nodeType":"Attribute","startLoc":13,"text":"self.case_insensitive_dict"},{"className":"TestLookupDict","col":0,"comment":"null","endLoc":76,"id":822,"nodeType":"Class","startLoc":52,"text":"class TestLookupDict:\n\n @pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"LookupDict instance with \"bad_gateway\" attribute.\"\"\"\n self.lookup_dict = LookupDict('test')\n self.lookup_dict.bad_gateway = 502\n\n def test_repr(self):\n assert repr(self.lookup_dict) == \"\"\n\n get_item_parameters = pytest.mark.parametrize(\n 'key, value', (\n ('bad_gateway', 502),\n ('not_a_key', None)\n )\n )\n\n @get_item_parameters\n def test_getitem(self, key, value):\n assert self.lookup_dict[key] == value\n\n @get_item_parameters\n def test_get(self, key, value):\n assert self.lookup_dict.get(key) == value"},{"col":4,"comment":"LookupDict instance with \"bad_gateway\" attribute.","endLoc":58,"header":"@pytest.fixture(autouse=True)\n def setup(self)","id":823,"name":"setup","nodeType":"Function","startLoc":54,"text":"@pytest.fixture(autouse=True)\n def setup(self):\n \"\"\"LookupDict instance with \"bad_gateway\" attribute.\"\"\"\n self.lookup_dict = LookupDict('test')\n self.lookup_dict.bad_gateway = 502"},{"col":0,"comment":"null","endLoc":175,"header":"def super_len(o)","id":824,"name":"super_len","nodeType":"Function","startLoc":114,"text":"def super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)"},{"className":"HTTPServer","col":0,"comment":"null","endLoc":14,"id":825,"nodeType":"Class","startLoc":12,"text":"class HTTPServer(socketserver.TCPServer):\n server_name: str\n server_port: int"},{"col":0,"comment":"Returns an internal sequence dictionary update.","endLoc":111,"header":"def dict_to_sequence(d)","id":826,"name":"dict_to_sequence","nodeType":"Function","startLoc":105,"text":"def dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d"},{"col":4,"comment":"null","endLoc":61,"header":"def test_repr(self)","id":827,"name":"test_repr","nodeType":"Function","startLoc":60,"text":"def test_repr(self):\n assert repr(self.lookup_dict) == \"\""},{"className":"TCPServer","col":0,"comment":"null","endLoc":82,"id":828,"nodeType":"Class","startLoc":72,"text":"class TCPServer(BaseServer):\n if sys.version_info >= (3, 11):\n allow_reuse_port: bool\n server_address: _AfInetAddress\n def __init__(\n self,\n server_address: _AfInetAddress,\n RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler],\n bind_and_activate: bool = True,\n ) -> None: ...\n def get_request(self) -> tuple[_socket, _RetAddress]: ..."},{"attributeType":"null","col":4,"comment":"null","endLoc":601,"id":829,"name":"__attrs__","nodeType":"Attribute","startLoc":601,"text":"__attrs__"},{"attributeType":"null","col":8,"comment":"null","endLoc":617,"id":830,"name":"headers","nodeType":"Attribute","startLoc":617,"text":"self.headers"},{"attributeType":"null","col":8,"comment":"null","endLoc":636,"id":831,"name":"reason","nodeType":"Attribute","startLoc":636,"text":"self.reason"},{"attributeType":"null","col":8,"comment":"null","endLoc":651,"id":832,"name":"request","nodeType":"Attribute","startLoc":651,"text":"self.request"},{"col":4,"comment":"Return a copy of this RequestsCookieJar.","endLoc":419,"header":"def copy(self)","id":833,"name":"copy","nodeType":"Function","startLoc":414,"text":"def copy(self):\n \"\"\"Return a copy of this RequestsCookieJar.\"\"\"\n new_cj = RequestsCookieJar()\n new_cj.set_policy(self.get_policy())\n new_cj.update(self)\n return new_cj"},{"col":4,"comment":"null","endLoc":72,"header":"@get_item_parameters\n def test_getitem(self, key, value)","id":834,"name":"test_getitem","nodeType":"Function","startLoc":70,"text":"@get_item_parameters\n def test_getitem(self, key, value):\n assert self.lookup_dict[key] == value"},{"attributeType":"null","col":8,"comment":"null","endLoc":612,"id":835,"name":"status_code","nodeType":"Attribute","startLoc":612,"text":"self.status_code"},{"col":4,"comment":"null","endLoc":76,"header":"@get_item_parameters\n def test_get(self, key, value)","id":836,"name":"test_get","nodeType":"Function","startLoc":74,"text":"@get_item_parameters\n def test_get(self, key, value):\n assert self.lookup_dict.get(key) == value"},{"attributeType":"null","col":8,"comment":"null","endLoc":622,"id":837,"name":"raw","nodeType":"Attribute","startLoc":622,"text":"self.raw"},{"attributeType":"null","col":8,"comment":"null","endLoc":633,"id":838,"name":"history","nodeType":"Attribute","startLoc":633,"text":"self.history"},{"attributeType":"null","col":8,"comment":"null","endLoc":628,"id":839,"name":"encoding","nodeType":"Attribute","startLoc":628,"text":"self.encoding"},{"attributeType":"null","col":8,"comment":"null","endLoc":625,"id":840,"name":"url","nodeType":"Attribute","startLoc":625,"text":"self.url"},{"attributeType":"null","col":8,"comment":"null","endLoc":639,"id":841,"name":"cookies","nodeType":"Attribute","startLoc":639,"text":"self.cookies"},{"col":0,"comment":"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n ","endLoc":277,"header":"def extract_zipped_paths(path)","id":842,"name":"extract_zipped_paths","nodeType":"Function","startLoc":243,"text":"def extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path"},{"attributeType":"null","col":8,"comment":"null","endLoc":647,"id":844,"name":"elapsed","nodeType":"Attribute","startLoc":647,"text":"self.elapsed"},{"className":"BaseServer","col":0,"comment":"null","endLoc":70,"id":845,"nodeType":"Class","startLoc":39,"text":"class BaseServer:\n address_family: int\n server_address: _Address\n socket: _socket\n allow_reuse_address: bool\n request_queue_size: int\n socket_type: int\n timeout: float | None\n RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler]\n def __init__(\n self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler]\n ) -> None: ...\n def fileno(self) -> int: ...\n def handle_request(self) -> None: ...\n def serve_forever(self, poll_interval: float = 0.5) -> None: ...\n def shutdown(self) -> None: ...\n def server_close(self) -> None: ...\n def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ...\n def get_request(self) -> tuple[Any, Any]: ...\n def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ...\n def handle_timeout(self) -> None: ...\n def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ...\n def server_activate(self) -> None: ...\n def server_bind(self) -> None: ...\n def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ...\n def __enter__(self) -> Self: ...\n def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None\n ) -> None: ...\n def service_actions(self) -> None: ...\n def shutdown_request(self, request: _RequestType) -> None: ... # undocumented\n def close_request(self, request: _RequestType) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":50,"header":"def __init__(\n self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler]\n ) -> None","id":846,"name":"__init__","nodeType":"Function","startLoc":48,"text":"def __init__(\n self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler]\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":51,"header":"def fileno(self) -> int","id":847,"name":"fileno","nodeType":"Function","startLoc":51,"text":"def fileno(self) -> int: ..."},{"col":4,"comment":"null","endLoc":52,"header":"def handle_request(self) -> None","id":848,"name":"handle_request","nodeType":"Function","startLoc":52,"text":"def handle_request(self) -> None: ..."},{"col":4,"comment":"null","endLoc":53,"header":"def serve_forever(self, poll_interval: float = 0.5) -> None","id":849,"name":"serve_forever","nodeType":"Function","startLoc":53,"text":"def serve_forever(self, poll_interval: float = 0.5) -> None: ..."},{"col":4,"comment":"null","endLoc":54,"header":"def shutdown(self) -> None","id":850,"name":"shutdown","nodeType":"Function","startLoc":54,"text":"def shutdown(self) -> None: ..."},{"col":4,"comment":"null","endLoc":55,"header":"def server_close(self) -> None","id":851,"name":"server_close","nodeType":"Function","startLoc":55,"text":"def server_close(self) -> None: ..."},{"col":4,"comment":"null","endLoc":56,"header":"def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None","id":852,"name":"finish_request","nodeType":"Function","startLoc":56,"text":"def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ..."},{"col":4,"comment":"null","endLoc":57,"header":"def get_request(self) -> tuple[Any, Any]","id":853,"name":"get_request","nodeType":"Function","startLoc":57,"text":"def get_request(self) -> tuple[Any, Any]: ..."},{"col":4,"comment":"null","endLoc":58,"header":"def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None","id":854,"name":"handle_error","nodeType":"Function","startLoc":58,"text":"def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ..."},{"col":4,"comment":"null","endLoc":59,"header":"def handle_timeout(self) -> None","id":855,"name":"handle_timeout","nodeType":"Function","startLoc":59,"text":"def handle_timeout(self) -> None: ..."},{"col":4,"comment":"null","endLoc":60,"header":"def process_request(self, request: _RequestType, client_address: _RetAddress) -> None","id":856,"name":"process_request","nodeType":"Function","startLoc":60,"text":"def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ..."},{"col":4,"comment":"null","endLoc":61,"header":"def server_activate(self) -> None","id":857,"name":"server_activate","nodeType":"Function","startLoc":61,"text":"def server_activate(self) -> None: ..."},{"col":4,"comment":"null","endLoc":62,"header":"def server_bind(self) -> None","id":858,"name":"server_bind","nodeType":"Function","startLoc":62,"text":"def server_bind(self) -> None: ..."},{"col":4,"comment":"null","endLoc":63,"header":"def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool","id":859,"name":"verify_request","nodeType":"Function","startLoc":63,"text":"def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ..."},{"col":4,"comment":"null","endLoc":64,"header":"def __enter__(self) -> Self","id":860,"name":"__enter__","nodeType":"Function","startLoc":64,"text":"def __enter__(self) -> Self: ..."},{"col":4,"comment":"null","endLoc":67,"header":"def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None\n ) -> None","id":861,"name":"__exit__","nodeType":"Function","startLoc":65,"text":"def __exit__(\n self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":68,"header":"def service_actions(self) -> None","id":862,"name":"service_actions","nodeType":"Function","startLoc":68,"text":"def service_actions(self) -> None: ..."},{"col":4,"comment":"null","endLoc":69,"header":"def shutdown_request(self, request: _RequestType) -> None","id":863,"name":"shutdown_request","nodeType":"Function","startLoc":69,"text":"def shutdown_request(self, request: _RequestType) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":70,"header":"def close_request(self, request: _RequestType) -> None","id":864,"name":"close_request","nodeType":"Function","startLoc":70,"text":"def close_request(self, request: _RequestType) -> None: ... # undocumented"},{"attributeType":"int","col":4,"comment":"null","endLoc":40,"id":865,"name":"address_family","nodeType":"Attribute","startLoc":40,"text":"address_family"},{"col":4,"comment":"Return the CookiePolicy instance used.","endLoc":423,"header":"def get_policy(self)","id":866,"name":"get_policy","nodeType":"Function","startLoc":421,"text":"def get_policy(self):\n \"\"\"Return the CookiePolicy instance used.\"\"\"\n return self._policy"},{"attributeType":"null","col":12,"comment":"null","endLoc":412,"id":867,"name":"_cookies_lock","nodeType":"Attribute","startLoc":412,"text":"self._cookies_lock"},{"attributeType":"null","col":8,"comment":"null","endLoc":607,"id":868,"name":"_content","nodeType":"Attribute","startLoc":607,"text":"self._content"},{"attributeType":"null","col":8,"comment":"null","endLoc":609,"id":873,"name":"_next","nodeType":"Attribute","startLoc":609,"text":"self._next"},{"attributeType":"null","col":4,"comment":"null","endLoc":41,"id":874,"name":"server_address","nodeType":"Attribute","startLoc":41,"text":"server_address"},{"attributeType":"null","col":8,"comment":"null","endLoc":608,"id":875,"name":"_content_consumed","nodeType":"Attribute","startLoc":608,"text":"self._content_consumed"},{"attributeType":"socket","col":4,"comment":"null","endLoc":42,"id":876,"name":"socket","nodeType":"Attribute","startLoc":42,"text":"socket"},{"col":0,"comment":"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n ","endLoc":666,"header":"def address_in_network(ip, net)","id":877,"name":"address_in_network","nodeType":"Function","startLoc":654,"text":"def address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)"},{"col":0,"comment":"Sends a HEAD request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes. If\n `allow_redirects` is not provided, it will be set to `False` (as\n opposed to the default :meth:`request` behavior).\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":102,"header":"def head(url, **kwargs)","id":878,"name":"head","nodeType":"Function","startLoc":90,"text":"def head(url, **kwargs):\n r\"\"\"Sends a HEAD request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes. If\n `allow_redirects` is not provided, it will be set to `False` (as\n opposed to the default :meth:`request` behavior).\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return request('head', url, **kwargs)"},{"col":0,"comment":"Sends a PATCH request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":147,"header":"def patch(url, data=None, **kwargs)","id":880,"name":"patch","nodeType":"Function","startLoc":135,"text":"def patch(url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('patch', url, data=data, **kwargs)"},{"attributeType":"bool","col":4,"comment":"null","endLoc":43,"id":881,"name":"allow_reuse_address","nodeType":"Attribute","startLoc":43,"text":"allow_reuse_address"},{"col":0,"comment":"Sends a PUT request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":132,"header":"def put(url, data=None, **kwargs)","id":882,"name":"put","nodeType":"Function","startLoc":120,"text":"def put(url, data=None, **kwargs):\n r\"\"\"Sends a PUT request.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json data to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('put', url, data=data, **kwargs)"},{"col":0,"comment":"Sends a DELETE request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":159,"header":"def delete(url, **kwargs)","id":883,"name":"delete","nodeType":"Function","startLoc":150,"text":"def delete(url, **kwargs):\n r\"\"\"Sends a DELETE request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('delete', url, **kwargs)"},{"attributeType":"int","col":4,"comment":"null","endLoc":44,"id":884,"name":"request_queue_size","nodeType":"Attribute","startLoc":44,"text":"request_queue_size"},{"attributeType":"int","col":4,"comment":"null","endLoc":45,"id":885,"name":"socket_type","nodeType":"Attribute","startLoc":45,"text":"socket_type"},{"col":0,"comment":"Sends an OPTIONS request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n ","endLoc":87,"header":"def options(url, **kwargs)","id":886,"name":"options","nodeType":"Function","startLoc":78,"text":"def options(url, **kwargs):\n r\"\"\"Sends an OPTIONS request.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :return: :class:`Response ` object\n :rtype: requests.Response\n \"\"\"\n\n return request('options', url, **kwargs)"},{"attributeType":"float | None","col":4,"comment":"null","endLoc":46,"id":887,"name":"timeout","nodeType":"Attribute","startLoc":46,"text":"timeout"},{"col":0,"comment":"\n Returns a :class:`Session` for context-management.\n\n .. deprecated:: 1.0.0\n\n This method has been deprecated since version 1.0.0 and is only kept for\n backwards compatibility. New code should use :class:`~requests.sessions.Session`\n to create a session. This may be removed at a future date.\n\n :rtype: Session\n ","endLoc":771,"header":"def session()","id":888,"name":"session","nodeType":"Function","startLoc":759,"text":"def session():\n \"\"\"\n Returns a :class:`Session` for context-management.\n\n .. deprecated:: 1.0.0\n\n This method has been deprecated since version 1.0.0 and is only kept for\n backwards compatibility. New code should use :class:`~requests.sessions.Session`\n to create a session. This may be removed at a future date.\n\n :rtype: Session\n \"\"\"\n return Session()"},{"attributeType":"(Any, Any, Self) -> BaseRequestHandler","col":4,"comment":"null","endLoc":47,"id":889,"name":"RequestHandlerClass","nodeType":"Attribute","startLoc":47,"text":"RequestHandlerClass"},{"attributeType":"null","col":4,"comment":"null","endLoc":63,"id":893,"name":"get_item_parameters","nodeType":"Attribute","startLoc":63,"text":"get_item_parameters"},{"col":4,"comment":"null","endLoc":81,"header":"def __init__(\n self,\n server_address: _AfInetAddress,\n RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler],\n bind_and_activate: bool = True,\n ) -> None","id":894,"name":"__init__","nodeType":"Function","startLoc":76,"text":"def __init__(\n self,\n server_address: _AfInetAddress,\n RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler],\n bind_and_activate: bool = True,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":82,"header":"def get_request(self) -> tuple[_socket, _RetAddress]","id":895,"name":"get_request","nodeType":"Function","startLoc":82,"text":"def get_request(self) -> tuple[_socket, _RetAddress]: ..."},{"attributeType":"(str | bytes | bytearray, int)","col":4,"comment":"null","endLoc":75,"id":896,"name":"server_address","nodeType":"Attribute","startLoc":75,"text":"server_address"},{"attributeType":"LookupDict","col":8,"comment":"null","endLoc":57,"id":899,"name":"lookup_dict","nodeType":"Attribute","startLoc":57,"text":"self.lookup_dict"},{"col":0,"comment":"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n ","endLoc":1016,"header":"def check_header_validity(header)","id":906,"name":"check_header_validity","nodeType":"Function","startLoc":998,"text":"def check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))"},{"fileName":"server.py","filePath":"tests/testserver","id":907,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport threading\nimport socket\nimport select\n\n\ndef consume_socket_content(sock, timeout=0.5):\n chunks = 65536\n content = b''\n\n while True:\n more_to_read = select.select([sock], [], [], timeout)[0]\n if not more_to_read:\n break\n\n new_content = sock.recv(chunks)\n if not new_content:\n break\n\n content += new_content\n\n return content\n\n\nclass Server(threading.Thread):\n \"\"\"Dummy server using for unit testing\"\"\"\n WAIT_EVENT_TIMEOUT = 5\n\n def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None):\n super(Server, self).__init__()\n\n self.handler = handler or consume_socket_content\n self.handler_results = []\n\n self.host = host\n self.port = port\n self.requests_to_handle = requests_to_handle\n\n self.wait_to_close_event = wait_to_close_event\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n @classmethod\n def text_response_server(cls, text, request_timeout=0.5, **kwargs):\n def text_response_handler(sock):\n request_content = consume_socket_content(sock, timeout=request_timeout)\n sock.send(text.encode('utf-8'))\n\n return request_content\n\n\n return Server(text_response_handler, **kwargs)\n\n @classmethod\n def basic_response_server(cls, **kwargs):\n return cls.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 0\\r\\n\\r\\n\",\n **kwargs\n )\n\n def run(self):\n try:\n self.server_sock = self._create_socket_and_bind()\n # in case self.port = 0\n self.port = self.server_sock.getsockname()[1]\n self.ready_event.set()\n self._handle_requests()\n\n if self.wait_to_close_event:\n self.wait_to_close_event.wait(self.WAIT_EVENT_TIMEOUT)\n finally:\n self.ready_event.set() # just in case of exception\n self._close_server_sock_ignore_errors()\n self.stop_event.set()\n\n def _create_socket_and_bind(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n # NB: when Python 2.7 is no longer supported, the argument\n # can be removed to use a default backlog size\n sock.listen(5)\n return sock\n\n def _close_server_sock_ignore_errors(self):\n try:\n self.server_sock.close()\n except IOError:\n pass\n\n def _handle_requests(self):\n for _ in range(self.requests_to_handle):\n sock = self._accept_connection()\n if not sock:\n break\n\n handler_result = self.handler(sock)\n\n self.handler_results.append(handler_result)\n sock.close()\n\n def _accept_connection(self):\n try:\n ready, _, _ = select.select([self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT)\n if not ready:\n return None\n\n return self.server_sock.accept()[0]\n except (select.error, socket.error):\n return None\n\n def __enter__(self):\n self.start()\n self.ready_event.wait(self.WAIT_EVENT_TIMEOUT)\n return self.host, self.port\n\n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait(self.WAIT_EVENT_TIMEOUT)\n else:\n if self.wait_to_close_event:\n # avoid server from waiting for event timeouts\n # if an exception is found in the main thread\n self.wait_to_close_event.set()\n\n # ensure server thread doesn't get stuck waiting for connections\n self._close_server_sock_ignore_errors()\n self.join()\n return False # allow exceptions to propagate\n"},{"className":"Session","col":0,"comment":"A Requests session.\n\n Provides cookie persistence, connection-pooling, and configuration.\n\n Basic Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> s.get('https://httpbin.org/get')\n \n\n Or as a context manager::\n\n >>> with requests.Session() as s:\n ... s.get('https://httpbin.org/get')\n \n ","endLoc":756,"id":908,"nodeType":"Class","startLoc":324,"text":"class Session(SessionRedirectMixin):\n \"\"\"A Requests session.\n\n Provides cookie persistence, connection-pooling, and configuration.\n\n Basic Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> s.get('https://httpbin.org/get')\n \n\n Or as a context manager::\n\n >>> with requests.Session() as s:\n ... s.get('https://httpbin.org/get')\n \n \"\"\"\n\n __attrs__ = [\n 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',\n 'cert', 'adapters', 'stream', 'trust_env',\n 'max_redirects',\n ]\n\n def __init__(self):\n\n #: A case-insensitive dictionary of headers to be sent on each\n #: :class:`Request ` sent from this\n #: :class:`Session `.\n self.headers = default_headers()\n\n #: Default Authentication tuple or object to attach to\n #: :class:`Request `.\n self.auth = None\n\n #: Dictionary mapping protocol or protocol and host to the URL of the proxy\n #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to\n #: be used on each :class:`Request `.\n self.proxies = {}\n\n #: Event-handling hooks.\n self.hooks = default_hooks()\n\n #: Dictionary of querystring data to attach to each\n #: :class:`Request `. The dictionary values may be lists for\n #: representing multivalued query parameters.\n self.params = {}\n\n #: Stream response content default.\n self.stream = False\n\n #: SSL Verification default.\n #: Defaults to `True`, requiring requests to verify the TLS certificate at the\n #: remote end.\n #: If verify is set to `False`, requests will accept any TLS certificate\n #: presented by the server, and will ignore hostname mismatches and/or\n #: expired certificates, which will make your application vulnerable to\n #: man-in-the-middle (MitM) attacks.\n #: Only set this to `False` for testing.\n self.verify = True\n\n #: SSL client certificate default, if String, path to ssl client\n #: cert file (.pem). If Tuple, ('cert', 'key') pair.\n self.cert = None\n\n #: Maximum number of redirects allowed. If the request exceeds this\n #: limit, a :class:`TooManyRedirects` exception is raised.\n #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is\n #: 30.\n self.max_redirects = DEFAULT_REDIRECT_LIMIT\n\n #: Trust environment settings for proxy configuration, default\n #: authentication and similar.\n self.trust_env = True\n\n #: A CookieJar containing all currently outstanding cookies set on this\n #: session. By default it is a\n #: :class:`RequestsCookieJar `, but\n #: may be any other ``cookielib.CookieJar`` compatible object.\n self.cookies = cookiejar_from_dict({})\n\n # Default connection adapters.\n self.adapters = OrderedDict()\n self.mount('https://', HTTPAdapter())\n self.mount('http://', HTTPAdapter())\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def prepare_request(self, request):\n \"\"\"Constructs a :class:`PreparedRequest ` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request ` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n \"\"\"\n cookies = request.cookies or {}\n\n # Bootstrap CookieJar.\n if not isinstance(cookies, cookielib.CookieJar):\n cookies = cookiejar_from_dict(cookies)\n\n # Merge with session cookies\n merged_cookies = merge_cookies(\n merge_cookies(RequestsCookieJar(), self.cookies), cookies)\n\n # Set environment's basic authentication if not explicitly set.\n auth = request.auth\n if self.trust_env and not auth and not self.auth:\n auth = get_netrc_auth(request.url)\n\n p = PreparedRequest()\n p.prepare(\n method=request.method.upper(),\n url=request.url,\n files=request.files,\n data=request.data,\n json=request.json,\n headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),\n params=merge_setting(request.params, self.params),\n auth=merge_setting(auth, self.auth),\n cookies=merged_cookies,\n hooks=merge_hooks(request.hooks, self.hooks),\n )\n return p\n\n def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None):\n \"\"\"Constructs a :class:`Request `, prepares it and sends it.\n Returns :class:`Response ` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n \"\"\"\n # Create the Request.\n req = Request(\n method=method.upper(),\n url=url,\n headers=headers,\n files=files,\n data=data or {},\n json=json,\n params=params or {},\n auth=auth,\n cookies=cookies,\n hooks=hooks,\n )\n prep = self.prepare_request(req)\n\n proxies = proxies or {}\n\n settings = self.merge_environment_settings(\n prep.url, proxies, stream, verify, cert\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': timeout,\n 'allow_redirects': allow_redirects,\n }\n send_kwargs.update(settings)\n resp = self.send(prep, **send_kwargs)\n\n return resp\n\n def get(self, url, **kwargs):\n r\"\"\"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('GET', url, **kwargs)\n\n def options(self, url, **kwargs):\n r\"\"\"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('OPTIONS', url, **kwargs)\n\n def head(self, url, **kwargs):\n r\"\"\"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return self.request('HEAD', url, **kwargs)\n\n def post(self, url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('POST', url, data=data, json=json, **kwargs)\n\n def put(self, url, data=None, **kwargs):\n r\"\"\"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PUT', url, data=data, **kwargs)\n\n def patch(self, url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PATCH', url, data=data, **kwargs)\n\n def delete(self, url, **kwargs):\n r\"\"\"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('DELETE', url, **kwargs)\n\n def send(self, request, **kwargs):\n \"\"\"Send a given PreparedRequest.\n\n :rtype: requests.Response\n \"\"\"\n # Set defaults that the hooks can utilize to ensure they always have\n # the correct parameters to reproduce the previous request.\n kwargs.setdefault('stream', self.stream)\n kwargs.setdefault('verify', self.verify)\n kwargs.setdefault('cert', self.cert)\n if 'proxies' not in kwargs:\n kwargs['proxies'] = resolve_proxies(\n request, self.proxies, self.trust_env\n )\n\n # It's possible that users might accidentally send a Request object.\n # Guard against that specific failure case.\n if isinstance(request, Request):\n raise ValueError('You can only send PreparedRequests.')\n\n # Set up variables needed for resolve_redirects and dispatching of hooks\n allow_redirects = kwargs.pop('allow_redirects', True)\n stream = kwargs.get('stream')\n hooks = request.hooks\n\n # Get the appropriate adapter to use\n adapter = self.get_adapter(url=request.url)\n\n # Start time (approximately) of the request\n start = preferred_clock()\n\n # Send the request\n r = adapter.send(request, **kwargs)\n\n # Total elapsed time of the request (approximately)\n elapsed = preferred_clock() - start\n r.elapsed = timedelta(seconds=elapsed)\n\n # Response manipulation hooks\n r = dispatch_hook('response', hooks, r, **kwargs)\n\n # Persist cookies\n if r.history:\n\n # If the hooks create history then we want those cookies too\n for resp in r.history:\n extract_cookies_to_jar(self.cookies, resp.request, resp.raw)\n\n extract_cookies_to_jar(self.cookies, request, r.raw)\n\n # Resolve redirects if allowed.\n if allow_redirects:\n # Redirect resolving generator.\n gen = self.resolve_redirects(r, request, **kwargs)\n history = [resp for resp in gen]\n else:\n history = []\n\n # Shuffle things around if there's history.\n if history:\n # Insert the first (original) request at the start\n history.insert(0, r)\n # Get the last request made\n r = history.pop()\n r.history = history\n\n # If redirects aren't being followed, store the response on the Request for Response.next().\n if not allow_redirects:\n try:\n r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))\n except StopIteration:\n pass\n\n if not stream:\n r.content\n\n return r\n\n def merge_environment_settings(self, url, proxies, stream, verify, cert):\n \"\"\"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n \"\"\"\n # Gather clues from the surrounding environment.\n if self.trust_env:\n # Set environment's proxies.\n no_proxy = proxies.get('no_proxy') if proxies is not None else None\n env_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n for (k, v) in env_proxies.items():\n proxies.setdefault(k, v)\n\n # Look for requests environment configuration and be compatible\n # with cURL.\n if verify is True or verify is None:\n verify = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n\n # Merge all the kwargs.\n proxies = merge_setting(proxies, self.proxies)\n stream = merge_setting(stream, self.stream)\n verify = merge_setting(verify, self.verify)\n cert = merge_setting(cert, self.cert)\n\n return {'verify': verify, 'proxies': proxies, 'stream': stream,\n 'cert': cert}\n\n def get_adapter(self, url):\n \"\"\"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n \"\"\"\n for (prefix, adapter) in self.adapters.items():\n\n if url.lower().startswith(prefix.lower()):\n return adapter\n\n # Nothing matches :-/\n raise InvalidSchema(\"No connection adapters were found for {!r}\".format(url))\n\n def close(self):\n \"\"\"Closes all adapters and as such the session\"\"\"\n for v in self.adapters.values():\n v.close()\n\n def mount(self, prefix, adapter):\n \"\"\"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n \"\"\"\n self.adapters[prefix] = adapter\n keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]\n\n for key in keys_to_move:\n self.adapters[key] = self.adapters.pop(key)\n\n def __getstate__(self):\n state = {attr: getattr(self, attr, None) for attr in self.__attrs__}\n return state\n\n def __setstate__(self, state):\n for attr, value in state.items():\n setattr(self, attr, value)"},{"className":"SessionRedirectMixin","col":0,"comment":"null","endLoc":321,"id":909,"nodeType":"Class","startLoc":96,"text":"class SessionRedirectMixin(object):\n\n def get_redirect_target(self, resp):\n \"\"\"Receives a Response. Returns a redirect URI or ``None``\"\"\"\n # Due to the nature of how requests processes redirects this method will\n # be called at least once upon the original response and at least twice\n # on each subsequent redirect response (if any).\n # If a custom mixin is used to handle this logic, it may be advantageous\n # to cache the redirect location onto the response object as a private\n # attribute.\n if resp.is_redirect:\n location = resp.headers['location']\n # Currently the underlying http module on py3 decode headers\n # in latin1, but empirical evidence suggests that latin1 is very\n # rarely used with non-ASCII characters in HTTP headers.\n # It is more likely to get UTF8 header rather than latin1.\n # This causes incorrect handling of UTF8 encoded location headers.\n # To solve this, we re-encode the location in latin1.\n if is_py3:\n location = location.encode('latin1')\n return to_native_string(location, 'utf8')\n return None\n\n def should_strip_auth(self, old_url, new_url):\n \"\"\"Decide whether Authorization header should be removed when redirecting\"\"\"\n old_parsed = urlparse(old_url)\n new_parsed = urlparse(new_url)\n if old_parsed.hostname != new_parsed.hostname:\n return True\n # Special case: allow http -> https redirect when using the standard\n # ports. This isn't specified by RFC 7235, but is kept to avoid\n # breaking backwards compatibility with older versions of requests\n # that allowed any redirects on the same host.\n if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)\n and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):\n return False\n\n # Handle default port usage corresponding to scheme.\n changed_port = old_parsed.port != new_parsed.port\n changed_scheme = old_parsed.scheme != new_parsed.scheme\n default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)\n if (not changed_scheme and old_parsed.port in default_port\n and new_parsed.port in default_port):\n return False\n\n # Standard case: root URI must match\n return changed_port or changed_scheme\n\n def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):\n \"\"\"Receives a Response. Returns a generator of Responses or Requests.\"\"\"\n\n hist = [] # keep track of history\n\n url = self.get_redirect_target(resp)\n previous_fragment = urlparse(req.url).fragment\n while url:\n prepared_request = req.copy()\n\n # Update history and keep track of redirects.\n # resp.history must ignore the original request in this loop\n hist.append(resp)\n resp.history = hist[1:]\n\n try:\n resp.content # Consume socket so it can be released\n except (ChunkedEncodingError, ContentDecodingError, RuntimeError):\n resp.raw.read(decode_content=False)\n\n if len(resp.history) >= self.max_redirects:\n raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)\n\n # Release the connection back into the pool.\n resp.close()\n\n # Handle redirection without scheme (see: RFC 1808 Section 4)\n if url.startswith('//'):\n parsed_rurl = urlparse(resp.url)\n url = ':'.join([to_native_string(parsed_rurl.scheme), url])\n\n # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)\n parsed = urlparse(url)\n if parsed.fragment == '' and previous_fragment:\n parsed = parsed._replace(fragment=previous_fragment)\n elif parsed.fragment:\n previous_fragment = parsed.fragment\n url = parsed.geturl()\n\n # Facilitate relative 'location' headers, as allowed by RFC 7231.\n # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')\n # Compliant with RFC3986, we percent encode the url.\n if not parsed.netloc:\n url = urljoin(resp.url, requote_uri(url))\n else:\n url = requote_uri(url)\n\n prepared_request.url = to_native_string(url)\n\n self.rebuild_method(prepared_request, resp)\n\n # https://github.com/psf/requests/issues/1084\n if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):\n # https://github.com/psf/requests/issues/3490\n purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')\n for header in purged_headers:\n prepared_request.headers.pop(header, None)\n prepared_request.body = None\n\n headers = prepared_request.headers\n headers.pop('Cookie', None)\n\n # Extract any cookies sent on the response to the cookiejar\n # in the new request. Because we've mutated our copied prepared\n # request, use the old one that we haven't yet touched.\n extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)\n merge_cookies(prepared_request._cookies, self.cookies)\n prepared_request.prepare_cookies(prepared_request._cookies)\n\n # Rebuild auth and proxy information.\n proxies = self.rebuild_proxies(prepared_request, proxies)\n self.rebuild_auth(prepared_request, resp)\n\n # A failed tell() sets `_body_position` to `object()`. This non-None\n # value ensures `rewindable` will be True, allowing us to raise an\n # UnrewindableBodyError, instead of hanging the connection.\n rewindable = (\n prepared_request._body_position is not None and\n ('Content-Length' in headers or 'Transfer-Encoding' in headers)\n )\n\n # Attempt to rewind consumed file-like object.\n if rewindable:\n rewind_body(prepared_request)\n\n # Override the original request.\n req = prepared_request\n\n if yield_requests:\n yield req\n else:\n\n resp = self.send(\n req,\n stream=stream,\n timeout=timeout,\n verify=verify,\n cert=cert,\n proxies=proxies,\n allow_redirects=False,\n **adapter_kwargs\n )\n\n extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)\n\n # extract redirect url, if any, for the next loop\n url = self.get_redirect_target(resp)\n yield resp\n\n def rebuild_auth(self, prepared_request, response):\n \"\"\"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n \"\"\"\n headers = prepared_request.headers\n url = prepared_request.url\n\n if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):\n # If we get redirected to a new host, we should strip out any\n # authentication headers.\n del headers['Authorization']\n\n # .netrc might have more auth for us on our new host.\n new_auth = get_netrc_auth(url) if self.trust_env else None\n if new_auth is not None:\n prepared_request.prepare_auth(new_auth)\n\n def rebuild_proxies(self, prepared_request, proxies):\n \"\"\"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n \"\"\"\n headers = prepared_request.headers\n scheme = urlparse(prepared_request.url).scheme\n new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)\n\n if 'Proxy-Authorization' in headers:\n del headers['Proxy-Authorization']\n\n try:\n username, password = get_auth_from_url(new_proxies[scheme])\n except KeyError:\n username, password = None, None\n\n if username and password:\n headers['Proxy-Authorization'] = _basic_auth_str(username, password)\n\n return new_proxies\n\n def rebuild_method(self, prepared_request, response):\n \"\"\"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n \"\"\"\n method = prepared_request.method\n\n # https://tools.ietf.org/html/rfc7231#section-6.4.4\n if response.status_code == codes.see_other and method != 'HEAD':\n method = 'GET'\n\n # Do what the browsers do, despite standards...\n # First, turn 302s into GETs.\n if response.status_code == codes.found and method != 'HEAD':\n method = 'GET'\n\n # Second, if a POST is responded to with a 301, turn it into a GET.\n # This bizarre behaviour is explained in Issue 1704.\n if response.status_code == codes.moved and method == 'POST':\n method = 'GET'\n\n prepared_request.method = method"},{"col":4,"comment":"Receives a Response. Returns a redirect URI or ``None``","endLoc":117,"header":"def get_redirect_target(self, resp)","id":910,"name":"get_redirect_target","nodeType":"Function","startLoc":98,"text":"def get_redirect_target(self, resp):\n \"\"\"Receives a Response. Returns a redirect URI or ``None``\"\"\"\n # Due to the nature of how requests processes redirects this method will\n # be called at least once upon the original response and at least twice\n # on each subsequent redirect response (if any).\n # If a custom mixin is used to handle this logic, it may be advantageous\n # to cache the redirect location onto the response object as a private\n # attribute.\n if resp.is_redirect:\n location = resp.headers['location']\n # Currently the underlying http module on py3 decode headers\n # in latin1, but empirical evidence suggests that latin1 is very\n # rarely used with non-ASCII characters in HTTP headers.\n # It is more likely to get UTF8 header rather than latin1.\n # This causes incorrect handling of UTF8 encoded location headers.\n # To solve this, we re-encode the location in latin1.\n if is_py3:\n location = location.encode('latin1')\n return to_native_string(location, 'utf8')\n return None"},{"col":4,"comment":"Decide whether Authorization header should be removed when redirecting","endLoc":142,"header":"def should_strip_auth(self, old_url, new_url)","id":913,"name":"should_strip_auth","nodeType":"Function","startLoc":119,"text":"def should_strip_auth(self, old_url, new_url):\n \"\"\"Decide whether Authorization header should be removed when redirecting\"\"\"\n old_parsed = urlparse(old_url)\n new_parsed = urlparse(new_url)\n if old_parsed.hostname != new_parsed.hostname:\n return True\n # Special case: allow http -> https redirect when using the standard\n # ports. This isn't specified by RFC 7235, but is kept to avoid\n # breaking backwards compatibility with older versions of requests\n # that allowed any redirects on the same host.\n if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)\n and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):\n return False\n\n # Handle default port usage corresponding to scheme.\n changed_port = old_parsed.port != new_parsed.port\n changed_scheme = old_parsed.scheme != new_parsed.scheme\n default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)\n if (not changed_scheme and old_parsed.port in default_port\n and new_parsed.port in default_port):\n return False\n\n # Standard case: root URI must match\n return changed_port or changed_scheme"},{"fileName":"exceptions.py","filePath":"requests","id":916,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.exceptions\n~~~~~~~~~~~~~~~~~~~\n\nThis module contains the set of Requests' exceptions.\n\"\"\"\nfrom urllib3.exceptions import HTTPError as BaseHTTPError\n\nfrom .compat import JSONDecodeError as CompatJSONDecodeError\n\n\nclass RequestException(IOError):\n \"\"\"There was an ambiguous exception that occurred while handling your\n request.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize RequestException with `request` and `response` objects.\"\"\"\n response = kwargs.pop('response', None)\n self.response = response\n self.request = kwargs.pop('request', None)\n if (response is not None and not self.request and\n hasattr(response, 'request')):\n self.request = self.response.request\n super(RequestException, self).__init__(*args, **kwargs)\n\n\nclass InvalidJSONError(RequestException):\n \"\"\"A JSON error occurred.\"\"\"\n\n\nclass JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):\n \"\"\"Couldn't decode the text into json\"\"\"\n\n\nclass HTTPError(RequestException):\n \"\"\"An HTTP error occurred.\"\"\"\n\n\nclass ConnectionError(RequestException):\n \"\"\"A Connection error occurred.\"\"\"\n\n\nclass ProxyError(ConnectionError):\n \"\"\"A proxy error occurred.\"\"\"\n\n\nclass SSLError(ConnectionError):\n \"\"\"An SSL error occurred.\"\"\"\n\n\nclass Timeout(RequestException):\n \"\"\"The request timed out.\n\n Catching this error will catch both\n :exc:`~requests.exceptions.ConnectTimeout` and\n :exc:`~requests.exceptions.ReadTimeout` errors.\n \"\"\"\n\n\nclass ConnectTimeout(ConnectionError, Timeout):\n \"\"\"The request timed out while trying to connect to the remote server.\n\n Requests that produced this error are safe to retry.\n \"\"\"\n\n\nclass ReadTimeout(Timeout):\n \"\"\"The server did not send any data in the allotted amount of time.\"\"\"\n\n\nclass URLRequired(RequestException):\n \"\"\"A valid URL is required to make a request.\"\"\"\n\n\nclass TooManyRedirects(RequestException):\n \"\"\"Too many redirects.\"\"\"\n\n\nclass MissingSchema(RequestException, ValueError):\n \"\"\"The URL schema (e.g. http or https) is missing.\"\"\"\n\n\nclass InvalidSchema(RequestException, ValueError):\n \"\"\"See defaults.py for valid schemas.\"\"\"\n\n\nclass InvalidURL(RequestException, ValueError):\n \"\"\"The URL provided was somehow invalid.\"\"\"\n\n\nclass InvalidHeader(RequestException, ValueError):\n \"\"\"The header value provided was somehow invalid.\"\"\"\n\n\nclass InvalidProxyURL(InvalidURL):\n \"\"\"The proxy URL provided is invalid.\"\"\"\n\n\nclass ChunkedEncodingError(RequestException):\n \"\"\"The server declared chunked encoding but sent an invalid chunk.\"\"\"\n\n\nclass ContentDecodingError(RequestException, BaseHTTPError):\n \"\"\"Failed to decode response content.\"\"\"\n\n\nclass StreamConsumedError(RequestException, TypeError):\n \"\"\"The content for this response was already consumed.\"\"\"\n\n\nclass RetryError(RequestException):\n \"\"\"Custom retries logic failed\"\"\"\n\n\nclass UnrewindableBodyError(RequestException):\n \"\"\"Requests encountered an error when trying to rewind a body.\"\"\"\n\n# Warnings\n\n\nclass RequestsWarning(Warning):\n \"\"\"Base warning for Requests.\"\"\"\n\n\nclass FileModeWarning(RequestsWarning, DeprecationWarning):\n \"\"\"A file was opened in text mode, but Requests determined its binary length.\"\"\"\n\n\nclass RequestsDependencyWarning(RequestsWarning):\n \"\"\"An imported dependency doesn't match the expected version range.\"\"\"\n"},{"className":"ProxyError","col":0,"comment":"A proxy error occurred.","endLoc":47,"id":917,"nodeType":"Class","startLoc":46,"text":"class ProxyError(ConnectionError):\n \"\"\"A proxy error occurred.\"\"\""},{"className":"SSLError","col":0,"comment":"An SSL error occurred.","endLoc":51,"id":918,"nodeType":"Class","startLoc":50,"text":"class SSLError(ConnectionError):\n \"\"\"An SSL error occurred.\"\"\""},{"className":"Timeout","col":0,"comment":"The request timed out.\n\n Catching this error will catch both\n :exc:`~requests.exceptions.ConnectTimeout` and\n :exc:`~requests.exceptions.ReadTimeout` errors.\n ","endLoc":60,"id":919,"nodeType":"Class","startLoc":54,"text":"class Timeout(RequestException):\n \"\"\"The request timed out.\n\n Catching this error will catch both\n :exc:`~requests.exceptions.ConnectTimeout` and\n :exc:`~requests.exceptions.ReadTimeout` errors.\n \"\"\""},{"className":"ConnectTimeout","col":0,"comment":"The request timed out while trying to connect to the remote server.\n\n Requests that produced this error are safe to retry.\n ","endLoc":67,"id":920,"nodeType":"Class","startLoc":63,"text":"class ConnectTimeout(ConnectionError, Timeout):\n \"\"\"The request timed out while trying to connect to the remote server.\n\n Requests that produced this error are safe to retry.\n \"\"\""},{"className":"ReadTimeout","col":0,"comment":"The server did not send any data in the allotted amount of time.","endLoc":71,"id":921,"nodeType":"Class","startLoc":70,"text":"class ReadTimeout(Timeout):\n \"\"\"The server did not send any data in the allotted amount of time.\"\"\""},{"className":"URLRequired","col":0,"comment":"A valid URL is required to make a request.","endLoc":75,"id":922,"nodeType":"Class","startLoc":74,"text":"class URLRequired(RequestException):\n \"\"\"A valid URL is required to make a request.\"\"\""},{"className":"TooManyRedirects","col":0,"comment":"Too many redirects.","endLoc":79,"id":923,"nodeType":"Class","startLoc":78,"text":"class TooManyRedirects(RequestException):\n \"\"\"Too many redirects.\"\"\""},{"className":"InvalidSchema","col":0,"comment":"See defaults.py for valid schemas.","endLoc":87,"id":924,"nodeType":"Class","startLoc":86,"text":"class InvalidSchema(RequestException, ValueError):\n \"\"\"See defaults.py for valid schemas.\"\"\""},{"className":"InvalidProxyURL","col":0,"comment":"The proxy URL provided is invalid.","endLoc":99,"id":925,"nodeType":"Class","startLoc":98,"text":"class InvalidProxyURL(InvalidURL):\n \"\"\"The proxy URL provided is invalid.\"\"\""},{"className":"RetryError","col":0,"comment":"Custom retries logic failed","endLoc":115,"id":926,"nodeType":"Class","startLoc":114,"text":"class RetryError(RequestException):\n \"\"\"Custom retries logic failed\"\"\""},{"col":0,"comment":"","endLoc":8,"header":"exceptions.py#","id":927,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.exceptions\n~~~~~~~~~~~~~~~~~~~\n\nThis module contains the set of Requests' exceptions.\n\"\"\""},{"col":4,"comment":"Receives a Response. Returns a generator of Responses or Requests.","endLoc":252,"header":"def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs)","id":928,"name":"resolve_redirects","nodeType":"Function","startLoc":144,"text":"def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):\n \"\"\"Receives a Response. Returns a generator of Responses or Requests.\"\"\"\n\n hist = [] # keep track of history\n\n url = self.get_redirect_target(resp)\n previous_fragment = urlparse(req.url).fragment\n while url:\n prepared_request = req.copy()\n\n # Update history and keep track of redirects.\n # resp.history must ignore the original request in this loop\n hist.append(resp)\n resp.history = hist[1:]\n\n try:\n resp.content # Consume socket so it can be released\n except (ChunkedEncodingError, ContentDecodingError, RuntimeError):\n resp.raw.read(decode_content=False)\n\n if len(resp.history) >= self.max_redirects:\n raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)\n\n # Release the connection back into the pool.\n resp.close()\n\n # Handle redirection without scheme (see: RFC 1808 Section 4)\n if url.startswith('//'):\n parsed_rurl = urlparse(resp.url)\n url = ':'.join([to_native_string(parsed_rurl.scheme), url])\n\n # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)\n parsed = urlparse(url)\n if parsed.fragment == '' and previous_fragment:\n parsed = parsed._replace(fragment=previous_fragment)\n elif parsed.fragment:\n previous_fragment = parsed.fragment\n url = parsed.geturl()\n\n # Facilitate relative 'location' headers, as allowed by RFC 7231.\n # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')\n # Compliant with RFC3986, we percent encode the url.\n if not parsed.netloc:\n url = urljoin(resp.url, requote_uri(url))\n else:\n url = requote_uri(url)\n\n prepared_request.url = to_native_string(url)\n\n self.rebuild_method(prepared_request, resp)\n\n # https://github.com/psf/requests/issues/1084\n if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):\n # https://github.com/psf/requests/issues/3490\n purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')\n for header in purged_headers:\n prepared_request.headers.pop(header, None)\n prepared_request.body = None\n\n headers = prepared_request.headers\n headers.pop('Cookie', None)\n\n # Extract any cookies sent on the response to the cookiejar\n # in the new request. Because we've mutated our copied prepared\n # request, use the old one that we haven't yet touched.\n extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)\n merge_cookies(prepared_request._cookies, self.cookies)\n prepared_request.prepare_cookies(prepared_request._cookies)\n\n # Rebuild auth and proxy information.\n proxies = self.rebuild_proxies(prepared_request, proxies)\n self.rebuild_auth(prepared_request, resp)\n\n # A failed tell() sets `_body_position` to `object()`. This non-None\n # value ensures `rewindable` will be True, allowing us to raise an\n # UnrewindableBodyError, instead of hanging the connection.\n rewindable = (\n prepared_request._body_position is not None and\n ('Content-Length' in headers or 'Transfer-Encoding' in headers)\n )\n\n # Attempt to rewind consumed file-like object.\n if rewindable:\n rewind_body(prepared_request)\n\n # Override the original request.\n req = prepared_request\n\n if yield_requests:\n yield req\n else:\n\n resp = self.send(\n req,\n stream=stream,\n timeout=timeout,\n verify=verify,\n cert=cert,\n proxies=proxies,\n allow_redirects=False,\n **adapter_kwargs\n )\n\n extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)\n\n # extract redirect url, if any, for the next loop\n url = self.get_redirect_target(resp)\n yield resp"},{"fileName":"packages.py","filePath":"requests","id":929,"nodeType":"File","text":"import sys\n\ntry:\n import chardet\nexcept ImportError:\n import charset_normalizer as chardet\n import warnings\n\n warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')\n\n# This code exists for backwards compatibility reasons.\n# I don't like it either. Just look the other way. :)\n\nfor package in ('urllib3', 'idna'):\n locals()[package] = __import__(package)\n # This traversal is apparently necessary such that the identities are\n # preserved (requests.packages.urllib3.* is urllib3.*)\n for mod in list(sys.modules):\n if mod == package or mod.startswith(package + '.'):\n sys.modules['requests.packages.' + mod] = sys.modules[mod]\n\ntarget = chardet.__name__\nfor mod in list(sys.modules):\n if mod == target or mod.startswith(target + '.'):\n sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]\n# Kinda cool, though, right?\n"},{"attributeType":"null","col":33,"comment":"null","endLoc":6,"id":930,"name":"chardet","nodeType":"Attribute","startLoc":6,"text":"chardet"},{"attributeType":"str","col":4,"comment":"null","endLoc":14,"id":932,"name":"package","nodeType":"Attribute","startLoc":14,"text":"package"},{"attributeType":"str","col":8,"comment":"null","endLoc":18,"id":933,"name":"mod","nodeType":"Attribute","startLoc":18,"text":"mod"},{"attributeType":"str","col":4,"comment":"null","endLoc":13,"id":935,"name":"server_name","nodeType":"Attribute","startLoc":13,"text":"server_name"},{"attributeType":"int","col":4,"comment":"null","endLoc":14,"id":936,"name":"server_port","nodeType":"Attribute","startLoc":14,"text":"server_port"},{"className":"SimpleHTTPRequestHandler","col":0,"comment":"null","endLoc":72,"id":941,"nodeType":"Class","startLoc":53,"text":"class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):\n extensions_map: dict[str, str]\n if sys.version_info >= (3, 12):\n index_pages: ClassVar[tuple[str, ...]]\n directory: str\n def __init__(\n self,\n request: socketserver._RequestType,\n client_address: _socket._RetAddress,\n server: socketserver.BaseServer,\n *,\n directory: str | None = None,\n ) -> None: ...\n def do_GET(self) -> None: ...\n def do_HEAD(self) -> None: ...\n def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented\n def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented\n def translate_path(self, path: str) -> str: ... # undocumented\n def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented\n def guess_type(self, path: StrPath) -> str: ... # undocumented"},{"className":"BaseHTTPRequestHandler","col":0,"comment":"null","endLoc":51,"id":942,"nodeType":"Class","startLoc":18,"text":"class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):\n client_address: tuple[str, int]\n close_connection: bool\n requestline: str\n command: str\n path: str\n request_version: str\n headers: email.message.Message\n server_version: str\n sys_version: str\n error_message_format: str\n error_content_type: str\n protocol_version: str\n MessageClass: type\n responses: Mapping[int, tuple[str, str]]\n default_request_version: str # undocumented\n weekdayname: ClassVar[Sequence[str]] # undocumented\n monthname: ClassVar[Sequence[str | None]] # undocumented\n def handle_one_request(self) -> None: ...\n def handle_expect_100(self) -> bool: ...\n def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ...\n def send_response(self, code: int, message: str | None = None) -> None: ...\n def send_header(self, keyword: str, value: str) -> None: ...\n def send_response_only(self, code: int, message: str | None = None) -> None: ...\n def end_headers(self) -> None: ...\n def flush_headers(self) -> None: ...\n def log_request(self, code: int | str = \"-\", size: int | str = \"-\") -> None: ...\n def log_error(self, format: str, *args: Any) -> None: ...\n def log_message(self, format: str, *args: Any) -> None: ...\n def version_string(self) -> str: ...\n def date_time_string(self, timestamp: float | None = None) -> str: ...\n def log_date_time_string(self) -> str: ...\n def address_string(self) -> str: ...\n def parse_request(self) -> bool: ... # undocumented"},{"className":"StreamRequestHandler","col":0,"comment":"null","endLoc":162,"id":943,"nodeType":"Class","startLoc":155,"text":"class StreamRequestHandler(BaseRequestHandler):\n rbufsize: ClassVar[int] # undocumented\n wbufsize: ClassVar[int] # undocumented\n timeout: ClassVar[float | None] # undocumented\n disable_nagle_algorithm: ClassVar[bool] # undocumented\n connection: Any # undocumented\n rfile: BinaryIO\n wfile: BinaryIO"},{"className":"BaseRequestHandler","col":0,"comment":"null","endLoc":153,"id":944,"nodeType":"Class","startLoc":140,"text":"class BaseRequestHandler:\n # `request` is technically of type _RequestType,\n # but there are some concerns that having a union here would cause\n # too much inconvenience to people using it (see\n # https://github.com/python/typeshed/pull/384#issuecomment-234649696)\n #\n # Note also that _RetAddress is also just an alias for `Any`\n request: Any\n client_address: _RetAddress\n server: BaseServer\n def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ...\n def setup(self) -> None: ...\n def handle(self) -> None: ...\n def finish(self) -> None: ..."},{"col":4,"comment":"null","endLoc":150,"header":"def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None","id":945,"name":"__init__","nodeType":"Function","startLoc":150,"text":"def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ..."},{"col":4,"comment":"null","endLoc":151,"header":"def setup(self) -> None","id":946,"name":"setup","nodeType":"Function","startLoc":151,"text":"def setup(self) -> None: ..."},{"col":4,"comment":"null","endLoc":152,"header":"def handle(self) -> None","id":947,"name":"handle","nodeType":"Function","startLoc":152,"text":"def handle(self) -> None: ..."},{"col":4,"comment":"null","endLoc":153,"header":"def finish(self) -> None","id":948,"name":"finish","nodeType":"Function","startLoc":153,"text":"def finish(self) -> None: ..."},{"attributeType":"null","col":4,"comment":"null","endLoc":147,"id":949,"name":"request","nodeType":"Attribute","startLoc":147,"text":"request"},{"attributeType":"null","col":4,"comment":"null","endLoc":148,"id":950,"name":"client_address","nodeType":"Attribute","startLoc":148,"text":"client_address"},{"attributeType":"BaseServer","col":4,"comment":"null","endLoc":149,"id":952,"name":"server","nodeType":"Attribute","startLoc":149,"text":"server"},{"attributeType":"dict","col":0,"comment":"null","endLoc":25,"id":953,"name":"_codes","nodeType":"Attribute","startLoc":25,"text":"_codes"},{"attributeType":"LookupDict","col":0,"comment":"null","endLoc":105,"id":954,"name":"codes","nodeType":"Attribute","startLoc":105,"text":"codes"},{"attributeType":"int","col":4,"comment":"null","endLoc":156,"id":955,"name":"rbufsize","nodeType":"Attribute","startLoc":156,"text":"rbufsize"},{"col":0,"comment":"","endLoc":21,"header":"status_codes.py#","id":956,"name":"","nodeType":"Function","startLoc":3,"text":"r\"\"\"\nThe ``codes`` object defines a mapping from common names for HTTP statuses\nto their numerical codes, accessible either as attributes or as dictionary\nitems.\n\nExample::\n\n >>> import requests\n >>> requests.codes['temporary_redirect']\n 307\n >>> requests.codes.teapot\n 418\n >>> requests.codes['\\o/']\n 200\n\nSome codes have multiple names, and both upper- and lower-case versions of\nthe names are allowed. For example, ``codes.ok``, ``codes.OK``, and\n``codes.okay`` all correspond to the HTTP status code 200.\n\"\"\"\n\n_codes = {\n\n # Informational.\n 100: ('continue',),\n 101: ('switching_protocols',),\n 102: ('processing',),\n 103: ('checkpoint',),\n 122: ('uri_too_long', 'request_uri_too_long'),\n 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\\\o/', '✓'),\n 201: ('created',),\n 202: ('accepted',),\n 203: ('non_authoritative_info', 'non_authoritative_information'),\n 204: ('no_content',),\n 205: ('reset_content', 'reset'),\n 206: ('partial_content', 'partial'),\n 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),\n 208: ('already_reported',),\n 226: ('im_used',),\n\n # Redirection.\n 300: ('multiple_choices',),\n 301: ('moved_permanently', 'moved', '\\\\o-'),\n 302: ('found',),\n 303: ('see_other', 'other'),\n 304: ('not_modified',),\n 305: ('use_proxy',),\n 306: ('switch_proxy',),\n 307: ('temporary_redirect', 'temporary_moved', 'temporary'),\n 308: ('permanent_redirect',\n 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0\n\n # Client Error.\n 400: ('bad_request', 'bad'),\n 401: ('unauthorized',),\n 402: ('payment_required', 'payment'),\n 403: ('forbidden',),\n 404: ('not_found', '-o-'),\n 405: ('method_not_allowed', 'not_allowed'),\n 406: ('not_acceptable',),\n 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),\n 408: ('request_timeout', 'timeout'),\n 409: ('conflict',),\n 410: ('gone',),\n 411: ('length_required',),\n 412: ('precondition_failed', 'precondition'),\n 413: ('request_entity_too_large',),\n 414: ('request_uri_too_large',),\n 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),\n 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),\n 417: ('expectation_failed',),\n 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),\n 421: ('misdirected_request',),\n 422: ('unprocessable_entity', 'unprocessable'),\n 423: ('locked',),\n 424: ('failed_dependency', 'dependency'),\n 425: ('unordered_collection', 'unordered'),\n 426: ('upgrade_required', 'upgrade'),\n 428: ('precondition_required', 'precondition'),\n 429: ('too_many_requests', 'too_many'),\n 431: ('header_fields_too_large', 'fields_too_large'),\n 444: ('no_response', 'none'),\n 449: ('retry_with', 'retry'),\n 450: ('blocked_by_windows_parental_controls', 'parental_controls'),\n 451: ('unavailable_for_legal_reasons', 'legal_reasons'),\n 499: ('client_closed_request',),\n\n # Server Error.\n 500: ('internal_server_error', 'server_error', '/o\\\\', '✗'),\n 501: ('not_implemented',),\n 502: ('bad_gateway',),\n 503: ('service_unavailable', 'unavailable'),\n 504: ('gateway_timeout',),\n 505: ('http_version_not_supported', 'http_version'),\n 506: ('variant_also_negotiates',),\n 507: ('insufficient_storage',),\n 509: ('bandwidth_limit_exceeded', 'bandwidth'),\n 510: ('not_extended',),\n 511: ('network_authentication_required', 'network_auth', 'network_authentication'),\n}\n\ncodes = LookupDict(name='status_codes')\n\n_init()"},{"attributeType":"int","col":4,"comment":"null","endLoc":157,"id":957,"name":"wbufsize","nodeType":"Attribute","startLoc":157,"text":"wbufsize"},{"attributeType":"float | None","col":4,"comment":"null","endLoc":158,"id":958,"name":"timeout","nodeType":"Attribute","startLoc":158,"text":"timeout"},{"col":0,"comment":"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n ","endLoc":677,"header":"def dotted_netmask(mask)","id":959,"name":"dotted_netmask","nodeType":"Function","startLoc":669,"text":"def dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))"},{"col":0,"comment":"Determines appropriate setting for a given request, taking into account\n the explicit setting on that request, and the setting in the session. If a\n setting is a dictionary, they will be merged together using `dict_class`\n ","endLoc":78,"header":"def merge_setting(request_setting, session_setting, dict_class=OrderedDict)","id":961,"name":"merge_setting","nodeType":"Function","startLoc":50,"text":"def merge_setting(request_setting, session_setting, dict_class=OrderedDict):\n \"\"\"Determines appropriate setting for a given request, taking into account\n the explicit setting on that request, and the setting in the session. If a\n setting is a dictionary, they will be merged together using `dict_class`\n \"\"\"\n\n if session_setting is None:\n return request_setting\n\n if request_setting is None:\n return session_setting\n\n # Bypass if not a dictionary (e.g. verify)\n if not (\n isinstance(session_setting, Mapping) and\n isinstance(request_setting, Mapping)\n ):\n return request_setting\n\n merged_setting = dict_class(to_key_val_list(session_setting))\n merged_setting.update(to_key_val_list(request_setting))\n\n # Remove keys that are set to None. Extract keys first to avoid altering\n # the dictionary during iteration.\n none_keys = [k for (k, v) in merged_setting.items() if v is None]\n for key in none_keys:\n del merged_setting[key]\n\n return merged_setting"},{"attributeType":"bool","col":4,"comment":"null","endLoc":159,"id":962,"name":"disable_nagle_algorithm","nodeType":"Attribute","startLoc":159,"text":"disable_nagle_algorithm"},{"attributeType":"null","col":4,"comment":"null","endLoc":160,"id":963,"name":"connection","nodeType":"Attribute","startLoc":160,"text":"connection"},{"col":4,"comment":"Prepares the given HTTP headers.","endLoc":455,"header":"def prepare_headers(self, headers)","id":964,"name":"prepare_headers","nodeType":"Function","startLoc":446,"text":"def prepare_headers(self, headers):\n \"\"\"Prepares the given HTTP headers.\"\"\"\n\n self.headers = CaseInsensitiveDict()\n if headers:\n for header in headers.items():\n # Raise exception on invalid header value.\n check_header_validity(header)\n name, value = header\n self.headers[to_native_string(name)] = value"},{"attributeType":"BinaryIO","col":4,"comment":"null","endLoc":161,"id":965,"name":"rfile","nodeType":"Attribute","startLoc":161,"text":"rfile"},{"attributeType":"BinaryIO","col":4,"comment":"null","endLoc":162,"id":968,"name":"wfile","nodeType":"Attribute","startLoc":162,"text":"wfile"},{"col":4,"comment":"null","endLoc":36,"header":"def handle_one_request(self) -> None","id":969,"name":"handle_one_request","nodeType":"Function","startLoc":36,"text":"def handle_one_request(self) -> None: ..."},{"col":4,"comment":"null","endLoc":37,"header":"def handle_expect_100(self) -> bool","id":970,"name":"handle_expect_100","nodeType":"Function","startLoc":37,"text":"def handle_expect_100(self) -> bool: ..."},{"col":4,"comment":"null","endLoc":38,"header":"def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None","id":971,"name":"send_error","nodeType":"Function","startLoc":38,"text":"def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":39,"header":"def send_response(self, code: int, message: str | None = None) -> None","id":972,"name":"send_response","nodeType":"Function","startLoc":39,"text":"def send_response(self, code: int, message: str | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":40,"header":"def send_header(self, keyword: str, value: str) -> None","id":973,"name":"send_header","nodeType":"Function","startLoc":40,"text":"def send_header(self, keyword: str, value: str) -> None: ..."},{"col":4,"comment":"null","endLoc":41,"header":"def send_response_only(self, code: int, message: str | None = None) -> None","id":974,"name":"send_response_only","nodeType":"Function","startLoc":41,"text":"def send_response_only(self, code: int, message: str | None = None) -> None: ..."},{"col":4,"comment":"null","endLoc":42,"header":"def end_headers(self) -> None","id":975,"name":"end_headers","nodeType":"Function","startLoc":42,"text":"def end_headers(self) -> None: ..."},{"col":4,"comment":"null","endLoc":43,"header":"def flush_headers(self) -> None","id":976,"name":"flush_headers","nodeType":"Function","startLoc":43,"text":"def flush_headers(self) -> None: ..."},{"col":4,"comment":"null","endLoc":44,"header":"def log_request(self, code: int | str = \"-\", size: int | str = \"-\") -> None","id":977,"name":"log_request","nodeType":"Function","startLoc":44,"text":"def log_request(self, code: int | str = \"-\", size: int | str = \"-\") -> None: ..."},{"col":4,"comment":"null","endLoc":45,"header":"def log_error(self, format: str, *args: Any) -> None","id":978,"name":"log_error","nodeType":"Function","startLoc":45,"text":"def log_error(self, format: str, *args: Any) -> None: ..."},{"col":4,"comment":"null","endLoc":46,"header":"def log_message(self, format: str, *args: Any) -> None","id":979,"name":"log_message","nodeType":"Function","startLoc":46,"text":"def log_message(self, format: str, *args: Any) -> None: ..."},{"col":4,"comment":"null","endLoc":47,"header":"def version_string(self) -> str","id":980,"name":"version_string","nodeType":"Function","startLoc":47,"text":"def version_string(self) -> str: ..."},{"col":4,"comment":"null","endLoc":48,"header":"def date_time_string(self, timestamp: float | None = None) -> str","id":981,"name":"date_time_string","nodeType":"Function","startLoc":48,"text":"def date_time_string(self, timestamp: float | None = None) -> str: ..."},{"col":4,"comment":"null","endLoc":49,"header":"def log_date_time_string(self) -> str","id":982,"name":"log_date_time_string","nodeType":"Function","startLoc":49,"text":"def log_date_time_string(self) -> str: ..."},{"col":4,"comment":"null","endLoc":50,"header":"def address_string(self) -> str","id":983,"name":"address_string","nodeType":"Function","startLoc":50,"text":"def address_string(self) -> str: ..."},{"col":4,"comment":"null","endLoc":51,"header":"def parse_request(self) -> bool","id":984,"name":"parse_request","nodeType":"Function","startLoc":51,"text":"def parse_request(self) -> bool: ... # undocumented"},{"attributeType":"(str, int)","col":4,"comment":"null","endLoc":19,"id":985,"name":"client_address","nodeType":"Attribute","startLoc":19,"text":"client_address"},{"col":0,"comment":"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n ","endLoc":511,"header":"def _parse_content_type_header(header)","id":986,"name":"_parse_content_type_header","nodeType":"Function","startLoc":489,"text":"def _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict"},{"attributeType":"bool","col":4,"comment":"null","endLoc":20,"id":987,"name":"close_connection","nodeType":"Attribute","startLoc":20,"text":"close_connection"},{"col":4,"comment":"Prepares the given HTTP cookie data.\n\n This function eventually generates a ``Cookie`` header from the\n given cookies using cookielib. Due to cookielib's design, the header\n will not be regenerated if it already exists, meaning this function\n can only be called once for the life of the\n :class:`PreparedRequest ` object. Any subsequent calls\n to ``prepare_cookies`` will have no actual effect, unless the \"Cookie\"\n header is removed beforehand.\n ","endLoc":584,"header":"def prepare_cookies(self, cookies)","id":988,"name":"prepare_cookies","nodeType":"Function","startLoc":566,"text":"def prepare_cookies(self, cookies):\n \"\"\"Prepares the given HTTP cookie data.\n\n This function eventually generates a ``Cookie`` header from the\n given cookies using cookielib. Due to cookielib's design, the header\n will not be regenerated if it already exists, meaning this function\n can only be called once for the life of the\n :class:`PreparedRequest ` object. Any subsequent calls\n to ``prepare_cookies`` will have no actual effect, unless the \"Cookie\"\n header is removed beforehand.\n \"\"\"\n if isinstance(cookies, cookielib.CookieJar):\n self._cookies = cookies\n else:\n self._cookies = cookiejar_from_dict(cookies)\n\n cookie_header = get_cookie_header(self._cookies, self)\n if cookie_header is not None:\n self.headers['Cookie'] = cookie_header"},{"attributeType":"str","col":4,"comment":"null","endLoc":21,"id":989,"name":"requestline","nodeType":"Attribute","startLoc":21,"text":"requestline"},{"attributeType":"str","col":4,"comment":"null","endLoc":22,"id":990,"name":"command","nodeType":"Attribute","startLoc":22,"text":"command"},{"attributeType":"str","col":4,"comment":"null","endLoc":23,"id":991,"name":"path","nodeType":"Attribute","startLoc":23,"text":"path"},{"attributeType":"str","col":4,"comment":"null","endLoc":24,"id":992,"name":"request_version","nodeType":"Attribute","startLoc":24,"text":"request_version"},{"attributeType":"Message","col":4,"comment":"null","endLoc":25,"id":993,"name":"headers","nodeType":"Attribute","startLoc":25,"text":"headers"},{"col":0,"comment":"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n ","endLoc":536,"header":"def get_encoding_from_headers(headers)","id":994,"name":"get_encoding_from_headers","nodeType":"Function","startLoc":514,"text":"def get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'"},{"col":4,"comment":"Prepares the given HTTP body data.","endLoc":529,"header":"def prepare_body(self, data, files, json=None)","id":995,"name":"prepare_body","nodeType":"Function","startLoc":457,"text":"def prepare_body(self, data, files, json=None):\n \"\"\"Prepares the given HTTP body data.\"\"\"\n\n # Check if file, fo, generator, iterator.\n # If not, run through normal process.\n\n # Nottin' on you.\n body = None\n content_type = None\n\n if not data and json is not None:\n # urllib3 requires a bytes-like body. Python 2's json.dumps\n # provides this natively, but Python 3 gives a Unicode string.\n content_type = 'application/json'\n\n try:\n body = complexjson.dumps(json, allow_nan=False)\n except ValueError as ve:\n raise InvalidJSONError(ve, request=self)\n\n if not isinstance(body, bytes):\n body = body.encode('utf-8')\n\n is_stream = all([\n hasattr(data, '__iter__'),\n not isinstance(data, (basestring, list, tuple, Mapping))\n ])\n\n if is_stream:\n try:\n length = super_len(data)\n except (TypeError, AttributeError, UnsupportedOperation):\n length = None\n\n body = data\n\n if getattr(body, 'tell', None) is not None:\n # Record the current file position before reading.\n # This will allow us to rewind a file in the event\n # of a redirect.\n try:\n self._body_position = body.tell()\n except (IOError, OSError):\n # This differentiates from None, allowing us to catch\n # a failed `tell()` later when trying to rewind the body\n self._body_position = object()\n\n if files:\n raise NotImplementedError('Streamed bodies and files are mutually exclusive.')\n\n if length:\n self.headers['Content-Length'] = builtin_str(length)\n else:\n self.headers['Transfer-Encoding'] = 'chunked'\n else:\n # Multi-part file uploads.\n if files:\n (body, content_type) = self._encode_files(files, data)\n else:\n if data:\n body = self._encode_params(data)\n if isinstance(data, basestring) or hasattr(data, 'read'):\n content_type = None\n else:\n content_type = 'application/x-www-form-urlencoded'\n\n self.prepare_content_length(body)\n\n # Add content-type if it wasn't explicitly provided.\n if content_type and ('content-type' not in self.headers):\n self.headers['Content-Type'] = content_type\n\n self.body = body"},{"col":0,"comment":"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n ","endLoc":486,"header":"def get_encodings_from_content(content)","id":996,"name":"get_encodings_from_content","nodeType":"Function","startLoc":469,"text":"def get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r']', flags=re.I)\n pragma_re = re.compile(r']', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))"},{"col":0,"comment":"Properly merges both requests and session hooks.\n\n This is necessary because when request_hooks == {'response': []}, the\n merge breaks Session hooks entirely.\n ","endLoc":93,"header":"def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict)","id":997,"name":"merge_hooks","nodeType":"Function","startLoc":81,"text":"def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):\n \"\"\"Properly merges both requests and session hooks.\n\n This is necessary because when request_hooks == {'response': []}, the\n merge breaks Session hooks entirely.\n \"\"\"\n if session_hooks is None or session_hooks.get('response') == []:\n return request_hooks\n\n if request_hooks is None or request_hooks.get('response') == []:\n return session_hooks\n\n return merge_setting(request_hooks, session_hooks, dict_class)"},{"id":998,"name":"requirements-dev.txt","nodeType":"TextFile","path":"","text":"pytest>=2.8.0,<=6.2.5\npytest-cov\npytest-httpbin==1.0.0\npytest-mock==2.0.0\nhttpbin==0.7.0\nFlask>=1.0,<2.0\ntrustme\nwheel\n"},{"col":4,"comment":"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n ","endLoc":718,"header":"def merge_environment_settings(self, url, proxies, stream, verify, cert)","id":1000,"name":"merge_environment_settings","nodeType":"Function","startLoc":691,"text":"def merge_environment_settings(self, url, proxies, stream, verify, cert):\n \"\"\"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n \"\"\"\n # Gather clues from the surrounding environment.\n if self.trust_env:\n # Set environment's proxies.\n no_proxy = proxies.get('no_proxy') if proxies is not None else None\n env_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n for (k, v) in env_proxies.items():\n proxies.setdefault(k, v)\n\n # Look for requests environment configuration and be compatible\n # with cURL.\n if verify is True or verify is None:\n verify = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n\n # Merge all the kwargs.\n proxies = merge_setting(proxies, self.proxies)\n stream = merge_setting(stream, self.stream)\n verify = merge_setting(verify, self.verify)\n cert = merge_setting(cert, self.cert)\n\n return {'verify': verify, 'proxies': proxies, 'stream': stream,\n 'cert': cert}"},{"col":0,"comment":"\n Return a dict of environment proxies.\n\n :rtype: dict\n ","endLoc":807,"header":"def get_environ_proxies(url, no_proxy=None)","id":1001,"name":"get_environ_proxies","nodeType":"Function","startLoc":798,"text":"def get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()"},{"col":0,"comment":"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n ","endLoc":795,"header":"def should_bypass_proxies(url, no_proxy)","id":1002,"name":"should_bypass_proxies","nodeType":"Function","startLoc":737,"text":"def should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False"},{"col":0,"comment":"null","endLoc":333,"header":"def test_redirect_rfc1808_to_non_ascii_location()","id":1003,"name":"test_redirect_rfc1808_to_non_ascii_location","nodeType":"Function","startLoc":304,"text":"def test_redirect_rfc1808_to_non_ascii_location():\n path = u'š'\n expected_path = b'%C5%A1'\n redirect_request = [] # stores the second request to the server\n\n def redirect_resp_handler(sock):\n consume_socket_content(sock, timeout=0.5)\n location = u'//{}:{}/{}'.format(host, port, path)\n sock.send(\n b'HTTP/1.1 301 Moved Permanently\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: ' + location.encode('utf8') + b'\\r\\n'\n b'\\r\\n'\n )\n redirect_request.append(consume_socket_content(sock, timeout=0.5))\n sock.send(b'HTTP/1.1 200 OK\\r\\n\\r\\n')\n\n close_server = threading.Event()\n server = Server(redirect_resp_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = u'http://{}:{}'.format(host, port)\n r = requests.get(url=url, allow_redirects=True)\n assert r.status_code == 200\n assert len(r.history) == 1\n assert r.history[0].status_code == 301\n assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1')\n assert r.url == u'{}/{}'.format(url, expected_path.decode('ascii'))\n\n close_server.set()"},{"attributeType":"str","col":4,"comment":"null","endLoc":26,"id":1004,"name":"server_version","nodeType":"Attribute","startLoc":26,"text":"server_version"},{"attributeType":"str","col":4,"comment":"null","endLoc":27,"id":1005,"name":"sys_version","nodeType":"Attribute","startLoc":27,"text":"sys_version"},{"attributeType":"str","col":4,"comment":"null","endLoc":28,"id":1006,"name":"error_message_format","nodeType":"Attribute","startLoc":28,"text":"error_message_format"},{"attributeType":"str","col":4,"comment":"null","endLoc":29,"id":1007,"name":"error_content_type","nodeType":"Attribute","startLoc":29,"text":"error_content_type"},{"attributeType":"str","col":4,"comment":"null","endLoc":30,"id":1008,"name":"protocol_version","nodeType":"Attribute","startLoc":30,"text":"protocol_version"},{"attributeType":"type","col":4,"comment":"null","endLoc":31,"id":1009,"name":"MessageClass","nodeType":"Attribute","startLoc":31,"text":"MessageClass"},{"attributeType":"Mapping","col":4,"comment":"null","endLoc":32,"id":1011,"name":"responses","nodeType":"Attribute","startLoc":32,"text":"responses"},{"col":16,"endLoc":745,"id":1012,"nodeType":"Lambda","startLoc":745,"text":"lambda k: os.environ.get(k) or os.environ.get(k.upper())"},{"col":4,"comment":"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n ","endLoc":321,"header":"def rebuild_method(self, prepared_request, response)","id":1013,"name":"rebuild_method","nodeType":"Function","startLoc":301,"text":"def rebuild_method(self, prepared_request, response):\n \"\"\"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n \"\"\"\n method = prepared_request.method\n\n # https://tools.ietf.org/html/rfc7231#section-6.4.4\n if response.status_code == codes.see_other and method != 'HEAD':\n method = 'GET'\n\n # Do what the browsers do, despite standards...\n # First, turn 302s into GETs.\n if response.status_code == codes.found and method != 'HEAD':\n method = 'GET'\n\n # Second, if a POST is responded to with a 301, turn it into a GET.\n # This bizarre behaviour is explained in Issue 1704.\n if response.status_code == codes.moved and method == 'POST':\n method = 'GET'\n\n prepared_request.method = method"},{"attributeType":"str","col":4,"comment":"null","endLoc":33,"id":1015,"name":"default_request_version","nodeType":"Attribute","startLoc":33,"text":"default_request_version"},{"attributeType":"Sequence","col":4,"comment":"null","endLoc":34,"id":1016,"name":"weekdayname","nodeType":"Attribute","startLoc":34,"text":"weekdayname"},{"attributeType":"null","col":43,"comment":"null","endLoc":34,"id":1017,"name":"RequestsJSONDecodeError","nodeType":"Attribute","startLoc":34,"text":"RequestsJSONDecodeError"},{"attributeType":"null","col":28,"comment":"null","endLoc":44,"id":1018,"name":"complexjson","nodeType":"Attribute","startLoc":44,"text":"complexjson"},{"attributeType":"(Any, Any, Any, Any, Any)","col":0,"comment":"null","endLoc":49,"id":1019,"name":"REDIRECT_STATI","nodeType":"Attribute","startLoc":49,"text":"REDIRECT_STATI"},{"col":0,"comment":"\n :rtype: bool\n ","endLoc":688,"header":"def is_ipv4_address(string_ip)","id":1020,"name":"is_ipv4_address","nodeType":"Function","startLoc":680,"text":"def is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True"},{"attributeType":"int","col":0,"comment":"null","endLoc":57,"id":1021,"name":"DEFAULT_REDIRECT_LIMIT","nodeType":"Attribute","startLoc":57,"text":"DEFAULT_REDIRECT_LIMIT"},{"attributeType":"int","col":0,"comment":"null","endLoc":58,"id":1022,"name":"CONTENT_CHUNK_SIZE","nodeType":"Attribute","startLoc":58,"text":"CONTENT_CHUNK_SIZE"},{"col":0,"comment":"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n ","endLoc":712,"header":"def is_valid_cidr(string_network)","id":1023,"name":"is_valid_cidr","nodeType":"Function","startLoc":691,"text":"def is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True"},{"col":4,"comment":"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n ","endLoc":299,"header":"def rebuild_proxies(self, prepared_request, proxies)","id":1024,"name":"rebuild_proxies","nodeType":"Function","startLoc":272,"text":"def rebuild_proxies(self, prepared_request, proxies):\n \"\"\"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n \"\"\"\n headers = prepared_request.headers\n scheme = urlparse(prepared_request.url).scheme\n new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)\n\n if 'Proxy-Authorization' in headers:\n del headers['Proxy-Authorization']\n\n try:\n username, password = get_auth_from_url(new_proxies[scheme])\n except KeyError:\n username, password = None, None\n\n if username and password:\n headers['Proxy-Authorization'] = _basic_auth_str(username, password)\n\n return new_proxies"},{"attributeType":"int","col":0,"comment":"null","endLoc":59,"id":1025,"name":"ITER_CHUNK_SIZE","nodeType":"Attribute","startLoc":59,"text":"ITER_CHUNK_SIZE"},{"col":0,"comment":"","endLoc":8,"header":"models.py#","id":1026,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.models\n~~~~~~~~~~~~~~~\n\nThis module contains the primary objects that power Requests.\n\"\"\"\n\nREDIRECT_STATI = (\n codes.moved, # 301\n codes.found, # 302\n codes.other, # 303\n codes.temporary_redirect, # 307\n codes.permanent_redirect, # 308\n)\n\nDEFAULT_REDIRECT_LIMIT = 30\n\nCONTENT_CHUNK_SIZE = 10 * 1024\n\nITER_CHUNK_SIZE = 512"},{"col":0,"comment":"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n ","endLoc":861,"header":"def resolve_proxies(request, proxies, trust_env=True)","id":1028,"name":"resolve_proxies","nodeType":"Function","startLoc":836,"text":"def resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get('no_proxy')\n new_proxies = proxies.copy()\n\n bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)\n if trust_env and not bypass_proxy:\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies"},{"attributeType":"Sequence","col":4,"comment":"null","endLoc":35,"id":1029,"name":"monthname","nodeType":"Attribute","startLoc":35,"text":"monthname"},{"col":0,"comment":"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n ","endLoc":414,"header":"def parse_dict_header(value)","id":1030,"name":"parse_dict_header","nodeType":"Function","startLoc":383,"text":"def parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result"},{"col":4,"comment":"null","endLoc":65,"header":"def __init__(\n self,\n request: socketserver._RequestType,\n client_address: _socket._RetAddress,\n server: socketserver.BaseServer,\n *,\n directory: str | None = None,\n ) -> None","id":1031,"name":"__init__","nodeType":"Function","startLoc":58,"text":"def __init__(\n self,\n request: socketserver._RequestType,\n client_address: _socket._RetAddress,\n server: socketserver.BaseServer,\n *,\n directory: str | None = None,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":66,"header":"def do_GET(self) -> None","id":1032,"name":"do_GET","nodeType":"Function","startLoc":66,"text":"def do_GET(self) -> None: ..."},{"col":4,"comment":"null","endLoc":67,"header":"def do_HEAD(self) -> None","id":1033,"name":"do_HEAD","nodeType":"Function","startLoc":67,"text":"def do_HEAD(self) -> None: ..."},{"col":4,"comment":"null","endLoc":68,"header":"def send_head(self) -> io.BytesIO | BinaryIO | None","id":1034,"name":"send_head","nodeType":"Function","startLoc":68,"text":"def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented"},{"col":4,"comment":"null","endLoc":69,"header":"def list_directory(self, path: StrPath) -> io.BytesIO | None","id":1035,"name":"list_directory","nodeType":"Function","startLoc":69,"text":"def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented"},{"col":4,"comment":"null","endLoc":70,"header":"def translate_path(self, path: str) -> str","id":1036,"name":"translate_path","nodeType":"Function","startLoc":70,"text":"def translate_path(self, path: str) -> str: ... # undocumented"},{"col":4,"comment":"null","endLoc":71,"header":"def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None","id":1037,"name":"copyfile","nodeType":"Function","startLoc":71,"text":"def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":72,"header":"def guess_type(self, path: StrPath) -> str","id":1038,"name":"guess_type","nodeType":"Function","startLoc":72,"text":"def guess_type(self, path: StrPath) -> str: ... # undocumented"},{"attributeType":"dict","col":4,"comment":"null","endLoc":54,"id":1039,"name":"extensions_map","nodeType":"Attribute","startLoc":54,"text":"extensions_map"},{"col":0,"comment":"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n ","endLoc":440,"header":"def unquote_header_value(value, is_filename=False)","id":1040,"name":"unquote_header_value","nodeType":"Function","startLoc":418,"text":"def unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value"},{"col":0,"comment":"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n ","endLoc":974,"header":"def prepend_scheme_if_needed(url, new_scheme)","id":1041,"name":"prepend_scheme_if_needed","nodeType":"Function","startLoc":960,"text":"def prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))"},{"col":4,"comment":"Prepare Content-Length header based on request method and body","endLoc":542,"header":"def prepare_content_length(self, body)","id":1042,"name":"prepare_content_length","nodeType":"Function","startLoc":531,"text":"def prepare_content_length(self, body):\n \"\"\"Prepare Content-Length header based on request method and body\"\"\"\n if body is not None:\n length = super_len(body)\n if length:\n # If length exists, set it. Otherwise, we fallback\n # to Transfer-Encoding: chunked.\n self.headers['Content-Length'] = builtin_str(length)\n elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:\n # Set Content-Length to 0 for methods that can have a body\n # but don't provide one. (i.e. not GET or HEAD)\n self.headers['Content-Length'] = '0'"},{"col":4,"comment":"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n ","endLoc":270,"header":"def rebuild_auth(self, prepared_request, response)","id":1043,"name":"rebuild_auth","nodeType":"Function","startLoc":254,"text":"def rebuild_auth(self, prepared_request, response):\n \"\"\"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n \"\"\"\n headers = prepared_request.headers\n url = prepared_request.url\n\n if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):\n # If we get redirected to a new host, we should strip out any\n # authentication headers.\n del headers['Authorization']\n\n # .netrc might have more auth for us on our new host.\n new_auth = get_netrc_auth(url) if self.trust_env else None\n if new_auth is not None:\n prepared_request.prepare_auth(new_auth)"},{"col":0,"comment":"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing","endLoc":734,"header":"@contextlib.contextmanager\ndef set_environ(env_name, value)","id":1044,"name":"set_environ","nodeType":"Function","startLoc":715,"text":"@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value"},{"col":0,"comment":"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n ","endLoc":833,"header":"def select_proxy(url, proxies)","id":1045,"name":"select_proxy","nodeType":"Function","startLoc":810,"text":"def select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy"},{"col":0,"comment":"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n ","endLoc":1048,"header":"def rewind_body(prepared_request)","id":1046,"name":"rewind_body","nodeType":"Function","startLoc":1036,"text":"def rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")"},{"attributeType":"str","col":4,"comment":"null","endLoc":57,"id":1047,"name":"directory","nodeType":"Attribute","startLoc":57,"text":"directory"},{"col":4,"comment":"Prepares the given HTTP auth data.","endLoc":564,"header":"def prepare_auth(self, auth, url='')","id":1048,"name":"prepare_auth","nodeType":"Function","startLoc":544,"text":"def prepare_auth(self, auth, url=''):\n \"\"\"Prepares the given HTTP auth data.\"\"\"\n\n # If no Auth is explicitly provided, extract it from the URL first.\n if auth is None:\n url_auth = get_auth_from_url(self.url)\n auth = url_auth if any(url_auth) else None\n\n if auth:\n if isinstance(auth, tuple) and len(auth) == 2:\n # special-case basic HTTP auth\n auth = HTTPBasicAuth(*auth)\n\n # Allow auth to make its changes.\n r = auth(self)\n\n # Update self to reflect the auth changes.\n self.__dict__.update(r.__dict__)\n\n # Recompute Content-Length\n self.prepare_content_length(self.body)"},{"col":0,"comment":"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n ","endLoc":1033,"header":"def urldefragauth(url)","id":1049,"name":"urldefragauth","nodeType":"Function","startLoc":1019,"text":"def urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))"},{"col":0,"comment":"null","endLoc":25,"header":"def prepare_url(value)","id":1050,"name":"prepare_url","nodeType":"Function","startLoc":18,"text":"def prepare_url(value):\n # Issue #1483: Make sure the URL always has a trailing slash\n httpbin_url = value.url.rstrip('/') + '/'\n\n def inner(*suffix):\n return urljoin(httpbin_url, '/'.join(suffix))\n\n return inner"},{"col":0,"comment":"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n ","endLoc":466,"header":"def add_dict_to_cookiejar(cj, cookie_dict)","id":1051,"name":"add_dict_to_cookiejar","nodeType":"Function","startLoc":458,"text":"def add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)"},{"col":4,"comment":"null","endLoc":412,"header":"def __enter__(self)","id":1052,"name":"__enter__","nodeType":"Function","startLoc":411,"text":"def __enter__(self):\n return self"},{"col":4,"comment":"null","endLoc":415,"header":"def __exit__(self, *args)","id":1053,"name":"__exit__","nodeType":"Function","startLoc":414,"text":"def __exit__(self, *args):\n self.close()"},{"col":4,"comment":"Closes all adapters and as such the session","endLoc":737,"header":"def close(self)","id":1054,"name":"close","nodeType":"Function","startLoc":734,"text":"def close(self):\n \"\"\"Closes all adapters and as such the session\"\"\"\n for v in self.adapters.values():\n v.close()"},{"col":4,"comment":"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":542,"header":"def get(self, url, **kwargs)","id":1056,"name":"get","nodeType":"Function","startLoc":533,"text":"def get(self, url, **kwargs):\n r\"\"\"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('GET', url, **kwargs)"},{"attributeType":"null","col":4,"comment":"null","endLoc":14,"id":1057,"name":"cStringIO","nodeType":"Attribute","startLoc":14,"text":"cStringIO"},{"col":4,"comment":"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":553,"header":"def options(self, url, **kwargs)","id":1058,"name":"options","nodeType":"Function","startLoc":544,"text":"def options(self, url, **kwargs):\n r\"\"\"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('OPTIONS', url, **kwargs)"},{"col":4,"comment":"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":564,"header":"def head(self, url, **kwargs)","id":1059,"name":"head","nodeType":"Function","startLoc":555,"text":"def head(self, url, **kwargs):\n r\"\"\"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return self.request('HEAD', url, **kwargs)"},{"className":"TestSuperLen","col":0,"comment":"null","endLoc":128,"id":1060,"nodeType":"Class","startLoc":31,"text":"class TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_tarfile_member(self, tmpdir):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n\n tar_obj = str(tmpdir.join('test.tar'))\n with tarfile.open(tar_obj, 'w') as tar:\n tar.add(str(file_obj), arcname='test.txt')\n\n with tarfile.open(tar_obj) as tar:\n member = tar.extractfile('test.txt')\n assert super_len(member) == 4\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0"},{"col":4,"comment":"Ensures that we properly deal with different kinds of IO streams.","endLoc":43,"header":"@pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value)","id":1061,"name":"test_io_streams","nodeType":"Function","startLoc":33,"text":"@pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4"},{"fileName":"cookies.py","filePath":"requests","id":1062,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.cookies\n~~~~~~~~~~~~~~~~\n\nCompatibility code to be able to use `cookielib.CookieJar` with requests.\n\nrequests.utils imports from here, so be careful with imports.\n\"\"\"\n\nimport copy\nimport time\nimport calendar\n\nfrom ._internal_utils import to_native_string\nfrom .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping\n\ntry:\n import threading\nexcept ImportError:\n import dummy_threading as threading\n\n\nclass MockRequest(object):\n \"\"\"Wraps a `requests.Request` to mimic a `urllib2.Request`.\n\n The code in `cookielib.CookieJar` expects this interface in order to correctly\n manage cookie policies, i.e., determine whether a cookie can be set, given the\n domains of the request and the cookie.\n\n The original request object is read-only. The client is responsible for collecting\n the new headers via `get_new_headers()` and interpreting them appropriately. You\n probably want `get_cookie_header`, defined below.\n \"\"\"\n\n def __init__(self, request):\n self._r = request\n self._new_headers = {}\n self.type = urlparse(self._r.url).scheme\n\n def get_type(self):\n return self.type\n\n def get_host(self):\n return urlparse(self._r.url).netloc\n\n def get_origin_req_host(self):\n return self.get_host()\n\n def get_full_url(self):\n # Only return the response's URL if the user hadn't set the Host\n # header\n if not self._r.headers.get('Host'):\n return self._r.url\n # If they did set it, retrieve it and reconstruct the expected domain\n host = to_native_string(self._r.headers['Host'], encoding='utf-8')\n parsed = urlparse(self._r.url)\n # Reconstruct the URL as we expect it\n return urlunparse([\n parsed.scheme, host, parsed.path, parsed.params, parsed.query,\n parsed.fragment\n ])\n\n def is_unverifiable(self):\n return True\n\n def has_header(self, name):\n return name in self._r.headers or name in self._new_headers\n\n def get_header(self, name, default=None):\n return self._r.headers.get(name, self._new_headers.get(name, default))\n\n def add_header(self, key, val):\n \"\"\"cookielib has no legitimate use for this method; add it back if you find one.\"\"\"\n raise NotImplementedError(\"Cookie headers should be added with add_unredirected_header()\")\n\n def add_unredirected_header(self, name, value):\n self._new_headers[name] = value\n\n def get_new_headers(self):\n return self._new_headers\n\n @property\n def unverifiable(self):\n return self.is_unverifiable()\n\n @property\n def origin_req_host(self):\n return self.get_origin_req_host()\n\n @property\n def host(self):\n return self.get_host()\n\n\nclass MockResponse(object):\n \"\"\"Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.\n\n ...what? Basically, expose the parsed HTTP headers from the server response\n the way `cookielib` expects to see them.\n \"\"\"\n\n def __init__(self, headers):\n \"\"\"Make a MockResponse for `cookielib` to read.\n\n :param headers: a httplib.HTTPMessage or analogous carrying the headers\n \"\"\"\n self._headers = headers\n\n def info(self):\n return self._headers\n\n def getheaders(self, name):\n self._headers.getheaders(name)\n\n\ndef extract_cookies_to_jar(jar, request, response):\n \"\"\"Extract the cookies from the response into a CookieJar.\n\n :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)\n :param request: our own requests.Request object\n :param response: urllib3.HTTPResponse object\n \"\"\"\n if not (hasattr(response, '_original_response') and\n response._original_response):\n return\n # the _original_response field is the wrapped httplib.HTTPResponse object,\n req = MockRequest(request)\n # pull out the HTTPMessage with the headers and put it in the mock:\n res = MockResponse(response._original_response.msg)\n jar.extract_cookies(res, req)\n\n\ndef get_cookie_header(jar, request):\n \"\"\"\n Produce an appropriate Cookie header string to be sent with `request`, or None.\n\n :rtype: str\n \"\"\"\n r = MockRequest(request)\n jar.add_cookie_header(r)\n return r.get_new_headers().get('Cookie')\n\n\ndef remove_cookie_by_name(cookiejar, name, domain=None, path=None):\n \"\"\"Unsets a cookie by name, by default over all domains and paths.\n\n Wraps CookieJar.clear(), is O(n).\n \"\"\"\n clearables = []\n for cookie in cookiejar:\n if cookie.name != name:\n continue\n if domain is not None and domain != cookie.domain:\n continue\n if path is not None and path != cookie.path:\n continue\n clearables.append((cookie.domain, cookie.path, cookie.name))\n\n for domain, path, name in clearables:\n cookiejar.clear(domain, path, name)\n\n\nclass CookieConflictError(RuntimeError):\n \"\"\"There are two cookies that meet the criteria specified in the cookie jar.\n Use .get and .set and include domain and path args in order to be more specific.\n \"\"\"\n\n\nclass RequestsCookieJar(cookielib.CookieJar, MutableMapping):\n \"\"\"Compatibility class; is a cookielib.CookieJar, but exposes a dict\n interface.\n\n This is the CookieJar we create by default for requests and sessions that\n don't specify one, since some clients may expect response.cookies and\n session.cookies to support dict operations.\n\n Requests does not use the dict interface internally; it's just for\n compatibility with external client code. All requests code should work\n out of the box with externally provided instances of ``CookieJar``, e.g.\n ``LWPCookieJar`` and ``FileCookieJar``.\n\n Unlike a regular CookieJar, this class is pickleable.\n\n .. warning:: dictionary operations that are normally O(1) may be O(n).\n \"\"\"\n\n def get(self, name, default=None, domain=None, path=None):\n \"\"\"Dict-like get() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n try:\n return self._find_no_duplicates(name, domain, path)\n except KeyError:\n return default\n\n def set(self, name, value, **kwargs):\n \"\"\"Dict-like set() that also supports optional domain and path args in\n order to resolve naming collisions from using one cookie jar over\n multiple domains.\n \"\"\"\n # support client code that unsets cookies by assignment of a None value:\n if value is None:\n remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))\n return\n\n if isinstance(value, Morsel):\n c = morsel_to_cookie(value)\n else:\n c = create_cookie(name, value, **kwargs)\n self.set_cookie(c)\n return c\n\n def iterkeys(self):\n \"\"\"Dict-like iterkeys() that returns an iterator of names of cookies\n from the jar.\n\n .. seealso:: itervalues() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name\n\n def keys(self):\n \"\"\"Dict-like keys() that returns a list of names of cookies from the\n jar.\n\n .. seealso:: values() and items().\n \"\"\"\n return list(self.iterkeys())\n\n def itervalues(self):\n \"\"\"Dict-like itervalues() that returns an iterator of values of cookies\n from the jar.\n\n .. seealso:: iterkeys() and iteritems().\n \"\"\"\n for cookie in iter(self):\n yield cookie.value\n\n def values(self):\n \"\"\"Dict-like values() that returns a list of values of cookies from the\n jar.\n\n .. seealso:: keys() and items().\n \"\"\"\n return list(self.itervalues())\n\n def iteritems(self):\n \"\"\"Dict-like iteritems() that returns an iterator of name-value tuples\n from the jar.\n\n .. seealso:: iterkeys() and itervalues().\n \"\"\"\n for cookie in iter(self):\n yield cookie.name, cookie.value\n\n def items(self):\n \"\"\"Dict-like items() that returns a list of name-value tuples from the\n jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a\n vanilla python dict of key value pairs.\n\n .. seealso:: keys() and values().\n \"\"\"\n return list(self.iteritems())\n\n def list_domains(self):\n \"\"\"Utility method to list all the domains in the jar.\"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain not in domains:\n domains.append(cookie.domain)\n return domains\n\n def list_paths(self):\n \"\"\"Utility method to list all the paths in the jar.\"\"\"\n paths = []\n for cookie in iter(self):\n if cookie.path not in paths:\n paths.append(cookie.path)\n return paths\n\n def multiple_domains(self):\n \"\"\"Returns True if there are multiple domains in the jar.\n Returns False otherwise.\n\n :rtype: bool\n \"\"\"\n domains = []\n for cookie in iter(self):\n if cookie.domain is not None and cookie.domain in domains:\n return True\n domains.append(cookie.domain)\n return False # there is only one domain in jar\n\n def get_dict(self, domain=None, path=None):\n \"\"\"Takes as an argument an optional domain and path and returns a plain\n old Python dict of name-value pairs of cookies that meet the\n requirements.\n\n :rtype: dict\n \"\"\"\n dictionary = {}\n for cookie in iter(self):\n if (\n (domain is None or cookie.domain == domain) and\n (path is None or cookie.path == path)\n ):\n dictionary[cookie.name] = cookie.value\n return dictionary\n\n def __contains__(self, name):\n try:\n return super(RequestsCookieJar, self).__contains__(name)\n except CookieConflictError:\n return True\n\n def __getitem__(self, name):\n \"\"\"Dict-like __getitem__() for compatibility with client code. Throws\n exception if there are more than one cookie with name. In that case,\n use the more explicit get() method instead.\n\n .. warning:: operation is O(n), not O(1).\n \"\"\"\n return self._find_no_duplicates(name)\n\n def __setitem__(self, name, value):\n \"\"\"Dict-like __setitem__ for compatibility with client code. Throws\n exception if there is already a cookie of that name in the jar. In that\n case, use the more explicit set() method instead.\n \"\"\"\n self.set(name, value)\n\n def __delitem__(self, name):\n \"\"\"Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s\n ``remove_cookie_by_name()``.\n \"\"\"\n remove_cookie_by_name(self, name)\n\n def set_cookie(self, cookie, *args, **kwargs):\n if hasattr(cookie.value, 'startswith') and cookie.value.startswith('\"') and cookie.value.endswith('\"'):\n cookie.value = cookie.value.replace('\\\\\"', '')\n return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)\n\n def update(self, other):\n \"\"\"Updates this jar with cookies from another CookieJar or dict-like\"\"\"\n if isinstance(other, cookielib.CookieJar):\n for cookie in other:\n self.set_cookie(copy.copy(cookie))\n else:\n super(RequestsCookieJar, self).update(other)\n\n def _find(self, name, domain=None, path=None):\n \"\"\"Requests uses this method internally to get cookie values.\n\n If there are conflicting cookies, _find arbitrarily chooses one.\n See _find_no_duplicates if you want an exception thrown if there are\n conflicting cookies.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :return: cookie.value\n \"\"\"\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n return cookie.value\n\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))\n\n def _find_no_duplicates(self, name, domain=None, path=None):\n \"\"\"Both ``__get_item__`` and ``get`` call this function: it's never\n used elsewhere in Requests.\n\n :param name: a string containing name of cookie\n :param domain: (optional) string containing domain of cookie\n :param path: (optional) string containing path of cookie\n :raises KeyError: if cookie is not found\n :raises CookieConflictError: if there are multiple cookies\n that match name and optionally domain and path\n :return: cookie.value\n \"\"\"\n toReturn = None\n for cookie in iter(self):\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n if toReturn is not None: # if there are multiple cookies that meet passed in criteria\n raise CookieConflictError('There are multiple cookies with name, %r' % (name))\n toReturn = cookie.value # we will eventually return this as long as no cookie conflict\n\n if toReturn:\n return toReturn\n raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))\n\n def __getstate__(self):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n state = self.__dict__.copy()\n # remove the unpickleable RLock object\n state.pop('_cookies_lock')\n return state\n\n def __setstate__(self, state):\n \"\"\"Unlike a normal CookieJar, this class is pickleable.\"\"\"\n self.__dict__.update(state)\n if '_cookies_lock' not in self.__dict__:\n self._cookies_lock = threading.RLock()\n\n def copy(self):\n \"\"\"Return a copy of this RequestsCookieJar.\"\"\"\n new_cj = RequestsCookieJar()\n new_cj.set_policy(self.get_policy())\n new_cj.update(self)\n return new_cj\n\n def get_policy(self):\n \"\"\"Return the CookiePolicy instance used.\"\"\"\n return self._policy\n\n\ndef _copy_cookie_jar(jar):\n if jar is None:\n return None\n\n if hasattr(jar, 'copy'):\n # We're dealing with an instance of RequestsCookieJar\n return jar.copy()\n # We're dealing with a generic CookieJar instance\n new_jar = copy.copy(jar)\n new_jar.clear()\n for cookie in jar:\n new_jar.set_cookie(copy.copy(cookie))\n return new_jar\n\n\ndef create_cookie(name, value, **kwargs):\n \"\"\"Make a cookie from underspecified parameters.\n\n By default, the pair of `name` and `value` will be set for the domain ''\n and sent on every request (this is sometimes called a \"supercookie\").\n \"\"\"\n result = {\n 'version': 0,\n 'name': name,\n 'value': value,\n 'port': None,\n 'domain': '',\n 'path': '/',\n 'secure': False,\n 'expires': None,\n 'discard': True,\n 'comment': None,\n 'comment_url': None,\n 'rest': {'HttpOnly': None},\n 'rfc2109': False,\n }\n\n badargs = set(kwargs) - set(result)\n if badargs:\n err = 'create_cookie() got unexpected keyword arguments: %s'\n raise TypeError(err % list(badargs))\n\n result.update(kwargs)\n result['port_specified'] = bool(result['port'])\n result['domain_specified'] = bool(result['domain'])\n result['domain_initial_dot'] = result['domain'].startswith('.')\n result['path_specified'] = bool(result['path'])\n\n return cookielib.Cookie(**result)\n\n\ndef morsel_to_cookie(morsel):\n \"\"\"Convert a Morsel object into a Cookie containing the one k/v pair.\"\"\"\n\n expires = None\n if morsel['max-age']:\n try:\n expires = int(time.time() + int(morsel['max-age']))\n except ValueError:\n raise TypeError('max-age: %s must be integer' % morsel['max-age'])\n elif morsel['expires']:\n time_template = '%a, %d-%b-%Y %H:%M:%S GMT'\n expires = calendar.timegm(\n time.strptime(morsel['expires'], time_template)\n )\n return create_cookie(\n comment=morsel['comment'],\n comment_url=bool(morsel['comment']),\n discard=False,\n domain=morsel['domain'],\n expires=expires,\n name=morsel.key,\n path=morsel['path'],\n port=None,\n rest={'HttpOnly': morsel['httponly']},\n rfc2109=False,\n secure=bool(morsel['secure']),\n value=morsel.value,\n version=morsel['version'] or 0,\n )\n\n\ndef cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :param cookiejar: (optional) A cookiejar to add the cookies to.\n :param overwrite: (optional) If False, will not replace cookies\n already in the jar with new ones.\n :rtype: CookieJar\n \"\"\"\n if cookiejar is None:\n cookiejar = RequestsCookieJar()\n\n if cookie_dict is not None:\n names_from_jar = [cookie.name for cookie in cookiejar]\n for name in cookie_dict:\n if overwrite or (name not in names_from_jar):\n cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))\n\n return cookiejar\n\n\ndef merge_cookies(cookiejar, cookies):\n \"\"\"Add cookies to cookiejar and returns a merged CookieJar.\n\n :param cookiejar: CookieJar object to add the cookies to.\n :param cookies: Dictionary or CookieJar object to be added.\n :rtype: CookieJar\n \"\"\"\n if not isinstance(cookiejar, cookielib.CookieJar):\n raise ValueError('You can only merge into CookieJar')\n\n if isinstance(cookies, dict):\n cookiejar = cookiejar_from_dict(\n cookies, cookiejar=cookiejar, overwrite=False)\n elif isinstance(cookies, cookielib.CookieJar):\n try:\n cookiejar.update(cookies)\n except AttributeError:\n for cookie_in_jar in cookies:\n cookiejar.set_cookie(cookie_in_jar)\n\n return cookiejar\n"},{"col":4,"comment":"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":577,"header":"def post(self, url, data=None, json=None, **kwargs)","id":1063,"name":"post","nodeType":"Function","startLoc":566,"text":"def post(self, url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('POST', url, data=data, json=json, **kwargs)"},{"col":4,"comment":"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":589,"header":"def put(self, url, data=None, **kwargs)","id":1064,"name":"put","nodeType":"Function","startLoc":579,"text":"def put(self, url, data=None, **kwargs):\n r\"\"\"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PUT', url, data=data, **kwargs)"},{"col":4,"comment":"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":601,"header":"def patch(self, url, data=None, **kwargs)","id":1065,"name":"patch","nodeType":"Function","startLoc":591,"text":"def patch(self, url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PATCH', url, data=data, **kwargs)"},{"col":4,"comment":"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n ","endLoc":611,"header":"def delete(self, url, **kwargs)","id":1066,"name":"delete","nodeType":"Function","startLoc":603,"text":"def delete(self, url, **kwargs):\n r\"\"\"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('DELETE', url, **kwargs)"},{"col":4,"comment":"Send a given PreparedRequest.\n\n :rtype: requests.Response\n ","endLoc":689,"header":"def send(self, request, **kwargs)","id":1068,"name":"send","nodeType":"Function","startLoc":613,"text":"def send(self, request, **kwargs):\n \"\"\"Send a given PreparedRequest.\n\n :rtype: requests.Response\n \"\"\"\n # Set defaults that the hooks can utilize to ensure they always have\n # the correct parameters to reproduce the previous request.\n kwargs.setdefault('stream', self.stream)\n kwargs.setdefault('verify', self.verify)\n kwargs.setdefault('cert', self.cert)\n if 'proxies' not in kwargs:\n kwargs['proxies'] = resolve_proxies(\n request, self.proxies, self.trust_env\n )\n\n # It's possible that users might accidentally send a Request object.\n # Guard against that specific failure case.\n if isinstance(request, Request):\n raise ValueError('You can only send PreparedRequests.')\n\n # Set up variables needed for resolve_redirects and dispatching of hooks\n allow_redirects = kwargs.pop('allow_redirects', True)\n stream = kwargs.get('stream')\n hooks = request.hooks\n\n # Get the appropriate adapter to use\n adapter = self.get_adapter(url=request.url)\n\n # Start time (approximately) of the request\n start = preferred_clock()\n\n # Send the request\n r = adapter.send(request, **kwargs)\n\n # Total elapsed time of the request (approximately)\n elapsed = preferred_clock() - start\n r.elapsed = timedelta(seconds=elapsed)\n\n # Response manipulation hooks\n r = dispatch_hook('response', hooks, r, **kwargs)\n\n # Persist cookies\n if r.history:\n\n # If the hooks create history then we want those cookies too\n for resp in r.history:\n extract_cookies_to_jar(self.cookies, resp.request, resp.raw)\n\n extract_cookies_to_jar(self.cookies, request, r.raw)\n\n # Resolve redirects if allowed.\n if allow_redirects:\n # Redirect resolving generator.\n gen = self.resolve_redirects(r, request, **kwargs)\n history = [resp for resp in gen]\n else:\n history = []\n\n # Shuffle things around if there's history.\n if history:\n # Insert the first (original) request at the start\n history.insert(0, r)\n # Get the last request made\n r = history.pop()\n r.history = history\n\n # If redirects aren't being followed, store the response on the Request for Response.next().\n if not allow_redirects:\n try:\n r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))\n except StopIteration:\n pass\n\n if not stream:\n r.content\n\n return r"},{"col":4,"comment":"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n ","endLoc":732,"header":"def get_adapter(self, url)","id":1069,"name":"get_adapter","nodeType":"Function","startLoc":720,"text":"def get_adapter(self, url):\n \"\"\"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n \"\"\"\n for (prefix, adapter) in self.adapters.items():\n\n if url.lower().startswith(prefix.lower()):\n return adapter\n\n # Nothing matches :-/\n raise InvalidSchema(\"No connection adapters were found for {!r}\".format(url))"},{"className":"MockRequest","col":0,"comment":"Wraps a `requests.Request` to mimic a `urllib2.Request`.\n\n The code in `cookielib.CookieJar` expects this interface in order to correctly\n manage cookie policies, i.e., determine whether a cookie can be set, given the\n domains of the request and the cookie.\n\n The original request object is read-only. The client is responsible for collecting\n the new headers via `get_new_headers()` and interpreting them appropriately. You\n probably want `get_cookie_header`, defined below.\n ","endLoc":94,"id":1070,"nodeType":"Class","startLoc":25,"text":"class MockRequest(object):\n \"\"\"Wraps a `requests.Request` to mimic a `urllib2.Request`.\n\n The code in `cookielib.CookieJar` expects this interface in order to correctly\n manage cookie policies, i.e., determine whether a cookie can be set, given the\n domains of the request and the cookie.\n\n The original request object is read-only. The client is responsible for collecting\n the new headers via `get_new_headers()` and interpreting them appropriately. You\n probably want `get_cookie_header`, defined below.\n \"\"\"\n\n def __init__(self, request):\n self._r = request\n self._new_headers = {}\n self.type = urlparse(self._r.url).scheme\n\n def get_type(self):\n return self.type\n\n def get_host(self):\n return urlparse(self._r.url).netloc\n\n def get_origin_req_host(self):\n return self.get_host()\n\n def get_full_url(self):\n # Only return the response's URL if the user hadn't set the Host\n # header\n if not self._r.headers.get('Host'):\n return self._r.url\n # If they did set it, retrieve it and reconstruct the expected domain\n host = to_native_string(self._r.headers['Host'], encoding='utf-8')\n parsed = urlparse(self._r.url)\n # Reconstruct the URL as we expect it\n return urlunparse([\n parsed.scheme, host, parsed.path, parsed.params, parsed.query,\n parsed.fragment\n ])\n\n def is_unverifiable(self):\n return True\n\n def has_header(self, name):\n return name in self._r.headers or name in self._new_headers\n\n def get_header(self, name, default=None):\n return self._r.headers.get(name, self._new_headers.get(name, default))\n\n def add_header(self, key, val):\n \"\"\"cookielib has no legitimate use for this method; add it back if you find one.\"\"\"\n raise NotImplementedError(\"Cookie headers should be added with add_unredirected_header()\")\n\n def add_unredirected_header(self, name, value):\n self._new_headers[name] = value\n\n def get_new_headers(self):\n return self._new_headers\n\n @property\n def unverifiable(self):\n return self.is_unverifiable()\n\n @property\n def origin_req_host(self):\n return self.get_origin_req_host()\n\n @property\n def host(self):\n return self.get_host()"},{"col":4,"comment":"null","endLoc":43,"header":"def get_type(self)","id":1071,"name":"get_type","nodeType":"Function","startLoc":42,"text":"def get_type(self):\n return self.type"},{"col":4,"comment":"null","endLoc":46,"header":"def get_host(self)","id":1072,"name":"get_host","nodeType":"Function","startLoc":45,"text":"def get_host(self):\n return urlparse(self._r.url).netloc"},{"col":0,"comment":"Verify that the fragment portion of a URI isn't sent to the server.","endLoc":362,"header":"def test_fragment_not_sent_with_request()","id":1073,"name":"test_fragment_not_sent_with_request","nodeType":"Function","startLoc":335,"text":"def test_fragment_not_sent_with_request():\n \"\"\"Verify that the fragment portion of a URI isn't sent to the server.\"\"\"\n def response_handler(sock):\n req = consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 200 OK\\r\\n'\n b'Content-Length: '+bytes(len(req))+b'\\r\\n'\n b'\\r\\n'+req\n )\n\n close_server = threading.Event()\n server = Server(response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port)\n r = requests.get(url)\n raw_request = r.content\n\n assert r.status_code == 200\n headers, body = raw_request.split(b'\\r\\n\\r\\n', 1)\n status_line, headers = headers.split(b'\\r\\n', 1)\n\n assert status_line == b'GET /path/to/thing/ HTTP/1.1'\n for frag in (b'view', b'edit', b'token', b'hunter2'):\n assert frag not in headers\n assert frag not in body\n\n close_server.set()"},{"col":4,"comment":"null","endLoc":49,"header":"def get_origin_req_host(self)","id":1074,"name":"get_origin_req_host","nodeType":"Function","startLoc":48,"text":"def get_origin_req_host(self):\n return self.get_host()"},{"col":0,"comment":"null","endLoc":30,"header":"@pytest.fixture\ndef httpbin(httpbin)","id":1075,"name":"httpbin","nodeType":"Function","startLoc":28,"text":"@pytest.fixture\ndef httpbin(httpbin):\n return prepare_url(httpbin)"},{"col":0,"comment":"null","endLoc":35,"header":"@pytest.fixture\ndef httpbin_secure(httpbin_secure)","id":1076,"name":"httpbin_secure","nodeType":"Function","startLoc":33,"text":"@pytest.fixture\ndef httpbin_secure(httpbin_secure):\n return prepare_url(httpbin_secure)"},{"col":0,"comment":"null","endLoc":61,"header":"@pytest.fixture\ndef nosan_server(tmp_path_factory)","id":1077,"name":"nosan_server","nodeType":"Function","startLoc":38,"text":"@pytest.fixture\ndef nosan_server(tmp_path_factory):\n # delay importing until the fixture in order to make it possible\n # to deselect the test via command-line when trustme is not available\n import trustme\n\n tmpdir = tmp_path_factory.mktemp(\"certs\")\n ca = trustme.CA()\n # only commonName, no subjectAltName\n server_cert = ca.issue_cert(common_name=u\"localhost\")\n ca_bundle = str(tmpdir / \"ca.pem\")\n ca.cert_pem.write_to_path(ca_bundle)\n\n context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)\n server_cert.configure_cert(context)\n server = HTTPServer((\"localhost\", 0), SimpleHTTPRequestHandler)\n server.socket = context.wrap_socket(server.socket, server_side=True)\n server_thread = threading.Thread(target=server.serve_forever)\n server_thread.start()\n\n yield \"localhost\", server.server_address[1], ca_bundle\n\n server.shutdown()\n server_thread.join()"},{"attributeType":"null","col":0,"comment":"null","endLoc":22,"id":1078,"name":"target","nodeType":"Attribute","startLoc":22,"text":"target"},{"col":4,"comment":"null","endLoc":752,"header":"def __getstate__(self)","id":1079,"name":"__getstate__","nodeType":"Function","startLoc":750,"text":"def __getstate__(self):\n state = {attr: getattr(self, attr, None) for attr in self.__attrs__}\n return state"},{"col":4,"comment":"Prepares the given hooks.","endLoc":593,"header":"def prepare_hooks(self, hooks)","id":1080,"name":"prepare_hooks","nodeType":"Function","startLoc":586,"text":"def prepare_hooks(self, hooks):\n \"\"\"Prepares the given hooks.\"\"\"\n # hooks can be passed as None to the prepare method and to this\n # method. To prevent iterating over None, simply use an empty list\n # if hooks is False-y\n hooks = hooks or []\n for event in hooks:\n self.register_hook(event, hooks[event])"},{"col":4,"comment":"null","endLoc":331,"header":"def __repr__(self)","id":1081,"name":"__repr__","nodeType":"Function","startLoc":330,"text":"def __repr__(self):\n return '' % (self.method)"},{"col":4,"comment":"null","endLoc":342,"header":"def copy(self)","id":1082,"name":"copy","nodeType":"Function","startLoc":333,"text":"def copy(self):\n p = PreparedRequest()\n p.method = self.method\n p.url = self.url\n p.headers = self.headers.copy() if self.headers is not None else None\n p._cookies = _copy_cookie_jar(self._cookies)\n p.body = self.body\n p.hooks = self.hooks\n p._body_position = self._body_position\n return p"},{"col":4,"comment":"null","endLoc":756,"header":"def __setstate__(self, state)","id":1083,"name":"__setstate__","nodeType":"Function","startLoc":754,"text":"def __setstate__(self, state):\n for attr, value in state.items():\n setattr(self, attr, value)"},{"attributeType":"null","col":4,"comment":"null","endLoc":343,"id":1084,"name":"__attrs__","nodeType":"Attribute","startLoc":343,"text":"__attrs__"},{"attributeType":"null","col":8,"comment":"null","endLoc":304,"id":1085,"name":"_cookies","nodeType":"Attribute","startLoc":304,"text":"self._cookies"},{"attributeType":"null","col":8,"comment":"null","endLoc":354,"id":1086,"name":"headers","nodeType":"Attribute","startLoc":354,"text":"self.headers"},{"attributeType":"null","col":8,"comment":"null","endLoc":301,"id":1087,"name":"headers","nodeType":"Attribute","startLoc":301,"text":"self.headers"},{"attributeType":"null","col":8,"comment":"null","endLoc":297,"id":1088,"name":"method","nodeType":"Attribute","startLoc":297,"text":"self.method"},{"attributeType":"null","col":8,"comment":"null","endLoc":407,"id":1089,"name":"adapters","nodeType":"Attribute","startLoc":407,"text":"self.adapters"},{"attributeType":"null","col":8,"comment":"null","endLoc":358,"id":1090,"name":"auth","nodeType":"Attribute","startLoc":358,"text":"self.auth"},{"attributeType":"null","col":8,"comment":"null","endLoc":310,"id":1092,"name":"_body_position","nodeType":"Attribute","startLoc":310,"text":"self._body_position"},{"attributeType":"null","col":8,"comment":"null","endLoc":374,"id":1093,"name":"stream","nodeType":"Attribute","startLoc":374,"text":"self.stream"},{"attributeType":"null","col":8,"comment":"null","endLoc":306,"id":1094,"name":"body","nodeType":"Attribute","startLoc":306,"text":"self.body"},{"attributeType":"null","col":8,"comment":"null","endLoc":398,"id":1096,"name":"trust_env","nodeType":"Attribute","startLoc":398,"text":"self.trust_env"},{"attributeType":"null","col":8,"comment":"null","endLoc":308,"id":1097,"name":"hooks","nodeType":"Attribute","startLoc":308,"text":"self.hooks"},{"attributeType":"null","col":8,"comment":"null","endLoc":384,"id":1098,"name":"verify","nodeType":"Attribute","startLoc":384,"text":"self.verify"},{"attributeType":"null","col":8,"comment":"null","endLoc":363,"id":1099,"name":"proxies","nodeType":"Attribute","startLoc":363,"text":"self.proxies"},{"attributeType":"null","col":8,"comment":"null","endLoc":299,"id":1100,"name":"url","nodeType":"Attribute","startLoc":299,"text":"self.url"},{"attributeType":"null","col":8,"comment":"null","endLoc":388,"id":1101,"name":"cert","nodeType":"Attribute","startLoc":388,"text":"self.cert"},{"attributeType":"null","col":8,"comment":"null","endLoc":371,"id":1102,"name":"params","nodeType":"Attribute","startLoc":371,"text":"self.params"},{"col":0,"comment":"Dispatches a hook dictionary on a given piece of data.","endLoc":34,"header":"def dispatch_hook(key, hooks, hook_data, **kwargs)","id":1103,"name":"dispatch_hook","nodeType":"Function","startLoc":23,"text":"def dispatch_hook(key, hooks, hook_data, **kwargs):\n \"\"\"Dispatches a hook dictionary on a given piece of data.\"\"\"\n hooks = hooks or {}\n hooks = hooks.get(key)\n if hooks:\n if hasattr(hooks, '__call__'):\n hooks = [hooks]\n for hook in hooks:\n _hook_data = hook(hook_data, **kwargs)\n if _hook_data is not None:\n hook_data = _hook_data\n return hook_data"},{"attributeType":"null","col":8,"comment":"null","endLoc":394,"id":1104,"name":"max_redirects","nodeType":"Attribute","startLoc":394,"text":"self.max_redirects"},{"attributeType":"null","col":0,"comment":"null","endLoc":43,"id":1105,"name":"DEFAULT_PORTS","nodeType":"Attribute","startLoc":43,"text":"DEFAULT_PORTS"},{"className":"HTTPAdapter","col":0,"comment":"The built-in HTTP Adapter for urllib3.\n\n Provides a general-case interface for Requests sessions to contact HTTP and\n HTTPS urls by implementing the Transport Adapter interface. This class will\n usually be created by the :class:`Session ` class under the\n covers.\n\n :param pool_connections: The number of urllib3 connection pools to cache.\n :param pool_maxsize: The maximum number of connections to save in the pool.\n :param max_retries: The maximum number of retries each connection\n should attempt. Note, this applies only to failed DNS lookups, socket\n connections and connection timeouts, never to requests where data has\n made it to the server. By default, Requests does not retry failed\n connections. If you need granular control over the conditions under\n which we retry a request, import urllib3's ``Retry`` class and pass\n that instead.\n :param pool_block: Whether the connection pool should block for connections.\n\n Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> a = requests.adapters.HTTPAdapter(max_retries=3)\n >>> s.mount('http://', a)\n ","endLoc":538,"id":1106,"nodeType":"Class","startLoc":85,"text":"class HTTPAdapter(BaseAdapter):\n \"\"\"The built-in HTTP Adapter for urllib3.\n\n Provides a general-case interface for Requests sessions to contact HTTP and\n HTTPS urls by implementing the Transport Adapter interface. This class will\n usually be created by the :class:`Session ` class under the\n covers.\n\n :param pool_connections: The number of urllib3 connection pools to cache.\n :param pool_maxsize: The maximum number of connections to save in the pool.\n :param max_retries: The maximum number of retries each connection\n should attempt. Note, this applies only to failed DNS lookups, socket\n connections and connection timeouts, never to requests where data has\n made it to the server. By default, Requests does not retry failed\n connections. If you need granular control over the conditions under\n which we retry a request, import urllib3's ``Retry`` class and pass\n that instead.\n :param pool_block: Whether the connection pool should block for connections.\n\n Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> a = requests.adapters.HTTPAdapter(max_retries=3)\n >>> s.mount('http://', a)\n \"\"\"\n __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',\n '_pool_block']\n\n def __init__(self, pool_connections=DEFAULT_POOLSIZE,\n pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,\n pool_block=DEFAULT_POOLBLOCK):\n if max_retries == DEFAULT_RETRIES:\n self.max_retries = Retry(0, read=False)\n else:\n self.max_retries = Retry.from_int(max_retries)\n self.config = {}\n self.proxy_manager = {}\n\n super(HTTPAdapter, self).__init__()\n\n self._pool_connections = pool_connections\n self._pool_maxsize = pool_maxsize\n self._pool_block = pool_block\n\n self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)\n\n def __getstate__(self):\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}\n\n def __setstate__(self, state):\n # Can't handle by adding 'proxy_manager' to self.__attrs__ because\n # self.poolmanager uses a lambda function, which isn't pickleable.\n self.proxy_manager = {}\n self.config = {}\n\n for attr, value in state.items():\n setattr(self, attr, value)\n\n self.init_poolmanager(self._pool_connections, self._pool_maxsize,\n block=self._pool_block)\n\n def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):\n \"\"\"Initializes a urllib3 PoolManager.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param connections: The number of urllib3 connection pools to cache.\n :param maxsize: The maximum number of connections to save in the pool.\n :param block: Block when no free connections are available.\n :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.\n \"\"\"\n # save these values for pickling\n self._pool_connections = connections\n self._pool_maxsize = maxsize\n self._pool_block = block\n\n self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,\n block=block, strict=True, **pool_kwargs)\n\n def proxy_manager_for(self, proxy, **proxy_kwargs):\n \"\"\"Return urllib3 ProxyManager for the given proxy.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The proxy to return a urllib3 ProxyManager for.\n :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.\n :returns: ProxyManager\n :rtype: urllib3.ProxyManager\n \"\"\"\n if proxy in self.proxy_manager:\n manager = self.proxy_manager[proxy]\n elif proxy.lower().startswith('socks'):\n username, password = get_auth_from_url(proxy)\n manager = self.proxy_manager[proxy] = SOCKSProxyManager(\n proxy,\n username=username,\n password=password,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs\n )\n else:\n proxy_headers = self.proxy_headers(proxy)\n manager = self.proxy_manager[proxy] = proxy_from_url(\n proxy,\n proxy_headers=proxy_headers,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs)\n\n return manager\n\n def cert_verify(self, conn, url, verify, cert):\n \"\"\"Verify a SSL certificate. This method should not be called from user\n code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param conn: The urllib3 connection object associated with the cert.\n :param url: The requested URL.\n :param verify: Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: The SSL certificate to verify.\n \"\"\"\n if url.lower().startswith('https') and verify:\n\n cert_loc = None\n\n # Allow self-specified cert location.\n if verify is not True:\n cert_loc = verify\n\n if not cert_loc:\n cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)\n\n if not cert_loc or not os.path.exists(cert_loc):\n raise IOError(\"Could not find a suitable TLS CA certificate bundle, \"\n \"invalid path: {}\".format(cert_loc))\n\n conn.cert_reqs = 'CERT_REQUIRED'\n\n if not os.path.isdir(cert_loc):\n conn.ca_certs = cert_loc\n else:\n conn.ca_cert_dir = cert_loc\n else:\n conn.cert_reqs = 'CERT_NONE'\n conn.ca_certs = None\n conn.ca_cert_dir = None\n\n if cert:\n if not isinstance(cert, basestring):\n conn.cert_file = cert[0]\n conn.key_file = cert[1]\n else:\n conn.cert_file = cert\n conn.key_file = None\n if conn.cert_file and not os.path.exists(conn.cert_file):\n raise IOError(\"Could not find the TLS certificate file, \"\n \"invalid path: {}\".format(conn.cert_file))\n if conn.key_file and not os.path.exists(conn.key_file):\n raise IOError(\"Could not find the TLS key file, \"\n \"invalid path: {}\".format(conn.key_file))\n\n def build_response(self, req, resp):\n \"\"\"Builds a :class:`Response ` object from a urllib3\n response. This should not be called from user code, and is only exposed\n for use when subclassing the\n :class:`HTTPAdapter `\n\n :param req: The :class:`PreparedRequest ` used to generate the response.\n :param resp: The urllib3 response object.\n :rtype: requests.Response\n \"\"\"\n response = Response()\n\n # Fallback to None if there's no status_code, for whatever reason.\n response.status_code = getattr(resp, 'status', None)\n\n # Make headers case-insensitive.\n response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))\n\n # Set encoding.\n response.encoding = get_encoding_from_headers(response.headers)\n response.raw = resp\n response.reason = response.raw.reason\n\n if isinstance(req.url, bytes):\n response.url = req.url.decode('utf-8')\n else:\n response.url = req.url\n\n # Add new cookies from the server.\n extract_cookies_to_jar(response.cookies, req, resp)\n\n # Give the Response some context.\n response.request = req\n response.connection = self\n\n return response\n\n def get_connection(self, url, proxies=None):\n \"\"\"Returns a urllib3 connection for the given URL. This should not be\n called from user code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param url: The URL to connect to.\n :param proxies: (optional) A Requests-style dictionary of proxies used on this request.\n :rtype: urllib3.ConnectionPool\n \"\"\"\n proxy = select_proxy(url, proxies)\n\n if proxy:\n proxy = prepend_scheme_if_needed(proxy, 'http')\n proxy_url = parse_url(proxy)\n if not proxy_url.host:\n raise InvalidProxyURL(\"Please check proxy URL. It is malformed\"\n \" and could be missing the host.\")\n proxy_manager = self.proxy_manager_for(proxy)\n conn = proxy_manager.connection_from_url(url)\n else:\n # Only scheme should be lower case\n parsed = urlparse(url)\n url = parsed.geturl()\n conn = self.poolmanager.connection_from_url(url)\n\n return conn\n\n def close(self):\n \"\"\"Disposes of any internal state.\n\n Currently, this closes the PoolManager and any active ProxyManager,\n which closes any pooled connections.\n \"\"\"\n self.poolmanager.clear()\n for proxy in self.proxy_manager.values():\n proxy.clear()\n\n def request_url(self, request, proxies):\n \"\"\"Obtain the url to use when making the final request.\n\n If the message is being sent through a HTTP proxy, the full URL has to\n be used. Otherwise, we should only use the path portion of the URL.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.\n :rtype: str\n \"\"\"\n proxy = select_proxy(request.url, proxies)\n scheme = urlparse(request.url).scheme\n\n is_proxied_http_request = (proxy and scheme != 'https')\n using_socks_proxy = False\n if proxy:\n proxy_scheme = urlparse(proxy).scheme.lower()\n using_socks_proxy = proxy_scheme.startswith('socks')\n\n url = request.path_url\n if is_proxied_http_request and not using_socks_proxy:\n url = urldefragauth(request.url)\n\n return url\n\n def add_headers(self, request, **kwargs):\n \"\"\"Add any headers needed by the connection. As of v2.0 this does\n nothing by default, but is left for overriding by users that subclass\n the :class:`HTTPAdapter `.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` to add headers to.\n :param kwargs: The keyword arguments from the call to send().\n \"\"\"\n pass\n\n def proxy_headers(self, proxy):\n \"\"\"Returns a dictionary of the headers to add to any request sent\n through a proxy. This works with urllib3 magic to ensure that they are\n correctly sent to the proxy, rather than in a tunnelled request if\n CONNECT is being used.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The url of the proxy being used for this request.\n :rtype: dict\n \"\"\"\n headers = {}\n username, password = get_auth_from_url(proxy)\n\n if username:\n headers['Proxy-Authorization'] = _basic_auth_str(username,\n password)\n\n return headers\n\n def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple or urllib3 Timeout object\n :param verify: (optional) Either a boolean, in which case it controls whether\n we verify the server's TLS certificate, or a string, in which case it\n must be a path to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n :rtype: requests.Response\n \"\"\"\n\n try:\n conn = self.get_connection(request.url, proxies)\n except LocationValueError as e:\n raise InvalidURL(e, request=request)\n\n self.cert_verify(conn, request.url, verify, cert)\n url = self.request_url(request, proxies)\n self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)\n\n chunked = not (request.body is None or 'Content-Length' in request.headers)\n\n if isinstance(timeout, tuple):\n try:\n connect, read = timeout\n timeout = TimeoutSauce(connect=connect, read=read)\n except ValueError as e:\n # this may raise a string formatting error.\n err = (\"Invalid timeout {}. Pass a (connect, read) \"\n \"timeout tuple, or a single float to set \"\n \"both timeouts to the same value\".format(timeout))\n raise ValueError(err)\n elif isinstance(timeout, TimeoutSauce):\n pass\n else:\n timeout = TimeoutSauce(connect=timeout, read=timeout)\n\n try:\n if not chunked:\n resp = conn.urlopen(\n method=request.method,\n url=url,\n body=request.body,\n headers=request.headers,\n redirect=False,\n assert_same_host=False,\n preload_content=False,\n decode_content=False,\n retries=self.max_retries,\n timeout=timeout\n )\n\n # Send the request.\n else:\n if hasattr(conn, 'proxy_pool'):\n conn = conn.proxy_pool\n\n low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)\n\n try:\n skip_host = 'Host' in request.headers\n low_conn.putrequest(request.method,\n url,\n skip_accept_encoding=True,\n skip_host=skip_host)\n\n for header, value in request.headers.items():\n low_conn.putheader(header, value)\n\n low_conn.endheaders()\n\n for i in request.body:\n low_conn.send(hex(len(i))[2:].encode('utf-8'))\n low_conn.send(b'\\r\\n')\n low_conn.send(i)\n low_conn.send(b'\\r\\n')\n low_conn.send(b'0\\r\\n\\r\\n')\n\n # Receive the response from the server\n try:\n # For Python 2.7, use buffering of HTTP responses\n r = low_conn.getresponse(buffering=True)\n except TypeError:\n # For compatibility with Python 3.3+\n r = low_conn.getresponse()\n\n resp = HTTPResponse.from_httplib(\n r,\n pool=conn,\n connection=low_conn,\n preload_content=False,\n decode_content=False\n )\n except:\n # If we hit any problems here, clean up the connection.\n # Then, reraise so that we can handle the actual exception.\n low_conn.close()\n raise\n\n except (ProtocolError, socket.error) as err:\n raise ConnectionError(err, request=request)\n\n except MaxRetryError as e:\n if isinstance(e.reason, ConnectTimeoutError):\n # TODO: Remove this in 3.0.0: see #2811\n if not isinstance(e.reason, NewConnectionError):\n raise ConnectTimeout(e, request=request)\n\n if isinstance(e.reason, ResponseError):\n raise RetryError(e, request=request)\n\n if isinstance(e.reason, _ProxyError):\n raise ProxyError(e, request=request)\n\n if isinstance(e.reason, _SSLError):\n # This branch is for urllib3 v1.22 and later.\n raise SSLError(e, request=request)\n\n raise ConnectionError(e, request=request)\n\n except ClosedPoolError as e:\n raise ConnectionError(e, request=request)\n\n except _ProxyError as e:\n raise ProxyError(e)\n\n except (_SSLError, _HTTPError) as e:\n if isinstance(e, _SSLError):\n # This branch is for urllib3 versions earlier than v1.22\n raise SSLError(e, request=request)\n elif isinstance(e, ReadTimeoutError):\n raise ReadTimeout(e, request=request)\n elif isinstance(e, _InvalidHeader):\n raise InvalidHeader(e, request=request)\n else:\n raise\n\n return self.build_response(request, resp)"},{"attributeType":"null","col":8,"comment":"null","endLoc":366,"id":1107,"name":"hooks","nodeType":"Attribute","startLoc":366,"text":"self.hooks"},{"className":"BaseAdapter","col":0,"comment":"The Base Transport Adapter","endLoc":82,"id":1109,"nodeType":"Class","startLoc":56,"text":"class BaseAdapter(object):\n \"\"\"The Base Transport Adapter\"\"\"\n\n def __init__(self):\n super(BaseAdapter, self).__init__()\n\n def send(self, request, stream=False, timeout=None, verify=True,\n cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n \"\"\"\n raise NotImplementedError\n\n def close(self):\n \"\"\"Cleans up adapter specific items.\"\"\"\n raise NotImplementedError"},{"col":4,"comment":"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n ","endLoc":78,"header":"def send(self, request, stream=False, timeout=None, verify=True,\n cert=None, proxies=None)","id":1110,"name":"send","nodeType":"Function","startLoc":62,"text":"def send(self, request, stream=False, timeout=None, verify=True,\n cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n \"\"\"\n raise NotImplementedError"},{"col":4,"comment":"Cleans up adapter specific items.","endLoc":82,"header":"def close(self)","id":1111,"name":"close","nodeType":"Function","startLoc":80,"text":"def close(self):\n \"\"\"Cleans up adapter specific items.\"\"\"\n raise NotImplementedError"},{"col":4,"comment":"null","endLoc":133,"header":"def __getstate__(self)","id":1112,"name":"__getstate__","nodeType":"Function","startLoc":132,"text":"def __getstate__(self):\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}"},{"attributeType":"null","col":8,"comment":"null","endLoc":404,"id":1113,"name":"cookies","nodeType":"Attribute","startLoc":404,"text":"self.cookies"},{"col":4,"comment":"null","endLoc":145,"header":"def __setstate__(self, state)","id":1114,"name":"__setstate__","nodeType":"Function","startLoc":135,"text":"def __setstate__(self, state):\n # Can't handle by adding 'proxy_manager' to self.__attrs__ because\n # self.poolmanager uses a lambda function, which isn't pickleable.\n self.proxy_manager = {}\n self.config = {}\n\n for attr, value in state.items():\n setattr(self, attr, value)\n\n self.init_poolmanager(self._pool_connections, self._pool_maxsize,\n block=self._pool_block)"},{"col":4,"comment":"Return urllib3 ProxyManager for the given proxy.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The proxy to return a urllib3 ProxyManager for.\n :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.\n :returns: ProxyManager\n :rtype: urllib3.ProxyManager\n ","endLoc":202,"header":"def proxy_manager_for(self, proxy, **proxy_kwargs)","id":1115,"name":"proxy_manager_for","nodeType":"Function","startLoc":167,"text":"def proxy_manager_for(self, proxy, **proxy_kwargs):\n \"\"\"Return urllib3 ProxyManager for the given proxy.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The proxy to return a urllib3 ProxyManager for.\n :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.\n :returns: ProxyManager\n :rtype: urllib3.ProxyManager\n \"\"\"\n if proxy in self.proxy_manager:\n manager = self.proxy_manager[proxy]\n elif proxy.lower().startswith('socks'):\n username, password = get_auth_from_url(proxy)\n manager = self.proxy_manager[proxy] = SOCKSProxyManager(\n proxy,\n username=username,\n password=password,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs\n )\n else:\n proxy_headers = self.proxy_headers(proxy)\n manager = self.proxy_manager[proxy] = proxy_from_url(\n proxy,\n proxy_headers=proxy_headers,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs)\n\n return manager"},{"className":"NullHandler","col":0,"comment":"null","endLoc":651,"id":1116,"nodeType":"Class","startLoc":651,"text":"class NullHandler(Handler): ..."},{"col":4,"comment":"Returns a dictionary of the headers to add to any request sent\n through a proxy. This works with urllib3 magic to ensure that they are\n correctly sent to the proxy, rather than in a tunnelled request if\n CONNECT is being used.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The url of the proxy being used for this request.\n :rtype: dict\n ","endLoc":393,"header":"def proxy_headers(self, proxy)","id":1118,"name":"proxy_headers","nodeType":"Function","startLoc":373,"text":"def proxy_headers(self, proxy):\n \"\"\"Returns a dictionary of the headers to add to any request sent\n through a proxy. This works with urllib3 magic to ensure that they are\n correctly sent to the proxy, rather than in a tunnelled request if\n CONNECT is being used.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The url of the proxy being used for this request.\n :rtype: dict\n \"\"\"\n headers = {}\n username, password = get_auth_from_url(proxy)\n\n if username:\n headers['Proxy-Authorization'] = _basic_auth_str(username,\n password)\n\n return headers"},{"col":0,"comment":"","endLoc":11,"header":"api.py#","id":1121,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.api\n~~~~~~~~~~~~\n\nThis module implements the Requests API.\n\n:copyright: (c) 2012 by Kenneth Reitz.\n:license: Apache2, see LICENSE for more details.\n\"\"\""},{"col":4,"comment":"Verify a SSL certificate. This method should not be called from user\n code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param conn: The urllib3 connection object associated with the cert.\n :param url: The requested URL.\n :param verify: Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: The SSL certificate to verify.\n ","endLoc":254,"header":"def cert_verify(self, conn, url, verify, cert)","id":1122,"name":"cert_verify","nodeType":"Function","startLoc":204,"text":"def cert_verify(self, conn, url, verify, cert):\n \"\"\"Verify a SSL certificate. This method should not be called from user\n code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param conn: The urllib3 connection object associated with the cert.\n :param url: The requested URL.\n :param verify: Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: The SSL certificate to verify.\n \"\"\"\n if url.lower().startswith('https') and verify:\n\n cert_loc = None\n\n # Allow self-specified cert location.\n if verify is not True:\n cert_loc = verify\n\n if not cert_loc:\n cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)\n\n if not cert_loc or not os.path.exists(cert_loc):\n raise IOError(\"Could not find a suitable TLS CA certificate bundle, \"\n \"invalid path: {}\".format(cert_loc))\n\n conn.cert_reqs = 'CERT_REQUIRED'\n\n if not os.path.isdir(cert_loc):\n conn.ca_certs = cert_loc\n else:\n conn.ca_cert_dir = cert_loc\n else:\n conn.cert_reqs = 'CERT_NONE'\n conn.ca_certs = None\n conn.ca_cert_dir = None\n\n if cert:\n if not isinstance(cert, basestring):\n conn.cert_file = cert[0]\n conn.key_file = cert[1]\n else:\n conn.cert_file = cert\n conn.key_file = None\n if conn.cert_file and not os.path.exists(conn.cert_file):\n raise IOError(\"Could not find the TLS certificate file, \"\n \"invalid path: {}\".format(conn.cert_file))\n if conn.key_file and not os.path.exists(conn.key_file):\n raise IOError(\"Could not find the TLS key file, \"\n \"invalid path: {}\".format(conn.key_file))"},{"className":"Handler","col":0,"comment":"null","endLoc":266,"id":1123,"nodeType":"Class","startLoc":248,"text":"class Handler(Filterer):\n level: int # undocumented\n formatter: Formatter | None # undocumented\n lock: threading.Lock | None # undocumented\n name: str | None # undocumented\n def __init__(self, level: _Level = 0) -> None: ...\n def get_name(self) -> str: ... # undocumented\n def set_name(self, name: str) -> None: ... # undocumented\n def createLock(self) -> None: ...\n def acquire(self) -> None: ...\n def release(self) -> None: ...\n def setLevel(self, level: _Level) -> None: ...\n def setFormatter(self, fmt: Formatter | None) -> None: ...\n def flush(self) -> None: ...\n def close(self) -> None: ...\n def handle(self, record: LogRecord) -> bool: ...\n def handleError(self, record: LogRecord) -> None: ...\n def format(self, record: LogRecord) -> str: ...\n def emit(self, record: LogRecord) -> None: ..."},{"fileName":"hooks.py","filePath":"requests","id":1124,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.hooks\n~~~~~~~~~~~~~~\n\nThis module provides the capabilities for the Requests hooks system.\n\nAvailable hooks:\n\n``response``:\n The response generated from a Request.\n\"\"\"\nHOOKS = ['response']\n\n\ndef default_hooks():\n return {event: [] for event in HOOKS}\n\n# TODO: response is the only one\n\n\ndef dispatch_hook(key, hooks, hook_data, **kwargs):\n \"\"\"Dispatches a hook dictionary on a given piece of data.\"\"\"\n hooks = hooks or {}\n hooks = hooks.get(key)\n if hooks:\n if hasattr(hooks, '__call__'):\n hooks = [hooks]\n for hook in hooks:\n _hook_data = hook(hook_data, **kwargs)\n if _hook_data is not None:\n hook_data = _hook_data\n return hook_data\n"},{"className":"Filterer","col":0,"comment":"null","endLoc":102,"id":1125,"nodeType":"Class","startLoc":95,"text":"class Filterer:\n filters: list[_FilterType]\n def addFilter(self, filter: _FilterType) -> None: ...\n def removeFilter(self, filter: _FilterType) -> None: ...\n if sys.version_info >= (3, 12):\n def filter(self, record: LogRecord) -> bool | LogRecord: ...\n else:\n def filter(self, record: LogRecord) -> bool: ..."},{"col":4,"comment":"null","endLoc":97,"header":"def addFilter(self, filter: _FilterType) -> None","id":1126,"name":"addFilter","nodeType":"Function","startLoc":97,"text":"def addFilter(self, filter: _FilterType) -> None: ..."},{"col":4,"comment":"null","endLoc":98,"header":"def removeFilter(self, filter: _FilterType) -> None","id":1127,"name":"removeFilter","nodeType":"Function","startLoc":98,"text":"def removeFilter(self, filter: _FilterType) -> None: ..."},{"col":8,"comment":"null","endLoc":102,"header":"def filter(self, record: LogRecord) -> bool","id":1128,"name":"filter","nodeType":"Function","startLoc":102,"text":"def filter(self, record: LogRecord) -> bool: ..."},{"attributeType":"list","col":4,"comment":"null","endLoc":96,"id":1129,"name":"filters","nodeType":"Attribute","startLoc":96,"text":"filters"},{"attributeType":"list","col":0,"comment":"null","endLoc":14,"id":1130,"name":"HOOKS","nodeType":"Attribute","startLoc":14,"text":"HOOKS"},{"col":0,"comment":"","endLoc":13,"header":"hooks.py#","id":1131,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.hooks\n~~~~~~~~~~~~~~\n\nThis module provides the capabilities for the Requests hooks system.\n\nAvailable hooks:\n\n``response``:\n The response generated from a Request.\n\"\"\"\n\nHOOKS = ['response']"},{"attributeType":"str","col":4,"comment":"null","endLoc":23,"id":1132,"name":"mod","nodeType":"Attribute","startLoc":23,"text":"mod"},{"id":1133,"name":"Feature_request.md","nodeType":"TextFile","path":".github/ISSUE_TEMPLATE","text":"---\nname: Feature request\nabout: Suggest an idea for this project\n\n---\n\nRequests is not accepting feature requests at this time.\n"},{"id":1135,"name":"LICENSE","nodeType":"TextFile","path":"","text":"\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n"},{"col":0,"comment":"Verify we only append previous fragment if one doesn't exist on new\n location. If a new fragment is encountered in a Location header, it should\n be added to all subsequent requests.\n ","endLoc":405,"header":"def test_fragment_update_on_redirect()","id":1139,"name":"test_fragment_update_on_redirect","nodeType":"Function","startLoc":364,"text":"def test_fragment_update_on_redirect():\n \"\"\"Verify we only append previous fragment if one doesn't exist on new\n location. If a new fragment is encountered in a Location header, it should\n be added to all subsequent requests.\n \"\"\"\n\n def response_handler(sock):\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /get#relevant-section\\r\\n\\r\\n'\n )\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 302 FOUND\\r\\n'\n b'Content-Length: 0\\r\\n'\n b'Location: /final-url/\\r\\n\\r\\n'\n )\n consume_socket_content(sock, timeout=0.5)\n sock.send(\n b'HTTP/1.1 200 OK\\r\\n\\r\\n'\n )\n\n close_server = threading.Event()\n server = Server(response_handler, wait_to_close_event=close_server)\n\n with server as (host, port):\n url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port)\n r = requests.get(url)\n raw_request = r.content\n\n assert r.status_code == 200\n assert len(r.history) == 2\n assert r.history[0].request.url == url\n\n # Verify we haven't overwritten the location with our previous fragment.\n assert r.history[1].request.url == 'http://{}:{}/get#relevant-section'.format(host, port)\n # Verify previous fragment is used and not the original.\n assert r.url == 'http://{}:{}/final-url/#relevant-section'.format(host, port)\n\n close_server.set()"},{"col":4,"comment":"null","endLoc":253,"header":"def __init__(self, level: _Level = 0) -> None","id":1140,"name":"__init__","nodeType":"Function","startLoc":253,"text":"def __init__(self, level: _Level = 0) -> None: ..."},{"id":1141,"name":"HISTORY.md","nodeType":"TextFile","path":"","text":"Release History\n===============\n\ndev\n---\n\n- \\[Short description of non-trivial change.\\]\n\n- Added a `requests.exceptions.JSONDecodeError` to decrease inconsistencies\n in the library. This gets raised in the `response.json()` method, and is\n backwards compatible as it inherits from previously thrown exceptions.\n Can be caught from `requests.exceptions.RequestException` as well.\n\n- Catch `AttributeError` when calculating length of files obtained by\n `Tarfile.extractfile()`\n\n2.26.0 (2021-07-13)\n-------------------\n\n**Improvements**\n\n- Requests now supports Brotli compression, if either the `brotli` or\n `brotlicffi` package is installed. (#5783)\n\n- `Session.send` now correctly resolves proxy configurations from both\n the Session and Request. Behavior now matches `Session.request`. (#5681)\n\n**Bugfixes**\n\n- Fixed a race condition in zip extraction when using Requests in parallel\n from zip archive. (#5707)\n\n**Dependencies**\n\n- Instead of `chardet`, use the MIT-licensed `charset_normalizer` for Python3\n to remove license ambiguity for projects bundling requests. If `chardet`\n is already installed on your machine it will be used instead of `charset_normalizer`\n to keep backwards compatibility. (#5797)\n\n You can also install `chardet` while installing requests by\n specifying `[use_chardet_on_py3]` extra as follows:\n\n ```shell\n pip install \"requests[use_chardet_on_py3]\"\n ```\n\n Python2 still depends upon the `chardet` module.\n\n- Requests now supports `idna` 3.x on Python 3. `idna` 2.x will continue to\n be used on Python 2 installations. (#5711)\n\n**Deprecations**\n\n- The `requests[security]` extra has been converted to a no-op install.\n PyOpenSSL is no longer the recommended secure option for Requests. (#5867)\n\n- Requests has officially dropped support for Python 3.5. (#5867)\n\n2.25.1 (2020-12-16)\n-------------------\n\n**Bugfixes**\n\n- Requests now treats `application/json` as `utf8` by default. Resolving\n inconsistencies between `r.text` and `r.json` output. (#5673)\n\n**Dependencies**\n\n- Requests now supports chardet v4.x.\n\n2.25.0 (2020-11-11)\n-------------------\n\n**Improvements**\n\n- Added support for NETRC environment variable. (#5643)\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.26.\n\n**Deprecations**\n\n- Requests v2.25.x will be the last release series with support for Python 3.5.\n- The `requests[security]` extra is officially deprecated and will be removed\n in Requests v2.26.0.\n\n2.24.0 (2020-06-17)\n-------------------\n\n**Improvements**\n\n- pyOpenSSL TLS implementation is now only used if Python\n either doesn't have an `ssl` module or doesn't support\n SNI. Previously pyOpenSSL was unconditionally used if available.\n This applies even if pyOpenSSL is installed via the\n `requests[security]` extra (#5443)\n\n- Redirect resolution should now only occur when\n `allow_redirects` is True. (#5492)\n\n- No longer perform unnecessary Content-Length calculation for\n requests that won't use it. (#5496)\n\n2.23.0 (2020-02-19)\n-------------------\n\n**Improvements**\n\n- Remove defunct reference to `prefetch` in Session `__attrs__` (#5110)\n\n**Bugfixes**\n\n- Requests no longer outputs password in basic auth usage warning. (#5099)\n\n**Dependencies**\n\n- Pinning for `chardet` and `idna` now uses major version instead of minor.\n This hopefully reduces the need for releases every time a dependency is updated.\n\n2.22.0 (2019-05-15)\n-------------------\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.25.2.\n (note: 1.25.0 and 1.25.1 are incompatible)\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 3.4.\n\n2.21.0 (2018-12-10)\n-------------------\n\n**Dependencies**\n\n- Requests now supports idna v2.8.\n\n2.20.1 (2018-11-08)\n-------------------\n\n**Bugfixes**\n\n- Fixed bug with unintended Authorization header stripping for\n redirects using default ports (http/80, https/443).\n\n2.20.0 (2018-10-18)\n-------------------\n\n**Bugfixes**\n\n- Content-Type header parsing is now case-insensitive (e.g.\n charset=utf8 v Charset=utf8).\n- Fixed exception leak where certain redirect urls would raise\n uncaught urllib3 exceptions.\n- Requests removes Authorization header from requests redirected\n from https to http on the same hostname. (CVE-2018-18074)\n- `should_bypass_proxies` now handles URIs without hostnames (e.g.\n files).\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.24.\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 2.6.\n\n2.19.1 (2018-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed issue where status\\_codes.py's `init` function failed trying\n to append to a `__doc__` value of `None`.\n\n2.19.0 (2018-06-12)\n-------------------\n\n**Improvements**\n\n- Warn user about possible slowdown when using cryptography version\n < 1.3.4\n- Check for invalid host in proxy URL, before forwarding request to\n adapter.\n- Fragments are now properly maintained across redirects. (RFC7231\n 7.1.2)\n- Removed use of cgi module to expedite library load time.\n- Added support for SHA-256 and SHA-512 digest auth algorithms.\n- Minor performance improvement to `Request.content`.\n- Migrate to using collections.abc for 3.7 compatibility.\n\n**Bugfixes**\n\n- Parsing empty `Link` headers with `parse_header_links()` no longer\n return one bogus entry.\n- Fixed issue where loading the default certificate bundle from a zip\n archive would raise an `IOError`.\n- Fixed issue with unexpected `ImportError` on windows system which do\n not support `winreg` module.\n- DNS resolution in proxy bypass no longer includes the username and\n password in the request. This also fixes the issue of DNS queries\n failing on macOS.\n- Properly normalize adapter prefixes for url comparison.\n- Passing `None` as a file pointer to the `files` param no longer\n raises an exception.\n- Calling `copy` on a `RequestsCookieJar` will now preserve the cookie\n policy correctly.\n\n**Dependencies**\n\n- We now support idna v2.7.\n- We now support urllib3 v1.23.\n\n2.18.4 (2017-08-15)\n-------------------\n\n**Improvements**\n\n- Error messages for invalid headers now include the header name for\n easier debugging\n\n**Dependencies**\n\n- We now support idna v2.6.\n\n2.18.3 (2017-08-02)\n-------------------\n\n**Improvements**\n\n- Running `$ python -m requests.help` now includes the installed\n version of idna.\n\n**Bugfixes**\n\n- Fixed issue where Requests would raise `ConnectionError` instead of\n `SSLError` when encountering SSL problems when using urllib3 v1.22.\n\n2.18.2 (2017-07-25)\n-------------------\n\n**Bugfixes**\n\n- `requests.help` no longer fails on Python 2.6 due to the absence of\n `ssl.OPENSSL_VERSION_NUMBER`.\n\n**Dependencies**\n\n- We now support urllib3 v1.22.\n\n2.18.1 (2017-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fix an error in the packaging whereby the `*.whl` contained\n incorrect data that regressed the fix in v2.17.3.\n\n2.18.0 (2017-06-14)\n-------------------\n\n**Improvements**\n\n- `Response` is now a context manager, so can be used directly in a\n `with` statement without first having to be wrapped by\n `contextlib.closing()`.\n\n**Bugfixes**\n\n- Resolve installation failure if multiprocessing is not available\n- Resolve tests crash if multiprocessing is not able to determine the\n number of CPU cores\n- Resolve error swallowing in utils set\\_environ generator\n\n2.17.3 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.2 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.1 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.0 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Removal of the 301 redirect cache. This improves thread-safety.\n\n2.16.5 (2017-05-28)\n-------------------\n\n- Improvements to `$ python -m requests.help`.\n\n2.16.4 (2017-05-27)\n-------------------\n\n- Introduction of the `$ python -m requests.help` command, for\n debugging with maintainers!\n\n2.16.3 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\n2.16.2 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\nNo code modification (noted below) should be necessary any longer.\n\n2.16.1 (2017-05-27)\n-------------------\n\n- Restored the `requests.packages` namespace for compatibility\n reasons.\n- Bugfix for `urllib3` version parsing.\n\n**Note**: code that was written to import against the\n`requests.packages` namespace previously will have to import code that\nrests at this module-level now.\n\nFor example:\n\n from requests.packages.urllib3.poolmanager import PoolManager\n\nWill need to be re-written to be:\n\n from requests.packages import urllib3\n urllib3.poolmanager.PoolManager\n\nOr, even better:\n\n from urllib3.poolmanager import PoolManager\n\n2.16.0 (2017-05-26)\n-------------------\n\n- Unvendor ALL the things!\n\n2.15.1 (2017-05-26)\n-------------------\n\n- Everyone makes mistakes.\n\n2.15.0 (2017-05-26)\n-------------------\n\n**Improvements**\n\n- Introduction of the `Response.next` property, for getting the next\n `PreparedResponse` from a redirect chain (when\n `allow_redirects=False`).\n- Internal refactoring of `__version__` module.\n\n**Bugfixes**\n\n- Restored once-optional parameter for\n `requests.utils.get_environ_proxies()`.\n\n2.14.2 (2017-05-10)\n-------------------\n\n**Bugfixes**\n\n- Changed a less-than to an equal-to and an or in the dependency\n markers to widen compatibility with older setuptools releases.\n\n2.14.1 (2017-05-09)\n-------------------\n\n**Bugfixes**\n\n- Changed the dependency markers to widen compatibility with older pip\n releases.\n\n2.14.0 (2017-05-09)\n-------------------\n\n**Improvements**\n\n- It is now possible to pass `no_proxy` as a key to the `proxies`\n dictionary to provide handling similar to the `NO_PROXY` environment\n variable.\n- When users provide invalid paths to certificate bundle files or\n directories Requests now raises `IOError`, rather than failing at\n the time of the HTTPS request with a fairly inscrutable certificate\n validation error.\n- The behavior of `SessionRedirectMixin` was slightly altered.\n `resolve_redirects` will now detect a redirect by calling\n `get_redirect_target(response)` instead of directly querying\n `Response.is_redirect` and `Response.headers['location']`. Advanced\n users will be able to process malformed redirects more easily.\n- Changed the internal calculation of elapsed request time to have\n higher resolution on Windows.\n- Added `win_inet_pton` as conditional dependency for the `[socks]`\n extra on Windows with Python 2.7.\n- Changed the proxy bypass implementation on Windows: the proxy bypass\n check doesn't use forward and reverse DNS requests anymore\n- URLs with schemes that begin with `http` but are not `http` or\n `https` no longer have their host parts forced to lowercase.\n\n**Bugfixes**\n\n- Much improved handling of non-ASCII `Location` header values in\n redirects. Fewer `UnicodeDecodeErrors` are encountered on Python 2,\n and Python 3 now correctly understands that Latin-1 is unlikely to\n be the correct encoding.\n- If an attempt to `seek` file to find out its length fails, we now\n appropriately handle that by aborting our content-length\n calculations.\n- Restricted `HTTPDigestAuth` to only respond to auth challenges made\n on 4XX responses, rather than to all auth challenges.\n- Fixed some code that was firing `DeprecationWarning` on Python 3.6.\n- The dismayed person emoticon (`/o\\\\`) no longer has a big head. I'm\n sure this is what you were all worrying about most.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to v1.21.1.\n- Updated bundled chardet to v3.0.2.\n- Updated bundled idna to v2.5.\n- Updated bundled certifi to 2017.4.17.\n\n2.13.0 (2017-01-24)\n-------------------\n\n**Features**\n\n- Only load the `idna` library when we've determined we need it. This\n will save some memory for users.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.20.\n- Updated bundled idna to 2.2.\n\n2.12.5 (2017-01-18)\n-------------------\n\n**Bugfixes**\n\n- Fixed an issue with JSON encoding detection, specifically detecting\n big-endian UTF-32 with BOM.\n\n2.12.4 (2016-12-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from 2.12.2 where non-string types were rejected in\n the basic auth parameters. While support for this behaviour has been\n re-added, the behaviour is deprecated and will be removed in the\n future.\n\n2.12.3 (2016-12-01)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from v2.12.1 for URLs with schemes that begin with\n \"http\". These URLs have historically been processed as though they\n were HTTP-schemed URLs, and so have had parameters added. This was\n removed in v2.12.2 in an overzealous attempt to resolve problems\n with IDNA-encoding those URLs. This change was reverted: the other\n fixes for IDNA-encoding have been judged to be sufficient to return\n to the behaviour Requests had before v2.12.0.\n\n2.12.2 (2016-11-30)\n-------------------\n\n**Bugfixes**\n\n- Fixed several issues with IDNA-encoding URLs that are technically\n invalid but which are widely accepted. Requests will now attempt to\n IDNA-encode a URL if it can but, if it fails, and the host contains\n only ASCII characters, it will be passed through optimistically.\n This will allow users to opt-in to using IDNA2003 themselves if they\n want to, and will also allow technically invalid but still common\n hostnames.\n- Fixed an issue where URLs with leading whitespace would raise\n `InvalidSchema` errors.\n- Fixed an issue where some URLs without the HTTP or HTTPS schemes\n would still have HTTP URL preparation applied to them.\n- Fixed an issue where Unicode strings could not be used in basic\n auth.\n- Fixed an issue encountered by some Requests plugins where\n constructing a Response object would cause `Response.content` to\n raise an `AttributeError`.\n\n2.12.1 (2016-11-16)\n-------------------\n\n**Bugfixes**\n\n- Updated setuptools 'security' extra for the new PyOpenSSL backend in\n urllib3.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.1.\n\n2.12.0 (2016-11-15)\n-------------------\n\n**Improvements**\n\n- Updated support for internationalized domain names from IDNA2003 to\n IDNA2008. This updated support is required for several forms of IDNs\n and is mandatory for .de domains.\n- Much improved heuristics for guessing content lengths: Requests will\n no longer read an entire `StringIO` into memory.\n- Much improved logic for recalculating `Content-Length` headers for\n `PreparedRequest` objects.\n- Improved tolerance for file-like objects that have no `tell` method\n but do have a `seek` method.\n- Anything that is a subclass of `Mapping` is now treated like a\n dictionary by the `data=` keyword argument.\n- Requests now tolerates empty passwords in proxy credentials, rather\n than stripping the credentials.\n- If a request is made with a file-like object as the body and that\n request is redirected with a 307 or 308 status code, Requests will\n now attempt to rewind the body object so it can be replayed.\n\n**Bugfixes**\n\n- When calling `response.close`, the call to `close` will be\n propagated through to non-urllib3 backends.\n- Fixed issue where the `ALL_PROXY` environment variable would be\n preferred over scheme-specific variables like `HTTP_PROXY`.\n- Fixed issue where non-UTF8 reason phrases got severely mangled by\n falling back to decoding using ISO 8859-1 instead.\n- Fixed a bug where Requests would not correctly correlate cookies set\n when using custom Host headers if those Host headers did not use the\n native string type for the platform.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.\n- Updated bundled certifi certs to 2016.09.26.\n\n2.11.1 (2016-08-17)\n-------------------\n\n**Bugfixes**\n\n- Fixed a bug when using `iter_content` with `decode_unicode=True` for\n streamed bodies would raise `AttributeError`. This bug was\n introduced in 2.11.\n- Strip Content-Type and Transfer-Encoding headers from the header\n block when following a redirect that transforms the verb from\n POST/PUT to GET.\n\n2.11.0 (2016-08-08)\n-------------------\n\n**Improvements**\n\n- Added support for the `ALL_PROXY` environment variable.\n- Reject header values that contain leading whitespace or newline\n characters to reduce risk of header smuggling.\n\n**Bugfixes**\n\n- Fixed occasional `TypeError` when attempting to decode a JSON\n response that occurred in an error case. Now correctly returns a\n `ValueError`.\n- Requests would incorrectly ignore a non-CIDR IP address in the\n `NO_PROXY` environment variables: Requests now treats it as a\n specific IP.\n- Fixed a bug when sending JSON data that could cause us to encounter\n obscure OpenSSL errors in certain network conditions (yes, really).\n- Added type checks to ensure that `iter_content` only accepts\n integers and `None` for chunk sizes.\n- Fixed issue where responses whose body had not been fully consumed\n would have the underlying connection closed but not returned to the\n connection pool, which could cause Requests to hang in situations\n where the `HTTPAdapter` had been configured to use a blocking\n connection pool.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.16.\n- Some previous releases accidentally accepted non-strings as\n acceptable header values. This release does not.\n\n2.10.0 (2016-04-29)\n-------------------\n\n**New Features**\n\n- SOCKS Proxy Support! (requires PySocks;\n `$ pip install requests[socks]`)\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.15.1.\n\n2.9.2 (2016-04-29)\n------------------\n\n**Improvements**\n\n- Change built-in CaseInsensitiveDict (used for headers) to use\n OrderedDict as its underlying datastore.\n\n**Bugfixes**\n\n- Don't use redirect\\_cache if allow\\_redirects=False\n- When passed objects that throw exceptions from `tell()`, send them\n via chunked transfer encoding instead of failing.\n- Raise a ProxyError for proxy related connection issues.\n\n2.9.1 (2015-12-21)\n------------------\n\n**Bugfixes**\n\n- Resolve regression introduced in 2.9.0 that made it impossible to\n send binary strings as bodies in Python 3.\n- Fixed errors when calculating cookie expiration dates in certain\n locales.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.13.1.\n\n2.9.0 (2015-12-15)\n------------------\n\n**Minor Improvements** (Backwards compatible)\n\n- The `verify` keyword argument now supports being passed a path to a\n directory of CA certificates, not just a single-file bundle.\n- Warnings are now emitted when sending files opened in text mode.\n- Added the 511 Network Authentication Required status code to the\n status code registry.\n\n**Bugfixes**\n\n- For file-like objects that are not sought to the very beginning, we\n now send the content length for the number of bytes we will actually\n read, rather than the total size of the file, allowing partial file\n uploads.\n- When uploading file-like objects, if they are empty or have no\n obvious content length we set `Transfer-Encoding: chunked` rather\n than `Content-Length: 0`.\n- We correctly receive the response in buffered mode when uploading\n chunked bodies.\n- We now handle being passed a query string as a bytestring on Python\n 3, by decoding it as UTF-8.\n- Sessions are now closed in all cases (exceptional and not) when\n using the functional API rather than leaking and waiting for the\n garbage collector to clean them up.\n- Correctly handle digest auth headers with a malformed `qop`\n directive that contains no token, by treating it the same as if no\n `qop` directive was provided at all.\n- Minor performance improvements when removing specific cookies by\n name.\n\n**Miscellaneous**\n\n- Updated urllib3 to 1.13.\n\n2.8.1 (2015-10-13)\n------------------\n\n**Bugfixes**\n\n- Update certificate bundle to match `certifi` 2015.9.6.2's weak\n certificate bundle.\n- Fix a bug in 2.8.0 where requests would raise `ConnectTimeout`\n instead of `ConnectionError`\n- When using the PreparedRequest flow, requests will now correctly\n respect the `json` parameter. Broken in 2.8.0.\n- When using the PreparedRequest flow, requests will now correctly\n handle a Unicode-string method name on Python 2. Broken in 2.8.0.\n\n2.8.0 (2015-10-05)\n------------------\n\n**Minor Improvements** (Backwards Compatible)\n\n- Requests now supports per-host proxies. This allows the `proxies`\n dictionary to have entries of the form\n `{'://': ''}`. Host-specific proxies will\n be used in preference to the previously-supported scheme-specific\n ones, but the previous syntax will continue to work.\n- `Response.raise_for_status` now prints the URL that failed as part\n of the exception message.\n- `requests.utils.get_netrc_auth` now takes an `raise_errors` kwarg,\n defaulting to `False`. When `True`, errors parsing `.netrc` files\n cause exceptions to be thrown.\n- Change to bundled projects import logic to make it easier to\n unbundle requests downstream.\n- Changed the default User-Agent string to avoid leaking data on\n Linux: now contains only the requests version.\n\n**Bugfixes**\n\n- The `json` parameter to `post()` and friends will now only be used\n if neither `data` nor `files` are present, consistent with the\n documentation.\n- We now ignore empty fields in the `NO_PROXY` environment variable.\n- Fixed problem where `httplib.BadStatusLine` would get raised if\n combining `stream=True` with `contextlib.closing`.\n- Prevented bugs where we would attempt to return the same connection\n back to the connection pool twice when sending a Chunked body.\n- Miscellaneous minor internal changes.\n- Digest Auth support is now thread safe.\n\n**Updates**\n\n- Updated urllib3 to 1.12.\n\n2.7.0 (2015-05-03)\n------------------\n\nThis is the first release that follows our new release process. For\nmore, see [our\ndocumentation](https://requests.readthedocs.io/en/latest/community/release-process/).\n\n**Bugfixes**\n\n- Updated urllib3 to 1.10.4, resolving several bugs involving chunked\n transfer encoding and response framing.\n\n2.6.2 (2015-04-23)\n------------------\n\n**Bugfixes**\n\n- Fix regression where compressed data that was sent as chunked data\n was not properly decompressed. (\\#2561)\n\n2.6.1 (2015-04-22)\n------------------\n\n**Bugfixes**\n\n- Remove VendorAlias import machinery introduced in v2.5.2.\n- Simplify the PreparedRequest.prepare API: We no longer require the\n user to pass an empty list to the hooks keyword argument. (c.f.\n \\#2552)\n- Resolve redirects now receives and forwards all of the original\n arguments to the adapter. (\\#2503)\n- Handle UnicodeDecodeErrors when trying to deal with a unicode URL\n that cannot be encoded in ASCII. (\\#2540)\n- Populate the parsed path of the URI field when performing Digest\n Authentication. (\\#2426)\n- Copy a PreparedRequest's CookieJar more reliably when it is not an\n instance of RequestsCookieJar. (\\#2527)\n\n2.6.0 (2015-03-14)\n------------------\n\n**Bugfixes**\n\n- CVE-2015-2296: Fix handling of cookies on redirect. Previously a\n cookie without a host value set would use the hostname for the\n redirected URL exposing requests users to session fixation attacks\n and potentially cookie stealing. This was disclosed privately by\n Matthew Daley of [BugFuzz](https://bugfuzz.com). This affects all\n versions of requests from v2.1.0 to v2.5.3 (inclusive on both ends).\n- Fix error when requests is an `install_requires` dependency and\n `python setup.py test` is run. (\\#2462)\n- Fix error when urllib3 is unbundled and requests continues to use\n the vendored import location.\n- Include fixes to `urllib3`'s header handling.\n- Requests' handling of unvendored dependencies is now more\n restrictive.\n\n**Features and Improvements**\n\n- Support bytearrays when passed as parameters in the `files`\n argument. (\\#2468)\n- Avoid data duplication when creating a request with `str`, `bytes`,\n or `bytearray` input to the `files` argument.\n\n2.5.3 (2015-02-24)\n------------------\n\n**Bugfixes**\n\n- Revert changes to our vendored certificate bundle. For more context\n see (\\#2455, \\#2456, and )\n\n2.5.2 (2015-02-23)\n------------------\n\n**Features and Improvements**\n\n- Add sha256 fingerprint support.\n ([shazow/urllib3\\#540](https://github.com/shazow/urllib3/pull/540))\n- Improve the performance of headers.\n ([shazow/urllib3\\#544](https://github.com/shazow/urllib3/pull/544))\n\n**Bugfixes**\n\n- Copy pip's import machinery. When downstream redistributors remove\n requests.packages.urllib3 the import machinery will continue to let\n those same symbols work. Example usage in requests' documentation\n and 3rd-party libraries relying on the vendored copies of urllib3\n will work without having to fallback to the system urllib3.\n- Attempt to quote parts of the URL on redirect if unquoting and then\n quoting fails. (\\#2356)\n- Fix filename type check for multipart form-data uploads. (\\#2411)\n- Properly handle the case where a server issuing digest\n authentication challenges provides both auth and auth-int\n qop-values. (\\#2408)\n- Fix a socket leak.\n ([shazow/urllib3\\#549](https://github.com/shazow/urllib3/pull/549))\n- Fix multiple `Set-Cookie` headers properly.\n ([shazow/urllib3\\#534](https://github.com/shazow/urllib3/pull/534))\n- Disable the built-in hostname verification.\n ([shazow/urllib3\\#526](https://github.com/shazow/urllib3/pull/526))\n- Fix the behaviour of decoding an exhausted stream.\n ([shazow/urllib3\\#535](https://github.com/shazow/urllib3/pull/535))\n\n**Security**\n\n- Pulled in an updated `cacert.pem`.\n- Drop RC4 from the default cipher list.\n ([shazow/urllib3\\#551](https://github.com/shazow/urllib3/pull/551))\n\n2.5.1 (2014-12-23)\n------------------\n\n**Behavioural Changes**\n\n- Only catch HTTPErrors in raise\\_for\\_status (\\#2382)\n\n**Bugfixes**\n\n- Handle LocationParseError from urllib3 (\\#2344)\n- Handle file-like object filenames that are not strings (\\#2379)\n- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated\n (\\#2389)\n\n2.5.0 (2014-12-01)\n------------------\n\n**Improvements**\n\n- Allow usage of urllib3's Retry object with HTTPAdapters (\\#2216)\n- The `iter_lines` method on a response now accepts a delimiter with\n which to split the content (\\#2295)\n\n**Behavioural Changes**\n\n- Add deprecation warnings to functions in requests.utils that will be\n removed in 3.0 (\\#2309)\n- Sessions used by the functional API are always closed (\\#2326)\n- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9)\n (\\#2323)\n\n**Bugfixes**\n\n- Only parse the URL once (\\#2353)\n- Allow Content-Length header to always be overridden (\\#2332)\n- Properly handle files in HTTPDigestAuth (\\#2333)\n- Cap redirect\\_cache size to prevent memory abuse (\\#2299)\n- Fix HTTPDigestAuth handling of redirects after authenticating\n successfully (\\#2253)\n- Fix crash with custom method parameter to Session.request (\\#2317)\n- Fix how Link headers are parsed using the regular expression library\n (\\#2271)\n\n**Documentation**\n\n- Add more references for interlinking (\\#2348)\n- Update CSS for theme (\\#2290)\n- Update width of buttons and sidebar (\\#2289)\n- Replace references of Gittip with Gratipay (\\#2282)\n- Add link to changelog in sidebar (\\#2273)\n\n2.4.3 (2014-10-06)\n------------------\n\n**Bugfixes**\n\n- Unicode URL improvements for Python 2.\n- Re-order JSON param for backwards compat.\n- Automatically defrag authentication schemes from host/pass URIs.\n ([\\#2249](https://github.com/psf/requests/issues/2249))\n\n2.4.2 (2014-10-05)\n------------------\n\n**Improvements**\n\n- FINALLY! Add json parameter for uploads!\n ([\\#2258](https://github.com/psf/requests/pull/2258))\n- Support for bytestring URLs on Python 3.x\n ([\\#2238](https://github.com/psf/requests/pull/2238))\n\n**Bugfixes**\n\n- Avoid getting stuck in a loop\n ([\\#2244](https://github.com/psf/requests/pull/2244))\n- Multiple calls to iter\\* fail with unhelpful error.\n ([\\#2240](https://github.com/psf/requests/issues/2240),\n [\\#2241](https://github.com/psf/requests/issues/2241))\n\n**Documentation**\n\n- Correct redirection introduction\n ([\\#2245](https://github.com/psf/requests/pull/2245/))\n- Added example of how to send multiple files in one request.\n ([\\#2227](https://github.com/psf/requests/pull/2227/))\n- Clarify how to pass a custom set of CAs\n ([\\#2248](https://github.com/psf/requests/pull/2248/))\n\n2.4.1 (2014-09-09)\n------------------\n\n- Now has a \"security\" package extras set,\n `$ pip install requests[security]`\n- Requests will now use Certifi if it is available.\n- Capture and re-raise urllib3 ProtocolError\n- Bugfix for responses that attempt to redirect to themselves forever\n (wtf?).\n\n2.4.0 (2014-08-29)\n------------------\n\n**Behavioral Changes**\n\n- `Connection: keep-alive` header is now sent automatically.\n\n**Improvements**\n\n- Support for connect timeouts! Timeout now accepts a tuple (connect,\n read) which is used to set individual connect and read timeouts.\n- Allow copying of PreparedRequests without headers/cookies.\n- Updated bundled urllib3 version.\n- Refactored settings loading from environment -- new\n Session.merge\\_environment\\_settings.\n- Handle socket errors in iter\\_content.\n\n2.3.0 (2014-05-16)\n------------------\n\n**API Changes**\n\n- New `Response` property `is_redirect`, which is true when the\n library could have processed this response as a redirection (whether\n or not it actually did).\n- The `timeout` parameter now affects requests with both `stream=True`\n and `stream=False` equally.\n- The change in v2.0.0 to mandate explicit proxy schemes has been\n reverted. Proxy schemes now default to `http://`.\n- The `CaseInsensitiveDict` used for HTTP headers now behaves like a\n normal dictionary when references as string or viewed in the\n interpreter.\n\n**Bugfixes**\n\n- No longer expose Authorization or Proxy-Authorization headers on\n redirect. Fix CVE-2014-1829 and CVE-2014-1830 respectively.\n- Authorization is re-evaluated each redirect.\n- On redirect, pass url as native strings.\n- Fall-back to autodetected encoding for JSON when Unicode detection\n fails.\n- Headers set to `None` on the `Session` are now correctly not sent.\n- Correctly honor `decode_unicode` even if it wasn't used earlier in\n the same response.\n- Stop advertising `compress` as a supported Content-Encoding.\n- The `Response.history` parameter is now always a list.\n- Many, many `urllib3` bugfixes.\n\n2.2.1 (2014-01-23)\n------------------\n\n**Bugfixes**\n\n- Fixes incorrect parsing of proxy credentials that contain a literal\n or encoded '\\#' character.\n- Assorted urllib3 fixes.\n\n2.2.0 (2014-01-09)\n------------------\n\n**API Changes**\n\n- New exception: `ContentDecodingError`. Raised instead of `urllib3`\n `DecodeError` exceptions.\n\n**Bugfixes**\n\n- Avoid many many exceptions from the buggy implementation of\n `proxy_bypass` on OS X in Python 2.6.\n- Avoid crashing when attempting to get authentication credentials\n from \\~/.netrc when running as a user without a home directory.\n- Use the correct pool size for pools of connections to proxies.\n- Fix iteration of `CookieJar` objects.\n- Ensure that cookies are persisted over redirect.\n- Switch back to using chardet, since it has merged with charade.\n\n2.1.0 (2013-12-05)\n------------------\n\n- Updated CA Bundle, of course.\n- Cookies set on individual Requests through a `Session` (e.g. via\n `Session.get()`) are no longer persisted to the `Session`.\n- Clean up connections when we hit problems during chunked upload,\n rather than leaking them.\n- Return connections to the pool when a chunked upload is successful,\n rather than leaking it.\n- Match the HTTPbis recommendation for HTTP 301 redirects.\n- Prevent hanging when using streaming uploads and Digest Auth when a\n 401 is received.\n- Values of headers set by Requests are now always the native string\n type.\n- Fix previously broken SNI support.\n- Fix accessing HTTP proxies using proxy authentication.\n- Unencode HTTP Basic usernames and passwords extracted from URLs.\n- Support for IP address ranges for no\\_proxy environment variable\n- Parse headers correctly when users override the default `Host:`\n header.\n- Avoid munging the URL in case of case-sensitive servers.\n- Looser URL handling for non-HTTP/HTTPS urls.\n- Accept unicode methods in Python 2.6 and 2.7.\n- More resilient cookie handling.\n- Make `Response` objects pickleable.\n- Actually added MD5-sess to Digest Auth instead of pretending to like\n last time.\n- Updated internal urllib3.\n- Fixed @Lukasa's lack of taste.\n\n2.0.1 (2013-10-24)\n------------------\n\n- Updated included CA Bundle with new mistrusts and automated process\n for the future\n- Added MD5-sess to Digest Auth\n- Accept per-file headers in multipart file POST messages.\n- Fixed: Don't send the full URL on CONNECT messages.\n- Fixed: Correctly lowercase a redirect scheme.\n- Fixed: Cookies not persisted when set via functional API.\n- Fixed: Translate urllib3 ProxyError into a requests ProxyError\n derived from ConnectionError.\n- Updated internal urllib3 and chardet.\n\n2.0.0 (2013-09-24)\n------------------\n\n**API Changes:**\n\n- Keys in the Headers dictionary are now native strings on all Python\n versions, i.e. bytestrings on Python 2, unicode on Python 3.\n- Proxy URLs now *must* have an explicit scheme. A `MissingSchema`\n exception will be raised if they don't.\n- Timeouts now apply to read time if `Stream=False`.\n- `RequestException` is now a subclass of `IOError`, not\n `RuntimeError`.\n- Added new method to `PreparedRequest` objects:\n `PreparedRequest.copy()`.\n- Added new method to `Session` objects: `Session.update_request()`.\n This method updates a `Request` object with the data (e.g. cookies)\n stored on the `Session`.\n- Added new method to `Session` objects: `Session.prepare_request()`.\n This method updates and prepares a `Request` object, and returns the\n corresponding `PreparedRequest` object.\n- Added new method to `HTTPAdapter` objects:\n `HTTPAdapter.proxy_headers()`. This should not be called directly,\n but improves the subclass interface.\n- `httplib.IncompleteRead` exceptions caused by incorrect chunked\n encoding will now raise a Requests `ChunkedEncodingError` instead.\n- Invalid percent-escape sequences now cause a Requests `InvalidURL`\n exception to be raised.\n- HTTP 208 no longer uses reason phrase `\"im_used\"`. Correctly uses\n `\"already_reported\"`.\n- HTTP 226 reason added (`\"im_used\"`).\n\n**Bugfixes:**\n\n- Vastly improved proxy support, including the CONNECT verb. Special\n thanks to the many contributors who worked towards this improvement.\n- Cookies are now properly managed when 401 authentication responses\n are received.\n- Chunked encoding fixes.\n- Support for mixed case schemes.\n- Better handling of streaming downloads.\n- Retrieve environment proxies from more locations.\n- Minor cookies fixes.\n- Improved redirect behaviour.\n- Improved streaming behaviour, particularly for compressed data.\n- Miscellaneous small Python 3 text encoding bugs.\n- `.netrc` no longer overrides explicit auth.\n- Cookies set by hooks are now correctly persisted on Sessions.\n- Fix problem with cookies that specify port numbers in their host\n field.\n- `BytesIO` can be used to perform streaming uploads.\n- More generous parsing of the `no_proxy` environment variable.\n- Non-string objects can be passed in data values alongside files.\n\n1.2.3 (2013-05-25)\n------------------\n\n- Simple packaging fix\n\n1.2.2 (2013-05-23)\n------------------\n\n- Simple packaging fix\n\n1.2.1 (2013-05-20)\n------------------\n\n- 301 and 302 redirects now change the verb to GET for all verbs, not\n just POST, improving browser compatibility.\n- Python 3.3.2 compatibility\n- Always percent-encode location headers\n- Fix connection adapter matching to be most-specific first\n- new argument to the default connection adapter for passing a block\n argument\n- prevent a KeyError when there's no link headers\n\n1.2.0 (2013-03-31)\n------------------\n\n- Fixed cookies on sessions and on requests\n- Significantly change how hooks are dispatched - hooks now receive\n all the arguments specified by the user when making a request so\n hooks can make a secondary request with the same parameters. This is\n especially necessary for authentication handler authors\n- certifi support was removed\n- Fixed bug where using OAuth 1 with body `signature_type` sent no\n data\n- Major proxy work thanks to @Lukasa including parsing of proxy\n authentication from the proxy url\n- Fix DigestAuth handling too many 401s\n- Update vendored urllib3 to include SSL bug fixes\n- Allow keyword arguments to be passed to `json.loads()` via the\n `Response.json()` method\n- Don't send `Content-Length` header by default on `GET` or `HEAD`\n requests\n- Add `elapsed` attribute to `Response` objects to time how long a\n request took.\n- Fix `RequestsCookieJar`\n- Sessions and Adapters are now picklable, i.e., can be used with the\n multiprocessing library\n- Update charade to version 1.0.3\n\nThe change in how hooks are dispatched will likely cause a great deal of\nissues.\n\n1.1.0 (2013-01-10)\n------------------\n\n- CHUNKED REQUESTS\n- Support for iterable response bodies\n- Assume servers persist redirect params\n- Allow explicit content types to be specified for file data\n- Make merge\\_kwargs case-insensitive when looking up keys\n\n1.0.3 (2012-12-18)\n------------------\n\n- Fix file upload encoding bug\n- Fix cookie behavior\n\n1.0.2 (2012-12-17)\n------------------\n\n- Proxy fix for HTTPAdapter.\n\n1.0.1 (2012-12-17)\n------------------\n\n- Cert verification exception bug.\n- Proxy fix for HTTPAdapter.\n\n1.0.0 (2012-12-17)\n------------------\n\n- Massive Refactor and Simplification\n- Switch to Apache 2.0 license\n- Swappable Connection Adapters\n- Mountable Connection Adapters\n- Mutable ProcessedRequest chain\n- /s/prefetch/stream\n- Removal of all configuration\n- Standard library logging\n- Make Response.json() callable, not property.\n- Usage of new charade project, which provides python 2 and 3\n simultaneous chardet.\n- Removal of all hooks except 'response'\n- Removal of all authentication helpers (OAuth, Kerberos)\n\nThis is not a backwards compatible change.\n\n0.14.2 (2012-10-27)\n-------------------\n\n- Improved mime-compatible JSON handling\n- Proxy fixes\n- Path hack fixes\n- Case-Insensitive Content-Encoding headers\n- Support for CJK parameters in form posts\n\n0.14.1 (2012-10-01)\n-------------------\n\n- Python 3.3 Compatibility\n- Simply default accept-encoding\n- Bugfixes\n\n0.14.0 (2012-09-02)\n-------------------\n\n- No more iter\\_content errors if already downloaded.\n\n0.13.9 (2012-08-25)\n-------------------\n\n- Fix for OAuth + POSTs\n- Remove exception eating from dispatch\\_hook\n- General bugfixes\n\n0.13.8 (2012-08-21)\n-------------------\n\n- Incredible Link header support :)\n\n0.13.7 (2012-08-19)\n-------------------\n\n- Support for (key, value) lists everywhere.\n- Digest Authentication improvements.\n- Ensure proxy exclusions work properly.\n- Clearer UnicodeError exceptions.\n- Automatic casting of URLs to strings (fURL and such)\n- Bugfixes.\n\n0.13.6 (2012-08-06)\n-------------------\n\n- Long awaited fix for hanging connections!\n\n0.13.5 (2012-07-27)\n-------------------\n\n- Packaging fix\n\n0.13.4 (2012-07-27)\n-------------------\n\n- GSSAPI/Kerberos authentication!\n- App Engine 2.7 Fixes!\n- Fix leaking connections (from urllib3 update)\n- OAuthlib path hack fix\n- OAuthlib URL parameters fix.\n\n0.13.3 (2012-07-12)\n-------------------\n\n- Use simplejson if available.\n- Do not hide SSLErrors behind Timeouts.\n- Fixed param handling with urls containing fragments.\n- Significantly improved information in User Agent.\n- client certificates are ignored when verify=False\n\n0.13.2 (2012-06-28)\n-------------------\n\n- Zero dependencies (once again)!\n- New: Response.reason\n- Sign querystring parameters in OAuth 1.0\n- Client certificates no longer ignored when verify=False\n- Add openSUSE certificate support\n\n0.13.1 (2012-06-07)\n-------------------\n\n- Allow passing a file or file-like object as data.\n- Allow hooks to return responses that indicate errors.\n- Fix Response.text and Response.json for body-less responses.\n\n0.13.0 (2012-05-29)\n-------------------\n\n- Removal of Requests.async in favor of\n [grequests](https://github.com/kennethreitz/grequests)\n- Allow disabling of cookie persistence.\n- New implementation of safe\\_mode\n- cookies.get now supports default argument\n- Session cookies not saved when Session.request is called with\n return\\_response=False\n- Env: no\\_proxy support.\n- RequestsCookieJar improvements.\n- Various bug fixes.\n\n0.12.1 (2012-05-08)\n-------------------\n\n- New `Response.json` property.\n- Ability to add string file uploads.\n- Fix out-of-range issue with iter\\_lines.\n- Fix iter\\_content default size.\n- Fix POST redirects containing files.\n\n0.12.0 (2012-05-02)\n-------------------\n\n- EXPERIMENTAL OAUTH SUPPORT!\n- Proper CookieJar-backed cookies interface with awesome dict-like\n interface.\n- Speed fix for non-iterated content chunks.\n- Move `pre_request` to a more usable place.\n- New `pre_send` hook.\n- Lazily encode data, params, files.\n- Load system Certificate Bundle if `certify` isn't available.\n- Cleanups, fixes.\n\n0.11.2 (2012-04-22)\n-------------------\n\n- Attempt to use the OS's certificate bundle if `certifi` isn't\n available.\n- Infinite digest auth redirect fix.\n- Multi-part file upload improvements.\n- Fix decoding of invalid %encodings in URLs.\n- If there is no content in a response don't throw an error the second\n time that content is attempted to be read.\n- Upload data on redirects.\n\n0.11.1 (2012-03-30)\n-------------------\n\n- POST redirects now break RFC to do what browsers do: Follow up with\n a GET.\n- New `strict_mode` configuration to disable new redirect behavior.\n\n0.11.0 (2012-03-14)\n-------------------\n\n- Private SSL Certificate support\n- Remove select.poll from Gevent monkeypatching\n- Remove redundant generator for chunked transfer encoding\n- Fix: Response.ok raises Timeout Exception in safe\\_mode\n\n0.10.8 (2012-03-09)\n-------------------\n\n- Generate chunked ValueError fix\n- Proxy configuration by environment variables\n- Simplification of iter\\_lines.\n- New trust\\_env configuration for disabling system/environment hints.\n- Suppress cookie errors.\n\n0.10.7 (2012-03-07)\n-------------------\n\n- encode\\_uri = False\n\n0.10.6 (2012-02-25)\n-------------------\n\n- Allow '=' in cookies.\n\n0.10.5 (2012-02-25)\n-------------------\n\n- Response body with 0 content-length fix.\n- New async.imap.\n- Don't fail on netrc.\n\n0.10.4 (2012-02-20)\n-------------------\n\n- Honor netrc.\n\n0.10.3 (2012-02-20)\n-------------------\n\n- HEAD requests don't follow redirects anymore.\n- raise\\_for\\_status() doesn't raise for 3xx anymore.\n- Make Session objects picklable.\n- ValueError for invalid schema URLs.\n\n0.10.2 (2012-01-15)\n-------------------\n\n- Vastly improved URL quoting.\n- Additional allowed cookie key values.\n- Attempted fix for \"Too many open files\" Error\n- Replace unicode errors on first pass, no need for second pass.\n- Append '/' to bare-domain urls before query insertion.\n- Exceptions now inherit from RuntimeError.\n- Binary uploads + auth fix.\n- Bugfixes.\n\n0.10.1 (2012-01-23)\n-------------------\n\n- PYTHON 3 SUPPORT!\n- Dropped 2.5 Support. (*Backwards Incompatible*)\n\n0.10.0 (2012-01-21)\n-------------------\n\n- `Response.content` is now bytes-only. (*Backwards Incompatible*)\n- New `Response.text` is unicode-only.\n- If no `Response.encoding` is specified and `chardet` is available,\n `Response.text` will guess an encoding.\n- Default to ISO-8859-1 (Western) encoding for \"text\" subtypes.\n- Removal of decode\\_unicode. (*Backwards Incompatible*)\n- New multiple-hooks system.\n- New `Response.register_hook` for registering hooks within the\n pipeline.\n- `Response.url` is now Unicode.\n\n0.9.3 (2012-01-18)\n------------------\n\n- SSL verify=False bugfix (apparent on windows machines).\n\n0.9.2 (2012-01-18)\n------------------\n\n- Asynchronous async.send method.\n- Support for proper chunk streams with boundaries.\n- session argument for Session classes.\n- Print entire hook tracebacks, not just exception instance.\n- Fix response.iter\\_lines from pending next line.\n- Fix but in HTTP-digest auth w/ URI having query strings.\n- Fix in Event Hooks section.\n- Urllib3 update.\n\n0.9.1 (2012-01-06)\n------------------\n\n- danger\\_mode for automatic Response.raise\\_for\\_status()\n- Response.iter\\_lines refactor\n\n0.9.0 (2011-12-28)\n------------------\n\n- verify ssl is default.\n\n0.8.9 (2011-12-28)\n------------------\n\n- Packaging fix.\n\n0.8.8 (2011-12-28)\n------------------\n\n- SSL CERT VERIFICATION!\n- Release of Cerifi: Mozilla's cert list.\n- New 'verify' argument for SSL requests.\n- Urllib3 update.\n\n0.8.7 (2011-12-24)\n------------------\n\n- iter\\_lines last-line truncation fix\n- Force safe\\_mode for async requests\n- Handle safe\\_mode exceptions more consistently\n- Fix iteration on null responses in safe\\_mode\n\n0.8.6 (2011-12-18)\n------------------\n\n- Socket timeout fixes.\n- Proxy Authorization support.\n\n0.8.5 (2011-12-14)\n------------------\n\n- Response.iter\\_lines!\n\n0.8.4 (2011-12-11)\n------------------\n\n- Prefetch bugfix.\n- Added license to installed version.\n\n0.8.3 (2011-11-27)\n------------------\n\n- Converted auth system to use simpler callable objects.\n- New session parameter to API methods.\n- Display full URL while logging.\n\n0.8.2 (2011-11-19)\n------------------\n\n- New Unicode decoding system, based on over-ridable\n Response.encoding.\n- Proper URL slash-quote handling.\n- Cookies with `[`, `]`, and `_` allowed.\n\n0.8.1 (2011-11-15)\n------------------\n\n- URL Request path fix\n- Proxy fix.\n- Timeouts fix.\n\n0.8.0 (2011-11-13)\n------------------\n\n- Keep-alive support!\n- Complete removal of Urllib2\n- Complete removal of Poster\n- Complete removal of CookieJars\n- New ConnectionError raising\n- Safe\\_mode for error catching\n- prefetch parameter for request methods\n- OPTION method\n- Async pool size throttling\n- File uploads send real names\n- Vendored in urllib3\n\n0.7.6 (2011-11-07)\n------------------\n\n- Digest authentication bugfix (attach query data to path)\n\n0.7.5 (2011-11-04)\n------------------\n\n- Response.content = None if there was an invalid response.\n- Redirection auth handling.\n\n0.7.4 (2011-10-26)\n------------------\n\n- Session Hooks fix.\n\n0.7.3 (2011-10-23)\n------------------\n\n- Digest Auth fix.\n\n0.7.2 (2011-10-23)\n------------------\n\n- PATCH Fix.\n\n0.7.1 (2011-10-23)\n------------------\n\n- Move away from urllib2 authentication handling.\n- Fully Remove AuthManager, AuthObject, &c.\n- New tuple-based auth system with handler callbacks.\n\n0.7.0 (2011-10-22)\n------------------\n\n- Sessions are now the primary interface.\n- Deprecated InvalidMethodException.\n- PATCH fix.\n- New config system (no more global settings).\n\n0.6.6 (2011-10-19)\n------------------\n\n- Session parameter bugfix (params merging).\n\n0.6.5 (2011-10-18)\n------------------\n\n- Offline (fast) test suite.\n- Session dictionary argument merging.\n\n0.6.4 (2011-10-13)\n------------------\n\n- Automatic decoding of unicode, based on HTTP Headers.\n- New `decode_unicode` setting.\n- Removal of `r.read/close` methods.\n- New `r.faw` interface for advanced response usage.\\*\n- Automatic expansion of parameterized headers.\n\n0.6.3 (2011-10-13)\n------------------\n\n- Beautiful `requests.async` module, for making async requests w/\n gevent.\n\n0.6.2 (2011-10-09)\n------------------\n\n- GET/HEAD obeys allow\\_redirects=False.\n\n0.6.1 (2011-08-20)\n------------------\n\n- Enhanced status codes experience `\\o/`\n- Set a maximum number of redirects (`settings.max_redirects`)\n- Full Unicode URL support\n- Support for protocol-less redirects.\n- Allow for arbitrary request types.\n- Bugfixes\n\n0.6.0 (2011-08-17)\n------------------\n\n- New callback hook system\n- New persistent sessions object and context manager\n- Transparent Dict-cookie handling\n- Status code reference object\n- Removed Response.cached\n- Added Response.request\n- All args are kwargs\n- Relative redirect support\n- HTTPError handling improvements\n- Improved https testing\n- Bugfixes\n\n0.5.1 (2011-07-23)\n------------------\n\n- International Domain Name Support!\n- Access headers without fetching entire body (`read()`)\n- Use lists as dicts for parameters\n- Add Forced Basic Authentication\n- Forced Basic is default authentication type\n- `python-requests.org` default User-Agent header\n- CaseInsensitiveDict lower-case caching\n- Response.history bugfix\n\n0.5.0 (2011-06-21)\n------------------\n\n- PATCH Support\n- Support for Proxies\n- HTTPBin Test Suite\n- Redirect Fixes\n- settings.verbose stream writing\n- Querystrings for all methods\n- URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as\n explicitly raised\n `r.requests.get('hwe://blah'); r.raise_for_status()`\n\n0.4.1 (2011-05-22)\n------------------\n\n- Improved Redirection Handling\n- New 'allow\\_redirects' param for following non-GET/HEAD Redirects\n- Settings module refactoring\n\n0.4.0 (2011-05-15)\n------------------\n\n- Response.history: list of redirected responses\n- Case-Insensitive Header Dictionaries!\n- Unicode URLs\n\n0.3.4 (2011-05-14)\n------------------\n\n- Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)\n- Internal Refactor\n- Bytes data upload Bugfix\n\n0.3.3 (2011-05-12)\n------------------\n\n- Request timeouts\n- Unicode url-encoded data\n- Settings context manager and module\n\n0.3.2 (2011-04-15)\n------------------\n\n- Automatic Decompression of GZip Encoded Content\n- AutoAuth Support for Tupled HTTP Auth\n\n0.3.1 (2011-04-01)\n------------------\n\n- Cookie Changes\n- Response.read()\n- Poster fix\n\n0.3.0 (2011-02-25)\n------------------\n\n- Automatic Authentication API Change\n- Smarter Query URL Parameterization\n- Allow file uploads and POST data together\n-\n\n New Authentication Manager System\n\n : - Simpler Basic HTTP System\n - Supports all built-in urllib2 Auths\n - Allows for custom Auth Handlers\n\n0.2.4 (2011-02-19)\n------------------\n\n- Python 2.5 Support\n- PyPy-c v1.4 Support\n- Auto-Authentication tests\n- Improved Request object constructor\n\n0.2.3 (2011-02-15)\n------------------\n\n-\n\n New HTTPHandling Methods\n\n : - Response.\\_\\_nonzero\\_\\_ (false if bad HTTP Status)\n - Response.ok (True if expected HTTP Status)\n - Response.error (Logged HTTPError if bad HTTP Status)\n - Response.raise\\_for\\_status() (Raises stored HTTPError)\n\n0.2.2 (2011-02-14)\n------------------\n\n- Still handles request in the event of an HTTPError. (Issue \\#2)\n- Eventlet and Gevent Monkeypatch support.\n- Cookie Support (Issue \\#1)\n\n0.2.1 (2011-02-14)\n------------------\n\n- Added file attribute to POST and PUT requests for multipart-encode\n file uploads.\n- Added Request.url attribute for context and redirects\n\n0.2.0 (2011-02-14)\n------------------\n\n- Birth!\n\n0.0.1 (2011-02-13)\n------------------\n\n- Frustration\n- Conception\n"},{"col":4,"comment":"null","endLoc":254,"header":"def get_name(self) -> str","id":1142,"name":"get_name","nodeType":"Function","startLoc":254,"text":"def get_name(self) -> str: ... # undocumented"},{"col":4,"comment":"null","endLoc":255,"header":"def set_name(self, name: str) -> None","id":1143,"name":"set_name","nodeType":"Function","startLoc":255,"text":"def set_name(self, name: str) -> None: ... # undocumented"},{"col":4,"comment":"null","endLoc":256,"header":"def createLock(self) -> None","id":1144,"name":"createLock","nodeType":"Function","startLoc":256,"text":"def createLock(self) -> None: ..."},{"col":4,"comment":"null","endLoc":257,"header":"def acquire(self) -> None","id":1145,"name":"acquire","nodeType":"Function","startLoc":257,"text":"def acquire(self) -> None: ..."},{"col":4,"comment":"null","endLoc":258,"header":"def release(self) -> None","id":1146,"name":"release","nodeType":"Function","startLoc":258,"text":"def release(self) -> None: ..."},{"col":4,"comment":"null","endLoc":259,"header":"def setLevel(self, level: _Level) -> None","id":1147,"name":"setLevel","nodeType":"Function","startLoc":259,"text":"def setLevel(self, level: _Level) -> None: ..."},{"col":4,"comment":"null","endLoc":260,"header":"def setFormatter(self, fmt: Formatter | None) -> None","id":1148,"name":"setFormatter","nodeType":"Function","startLoc":260,"text":"def setFormatter(self, fmt: Formatter | None) -> None: ..."},{"col":4,"comment":"null","endLoc":261,"header":"def flush(self) -> None","id":1149,"name":"flush","nodeType":"Function","startLoc":261,"text":"def flush(self) -> None: ..."},{"col":4,"comment":"null","endLoc":262,"header":"def close(self) -> None","id":1150,"name":"close","nodeType":"Function","startLoc":262,"text":"def close(self) -> None: ..."},{"col":4,"comment":"null","endLoc":263,"header":"def handle(self, record: LogRecord) -> bool","id":1151,"name":"handle","nodeType":"Function","startLoc":263,"text":"def handle(self, record: LogRecord) -> bool: ..."},{"col":4,"comment":"null","endLoc":264,"header":"def handleError(self, record: LogRecord) -> None","id":1152,"name":"handleError","nodeType":"Function","startLoc":264,"text":"def handleError(self, record: LogRecord) -> None: ..."},{"col":4,"comment":"null","endLoc":265,"header":"def format(self, record: LogRecord) -> str","id":1153,"name":"format","nodeType":"Function","startLoc":265,"text":"def format(self, record: LogRecord) -> str: ..."},{"col":4,"comment":"null","endLoc":266,"header":"def emit(self, record: LogRecord) -> None","id":1154,"name":"emit","nodeType":"Function","startLoc":266,"text":"def emit(self, record: LogRecord) -> None: ..."},{"attributeType":"int","col":4,"comment":"null","endLoc":249,"id":1155,"name":"level","nodeType":"Attribute","startLoc":249,"text":"level"},{"attributeType":"Formatter | None","col":4,"comment":"null","endLoc":250,"id":1156,"name":"formatter","nodeType":"Attribute","startLoc":250,"text":"formatter"},{"id":1157,"name":"AUTHORS.rst","nodeType":"TextFile","path":"","text":"Requests was lovingly created by Kenneth Reitz.\n\nKeepers of the Crystals\n```````````````````````\n\n- Nate Prewitt `@nateprewitt `_.\n- Seth M. Larson `@sethmlarson `_.\n\nPrevious Keepers of Crystals\n````````````````````````````\n- Kenneth Reitz `@ken-reitz `_, reluctant Keeper of the Master Crystal.\n- Cory Benfield `@lukasa `_\n- Ian Cordasco `@sigmavirus24 `_.\n\n\nPatches and Suggestions\n```````````````````````\n\n- Various Pocoo Members\n- Chris Adams\n- Flavio Percoco Premoli\n- Dj Gilcrease\n- Justin Murphy\n- Rob Madole\n- Aram Dulyan\n- Johannes Gorset\n- 村山めがね (Megane Murayama)\n- James Rowe\n- Daniel Schauenberg\n- Zbigniew Siciarz\n- Daniele Tricoli 'Eriol'\n- Richard Boulton\n- Miguel Olivares \n- Alberto Paro\n- Jérémy Bethmont\n- 潘旭 (Xu Pan)\n- Tamás Gulácsi\n- Rubén Abad\n- Peter Manser\n- Jeremy Selier\n- Jens Diemer\n- Alex (`@alopatin `_)\n- Tom Hogans \n- Armin Ronacher\n- Shrikant Sharat Kandula\n- Mikko Ohtamaa\n- Den Shabalin\n- Daniel Miller \n- Alejandro Giacometti\n- Rick Mak\n- Johan Bergström\n- Josselin Jacquard\n- Travis N. Vaught\n- Fredrik Möllerstrand\n- Daniel Hengeveld\n- Dan Head\n- Bruno Renié\n- David Fischer\n- Joseph McCullough\n- Juergen Brendel\n- Juan Riaza\n- Ryan Kelly\n- Rolando Espinoza La fuente\n- Robert Gieseke\n- Idan Gazit\n- Ed Summers\n- Chris Van Horne\n- Christopher Davis\n- Ori Livneh\n- Jason Emerick\n- Bryan Helmig\n- Jonas Obrist\n- Lucian Ursu\n- Tom Moertel\n- Frank Kumro Jr\n- Chase Sterling\n- Marty Alchin\n- takluyver\n- Ben Toews (`@mastahyeti `_)\n- David Kemp\n- Brendon Crawford\n- Denis (`@Telofy `_)\n- Matt Giuca\n- Adam Tauber\n- Honza Javorek\n- Brendan Maguire \n- Chris Dary\n- Danver Braganza \n- Max Countryman\n- Nick Chadwick\n- Jonathan Drosdeck\n- Jiri Machalek\n- Steve Pulec\n- Michael Kelly\n- Michael Newman \n- Jonty Wareing \n- Shivaram Lingamneni\n- Miguel Turner\n- Rohan Jain (`@crodjer `_)\n- Justin Barber \n- Roman Haritonov (`@reclosedev `_)\n- Josh Imhoff \n- Arup Malakar \n- Danilo Bargen (`@dbrgn `_)\n- Torsten Landschoff\n- Michael Holler (`@apotheos `_)\n- Timnit Gebru\n- Sarah Gonzalez\n- Victoria Mo\n- Leila Muhtasib\n- Matthias Rahlf \n- Jakub Roztocil \n- Rhys Elsmore\n- André Graf (`@dergraf `_)\n- Stephen Zhuang (`@everbird `_)\n- Martijn Pieters\n- Jonatan Heyman\n- David Bonner (`@rascalking `_)\n- Vinod Chandru\n- Johnny Goodnow \n- Denis Ryzhkov \n- Wilfred Hughes \n- Dmitry Medvinsky \n- Bryce Boe (`@bboe `_)\n- Colin Dunklau (`@cdunklau `_)\n- Bob Carroll (`@rcarz `_)\n- Hugo Osvaldo Barrera (`@hobarrera `_)\n- Łukasz Langa \n- Dave Shawley \n- James Clarke (`@jam `_)\n- Kevin Burke \n- Flavio Curella\n- David Pursehouse (`@dpursehouse `_)\n- Jon Parise (`@jparise `_)\n- Alexander Karpinsky (`@homm86 `_)\n- Marc Schlaich (`@schlamar `_)\n- Park Ilsu (`@daftshady `_)\n- Matt Spitz (`@mattspitz `_)\n- Vikram Oberoi (`@voberoi `_)\n- Can Ibanoglu (`@canibanoglu `_)\n- Thomas Weißschuh (`@t-8ch `_)\n- Jayson Vantuyl \n- Pengfei.X \n- Kamil Madac \n- Michael Becker (`@beckerfuffle `_)\n- Erik Wickstrom (`@erikwickstrom `_)\n- Константин Подшумок (`@podshumok `_)\n- Ben Bass (`@codedstructure `_)\n- Jonathan Wong (`@ContinuousFunction `_)\n- Martin Jul (`@mjul `_)\n- Joe Alcorn (`@buttscicles `_)\n- Syed Suhail Ahmed (`@syedsuhail `_)\n- Scott Sadler (`@ssadler `_)\n- Arthur Darcet (`@arthurdarcet `_)\n- Ulrich Petri (`@ulope `_)\n- Muhammad Yasoob Ullah Khalid (`@yasoob `_)\n- Paul van der Linden (`@pvanderlinden `_)\n- Colin Dickson (`@colindickson `_)\n- Smiley Barry (`@smiley `_)\n- Shagun Sodhani (`@shagunsodhani `_)\n- Robin Linderborg (`@vienno `_)\n- Brian Samek (`@bsamek `_)\n- Dmitry Dygalo (`@Stranger6667 `_)\n- piotrjurkiewicz\n- Jesse Shapiro (`@haikuginger `_)\n- Nate Prewitt (`@nateprewitt `_)\n- Maik Himstedt\n- Michael Hunsinger\n- Brian Bamsch (`@bbamsch `_)\n- Om Prakash Kumar (`@iamprakashom `_)\n- Philipp Konrad (`@gardiac2002 `_)\n- Hussain Tamboli (`@hussaintamboli `_)\n- Casey Davidson (`@davidsoncasey `_)\n- Andrii Soldatenko (`@a_soldatenko `_)\n- Moinuddin Quadri (`@moin18 `_)\n- Matt Kohl (`@mattkohl `_)\n- Jonathan Vanasco (`@jvanasco `_)\n- David Fontenot (`@davidfontenot `_)\n- Shmuel Amar (`@shmuelamar `_)\n- Gary Wu (`@garywu `_)\n- Ryan Pineo (`@ryanpineo `_)\n- Ed Morley (`@edmorley `_)\n- Matt Liu (`@mlcrazy `_)\n- Taylor Hoff (`@PrimordialHelios `_)\n- Arthur Vigil (`@ahvigil `_)\n- Nehal J Wani (`@nehaljwani `_)\n- Demetrios Bairaktaris (`@DemetriosBairaktaris `_)\n- Darren Dormer (`@ddormer `_)\n- Rajiv Mayani (`@mayani `_)\n- Antti Kaihola (`@akaihola `_)\n- \"Dull Bananas\" (`@dullbananas `_)\n- Alessio Izzo (`@aless10 `_)\n- Sylvain Marié (`@smarie `_)\n- Hod Bin Noon (`@hodbn `_)\n"},{"attributeType":"Lock | None","col":4,"comment":"null","endLoc":251,"id":1158,"name":"lock","nodeType":"Attribute","startLoc":251,"text":"lock"},{"fileName":"structures.py","filePath":"requests","id":1159,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.structures\n~~~~~~~~~~~~~~~~~~~\n\nData structures that power Requests.\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom .compat import Mapping, MutableMapping\n\n\nclass CaseInsensitiveDict(MutableMapping):\n \"\"\"A case-insensitive ``dict``-like object.\n\n Implements all methods and operations of\n ``MutableMapping`` as well as dict's ``copy``. Also\n provides ``lower_items``.\n\n All keys are expected to be strings. The structure remembers the\n case of the last key to be set, and ``iter(instance)``,\n ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``\n will contain case-sensitive keys. However, querying and contains\n testing is case insensitive::\n\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n cid['aCCEPT'] == 'application/json' # True\n list(cid) == ['Accept'] # True\n\n For example, ``headers['content-encoding']`` will return the\n value of a ``'Content-Encoding'`` response header, regardless\n of how the header name was originally stored.\n\n If the constructor, ``.update``, or equality comparison\n operations are given keys that have equal ``.lower()``s, the\n behavior is undefined.\n \"\"\"\n\n def __init__(self, data=None, **kwargs):\n self._store = OrderedDict()\n if data is None:\n data = {}\n self.update(data, **kwargs)\n\n def __setitem__(self, key, value):\n # Use the lowercased key for lookups, but store the actual\n # key alongside the value.\n self._store[key.lower()] = (key, value)\n\n def __getitem__(self, key):\n return self._store[key.lower()][1]\n\n def __delitem__(self, key):\n del self._store[key.lower()]\n\n def __iter__(self):\n return (casedkey for casedkey, mappedvalue in self._store.values())\n\n def __len__(self):\n return len(self._store)\n\n def lower_items(self):\n \"\"\"Like iteritems(), but with all lowercase keys.\"\"\"\n return (\n (lowerkey, keyval[1])\n for (lowerkey, keyval)\n in self._store.items()\n )\n\n def __eq__(self, other):\n if isinstance(other, Mapping):\n other = CaseInsensitiveDict(other)\n else:\n return NotImplemented\n # Compare insensitively\n return dict(self.lower_items()) == dict(other.lower_items())\n\n # Copy is required\n def copy(self):\n return CaseInsensitiveDict(self._store.values())\n\n def __repr__(self):\n return str(dict(self.items()))\n\n\nclass LookupDict(dict):\n \"\"\"Dictionary lookup object.\"\"\"\n\n def __init__(self, name=None):\n self.name = name\n super(LookupDict, self).__init__()\n\n def __repr__(self):\n return '' % (self.name)\n\n def __getitem__(self, key):\n # We allow fall-through here, so values default to None\n\n return self.__dict__.get(key, None)\n\n def get(self, key, default=None):\n return self.__dict__.get(key, default)\n"},{"col":4,"comment":"Ensure that we handle partially consumed file like objects.","endLoc":49,"header":"def test_super_len_correctly_calculates_len_of_partially_read_file(self)","id":1160,"name":"test_super_len_correctly_calculates_len_of_partially_read_file","nodeType":"Function","startLoc":45,"text":"def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0"},{"col":0,"comment":"","endLoc":8,"header":"structures.py#","id":1161,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.structures\n~~~~~~~~~~~~~~~~~~~\n\nData structures that power Requests.\n\"\"\""},{"attributeType":"str | None","col":4,"comment":"null","endLoc":252,"id":1162,"name":"name","nodeType":"Attribute","startLoc":252,"text":"name"},{"id":1163,"name":".gitignore","nodeType":"TextFile","path":"","text":".coverage\nMANIFEST\ncoverage.xml\nnosetests.xml\njunit-report.xml\npylint.txt\ntoy.py\n.cache/\ncover/\nbuild/\ndocs/_build\nrequests.egg-info/\n*.pyc\n*.swp\n*.egg\nenv/\n.venv/\n.eggs/\n.tox/\n.pytest_cache/\n.vscode/\n.eggs/\n\n.workon\n\n# in case you work with IntelliJ/PyCharm\n.idea\n*.iml\n.python-version\n\n\nt.py\n\nt2.py\ndist\n\n/.mypy_cache/\n"},{"col":4,"comment":"Builds a :class:`Response ` object from a urllib3\n response. This should not be called from user code, and is only exposed\n for use when subclassing the\n :class:`HTTPAdapter `\n\n :param req: The :class:`PreparedRequest ` used to generate the response.\n :param resp: The urllib3 response object.\n :rtype: requests.Response\n ","endLoc":291,"header":"def build_response(self, req, resp)","id":1164,"name":"build_response","nodeType":"Function","startLoc":256,"text":"def build_response(self, req, resp):\n \"\"\"Builds a :class:`Response ` object from a urllib3\n response. This should not be called from user code, and is only exposed\n for use when subclassing the\n :class:`HTTPAdapter `\n\n :param req: The :class:`PreparedRequest ` used to generate the response.\n :param resp: The urllib3 response object.\n :rtype: requests.Response\n \"\"\"\n response = Response()\n\n # Fallback to None if there's no status_code, for whatever reason.\n response.status_code = getattr(resp, 'status', None)\n\n # Make headers case-insensitive.\n response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))\n\n # Set encoding.\n response.encoding = get_encoding_from_headers(response.headers)\n response.raw = resp\n response.reason = response.raw.reason\n\n if isinstance(req.url, bytes):\n response.url = req.url.decode('utf-8')\n else:\n response.url = req.url\n\n # Add new cookies from the server.\n extract_cookies_to_jar(response.cookies, req, resp)\n\n # Give the Response some context.\n response.request = req\n response.connection = self\n\n return response"},{"attributeType":"list","col":0,"comment":"null","endLoc":271,"id":1165,"name":"_schemes_by_var_prefix","nodeType":"Attribute","startLoc":271,"text":"_schemes_by_var_prefix"},{"attributeType":"list","col":0,"comment":"null","endLoc":277,"id":1166,"name":"_proxy_combos","nodeType":"Attribute","startLoc":277,"text":"_proxy_combos"},{"attributeType":"str","col":4,"comment":"null","endLoc":278,"id":1167,"name":"prefix","nodeType":"Attribute","startLoc":278,"text":"prefix"},{"attributeType":"list","col":12,"comment":"null","endLoc":278,"id":1168,"name":"schemes","nodeType":"Attribute","startLoc":278,"text":"schemes"},{"attributeType":"str","col":8,"comment":"null","endLoc":279,"id":1169,"name":"scheme","nodeType":"Attribute","startLoc":279,"text":"scheme"},{"attributeType":"str","col":44,"comment":"null","endLoc":282,"id":1170,"name":"var","nodeType":"Attribute","startLoc":282,"text":"var"},{"attributeType":"str","col":49,"comment":"null","endLoc":282,"id":1171,"name":"scheme","nodeType":"Attribute","startLoc":282,"text":"scheme"},{"col":0,"comment":"null","endLoc":85,"header":"def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version)","id":1172,"name":"check_compatibility","nodeType":"Function","startLoc":57,"text":"def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):\n urllib3_version = urllib3_version.split('.')\n assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.\n\n # Sometimes, urllib3 only reports its version as 16.1.\n if len(urllib3_version) == 2:\n urllib3_version.append('0')\n\n # Check urllib3 for compatibility.\n major, minor, patch = urllib3_version # noqa: F811\n major, minor, patch = int(major), int(minor), int(patch)\n # urllib3 >= 1.21.1, <= 1.26\n assert major == 1\n assert minor >= 21\n assert minor <= 26\n\n # Check charset_normalizer for compatibility.\n if chardet_version:\n major, minor, patch = chardet_version.split('.')[:3]\n major, minor, patch = int(major), int(minor), int(patch)\n # chardet_version >= 3.0.2, < 5.0.0\n assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)\n elif charset_normalizer_version:\n major, minor, patch = charset_normalizer_version.split('.')[:3]\n major, minor, patch = int(major), int(minor), int(patch)\n # charset_normalizer >= 2.0.0 < 3.0.0\n assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)\n else:\n raise Exception(\"You need either charset_normalizer or chardet installed\")"},{"col":0,"comment":"","endLoc":1,"header":"packages.py#","id":1173,"name":"","nodeType":"Function","startLoc":1,"text":"try:\n import chardet\nexcept ImportError:\n import charset_normalizer as chardet\n import warnings\n\n warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')\n\nfor package in ('urllib3', 'idna'):\n locals()[package] = __import__(package)\n # This traversal is apparently necessary such that the identities are\n # preserved (requests.packages.urllib3.* is urllib3.*)\n for mod in list(sys.modules):\n if mod == package or mod.startswith(package + '.'):\n sys.modules['requests.packages.' + mod] = sys.modules[mod]\n\ntarget = chardet.__name__\n\nfor mod in list(sys.modules):\n if mod == target or mod.startswith(target + '.'):\n sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]"},{"fileName":"help.py","filePath":"requests","id":1175,"nodeType":"File","text":"\"\"\"Module containing bug report helper(s).\"\"\"\nfrom __future__ import print_function\n\nimport json\nimport platform\nimport sys\nimport ssl\n\nimport idna\nimport urllib3\n\nfrom . import __version__ as requests_version\n\ntry:\n import charset_normalizer\nexcept ImportError:\n charset_normalizer = None\n\ntry:\n import chardet\nexcept ImportError:\n chardet = None\n\ntry:\n from urllib3.contrib import pyopenssl\nexcept ImportError:\n pyopenssl = None\n OpenSSL = None\n cryptography = None\nelse:\n import OpenSSL\n import cryptography\n\n\ndef _implementation():\n \"\"\"Return a dict with the Python implementation and version.\n\n Provide both the name and the version of the Python implementation\n currently running. For example, on CPython 2.7.5 it will return\n {'name': 'CPython', 'version': '2.7.5'}.\n\n This function works best on CPython and PyPy: in particular, it probably\n doesn't work for Jython or IronPython. Future investigation should be done\n to work out the correct shape of the code for those platforms.\n \"\"\"\n implementation = platform.python_implementation()\n\n if implementation == 'CPython':\n implementation_version = platform.python_version()\n elif implementation == 'PyPy':\n implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,\n sys.pypy_version_info.minor,\n sys.pypy_version_info.micro)\n if sys.pypy_version_info.releaselevel != 'final':\n implementation_version = ''.join([\n implementation_version, sys.pypy_version_info.releaselevel\n ])\n elif implementation == 'Jython':\n implementation_version = platform.python_version() # Complete Guess\n elif implementation == 'IronPython':\n implementation_version = platform.python_version() # Complete Guess\n else:\n implementation_version = 'Unknown'\n\n return {'name': implementation, 'version': implementation_version}\n\n\ndef info():\n \"\"\"Generate information for a bug report.\"\"\"\n try:\n platform_info = {\n 'system': platform.system(),\n 'release': platform.release(),\n }\n except IOError:\n platform_info = {\n 'system': 'Unknown',\n 'release': 'Unknown',\n }\n\n implementation_info = _implementation()\n urllib3_info = {'version': urllib3.__version__}\n charset_normalizer_info = {'version': None}\n chardet_info = {'version': None}\n if charset_normalizer:\n charset_normalizer_info = {'version': charset_normalizer.__version__}\n if chardet:\n chardet_info = {'version': chardet.__version__}\n\n pyopenssl_info = {\n 'version': None,\n 'openssl_version': '',\n }\n if OpenSSL:\n pyopenssl_info = {\n 'version': OpenSSL.__version__,\n 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,\n }\n cryptography_info = {\n 'version': getattr(cryptography, '__version__', ''),\n }\n idna_info = {\n 'version': getattr(idna, '__version__', ''),\n }\n\n system_ssl = ssl.OPENSSL_VERSION_NUMBER\n system_ssl_info = {\n 'version': '%x' % system_ssl if system_ssl is not None else ''\n }\n\n return {\n 'platform': platform_info,\n 'implementation': implementation_info,\n 'system_ssl': system_ssl_info,\n 'using_pyopenssl': pyopenssl is not None,\n 'using_charset_normalizer': chardet is None,\n 'pyOpenSSL': pyopenssl_info,\n 'urllib3': urllib3_info,\n 'chardet': chardet_info,\n 'charset_normalizer': charset_normalizer_info,\n 'cryptography': cryptography_info,\n 'idna': idna_info,\n 'requests': {\n 'version': requests_version,\n },\n }\n\n\ndef main():\n \"\"\"Pretty-print the bug information as JSON.\"\"\"\n print(json.dumps(info(), sort_keys=True, indent=2))\n\n\nif __name__ == '__main__':\n main()\n"},{"col":0,"comment":"","endLoc":3,"header":"test_lowlevel.py#","id":1178,"name":"","nodeType":"Function","startLoc":3,"text":"_schemes_by_var_prefix = [\n ('http', ['http']),\n ('https', ['https']),\n ('all', ['http', 'https']),\n]\n\n_proxy_combos = []\n\nfor prefix, schemes in _schemes_by_var_prefix:\n for scheme in schemes:\n _proxy_combos.append((\"{}_proxy\".format(prefix), scheme))\n\n_proxy_combos += [(var.upper(), scheme) for var, scheme in _proxy_combos]"},{"col":4,"comment":"Returns a urllib3 connection for the given URL. This should not be\n called from user code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param url: The URL to connect to.\n :param proxies: (optional) A Requests-style dictionary of proxies used on this request.\n :rtype: urllib3.ConnectionPool\n ","endLoc":318,"header":"def get_connection(self, url, proxies=None)","id":1179,"name":"get_connection","nodeType":"Function","startLoc":293,"text":"def get_connection(self, url, proxies=None):\n \"\"\"Returns a urllib3 connection for the given URL. This should not be\n called from user code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param url: The URL to connect to.\n :param proxies: (optional) A Requests-style dictionary of proxies used on this request.\n :rtype: urllib3.ConnectionPool\n \"\"\"\n proxy = select_proxy(url, proxies)\n\n if proxy:\n proxy = prepend_scheme_if_needed(proxy, 'http')\n proxy_url = parse_url(proxy)\n if not proxy_url.host:\n raise InvalidProxyURL(\"Please check proxy URL. It is malformed\"\n \" and could be missing the host.\")\n proxy_manager = self.proxy_manager_for(proxy)\n conn = proxy_manager.connection_from_url(url)\n else:\n # Only scheme should be lower case\n parsed = urlparse(url)\n url = parsed.geturl()\n conn = self.poolmanager.connection_from_url(url)\n\n return conn"},{"col":4,"comment":"If tell() raises errors, assume the cursor is at position zero.","endLoc":61,"header":"@pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error)","id":1180,"name":"test_super_len_handles_files_raising_weird_errors_in_tell","nodeType":"Function","startLoc":51,"text":"@pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0"},{"attributeType":"_Feature","col":0,"comment":"null","endLoc":15,"id":1181,"name":"print_function","nodeType":"Attribute","startLoc":15,"text":"print_function"},{"id":1182,"name":"NOTICE","nodeType":"TextFile","path":"","text":"Requests\nCopyright 2019 Kenneth Reitz\n"},{"fileName":"test_help.py","filePath":"tests","id":1183,"nodeType":"File","text":"# -*- encoding: utf-8\n\nimport sys\n\nimport pytest\n\nfrom requests.help import info\n\n\ndef test_system_ssl():\n \"\"\"Verify we're actually setting system_ssl when it should be available.\"\"\"\n assert info()['system_ssl']['version'] != ''\n\n\nclass VersionedPackage(object):\n def __init__(self, version):\n self.__version__ = version\n\n\ndef test_idna_without_version_attribute(mocker):\n \"\"\"Older versions of IDNA don't provide a __version__ attribute, verify\n that if we have such a package, we don't blow up.\n \"\"\"\n mocker.patch('requests.help.idna', new=None)\n assert info()['idna'] == {'version': ''}\n\n\ndef test_idna_with_version_attribute(mocker):\n \"\"\"Verify we're actually setting idna version when it should be available.\"\"\"\n mocker.patch('requests.help.idna', new=VersionedPackage('2.6'))\n assert info()['idna'] == {'version': '2.6'}\n"},{"col":0,"comment":"Generate information for a bug report.","endLoc":126,"header":"def info()","id":1185,"name":"info","nodeType":"Function","startLoc":68,"text":"def info():\n \"\"\"Generate information for a bug report.\"\"\"\n try:\n platform_info = {\n 'system': platform.system(),\n 'release': platform.release(),\n }\n except IOError:\n platform_info = {\n 'system': 'Unknown',\n 'release': 'Unknown',\n }\n\n implementation_info = _implementation()\n urllib3_info = {'version': urllib3.__version__}\n charset_normalizer_info = {'version': None}\n chardet_info = {'version': None}\n if charset_normalizer:\n charset_normalizer_info = {'version': charset_normalizer.__version__}\n if chardet:\n chardet_info = {'version': chardet.__version__}\n\n pyopenssl_info = {\n 'version': None,\n 'openssl_version': '',\n }\n if OpenSSL:\n pyopenssl_info = {\n 'version': OpenSSL.__version__,\n 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,\n }\n cryptography_info = {\n 'version': getattr(cryptography, '__version__', ''),\n }\n idna_info = {\n 'version': getattr(idna, '__version__', ''),\n }\n\n system_ssl = ssl.OPENSSL_VERSION_NUMBER\n system_ssl_info = {\n 'version': '%x' % system_ssl if system_ssl is not None else ''\n }\n\n return {\n 'platform': platform_info,\n 'implementation': implementation_info,\n 'system_ssl': system_ssl_info,\n 'using_pyopenssl': pyopenssl is not None,\n 'using_charset_normalizer': chardet is None,\n 'pyOpenSSL': pyopenssl_info,\n 'urllib3': urllib3_info,\n 'chardet': chardet_info,\n 'charset_normalizer': charset_normalizer_info,\n 'cryptography': cryptography_info,\n 'idna': idna_info,\n 'requests': {\n 'version': requests_version,\n },\n }"},{"col":0,"comment":"Return a dict with the Python implementation and version.\n\n Provide both the name and the version of the Python implementation\n currently running. For example, on CPython 2.7.5 it will return\n {'name': 'CPython', 'version': '2.7.5'}.\n\n This function works best on CPython and PyPy: in particular, it probably\n doesn't work for Jython or IronPython. Future investigation should be done\n to work out the correct shape of the code for those platforms.\n ","endLoc":65,"header":"def _implementation()","id":1186,"name":"_implementation","nodeType":"Function","startLoc":35,"text":"def _implementation():\n \"\"\"Return a dict with the Python implementation and version.\n\n Provide both the name and the version of the Python implementation\n currently running. For example, on CPython 2.7.5 it will return\n {'name': 'CPython', 'version': '2.7.5'}.\n\n This function works best on CPython and PyPy: in particular, it probably\n doesn't work for Jython or IronPython. Future investigation should be done\n to work out the correct shape of the code for those platforms.\n \"\"\"\n implementation = platform.python_implementation()\n\n if implementation == 'CPython':\n implementation_version = platform.python_version()\n elif implementation == 'PyPy':\n implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,\n sys.pypy_version_info.minor,\n sys.pypy_version_info.micro)\n if sys.pypy_version_info.releaselevel != 'final':\n implementation_version = ''.join([\n implementation_version, sys.pypy_version_info.releaselevel\n ])\n elif implementation == 'Jython':\n implementation_version = platform.python_version() # Complete Guess\n elif implementation == 'IronPython':\n implementation_version = platform.python_version() # Complete Guess\n else:\n implementation_version = 'Unknown'\n\n return {'name': implementation, 'version': implementation_version}"},{"col":4,"comment":"Disposes of any internal state.\n\n Currently, this closes the PoolManager and any active ProxyManager,\n which closes any pooled connections.\n ","endLoc":328,"header":"def close(self)","id":1189,"name":"close","nodeType":"Function","startLoc":320,"text":"def close(self):\n \"\"\"Disposes of any internal state.\n\n Currently, this closes the PoolManager and any active ProxyManager,\n which closes any pooled connections.\n \"\"\"\n self.poolmanager.clear()\n for proxy in self.proxy_manager.values():\n proxy.clear()"},{"col":4,"comment":"Obtain the url to use when making the final request.\n\n If the message is being sent through a HTTP proxy, the full URL has to\n be used. Otherwise, we should only use the path portion of the URL.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.\n :rtype: str\n ","endLoc":357,"header":"def request_url(self, request, proxies)","id":1191,"name":"request_url","nodeType":"Function","startLoc":330,"text":"def request_url(self, request, proxies):\n \"\"\"Obtain the url to use when making the final request.\n\n If the message is being sent through a HTTP proxy, the full URL has to\n be used. Otherwise, we should only use the path portion of the URL.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.\n :rtype: str\n \"\"\"\n proxy = select_proxy(request.url, proxies)\n scheme = urlparse(request.url).scheme\n\n is_proxied_http_request = (proxy and scheme != 'https')\n using_socks_proxy = False\n if proxy:\n proxy_scheme = urlparse(proxy).scheme.lower()\n using_socks_proxy = proxy_scheme.startswith('socks')\n\n url = request.path_url\n if is_proxied_http_request and not using_socks_proxy:\n url = urldefragauth(request.url)\n\n return url"},{"className":"VersionedPackage","col":0,"comment":"null","endLoc":17,"id":1192,"nodeType":"Class","startLoc":15,"text":"class VersionedPackage(object):\n def __init__(self, version):\n self.__version__ = version"},{"col":4,"comment":"null","endLoc":17,"header":"def __init__(self, version)","id":1193,"name":"__init__","nodeType":"Function","startLoc":16,"text":"def __init__(self, version):\n self.__version__ = version"},{"attributeType":"null","col":8,"comment":"null","endLoc":17,"id":1194,"name":"__version__","nodeType":"Attribute","startLoc":17,"text":"self.__version__"},{"col":4,"comment":"Ensure that if tell gives an IOError super_len doesn't fail","endLoc":73,"header":"@pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error)","id":1195,"name":"test_super_len_tell_ioerror","nodeType":"Function","startLoc":63,"text":"@pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0"},{"col":0,"comment":"null","endLoc":96,"header":"def _check_cryptography(cryptography_version)","id":1196,"name":"_check_cryptography","nodeType":"Function","startLoc":87,"text":"def _check_cryptography(cryptography_version):\n # cryptography < 1.3.4\n try:\n cryptography_version = list(map(int, cryptography_version.split('.')))\n except ValueError:\n return\n\n if cryptography_version < [1, 3, 4]:\n warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)\n warnings.warn(warning, RequestsDependencyWarning)"},{"col":0,"comment":"Verify we're actually setting system_ssl when it should be available.","endLoc":12,"header":"def test_system_ssl()","id":1197,"name":"test_system_ssl","nodeType":"Function","startLoc":10,"text":"def test_system_ssl():\n \"\"\"Verify we're actually setting system_ssl when it should be available.\"\"\"\n assert info()['system_ssl']['version'] != ''"},{"fileName":"adapters.py","filePath":"requests","id":1198,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.adapters\n~~~~~~~~~~~~~~~~~\n\nThis module contains the transport adapters that Requests uses to define\nand maintain connections.\n\"\"\"\n\nimport os.path\nimport socket\n\nfrom urllib3.poolmanager import PoolManager, proxy_from_url\nfrom urllib3.response import HTTPResponse\nfrom urllib3.util import parse_url\nfrom urllib3.util import Timeout as TimeoutSauce\nfrom urllib3.util.retry import Retry\nfrom urllib3.exceptions import ClosedPoolError\nfrom urllib3.exceptions import ConnectTimeoutError\nfrom urllib3.exceptions import HTTPError as _HTTPError\nfrom urllib3.exceptions import InvalidHeader as _InvalidHeader\nfrom urllib3.exceptions import MaxRetryError\nfrom urllib3.exceptions import NewConnectionError\nfrom urllib3.exceptions import ProxyError as _ProxyError\nfrom urllib3.exceptions import ProtocolError\nfrom urllib3.exceptions import ReadTimeoutError\nfrom urllib3.exceptions import SSLError as _SSLError\nfrom urllib3.exceptions import ResponseError\nfrom urllib3.exceptions import LocationValueError\n\nfrom .models import Response\nfrom .compat import urlparse, basestring\nfrom .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,\n get_encoding_from_headers, prepend_scheme_if_needed,\n get_auth_from_url, urldefragauth, select_proxy)\nfrom .structures import CaseInsensitiveDict\nfrom .cookies import extract_cookies_to_jar\nfrom .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,\n ProxyError, RetryError, InvalidSchema, InvalidProxyURL,\n InvalidURL, InvalidHeader)\nfrom .auth import _basic_auth_str\n\ntry:\n from urllib3.contrib.socks import SOCKSProxyManager\nexcept ImportError:\n def SOCKSProxyManager(*args, **kwargs):\n raise InvalidSchema(\"Missing dependencies for SOCKS support.\")\n\nDEFAULT_POOLBLOCK = False\nDEFAULT_POOLSIZE = 10\nDEFAULT_RETRIES = 0\nDEFAULT_POOL_TIMEOUT = None\n\n\nclass BaseAdapter(object):\n \"\"\"The Base Transport Adapter\"\"\"\n\n def __init__(self):\n super(BaseAdapter, self).__init__()\n\n def send(self, request, stream=False, timeout=None, verify=True,\n cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n \"\"\"\n raise NotImplementedError\n\n def close(self):\n \"\"\"Cleans up adapter specific items.\"\"\"\n raise NotImplementedError\n\n\nclass HTTPAdapter(BaseAdapter):\n \"\"\"The built-in HTTP Adapter for urllib3.\n\n Provides a general-case interface for Requests sessions to contact HTTP and\n HTTPS urls by implementing the Transport Adapter interface. This class will\n usually be created by the :class:`Session ` class under the\n covers.\n\n :param pool_connections: The number of urllib3 connection pools to cache.\n :param pool_maxsize: The maximum number of connections to save in the pool.\n :param max_retries: The maximum number of retries each connection\n should attempt. Note, this applies only to failed DNS lookups, socket\n connections and connection timeouts, never to requests where data has\n made it to the server. By default, Requests does not retry failed\n connections. If you need granular control over the conditions under\n which we retry a request, import urllib3's ``Retry`` class and pass\n that instead.\n :param pool_block: Whether the connection pool should block for connections.\n\n Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> a = requests.adapters.HTTPAdapter(max_retries=3)\n >>> s.mount('http://', a)\n \"\"\"\n __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',\n '_pool_block']\n\n def __init__(self, pool_connections=DEFAULT_POOLSIZE,\n pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,\n pool_block=DEFAULT_POOLBLOCK):\n if max_retries == DEFAULT_RETRIES:\n self.max_retries = Retry(0, read=False)\n else:\n self.max_retries = Retry.from_int(max_retries)\n self.config = {}\n self.proxy_manager = {}\n\n super(HTTPAdapter, self).__init__()\n\n self._pool_connections = pool_connections\n self._pool_maxsize = pool_maxsize\n self._pool_block = pool_block\n\n self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)\n\n def __getstate__(self):\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}\n\n def __setstate__(self, state):\n # Can't handle by adding 'proxy_manager' to self.__attrs__ because\n # self.poolmanager uses a lambda function, which isn't pickleable.\n self.proxy_manager = {}\n self.config = {}\n\n for attr, value in state.items():\n setattr(self, attr, value)\n\n self.init_poolmanager(self._pool_connections, self._pool_maxsize,\n block=self._pool_block)\n\n def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):\n \"\"\"Initializes a urllib3 PoolManager.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param connections: The number of urllib3 connection pools to cache.\n :param maxsize: The maximum number of connections to save in the pool.\n :param block: Block when no free connections are available.\n :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.\n \"\"\"\n # save these values for pickling\n self._pool_connections = connections\n self._pool_maxsize = maxsize\n self._pool_block = block\n\n self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,\n block=block, strict=True, **pool_kwargs)\n\n def proxy_manager_for(self, proxy, **proxy_kwargs):\n \"\"\"Return urllib3 ProxyManager for the given proxy.\n\n This method should not be called from user code, and is only\n exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The proxy to return a urllib3 ProxyManager for.\n :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.\n :returns: ProxyManager\n :rtype: urllib3.ProxyManager\n \"\"\"\n if proxy in self.proxy_manager:\n manager = self.proxy_manager[proxy]\n elif proxy.lower().startswith('socks'):\n username, password = get_auth_from_url(proxy)\n manager = self.proxy_manager[proxy] = SOCKSProxyManager(\n proxy,\n username=username,\n password=password,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs\n )\n else:\n proxy_headers = self.proxy_headers(proxy)\n manager = self.proxy_manager[proxy] = proxy_from_url(\n proxy,\n proxy_headers=proxy_headers,\n num_pools=self._pool_connections,\n maxsize=self._pool_maxsize,\n block=self._pool_block,\n **proxy_kwargs)\n\n return manager\n\n def cert_verify(self, conn, url, verify, cert):\n \"\"\"Verify a SSL certificate. This method should not be called from user\n code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param conn: The urllib3 connection object associated with the cert.\n :param url: The requested URL.\n :param verify: Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use\n :param cert: The SSL certificate to verify.\n \"\"\"\n if url.lower().startswith('https') and verify:\n\n cert_loc = None\n\n # Allow self-specified cert location.\n if verify is not True:\n cert_loc = verify\n\n if not cert_loc:\n cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)\n\n if not cert_loc or not os.path.exists(cert_loc):\n raise IOError(\"Could not find a suitable TLS CA certificate bundle, \"\n \"invalid path: {}\".format(cert_loc))\n\n conn.cert_reqs = 'CERT_REQUIRED'\n\n if not os.path.isdir(cert_loc):\n conn.ca_certs = cert_loc\n else:\n conn.ca_cert_dir = cert_loc\n else:\n conn.cert_reqs = 'CERT_NONE'\n conn.ca_certs = None\n conn.ca_cert_dir = None\n\n if cert:\n if not isinstance(cert, basestring):\n conn.cert_file = cert[0]\n conn.key_file = cert[1]\n else:\n conn.cert_file = cert\n conn.key_file = None\n if conn.cert_file and not os.path.exists(conn.cert_file):\n raise IOError(\"Could not find the TLS certificate file, \"\n \"invalid path: {}\".format(conn.cert_file))\n if conn.key_file and not os.path.exists(conn.key_file):\n raise IOError(\"Could not find the TLS key file, \"\n \"invalid path: {}\".format(conn.key_file))\n\n def build_response(self, req, resp):\n \"\"\"Builds a :class:`Response ` object from a urllib3\n response. This should not be called from user code, and is only exposed\n for use when subclassing the\n :class:`HTTPAdapter `\n\n :param req: The :class:`PreparedRequest ` used to generate the response.\n :param resp: The urllib3 response object.\n :rtype: requests.Response\n \"\"\"\n response = Response()\n\n # Fallback to None if there's no status_code, for whatever reason.\n response.status_code = getattr(resp, 'status', None)\n\n # Make headers case-insensitive.\n response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))\n\n # Set encoding.\n response.encoding = get_encoding_from_headers(response.headers)\n response.raw = resp\n response.reason = response.raw.reason\n\n if isinstance(req.url, bytes):\n response.url = req.url.decode('utf-8')\n else:\n response.url = req.url\n\n # Add new cookies from the server.\n extract_cookies_to_jar(response.cookies, req, resp)\n\n # Give the Response some context.\n response.request = req\n response.connection = self\n\n return response\n\n def get_connection(self, url, proxies=None):\n \"\"\"Returns a urllib3 connection for the given URL. This should not be\n called from user code, and is only exposed for use when subclassing the\n :class:`HTTPAdapter `.\n\n :param url: The URL to connect to.\n :param proxies: (optional) A Requests-style dictionary of proxies used on this request.\n :rtype: urllib3.ConnectionPool\n \"\"\"\n proxy = select_proxy(url, proxies)\n\n if proxy:\n proxy = prepend_scheme_if_needed(proxy, 'http')\n proxy_url = parse_url(proxy)\n if not proxy_url.host:\n raise InvalidProxyURL(\"Please check proxy URL. It is malformed\"\n \" and could be missing the host.\")\n proxy_manager = self.proxy_manager_for(proxy)\n conn = proxy_manager.connection_from_url(url)\n else:\n # Only scheme should be lower case\n parsed = urlparse(url)\n url = parsed.geturl()\n conn = self.poolmanager.connection_from_url(url)\n\n return conn\n\n def close(self):\n \"\"\"Disposes of any internal state.\n\n Currently, this closes the PoolManager and any active ProxyManager,\n which closes any pooled connections.\n \"\"\"\n self.poolmanager.clear()\n for proxy in self.proxy_manager.values():\n proxy.clear()\n\n def request_url(self, request, proxies):\n \"\"\"Obtain the url to use when making the final request.\n\n If the message is being sent through a HTTP proxy, the full URL has to\n be used. Otherwise, we should only use the path portion of the URL.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.\n :rtype: str\n \"\"\"\n proxy = select_proxy(request.url, proxies)\n scheme = urlparse(request.url).scheme\n\n is_proxied_http_request = (proxy and scheme != 'https')\n using_socks_proxy = False\n if proxy:\n proxy_scheme = urlparse(proxy).scheme.lower()\n using_socks_proxy = proxy_scheme.startswith('socks')\n\n url = request.path_url\n if is_proxied_http_request and not using_socks_proxy:\n url = urldefragauth(request.url)\n\n return url\n\n def add_headers(self, request, **kwargs):\n \"\"\"Add any headers needed by the connection. As of v2.0 this does\n nothing by default, but is left for overriding by users that subclass\n the :class:`HTTPAdapter `.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` to add headers to.\n :param kwargs: The keyword arguments from the call to send().\n \"\"\"\n pass\n\n def proxy_headers(self, proxy):\n \"\"\"Returns a dictionary of the headers to add to any request sent\n through a proxy. This works with urllib3 magic to ensure that they are\n correctly sent to the proxy, rather than in a tunnelled request if\n CONNECT is being used.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param proxy: The url of the proxy being used for this request.\n :rtype: dict\n \"\"\"\n headers = {}\n username, password = get_auth_from_url(proxy)\n\n if username:\n headers['Proxy-Authorization'] = _basic_auth_str(username,\n password)\n\n return headers\n\n def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple or urllib3 Timeout object\n :param verify: (optional) Either a boolean, in which case it controls whether\n we verify the server's TLS certificate, or a string, in which case it\n must be a path to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n :rtype: requests.Response\n \"\"\"\n\n try:\n conn = self.get_connection(request.url, proxies)\n except LocationValueError as e:\n raise InvalidURL(e, request=request)\n\n self.cert_verify(conn, request.url, verify, cert)\n url = self.request_url(request, proxies)\n self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)\n\n chunked = not (request.body is None or 'Content-Length' in request.headers)\n\n if isinstance(timeout, tuple):\n try:\n connect, read = timeout\n timeout = TimeoutSauce(connect=connect, read=read)\n except ValueError as e:\n # this may raise a string formatting error.\n err = (\"Invalid timeout {}. Pass a (connect, read) \"\n \"timeout tuple, or a single float to set \"\n \"both timeouts to the same value\".format(timeout))\n raise ValueError(err)\n elif isinstance(timeout, TimeoutSauce):\n pass\n else:\n timeout = TimeoutSauce(connect=timeout, read=timeout)\n\n try:\n if not chunked:\n resp = conn.urlopen(\n method=request.method,\n url=url,\n body=request.body,\n headers=request.headers,\n redirect=False,\n assert_same_host=False,\n preload_content=False,\n decode_content=False,\n retries=self.max_retries,\n timeout=timeout\n )\n\n # Send the request.\n else:\n if hasattr(conn, 'proxy_pool'):\n conn = conn.proxy_pool\n\n low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)\n\n try:\n skip_host = 'Host' in request.headers\n low_conn.putrequest(request.method,\n url,\n skip_accept_encoding=True,\n skip_host=skip_host)\n\n for header, value in request.headers.items():\n low_conn.putheader(header, value)\n\n low_conn.endheaders()\n\n for i in request.body:\n low_conn.send(hex(len(i))[2:].encode('utf-8'))\n low_conn.send(b'\\r\\n')\n low_conn.send(i)\n low_conn.send(b'\\r\\n')\n low_conn.send(b'0\\r\\n\\r\\n')\n\n # Receive the response from the server\n try:\n # For Python 2.7, use buffering of HTTP responses\n r = low_conn.getresponse(buffering=True)\n except TypeError:\n # For compatibility with Python 3.3+\n r = low_conn.getresponse()\n\n resp = HTTPResponse.from_httplib(\n r,\n pool=conn,\n connection=low_conn,\n preload_content=False,\n decode_content=False\n )\n except:\n # If we hit any problems here, clean up the connection.\n # Then, reraise so that we can handle the actual exception.\n low_conn.close()\n raise\n\n except (ProtocolError, socket.error) as err:\n raise ConnectionError(err, request=request)\n\n except MaxRetryError as e:\n if isinstance(e.reason, ConnectTimeoutError):\n # TODO: Remove this in 3.0.0: see #2811\n if not isinstance(e.reason, NewConnectionError):\n raise ConnectTimeout(e, request=request)\n\n if isinstance(e.reason, ResponseError):\n raise RetryError(e, request=request)\n\n if isinstance(e.reason, _ProxyError):\n raise ProxyError(e, request=request)\n\n if isinstance(e.reason, _SSLError):\n # This branch is for urllib3 v1.22 and later.\n raise SSLError(e, request=request)\n\n raise ConnectionError(e, request=request)\n\n except ClosedPoolError as e:\n raise ConnectionError(e, request=request)\n\n except _ProxyError as e:\n raise ProxyError(e)\n\n except (_SSLError, _HTTPError) as e:\n if isinstance(e, _SSLError):\n # This branch is for urllib3 versions earlier than v1.22\n raise SSLError(e, request=request)\n elif isinstance(e, ReadTimeoutError):\n raise ReadTimeout(e, request=request)\n elif isinstance(e, _InvalidHeader):\n raise InvalidHeader(e, request=request)\n else:\n raise\n\n return self.build_response(request, resp)\n"},{"col":0,"comment":"Older versions of IDNA don't provide a __version__ attribute, verify\n that if we have such a package, we don't blow up.\n ","endLoc":25,"header":"def test_idna_without_version_attribute(mocker)","id":1199,"name":"test_idna_without_version_attribute","nodeType":"Function","startLoc":20,"text":"def test_idna_without_version_attribute(mocker):\n \"\"\"Older versions of IDNA don't provide a __version__ attribute, verify\n that if we have such a package, we don't blow up.\n \"\"\"\n mocker.patch('requests.help.idna', new=None)\n assert info()['idna'] == {'version': ''}"},{"attributeType":"null","col":0,"comment":"null","endLoc":41,"id":1200,"name":"DEFAULT_CA_BUNDLE_PATH","nodeType":"Attribute","startLoc":41,"text":"DEFAULT_CA_BUNDLE_PATH"},{"col":4,"comment":"null","endLoc":76,"header":"def test_string(self)","id":1201,"name":"test_string","nodeType":"Function","startLoc":75,"text":"def test_string(self):\n assert super_len('Test') == 4"},{"col":4,"comment":"null","endLoc":88,"header":"@pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn)","id":1202,"name":"test_file","nodeType":"Function","startLoc":78,"text":"@pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num"},{"col":0,"comment":"Verify we're actually setting idna version when it should be available.","endLoc":31,"header":"def test_idna_with_version_attribute(mocker)","id":1203,"name":"test_idna_with_version_attribute","nodeType":"Function","startLoc":28,"text":"def test_idna_with_version_attribute(mocker):\n \"\"\"Verify we're actually setting idna version when it should be available.\"\"\"\n mocker.patch('requests.help.idna', new=VersionedPackage('2.6'))\n assert info()['idna'] == {'version': '2.6'}"},{"attributeType":"bool","col":0,"comment":"null","endLoc":50,"id":1204,"name":"DEFAULT_POOLBLOCK","nodeType":"Attribute","startLoc":50,"text":"DEFAULT_POOLBLOCK"},{"attributeType":"int","col":0,"comment":"null","endLoc":51,"id":1205,"name":"DEFAULT_POOLSIZE","nodeType":"Attribute","startLoc":51,"text":"DEFAULT_POOLSIZE"},{"attributeType":"int","col":0,"comment":"null","endLoc":52,"id":1206,"name":"DEFAULT_RETRIES","nodeType":"Attribute","startLoc":52,"text":"DEFAULT_RETRIES"},{"attributeType":"None","col":0,"comment":"null","endLoc":53,"id":1207,"name":"DEFAULT_POOL_TIMEOUT","nodeType":"Attribute","startLoc":53,"text":"DEFAULT_POOL_TIMEOUT"},{"col":0,"comment":"","endLoc":9,"header":"adapters.py#","id":1208,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.adapters\n~~~~~~~~~~~~~~~~~\n\nThis module contains the transport adapters that Requests uses to define\nand maintain connections.\n\"\"\"\n\ntry:\n from urllib3.contrib.socks import SOCKSProxyManager\nexcept ImportError:\n def SOCKSProxyManager(*args, **kwargs):\n raise InvalidSchema(\"Missing dependencies for SOCKS support.\")\n\nDEFAULT_POOLBLOCK = False\n\nDEFAULT_POOLSIZE = 10\n\nDEFAULT_RETRIES = 0\n\nDEFAULT_POOL_TIMEOUT = None"},{"col":4,"comment":"null","endLoc":100,"header":"def test_tarfile_member(self, tmpdir)","id":1209,"name":"test_tarfile_member","nodeType":"Function","startLoc":90,"text":"def test_tarfile_member(self, tmpdir):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n\n tar_obj = str(tmpdir.join('test.tar'))\n with tarfile.open(tar_obj, 'w') as tar:\n tar.add(str(file_obj), arcname='test.txt')\n\n with tarfile.open(tar_obj) as tar:\n member = tar.extractfile('test.txt')\n assert super_len(member) == 4"},{"col":4,"comment":"Add any headers needed by the connection. As of v2.0 this does\n nothing by default, but is left for overriding by users that subclass\n the :class:`HTTPAdapter `.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` to add headers to.\n :param kwargs: The keyword arguments from the call to send().\n ","endLoc":371,"header":"def add_headers(self, request, **kwargs)","id":1213,"name":"add_headers","nodeType":"Function","startLoc":359,"text":"def add_headers(self, request, **kwargs):\n \"\"\"Add any headers needed by the connection. As of v2.0 this does\n nothing by default, but is left for overriding by users that subclass\n the :class:`HTTPAdapter `.\n\n This should not be called from user code, and is only exposed for use\n when subclassing the\n :class:`HTTPAdapter `.\n\n :param request: The :class:`PreparedRequest ` to add headers to.\n :param kwargs: The keyword arguments from the call to send().\n \"\"\"\n pass"},{"col":4,"comment":"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple or urllib3 Timeout object\n :param verify: (optional) Either a boolean, in which case it controls whether\n we verify the server's TLS certificate, or a string, in which case it\n must be a path to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n :rtype: requests.Response\n ","endLoc":538,"header":"def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None)","id":1214,"name":"send","nodeType":"Function","startLoc":395,"text":"def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):\n \"\"\"Sends PreparedRequest object. Returns Response object.\n\n :param request: The :class:`PreparedRequest ` being sent.\n :param stream: (optional) Whether to stream the request content.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n :type timeout: float or tuple or urllib3 Timeout object\n :param verify: (optional) Either a boolean, in which case it controls whether\n we verify the server's TLS certificate, or a string, in which case it\n must be a path to a CA bundle to use\n :param cert: (optional) Any user-provided SSL certificate to be trusted.\n :param proxies: (optional) The proxies dictionary to apply to the request.\n :rtype: requests.Response\n \"\"\"\n\n try:\n conn = self.get_connection(request.url, proxies)\n except LocationValueError as e:\n raise InvalidURL(e, request=request)\n\n self.cert_verify(conn, request.url, verify, cert)\n url = self.request_url(request, proxies)\n self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)\n\n chunked = not (request.body is None or 'Content-Length' in request.headers)\n\n if isinstance(timeout, tuple):\n try:\n connect, read = timeout\n timeout = TimeoutSauce(connect=connect, read=read)\n except ValueError as e:\n # this may raise a string formatting error.\n err = (\"Invalid timeout {}. Pass a (connect, read) \"\n \"timeout tuple, or a single float to set \"\n \"both timeouts to the same value\".format(timeout))\n raise ValueError(err)\n elif isinstance(timeout, TimeoutSauce):\n pass\n else:\n timeout = TimeoutSauce(connect=timeout, read=timeout)\n\n try:\n if not chunked:\n resp = conn.urlopen(\n method=request.method,\n url=url,\n body=request.body,\n headers=request.headers,\n redirect=False,\n assert_same_host=False,\n preload_content=False,\n decode_content=False,\n retries=self.max_retries,\n timeout=timeout\n )\n\n # Send the request.\n else:\n if hasattr(conn, 'proxy_pool'):\n conn = conn.proxy_pool\n\n low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)\n\n try:\n skip_host = 'Host' in request.headers\n low_conn.putrequest(request.method,\n url,\n skip_accept_encoding=True,\n skip_host=skip_host)\n\n for header, value in request.headers.items():\n low_conn.putheader(header, value)\n\n low_conn.endheaders()\n\n for i in request.body:\n low_conn.send(hex(len(i))[2:].encode('utf-8'))\n low_conn.send(b'\\r\\n')\n low_conn.send(i)\n low_conn.send(b'\\r\\n')\n low_conn.send(b'0\\r\\n\\r\\n')\n\n # Receive the response from the server\n try:\n # For Python 2.7, use buffering of HTTP responses\n r = low_conn.getresponse(buffering=True)\n except TypeError:\n # For compatibility with Python 3.3+\n r = low_conn.getresponse()\n\n resp = HTTPResponse.from_httplib(\n r,\n pool=conn,\n connection=low_conn,\n preload_content=False,\n decode_content=False\n )\n except:\n # If we hit any problems here, clean up the connection.\n # Then, reraise so that we can handle the actual exception.\n low_conn.close()\n raise\n\n except (ProtocolError, socket.error) as err:\n raise ConnectionError(err, request=request)\n\n except MaxRetryError as e:\n if isinstance(e.reason, ConnectTimeoutError):\n # TODO: Remove this in 3.0.0: see #2811\n if not isinstance(e.reason, NewConnectionError):\n raise ConnectTimeout(e, request=request)\n\n if isinstance(e.reason, ResponseError):\n raise RetryError(e, request=request)\n\n if isinstance(e.reason, _ProxyError):\n raise ProxyError(e, request=request)\n\n if isinstance(e.reason, _SSLError):\n # This branch is for urllib3 v1.22 and later.\n raise SSLError(e, request=request)\n\n raise ConnectionError(e, request=request)\n\n except ClosedPoolError as e:\n raise ConnectionError(e, request=request)\n\n except _ProxyError as e:\n raise ProxyError(e)\n\n except (_SSLError, _HTTPError) as e:\n if isinstance(e, _SSLError):\n # This branch is for urllib3 versions earlier than v1.22\n raise SSLError(e, request=request)\n elif isinstance(e, ReadTimeoutError):\n raise ReadTimeout(e, request=request)\n elif isinstance(e, _InvalidHeader):\n raise InvalidHeader(e, request=request)\n else:\n raise\n\n return self.build_response(request, resp)"},{"id":1216,"name":"docs/_static","nodeType":"Package"},{"id":1217,"name":"custom.css","nodeType":"TextFile","path":"docs/_static","text":"body > div.document > div.sphinxsidebar > div > form > table > tbody > tr:nth-child(2) > td > select {\n width: 100%!important;\n}\n\n#python27 > a {\n color: white;\n}\n\n/* Carbon by BuySellAds */\n#carbonads {\n display: block;\n overflow: hidden;\n margin: 1.5em 0 2em;\n padding: 1em;\n border: solid 1px #cccccc;\n border-radius: 2px;\n background-color: #eeeeee;\n text-align: center;\n line-height: 1.5;\n}\n\n#carbonads a {\n border-bottom: 0;\n}\n\n#carbonads span {\n display: block;\n overflow: hidden;\n}\n\n.carbon-img {\n display: block;\n margin: 0 auto 1em;\n text-align: center;\n}\n\n.carbon-text {\n display: block;\n margin-bottom: 1em;\n}\n\n.carbon-poweredby {\n display: block;\n text-transform: uppercase;\n letter-spacing: 1px;\n font-size: 10px;\n line-height: 1;\n}\n\n\n/* Native CPC by BuySellAds */\n\n#native-ribbon #_custom_ {\n position: fixed;\n right: 0;\n bottom: 0;\n left: 0;\n box-shadow: 0 -1px 4px 1px hsla(0, 0%, 0%, .15);\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", Roboto, Oxygen-Sans, Ubuntu,\n Cantarell, \"Helvetica Neue\", Helvetica, Arial, sans-serif;\n transition: all .25s ease-in-out;\n transform: translateY(calc(100% - 35px));\n\n flex-flow: column nowrap;\n}\n\n#native-ribbon #_custom_:hover {\n transform: translateY(0);\n}\n\n.native-img {\n margin-right: 20px;\n max-height: 50px;\n border-radius: 3px;\n}\n\n.native-sponsor {\n margin: 10px 20px;\n text-align: center;\n text-transform: uppercase;\n letter-spacing: .5px;\n font-size: 12px;\n transition: all .3s ease-in-out;\n transform-origin: left;\n}\n\n#native-ribbon #_custom_:hover .native-sponsor {\n margin: 0 20px;\n opacity: 0;\n transform: scaleY(0);\n}\n\n.native-flex {\n display: flex;\n padding: 10px 20px 25px;\n text-decoration: none;\n\n flex-flow: row nowrap;\n justify-content: center;\n align-items: center;\n}\n\n.native-main {\n display: flex;\n\n flex-flow: row nowrap;\n align-items: center;\n}\n\n.native-details {\n display: flex;\n margin-right: 30px;\n\n flex-flow: column nowrap;\n}\n\n.native-company {\n margin-bottom: 4px;\n text-transform: uppercase;\n letter-spacing: 2px;\n font-size: 10px;\n}\n\n.native-desc {\n letter-spacing: 1px;\n font-weight: 300;\n font-size: 14px;\n line-height: 1.4;\n}\n\n.native-cta {\n padding: 10px 14px;\n border-radius: 3px;\n box-shadow: 0 6px 13px 0 hsla(0, 0%, 0%, .15);\n text-transform: uppercase;\n white-space: nowrap;\n letter-spacing: 1px;\n font-weight: 400;\n font-size: 12px;\n transition: all .3s ease-in-out;\n transform: translateY(-1px);\n}\n\n.native-cta:hover {\n box-shadow: none;\n transform: translateY(1px);\n}\n\n@media only screen and (min-width: 320px) and (max-width: 759px) {\n .native-flex {\n padding: 5px 5px 15px;\n flex-direction: column;\n\n flex-wrap: wrap;\n }\n\n .native-img {\n margin: 0;\n display: none;\n }\n\n .native-details {\n margin: 0;\n }\n\n .native-main {\n flex-direction: column;\n text-align: left;\n\n flex-wrap: wrap;\n align-content: center;\n }\n\n .native-cta {\n display: none;\n }\n}\n"},{"id":1218,"name":"Custom.md","nodeType":"TextFile","path":".github/ISSUE_TEMPLATE","text":"---\nname: Request for Help\nabout: Guidance on using Requests.\n\n---\n\nPlease refer to our [Stack Overflow tag](https://stackoverflow.com/questions/tagged/python-requests) for guidance.\n"},{"fileName":"auth.py","filePath":"requests","id":1219,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.auth\n~~~~~~~~~~~~~\n\nThis module contains the authentication handlers for Requests.\n\"\"\"\n\nimport os\nimport re\nimport time\nimport hashlib\nimport threading\nimport warnings\n\nfrom base64 import b64encode\n\nfrom .compat import urlparse, str, basestring\nfrom .cookies import extract_cookies_to_jar\nfrom ._internal_utils import to_native_string\nfrom .utils import parse_dict_header\n\nCONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'\nCONTENT_TYPE_MULTI_PART = 'multipart/form-data'\n\n\ndef _basic_auth_str(username, password):\n \"\"\"Returns a Basic Auth string.\"\"\"\n\n # \"I want us to put a big-ol' comment on top of it that\n # says that this behaviour is dumb but we need to preserve\n # it because people are relying on it.\"\n # - Lukasa\n #\n # These are here solely to maintain backwards compatibility\n # for things like ints. This will be removed in 3.0.0.\n if not isinstance(username, basestring):\n warnings.warn(\n \"Non-string usernames will no longer be supported in Requests \"\n \"3.0.0. Please convert the object you've passed in ({!r}) to \"\n \"a string or bytes object in the near future to avoid \"\n \"problems.\".format(username),\n category=DeprecationWarning,\n )\n username = str(username)\n\n if not isinstance(password, basestring):\n warnings.warn(\n \"Non-string passwords will no longer be supported in Requests \"\n \"3.0.0. Please convert the object you've passed in ({!r}) to \"\n \"a string or bytes object in the near future to avoid \"\n \"problems.\".format(type(password)),\n category=DeprecationWarning,\n )\n password = str(password)\n # -- End Removal --\n\n if isinstance(username, str):\n username = username.encode('latin1')\n\n if isinstance(password, str):\n password = password.encode('latin1')\n\n authstr = 'Basic ' + to_native_string(\n b64encode(b':'.join((username, password))).strip()\n )\n\n return authstr\n\n\nclass AuthBase(object):\n \"\"\"Base class that all auth implementations derive from\"\"\"\n\n def __call__(self, r):\n raise NotImplementedError('Auth hooks must be callable.')\n\n\nclass HTTPBasicAuth(AuthBase):\n \"\"\"Attaches HTTP Basic Authentication to the given Request object.\"\"\"\n\n def __init__(self, username, password):\n self.username = username\n self.password = password\n\n def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])\n\n def __ne__(self, other):\n return not self == other\n\n def __call__(self, r):\n r.headers['Authorization'] = _basic_auth_str(self.username, self.password)\n return r\n\n\nclass HTTPProxyAuth(HTTPBasicAuth):\n \"\"\"Attaches HTTP Proxy Authentication to a given Request object.\"\"\"\n\n def __call__(self, r):\n r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)\n return r\n\n\nclass HTTPDigestAuth(AuthBase):\n \"\"\"Attaches HTTP Digest Authentication to the given Request object.\"\"\"\n\n def __init__(self, username, password):\n self.username = username\n self.password = password\n # Keep state in per-thread local storage\n self._thread_local = threading.local()\n\n def init_per_thread_state(self):\n # Ensure state is initialized just once per-thread\n if not hasattr(self._thread_local, 'init'):\n self._thread_local.init = True\n self._thread_local.last_nonce = ''\n self._thread_local.nonce_count = 0\n self._thread_local.chal = {}\n self._thread_local.pos = None\n self._thread_local.num_401_calls = None\n\n def build_digest_header(self, method, url):\n \"\"\"\n :rtype: str\n \"\"\"\n\n realm = self._thread_local.chal['realm']\n nonce = self._thread_local.chal['nonce']\n qop = self._thread_local.chal.get('qop')\n algorithm = self._thread_local.chal.get('algorithm')\n opaque = self._thread_local.chal.get('opaque')\n hash_utf8 = None\n\n if algorithm is None:\n _algorithm = 'MD5'\n else:\n _algorithm = algorithm.upper()\n # lambdas assume digest modules are imported at the top level\n if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':\n def md5_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.md5(x).hexdigest()\n hash_utf8 = md5_utf8\n elif _algorithm == 'SHA':\n def sha_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha1(x).hexdigest()\n hash_utf8 = sha_utf8\n elif _algorithm == 'SHA-256':\n def sha256_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha256(x).hexdigest()\n hash_utf8 = sha256_utf8\n elif _algorithm == 'SHA-512':\n def sha512_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha512(x).hexdigest()\n hash_utf8 = sha512_utf8\n\n KD = lambda s, d: hash_utf8(\"%s:%s\" % (s, d))\n\n if hash_utf8 is None:\n return None\n\n # XXX not implemented yet\n entdig = None\n p_parsed = urlparse(url)\n #: path is request-uri defined in RFC 2616 which should not be empty\n path = p_parsed.path or \"/\"\n if p_parsed.query:\n path += '?' + p_parsed.query\n\n A1 = '%s:%s:%s' % (self.username, realm, self.password)\n A2 = '%s:%s' % (method, path)\n\n HA1 = hash_utf8(A1)\n HA2 = hash_utf8(A2)\n\n if nonce == self._thread_local.last_nonce:\n self._thread_local.nonce_count += 1\n else:\n self._thread_local.nonce_count = 1\n ncvalue = '%08x' % self._thread_local.nonce_count\n s = str(self._thread_local.nonce_count).encode('utf-8')\n s += nonce.encode('utf-8')\n s += time.ctime().encode('utf-8')\n s += os.urandom(8)\n\n cnonce = (hashlib.sha1(s).hexdigest()[:16])\n if _algorithm == 'MD5-SESS':\n HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))\n\n if not qop:\n respdig = KD(HA1, \"%s:%s\" % (nonce, HA2))\n elif qop == 'auth' or 'auth' in qop.split(','):\n noncebit = \"%s:%s:%s:%s:%s\" % (\n nonce, ncvalue, cnonce, 'auth', HA2\n )\n respdig = KD(HA1, noncebit)\n else:\n # XXX handle auth-int.\n return None\n\n self._thread_local.last_nonce = nonce\n\n # XXX should the partial digests be encoded too?\n base = 'username=\"%s\", realm=\"%s\", nonce=\"%s\", uri=\"%s\", ' \\\n 'response=\"%s\"' % (self.username, realm, nonce, path, respdig)\n if opaque:\n base += ', opaque=\"%s\"' % opaque\n if algorithm:\n base += ', algorithm=\"%s\"' % algorithm\n if entdig:\n base += ', digest=\"%s\"' % entdig\n if qop:\n base += ', qop=\"auth\", nc=%s, cnonce=\"%s\"' % (ncvalue, cnonce)\n\n return 'Digest %s' % (base)\n\n def handle_redirect(self, r, **kwargs):\n \"\"\"Reset num_401_calls counter on redirects.\"\"\"\n if r.is_redirect:\n self._thread_local.num_401_calls = 1\n\n def handle_401(self, r, **kwargs):\n \"\"\"\n Takes the given response and tries digest-auth, if needed.\n\n :rtype: requests.Response\n \"\"\"\n\n # If response is not 4xx, do not auth\n # See https://github.com/psf/requests/issues/3772\n if not 400 <= r.status_code < 500:\n self._thread_local.num_401_calls = 1\n return r\n\n if self._thread_local.pos is not None:\n # Rewind the file position indicator of the body to where\n # it was to resend the request.\n r.request.body.seek(self._thread_local.pos)\n s_auth = r.headers.get('www-authenticate', '')\n\n if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:\n\n self._thread_local.num_401_calls += 1\n pat = re.compile(r'digest ', flags=re.IGNORECASE)\n self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))\n\n # Consume content and release the original connection\n # to allow our new request to reuse the same one.\n r.content\n r.close()\n prep = r.request.copy()\n extract_cookies_to_jar(prep._cookies, r.request, r.raw)\n prep.prepare_cookies(prep._cookies)\n\n prep.headers['Authorization'] = self.build_digest_header(\n prep.method, prep.url)\n _r = r.connection.send(prep, **kwargs)\n _r.history.append(r)\n _r.request = prep\n\n return _r\n\n self._thread_local.num_401_calls = 1\n return r\n\n def __call__(self, r):\n # Initialize per-thread state, if needed\n self.init_per_thread_state()\n # If we have a saved nonce, skip the 401\n if self._thread_local.last_nonce:\n r.headers['Authorization'] = self.build_digest_header(r.method, r.url)\n try:\n self._thread_local.pos = r.body.tell()\n except AttributeError:\n # In the case of HTTPDigestAuth being reused and the body of\n # the previous request was a file-like object, pos has the\n # file position of the previous body. Ensure it's set to\n # None.\n self._thread_local.pos = None\n r.register_hook('response', self.handle_401)\n r.register_hook('response', self.handle_redirect)\n self._thread_local.num_401_calls = 1\n\n return r\n\n def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])\n\n def __ne__(self, other):\n return not self == other\n"},{"className":"HTTPProxyAuth","col":0,"comment":"Attaches HTTP Proxy Authentication to a given Request object.","endLoc":105,"id":1220,"nodeType":"Class","startLoc":100,"text":"class HTTPProxyAuth(HTTPBasicAuth):\n \"\"\"Attaches HTTP Proxy Authentication to a given Request object.\"\"\"\n\n def __call__(self, r):\n r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)\n return r"},{"col":4,"comment":"null","endLoc":105,"header":"def __call__(self, r)","id":1221,"name":"__call__","nodeType":"Function","startLoc":103,"text":"def __call__(self, r):\n r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)\n return r"},{"className":"HTTPDigestAuth","col":0,"comment":"Attaches HTTP Digest Authentication to the given Request object.","endLoc":305,"id":1222,"nodeType":"Class","startLoc":108,"text":"class HTTPDigestAuth(AuthBase):\n \"\"\"Attaches HTTP Digest Authentication to the given Request object.\"\"\"\n\n def __init__(self, username, password):\n self.username = username\n self.password = password\n # Keep state in per-thread local storage\n self._thread_local = threading.local()\n\n def init_per_thread_state(self):\n # Ensure state is initialized just once per-thread\n if not hasattr(self._thread_local, 'init'):\n self._thread_local.init = True\n self._thread_local.last_nonce = ''\n self._thread_local.nonce_count = 0\n self._thread_local.chal = {}\n self._thread_local.pos = None\n self._thread_local.num_401_calls = None\n\n def build_digest_header(self, method, url):\n \"\"\"\n :rtype: str\n \"\"\"\n\n realm = self._thread_local.chal['realm']\n nonce = self._thread_local.chal['nonce']\n qop = self._thread_local.chal.get('qop')\n algorithm = self._thread_local.chal.get('algorithm')\n opaque = self._thread_local.chal.get('opaque')\n hash_utf8 = None\n\n if algorithm is None:\n _algorithm = 'MD5'\n else:\n _algorithm = algorithm.upper()\n # lambdas assume digest modules are imported at the top level\n if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':\n def md5_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.md5(x).hexdigest()\n hash_utf8 = md5_utf8\n elif _algorithm == 'SHA':\n def sha_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha1(x).hexdigest()\n hash_utf8 = sha_utf8\n elif _algorithm == 'SHA-256':\n def sha256_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha256(x).hexdigest()\n hash_utf8 = sha256_utf8\n elif _algorithm == 'SHA-512':\n def sha512_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha512(x).hexdigest()\n hash_utf8 = sha512_utf8\n\n KD = lambda s, d: hash_utf8(\"%s:%s\" % (s, d))\n\n if hash_utf8 is None:\n return None\n\n # XXX not implemented yet\n entdig = None\n p_parsed = urlparse(url)\n #: path is request-uri defined in RFC 2616 which should not be empty\n path = p_parsed.path or \"/\"\n if p_parsed.query:\n path += '?' + p_parsed.query\n\n A1 = '%s:%s:%s' % (self.username, realm, self.password)\n A2 = '%s:%s' % (method, path)\n\n HA1 = hash_utf8(A1)\n HA2 = hash_utf8(A2)\n\n if nonce == self._thread_local.last_nonce:\n self._thread_local.nonce_count += 1\n else:\n self._thread_local.nonce_count = 1\n ncvalue = '%08x' % self._thread_local.nonce_count\n s = str(self._thread_local.nonce_count).encode('utf-8')\n s += nonce.encode('utf-8')\n s += time.ctime().encode('utf-8')\n s += os.urandom(8)\n\n cnonce = (hashlib.sha1(s).hexdigest()[:16])\n if _algorithm == 'MD5-SESS':\n HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))\n\n if not qop:\n respdig = KD(HA1, \"%s:%s\" % (nonce, HA2))\n elif qop == 'auth' or 'auth' in qop.split(','):\n noncebit = \"%s:%s:%s:%s:%s\" % (\n nonce, ncvalue, cnonce, 'auth', HA2\n )\n respdig = KD(HA1, noncebit)\n else:\n # XXX handle auth-int.\n return None\n\n self._thread_local.last_nonce = nonce\n\n # XXX should the partial digests be encoded too?\n base = 'username=\"%s\", realm=\"%s\", nonce=\"%s\", uri=\"%s\", ' \\\n 'response=\"%s\"' % (self.username, realm, nonce, path, respdig)\n if opaque:\n base += ', opaque=\"%s\"' % opaque\n if algorithm:\n base += ', algorithm=\"%s\"' % algorithm\n if entdig:\n base += ', digest=\"%s\"' % entdig\n if qop:\n base += ', qop=\"auth\", nc=%s, cnonce=\"%s\"' % (ncvalue, cnonce)\n\n return 'Digest %s' % (base)\n\n def handle_redirect(self, r, **kwargs):\n \"\"\"Reset num_401_calls counter on redirects.\"\"\"\n if r.is_redirect:\n self._thread_local.num_401_calls = 1\n\n def handle_401(self, r, **kwargs):\n \"\"\"\n Takes the given response and tries digest-auth, if needed.\n\n :rtype: requests.Response\n \"\"\"\n\n # If response is not 4xx, do not auth\n # See https://github.com/psf/requests/issues/3772\n if not 400 <= r.status_code < 500:\n self._thread_local.num_401_calls = 1\n return r\n\n if self._thread_local.pos is not None:\n # Rewind the file position indicator of the body to where\n # it was to resend the request.\n r.request.body.seek(self._thread_local.pos)\n s_auth = r.headers.get('www-authenticate', '')\n\n if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:\n\n self._thread_local.num_401_calls += 1\n pat = re.compile(r'digest ', flags=re.IGNORECASE)\n self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))\n\n # Consume content and release the original connection\n # to allow our new request to reuse the same one.\n r.content\n r.close()\n prep = r.request.copy()\n extract_cookies_to_jar(prep._cookies, r.request, r.raw)\n prep.prepare_cookies(prep._cookies)\n\n prep.headers['Authorization'] = self.build_digest_header(\n prep.method, prep.url)\n _r = r.connection.send(prep, **kwargs)\n _r.history.append(r)\n _r.request = prep\n\n return _r\n\n self._thread_local.num_401_calls = 1\n return r\n\n def __call__(self, r):\n # Initialize per-thread state, if needed\n self.init_per_thread_state()\n # If we have a saved nonce, skip the 401\n if self._thread_local.last_nonce:\n r.headers['Authorization'] = self.build_digest_header(r.method, r.url)\n try:\n self._thread_local.pos = r.body.tell()\n except AttributeError:\n # In the case of HTTPDigestAuth being reused and the body of\n # the previous request was a file-like object, pos has the\n # file position of the previous body. Ensure it's set to\n # None.\n self._thread_local.pos = None\n r.register_hook('response', self.handle_401)\n r.register_hook('response', self.handle_redirect)\n self._thread_local.num_401_calls = 1\n\n return r\n\n def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])\n\n def __ne__(self, other):\n return not self == other"},{"col":4,"comment":"null","endLoc":115,"header":"def __init__(self, username, password)","id":1223,"name":"__init__","nodeType":"Function","startLoc":111,"text":"def __init__(self, username, password):\n self.username = username\n self.password = password\n # Keep state in per-thread local storage\n self._thread_local = threading.local()"},{"col":4,"comment":"null","endLoc":63,"header":"def get_full_url(self)","id":1225,"name":"get_full_url","nodeType":"Function","startLoc":51,"text":"def get_full_url(self):\n # Only return the response's URL if the user hadn't set the Host\n # header\n if not self._r.headers.get('Host'):\n return self._r.url\n # If they did set it, retrieve it and reconstruct the expected domain\n host = to_native_string(self._r.headers['Host'], encoding='utf-8')\n parsed = urlparse(self._r.url)\n # Reconstruct the URL as we expect it\n return urlunparse([\n parsed.scheme, host, parsed.path, parsed.params, parsed.query,\n parsed.fragment\n ])"},{"col":4,"comment":"null","endLoc":66,"header":"def is_unverifiable(self)","id":1227,"name":"is_unverifiable","nodeType":"Function","startLoc":65,"text":"def is_unverifiable(self):\n return True"},{"col":4,"comment":"null","endLoc":69,"header":"def has_header(self, name)","id":1228,"name":"has_header","nodeType":"Function","startLoc":68,"text":"def has_header(self, name):\n return name in self._r.headers or name in self._new_headers"},{"col":4,"comment":"null","endLoc":72,"header":"def get_header(self, name, default=None)","id":1229,"name":"get_header","nodeType":"Function","startLoc":71,"text":"def get_header(self, name, default=None):\n return self._r.headers.get(name, self._new_headers.get(name, default))"},{"col":0,"comment":"Pretty-print the bug information as JSON.","endLoc":131,"header":"def main()","id":1230,"name":"main","nodeType":"Function","startLoc":129,"text":"def main():\n \"\"\"Pretty-print the bug information as JSON.\"\"\"\n print(json.dumps(info(), sort_keys=True, indent=2))"},{"col":4,"comment":"null","endLoc":125,"header":"def init_per_thread_state(self)","id":1231,"name":"init_per_thread_state","nodeType":"Function","startLoc":117,"text":"def init_per_thread_state(self):\n # Ensure state is initialized just once per-thread\n if not hasattr(self._thread_local, 'init'):\n self._thread_local.init = True\n self._thread_local.last_nonce = ''\n self._thread_local.nonce_count = 0\n self._thread_local.chal = {}\n self._thread_local.pos = None\n self._thread_local.num_401_calls = None"},{"attributeType":"null","col":29,"comment":"null","endLoc":12,"id":1232,"name":"requests_version","nodeType":"Attribute","startLoc":12,"text":"requests_version"},{"col":4,"comment":"\n :rtype: str\n ","endLoc":227,"header":"def build_digest_header(self, method, url)","id":1233,"name":"build_digest_header","nodeType":"Function","startLoc":127,"text":"def build_digest_header(self, method, url):\n \"\"\"\n :rtype: str\n \"\"\"\n\n realm = self._thread_local.chal['realm']\n nonce = self._thread_local.chal['nonce']\n qop = self._thread_local.chal.get('qop')\n algorithm = self._thread_local.chal.get('algorithm')\n opaque = self._thread_local.chal.get('opaque')\n hash_utf8 = None\n\n if algorithm is None:\n _algorithm = 'MD5'\n else:\n _algorithm = algorithm.upper()\n # lambdas assume digest modules are imported at the top level\n if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':\n def md5_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.md5(x).hexdigest()\n hash_utf8 = md5_utf8\n elif _algorithm == 'SHA':\n def sha_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha1(x).hexdigest()\n hash_utf8 = sha_utf8\n elif _algorithm == 'SHA-256':\n def sha256_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha256(x).hexdigest()\n hash_utf8 = sha256_utf8\n elif _algorithm == 'SHA-512':\n def sha512_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha512(x).hexdigest()\n hash_utf8 = sha512_utf8\n\n KD = lambda s, d: hash_utf8(\"%s:%s\" % (s, d))\n\n if hash_utf8 is None:\n return None\n\n # XXX not implemented yet\n entdig = None\n p_parsed = urlparse(url)\n #: path is request-uri defined in RFC 2616 which should not be empty\n path = p_parsed.path or \"/\"\n if p_parsed.query:\n path += '?' + p_parsed.query\n\n A1 = '%s:%s:%s' % (self.username, realm, self.password)\n A2 = '%s:%s' % (method, path)\n\n HA1 = hash_utf8(A1)\n HA2 = hash_utf8(A2)\n\n if nonce == self._thread_local.last_nonce:\n self._thread_local.nonce_count += 1\n else:\n self._thread_local.nonce_count = 1\n ncvalue = '%08x' % self._thread_local.nonce_count\n s = str(self._thread_local.nonce_count).encode('utf-8')\n s += nonce.encode('utf-8')\n s += time.ctime().encode('utf-8')\n s += os.urandom(8)\n\n cnonce = (hashlib.sha1(s).hexdigest()[:16])\n if _algorithm == 'MD5-SESS':\n HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))\n\n if not qop:\n respdig = KD(HA1, \"%s:%s\" % (nonce, HA2))\n elif qop == 'auth' or 'auth' in qop.split(','):\n noncebit = \"%s:%s:%s:%s:%s\" % (\n nonce, ncvalue, cnonce, 'auth', HA2\n )\n respdig = KD(HA1, noncebit)\n else:\n # XXX handle auth-int.\n return None\n\n self._thread_local.last_nonce = nonce\n\n # XXX should the partial digests be encoded too?\n base = 'username=\"%s\", realm=\"%s\", nonce=\"%s\", uri=\"%s\", ' \\\n 'response=\"%s\"' % (self.username, realm, nonce, path, respdig)\n if opaque:\n base += ', opaque=\"%s\"' % opaque\n if algorithm:\n base += ', algorithm=\"%s\"' % algorithm\n if entdig:\n base += ', digest=\"%s\"' % entdig\n if qop:\n base += ', qop=\"auth\", nc=%s, cnonce=\"%s\"' % (ncvalue, cnonce)\n\n return 'Digest %s' % (base)"},{"attributeType":"None","col":4,"comment":"null","endLoc":17,"id":1234,"name":"charset_normalizer","nodeType":"Attribute","startLoc":17,"text":"charset_normalizer"},{"attributeType":"None","col":4,"comment":"null","endLoc":22,"id":1235,"name":"chardet","nodeType":"Attribute","startLoc":22,"text":"chardet"},{"attributeType":"None","col":4,"comment":"null","endLoc":27,"id":1236,"name":"pyopenssl","nodeType":"Attribute","startLoc":27,"text":"pyopenssl"},{"fileName":"test_testserver.py","filePath":"tests","id":1237,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport threading\nimport socket\nimport time\n\nimport pytest\nimport requests\nfrom tests.testserver.server import Server\n\n\nclass TestTestServer:\n\n def test_basic(self):\n \"\"\"messages are sent and received properly\"\"\"\n question = b\"success?\"\n answer = b\"yeah, success\"\n\n def handler(sock):\n text = sock.recv(1000)\n assert text == question\n sock.sendall(answer)\n\n with Server(handler) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(question)\n text = sock.recv(1000)\n assert text == answer\n sock.close()\n\n def test_server_closes(self):\n \"\"\"the server closes when leaving the context manager\"\"\"\n with Server.basic_response_server() as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n\n sock.close()\n\n with pytest.raises(socket.error):\n new_sock = socket.socket()\n new_sock.connect((host, port))\n\n def test_text_response(self):\n \"\"\"the text_response_server sends the given text\"\"\"\n server = Server.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 6\\r\\n\" +\n \"\\r\\nroflol\"\n )\n\n with server as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n\n assert r.status_code == 200\n assert r.text == u'roflol'\n assert r.headers['Content-Length'] == '6'\n\n def test_basic_response(self):\n \"\"\"the basic response server returns an empty http response\"\"\"\n with Server.basic_response_server() as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n assert r.status_code == 200\n assert r.text == u''\n assert r.headers['Content-Length'] == '0'\n\n def test_basic_waiting_server(self):\n \"\"\"the server waits for the block_server event to be set before closing\"\"\"\n block_server = threading.Event()\n\n with Server.basic_response_server(wait_to_close_event=block_server) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(b'send something')\n time.sleep(2.5)\n sock.sendall(b'still alive')\n block_server.set() # release server block\n\n def test_multiple_requests(self):\n \"\"\"multiple requests can be served\"\"\"\n requests_to_handle = 5\n\n server = Server.basic_response_server(requests_to_handle=requests_to_handle)\n\n with server as (host, port):\n server_url = 'http://{}:{}'.format(host, port)\n for _ in range(requests_to_handle):\n r = requests.get(server_url)\n assert r.status_code == 200\n\n # the (n+1)th request fails\n with pytest.raises(requests.exceptions.ConnectionError):\n r = requests.get(server_url)\n\n @pytest.mark.skip(reason=\"this fails non-deterministically under pytest-xdist\")\n def test_request_recovery(self):\n \"\"\"can check the requests content\"\"\"\n # TODO: figure out why this sometimes fails when using pytest-xdist.\n server = Server.basic_response_server(requests_to_handle=2)\n first_request = b'put your hands up in the air'\n second_request = b'put your hand down in the floor'\n\n with server as address:\n sock1 = socket.socket()\n sock2 = socket.socket()\n\n sock1.connect(address)\n sock1.sendall(first_request)\n sock1.close()\n\n sock2.connect(address)\n sock2.sendall(second_request)\n sock2.close()\n\n assert server.handler_results[0] == first_request\n assert server.handler_results[1] == second_request\n\n def test_requests_after_timeout_are_not_received(self):\n \"\"\"the basic response handler times out when receiving requests\"\"\"\n server = Server.basic_response_server(request_timeout=1)\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(b'hehehe, not received')\n sock.close()\n\n assert server.handler_results[0] == b''\n\n def test_request_recovery_with_bigger_timeout(self):\n \"\"\"a biggest timeout can be specified\"\"\"\n server = Server.basic_response_server(request_timeout=3)\n data = b'bananadine'\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(data)\n sock.close()\n\n assert server.handler_results[0] == data\n\n def test_server_finishes_on_error(self):\n \"\"\"the server thread exits even if an exception exits the context manager\"\"\"\n server = Server.basic_response_server()\n with pytest.raises(Exception):\n with server:\n raise Exception()\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout.\n\n def test_server_finishes_when_no_connections(self):\n \"\"\"the server thread exits even if there are no connections\"\"\"\n server = Server.basic_response_server()\n with server:\n pass\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout.\n"},{"attributeType":"None","col":4,"comment":"null","endLoc":28,"id":1238,"name":"OpenSSL","nodeType":"Attribute","startLoc":28,"text":"OpenSSL"},{"attributeType":"None","col":4,"comment":"null","endLoc":29,"id":1239,"name":"cryptography","nodeType":"Attribute","startLoc":29,"text":"cryptography"},{"col":0,"comment":"","endLoc":1,"header":"help.py#","id":1240,"name":"","nodeType":"Function","startLoc":1,"text":"\"\"\"Module containing bug report helper(s).\"\"\"\n\ntry:\n import charset_normalizer\nexcept ImportError:\n charset_normalizer = None\n\ntry:\n import chardet\nexcept ImportError:\n chardet = None\n\ntry:\n from urllib3.contrib import pyopenssl\nexcept ImportError:\n pyopenssl = None\n OpenSSL = None\n cryptography = None\nelse:\n import OpenSSL\n import cryptography\n\nif __name__ == '__main__':\n main()"},{"className":"TestTestServer","col":0,"comment":"null","endLoc":166,"id":1241,"nodeType":"Class","startLoc":12,"text":"class TestTestServer:\n\n def test_basic(self):\n \"\"\"messages are sent and received properly\"\"\"\n question = b\"success?\"\n answer = b\"yeah, success\"\n\n def handler(sock):\n text = sock.recv(1000)\n assert text == question\n sock.sendall(answer)\n\n with Server(handler) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(question)\n text = sock.recv(1000)\n assert text == answer\n sock.close()\n\n def test_server_closes(self):\n \"\"\"the server closes when leaving the context manager\"\"\"\n with Server.basic_response_server() as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n\n sock.close()\n\n with pytest.raises(socket.error):\n new_sock = socket.socket()\n new_sock.connect((host, port))\n\n def test_text_response(self):\n \"\"\"the text_response_server sends the given text\"\"\"\n server = Server.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 6\\r\\n\" +\n \"\\r\\nroflol\"\n )\n\n with server as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n\n assert r.status_code == 200\n assert r.text == u'roflol'\n assert r.headers['Content-Length'] == '6'\n\n def test_basic_response(self):\n \"\"\"the basic response server returns an empty http response\"\"\"\n with Server.basic_response_server() as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n assert r.status_code == 200\n assert r.text == u''\n assert r.headers['Content-Length'] == '0'\n\n def test_basic_waiting_server(self):\n \"\"\"the server waits for the block_server event to be set before closing\"\"\"\n block_server = threading.Event()\n\n with Server.basic_response_server(wait_to_close_event=block_server) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(b'send something')\n time.sleep(2.5)\n sock.sendall(b'still alive')\n block_server.set() # release server block\n\n def test_multiple_requests(self):\n \"\"\"multiple requests can be served\"\"\"\n requests_to_handle = 5\n\n server = Server.basic_response_server(requests_to_handle=requests_to_handle)\n\n with server as (host, port):\n server_url = 'http://{}:{}'.format(host, port)\n for _ in range(requests_to_handle):\n r = requests.get(server_url)\n assert r.status_code == 200\n\n # the (n+1)th request fails\n with pytest.raises(requests.exceptions.ConnectionError):\n r = requests.get(server_url)\n\n @pytest.mark.skip(reason=\"this fails non-deterministically under pytest-xdist\")\n def test_request_recovery(self):\n \"\"\"can check the requests content\"\"\"\n # TODO: figure out why this sometimes fails when using pytest-xdist.\n server = Server.basic_response_server(requests_to_handle=2)\n first_request = b'put your hands up in the air'\n second_request = b'put your hand down in the floor'\n\n with server as address:\n sock1 = socket.socket()\n sock2 = socket.socket()\n\n sock1.connect(address)\n sock1.sendall(first_request)\n sock1.close()\n\n sock2.connect(address)\n sock2.sendall(second_request)\n sock2.close()\n\n assert server.handler_results[0] == first_request\n assert server.handler_results[1] == second_request\n\n def test_requests_after_timeout_are_not_received(self):\n \"\"\"the basic response handler times out when receiving requests\"\"\"\n server = Server.basic_response_server(request_timeout=1)\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(b'hehehe, not received')\n sock.close()\n\n assert server.handler_results[0] == b''\n\n def test_request_recovery_with_bigger_timeout(self):\n \"\"\"a biggest timeout can be specified\"\"\"\n server = Server.basic_response_server(request_timeout=3)\n data = b'bananadine'\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(data)\n sock.close()\n\n assert server.handler_results[0] == data\n\n def test_server_finishes_on_error(self):\n \"\"\"the server thread exits even if an exception exits the context manager\"\"\"\n server = Server.basic_response_server()\n with pytest.raises(Exception):\n with server:\n raise Exception()\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout.\n\n def test_server_finishes_when_no_connections(self):\n \"\"\"the server thread exits even if there are no connections\"\"\"\n server = Server.basic_response_server()\n with server:\n pass\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout."},{"col":4,"comment":"messages are sent and received properly","endLoc":30,"header":"def test_basic(self)","id":1242,"name":"test_basic","nodeType":"Function","startLoc":14,"text":"def test_basic(self):\n \"\"\"messages are sent and received properly\"\"\"\n question = b\"success?\"\n answer = b\"yeah, success\"\n\n def handler(sock):\n text = sock.recv(1000)\n assert text == question\n sock.sendall(answer)\n\n with Server(handler) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(question)\n text = sock.recv(1000)\n assert text == answer\n sock.close()"},{"col":4,"comment":"null","endLoc":442,"header":"def wrap_socket(\n self,\n sock: socket.socket,\n server_side: bool = False,\n do_handshake_on_connect: bool = True,\n suppress_ragged_eofs: bool = True,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLSocket","id":1243,"name":"wrap_socket","nodeType":"Function","startLoc":434,"text":"def wrap_socket(\n self,\n sock: socket.socket,\n server_side: bool = False,\n do_handshake_on_connect: bool = True,\n suppress_ragged_eofs: bool = True,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLSocket: ..."},{"id":1244,"name":"index.rst","nodeType":"TextFile","path":"docs","text":".. Requests documentation master file, created by\n sphinx-quickstart on Sun Feb 13 23:54:25 2011.\n You can adapt this file completely to your liking, but it should at least\n contain the root `toctree` directive.\n\nRequests: HTTP for Humans™\n==========================\n\nRelease v\\ |version|. (:ref:`Installation `)\n\n\n.. image:: https://pepy.tech/badge/requests/month\n :target: https://pepy.tech/project/requests\n :alt: Requests Downloads Per Month Badge\n \n.. image:: https://img.shields.io/pypi/l/requests.svg\n :target: https://pypi.org/project/requests/\n :alt: License Badge\n\n.. image:: https://img.shields.io/pypi/wheel/requests.svg\n :target: https://pypi.org/project/requests/\n :alt: Wheel Support Badge\n\n.. image:: https://img.shields.io/pypi/pyversions/requests.svg\n :target: https://pypi.org/project/requests/\n :alt: Python Version Support Badge\n\n**Requests** is an elegant and simple HTTP library for Python, built for human beings.\n\n-------------------\n\n**Behold, the power of Requests**::\n\n >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))\n >>> r.status_code\n 200\n >>> r.headers['content-type']\n 'application/json; charset=utf8'\n >>> r.encoding\n 'utf-8'\n >>> r.text\n '{\"type\":\"User\"...'\n >>> r.json()\n {'private_gists': 419, 'total_private_repos': 77, ...}\n\nSee `similar code, sans Requests `_.\n\n\n**Requests** allows you to send HTTP/1.1 requests extremely easily. \nThere's no need to manually add query strings to your\nURLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling\nare 100% automatic, thanks to `urllib3 `_.\n\nBeloved Features\n----------------\n\nRequests is ready for today's web.\n\n- Keep-Alive & Connection Pooling\n- International Domains and URLs\n- Sessions with Cookie Persistence\n- Browser-style SSL Verification\n- Automatic Content Decoding\n- Basic/Digest Authentication\n- Elegant Key/Value Cookies\n- Automatic Decompression\n- Unicode Response Bodies\n- HTTP(S) Proxy Support\n- Multipart File Uploads\n- Streaming Downloads\n- Connection Timeouts\n- Chunked Requests\n- ``.netrc`` Support\n\nRequests officially supports Python 2.7 & 3.6+, and runs great on PyPy.\n\n\nThe User Guide\n--------------\n\nThis part of the documentation, which is mostly prose, begins with some\nbackground information about Requests, then focuses on step-by-step\ninstructions for getting the most out of Requests.\n\n.. toctree::\n :maxdepth: 2\n\n user/install\n user/quickstart\n user/advanced\n user/authentication\n\n\nThe Community Guide\n-------------------\n\nThis part of the documentation, which is mostly prose, details the\nRequests ecosystem and community.\n\n.. toctree::\n :maxdepth: 2\n\n community/recommended\n community/faq\n community/out-there\n community/support\n community/vulnerabilities\n community/release-process\n\n.. toctree::\n :maxdepth: 1\n\n community/updates\n\nThe API Documentation / Guide\n-----------------------------\n\nIf you are looking for information on a specific function, class, or method,\nthis part of the documentation is for you.\n\n.. toctree::\n :maxdepth: 2\n\n api\n\n\nThe Contributor Guide\n---------------------\n\nIf you want to contribute to the project, this part of the documentation is for\nyou.\n\n.. toctree::\n :maxdepth: 3\n\n dev/contributing\n dev/authors\n\nThere are no more guides. You are now guideless.\nGood luck.\n"},{"fileName":"conf.py","filePath":"docs","id":1246,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n#\n# Requests documentation build configuration file, created by\n# sphinx-quickstart on Fri Feb 19 00:05:47 2016.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n# sys.path.insert(0, os.path.abspath('.'))\n\n# Insert Requests' path into the system.\nsys.path.insert(0, os.path.abspath(\"..\"))\nsys.path.insert(0, os.path.abspath(\"_themes\"))\n\nimport requests\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.todo\",\n \"sphinx.ext.viewcode\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = \".rst\"\n\n# The encoding of source files.\n# source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = u\"Requests\"\ncopyright = u'MMXVIX. A Kenneth Reitz Project'\nauthor = u\"Kenneth Reitz\"\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = requests.__version__\n# The full version, including alpha/beta/rc tags.\nrelease = requests.__version__\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n# today = ''\n# Else, today_fmt is used as the format for a strftime call.\n# today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = [\"_build\"]\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\nadd_function_parentheses = False\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\nadd_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = \"flask_theme_support.FlaskyStyle\"\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n# keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = True\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\nhtml_theme = \"alabaster\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n \"show_powered_by\": False,\n \"github_user\": \"requests\",\n \"github_repo\": \"requests\",\n \"github_banner\": True,\n \"show_related\": False,\n \"note_bg\": \"#FFF59C\",\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\n# html_theme_path = []\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \" v documentation\".\n# html_title = None\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n# html_logo = None\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n# html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n# html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n# html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\nhtml_use_smartypants = False\n\n# Custom sidebar templates, maps document names to template names.\nhtml_sidebars = {\n \"index\": [\"sidebarintro.html\", \"sourcelink.html\", \"searchbox.html\", \"hacks.html\"],\n \"**\": [\n \"sidebarlogo.html\",\n \"localtoc.html\",\n \"relations.html\",\n \"sourcelink.html\",\n \"searchbox.html\",\n \"hacks.html\",\n ],\n}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n# html_additional_pages = {}\n\n# If false, no module index is generated.\n# html_domain_indices = True\n\n# If false, no index is generated.\n# html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\nhtml_show_sourcelink = False\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\nhtml_show_sphinx = False\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\nhtml_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n# html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'\n# html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n# html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n# html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = \"Requestsdoc\"\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #'preamble': '',\n # Latex figure (float) alignment\n #'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, \"Requests.tex\", u\"Requests Documentation\", u\"Kenneth Reitz\", \"manual\")\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [(master_doc, \"requests\", u\"Requests Documentation\", [author], 1)]\n\n# If true, show URL addresses after external links.\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n master_doc,\n \"Requests\",\n u\"Requests Documentation\",\n author,\n \"Requests\",\n \"One line description of project.\",\n \"Miscellaneous\",\n )\n]\n\n# Documents to append as an appendix to all manuals.\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n# texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n# texinfo_no_detailmenu = False\n\n\n# -- Options for Epub output ----------------------------------------------\n\n# Bibliographic Dublin Core info.\nepub_title = project\nepub_author = author\nepub_publisher = author\nepub_copyright = copyright\n\n# The basename for the epub file. It defaults to the project name.\n# epub_basename = project\n\n# The HTML theme for the epub output. Since the default themes are not\n# optimized for small screen space, using the same theme for HTML and epub\n# output is usually not wise. This defaults to 'epub', a theme designed to save\n# visual space.\n# epub_theme = 'epub'\n\n# The language of the text. It defaults to the language option\n# or 'en' if the language is not set.\n# epub_language = ''\n\n# The scheme of the identifier. Typical schemes are ISBN or URL.\n# epub_scheme = ''\n\n# The unique identifier of the text. This can be a ISBN number\n# or the project homepage.\n# epub_identifier = ''\n\n# A unique identification for the text.\n# epub_uid = ''\n\n# A tuple containing the cover image and cover page html template filenames.\n# epub_cover = ()\n\n# A sequence of (type, uri, title) tuples for the guide element of content.opf.\n# epub_guide = ()\n\n# HTML files that should be inserted before the pages created by sphinx.\n# The format is a list of tuples containing the path and title.\n# epub_pre_files = []\n\n# HTML files that should be inserted after the pages created by sphinx.\n# The format is a list of tuples containing the path and title.\n# epub_post_files = []\n\n# A list of files that should not be packed into the epub file.\nepub_exclude_files = [\"search.html\"]\n\n# The depth of the table of contents in toc.ncx.\n# epub_tocdepth = 3\n\n# Allow duplicate toc entries.\n# epub_tocdup = True\n\n# Choose between 'default' and 'includehidden'.\n# epub_tocscope = 'default'\n\n# Fix unsupported image types using the Pillow.\n# epub_fix_images = False\n\n# Scale large images.\n# epub_max_image_width = 0\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n# epub_show_urls = 'inline'\n\n# If false, no index is generated.\n# epub_use_index = True\n\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"urllib3\": (\"https://urllib3.readthedocs.io/en/latest\", None),\n}\n"},{"attributeType":"list","col":0,"comment":"null","endLoc":38,"id":1247,"name":"extensions","nodeType":"Attribute","startLoc":38,"text":"extensions"},{"id":1249,"name":"requirements.txt","nodeType":"TextFile","path":"docs","text":"# Pinning to avoid unexpected breakages.\n# Used by RTD to generate docs.\nSphinx==4.2.0\n"},{"attributeType":"list","col":0,"comment":"null","endLoc":46,"id":1250,"name":"templates_path","nodeType":"Attribute","startLoc":46,"text":"templates_path"},{"attributeType":"str","col":0,"comment":"null","endLoc":51,"id":1253,"name":"source_suffix","nodeType":"Attribute","startLoc":51,"text":"source_suffix"},{"attributeType":"str","col":0,"comment":"null","endLoc":57,"id":1254,"name":"master_doc","nodeType":"Attribute","startLoc":57,"text":"master_doc"},{"attributeType":"str","col":0,"comment":"null","endLoc":60,"id":1255,"name":"project","nodeType":"Attribute","startLoc":60,"text":"project"},{"attributeType":"str","col":0,"comment":"null","endLoc":61,"id":1256,"name":"copyright","nodeType":"Attribute","startLoc":61,"text":"copyright"},{"attributeType":"str","col":0,"comment":"null","endLoc":62,"id":1257,"name":"author","nodeType":"Attribute","startLoc":62,"text":"author"},{"attributeType":"null","col":0,"comment":"null","endLoc":69,"id":1258,"name":"version","nodeType":"Attribute","startLoc":69,"text":"version"},{"col":0,"comment":"Write a file to the disk in an atomic fashion","endLoc":291,"header":"@contextlib.contextmanager\ndef atomic_open(filename)","id":1261,"name":"atomic_open","nodeType":"Function","startLoc":280,"text":"@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise"},{"col":0,"comment":"","endLoc":8,"header":"conftest.py#","id":1263,"name":"","nodeType":"Function","startLoc":3,"text":"try:\n from http.server import HTTPServer\n from http.server import SimpleHTTPRequestHandler\nexcept ImportError:\n from BaseHTTPServer import HTTPServer\n from SimpleHTTPServer import SimpleHTTPRequestHandler"},{"fileName":"compat.py","filePath":"tests","id":1264,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nfrom requests.compat import is_py3\n\n\ntry:\n import StringIO\nexcept ImportError:\n import io as StringIO\n\ntry:\n from cStringIO import StringIO as cStringIO\nexcept ImportError:\n cStringIO = None\n\nif is_py3:\n def u(s):\n return s\nelse:\n def u(s):\n return s.decode('unicode-escape')\n"},{"col":4,"comment":"null","endLoc":18,"header":"def u(s)","id":1265,"name":"u","nodeType":"Function","startLoc":17,"text":"def u(s):\n return s"},{"col":4,"comment":"null","endLoc":21,"header":"def u(s)","id":1266,"name":"u","nodeType":"Function","startLoc":20,"text":"def u(s):\n return s.decode('unicode-escape')"},{"attributeType":"null","col":4,"comment":"null","endLoc":111,"id":1267,"name":"__attrs__","nodeType":"Attribute","startLoc":111,"text":"__attrs__"},{"attributeType":"null","col":12,"comment":"null","endLoc":120,"id":1268,"name":"max_retries","nodeType":"Attribute","startLoc":120,"text":"self.max_retries"},{"col":0,"comment":"","endLoc":3,"header":"compat.py#","id":1269,"name":"","nodeType":"Function","startLoc":3,"text":"try:\n import StringIO\nexcept ImportError:\n import io as StringIO\n\ntry:\n from cStringIO import StringIO as cStringIO\nexcept ImportError:\n cStringIO = None\n\nif is_py3:\n def u(s):\n return s\nelse:\n def u(s):\n return s.decode('unicode-escape')"},{"attributeType":"null","col":8,"comment":"null","endLoc":126,"id":1270,"name":"_pool_connections","nodeType":"Attribute","startLoc":126,"text":"self._pool_connections"},{"col":4,"comment":"cookielib has no legitimate use for this method; add it back if you find one.","endLoc":76,"header":"def add_header(self, key, val)","id":1271,"name":"add_header","nodeType":"Function","startLoc":74,"text":"def add_header(self, key, val):\n \"\"\"cookielib has no legitimate use for this method; add it back if you find one.\"\"\"\n raise NotImplementedError(\"Cookie headers should be added with add_unredirected_header()\")"},{"attributeType":"null","col":8,"comment":"null","endLoc":127,"id":1272,"name":"_pool_maxsize","nodeType":"Attribute","startLoc":127,"text":"self._pool_maxsize"},{"col":4,"comment":"null","endLoc":79,"header":"def add_unredirected_header(self, name, value)","id":1274,"name":"add_unredirected_header","nodeType":"Function","startLoc":78,"text":"def add_unredirected_header(self, name, value):\n self._new_headers[name] = value"},{"col":4,"comment":"null","endLoc":82,"header":"def get_new_headers(self)","id":1275,"name":"get_new_headers","nodeType":"Function","startLoc":81,"text":"def get_new_headers(self):\n return self._new_headers"},{"col":4,"comment":"null","endLoc":86,"header":"@property\n def unverifiable(self)","id":1276,"name":"unverifiable","nodeType":"Function","startLoc":84,"text":"@property\n def unverifiable(self):\n return self.is_unverifiable()"},{"col":4,"comment":"null","endLoc":90,"header":"@property\n def origin_req_host(self)","id":1277,"name":"origin_req_host","nodeType":"Function","startLoc":88,"text":"@property\n def origin_req_host(self):\n return self.get_origin_req_host()"},{"attributeType":"null","col":8,"comment":"null","endLoc":122,"id":1278,"name":"proxy_manager","nodeType":"Attribute","startLoc":122,"text":"self.proxy_manager"},{"attributeType":"null","col":8,"comment":"null","endLoc":128,"id":1279,"name":"_pool_block","nodeType":"Attribute","startLoc":128,"text":"self._pool_block"},{"id":1280,"name":"sidebarlogo.html","nodeType":"TextFile","path":"docs/_templates","text":"

\n \n

\n\n\n\n\n\n

\n Requests is an elegant and simple HTTP library for Python, built for\n human beings. You are currently looking at the documentation of the\n development release.\n

\n\n

Sponsored by CERT Gouvernemental - GOVCERT.LU.

\n\n \n\n

Useful Links

\n\n\n

Translations

\n\n \n"},{"col":4,"comment":"null","endLoc":94,"header":"@property\n def host(self)","id":1281,"name":"host","nodeType":"Function","startLoc":92,"text":"@property\n def host(self):\n return self.get_host()"},{"attributeType":"null","col":8,"comment":"null","endLoc":38,"id":1282,"name":"_r","nodeType":"Attribute","startLoc":38,"text":"self._r"},{"attributeType":"TypedDict","col":8,"comment":"null","endLoc":39,"id":1283,"name":"_new_headers","nodeType":"Attribute","startLoc":39,"text":"self._new_headers"},{"attributeType":"str","col":8,"comment":"null","endLoc":40,"id":1284,"name":"type","nodeType":"Attribute","startLoc":40,"text":"self.type"},{"id":1285,"name":"out-there.rst","nodeType":"TextFile","path":"docs/community","text":"Integrations\n============\n\nPython for iOS\n--------------\n\nRequests is built into the wonderful `Python for iOS `_ runtime!\n\nTo give it a try, simply::\n\n import requests\n\n\nArticles & Talks\n================\n- `Python for the Web `_ teaches how to use Python to interact with the web, using Requests.\n- `Daniel Greenfeld's Review of Requests `_\n- `My 'Python for Humans' talk `_ ( `audio `_ )\n- `Issac Kelly's 'Consuming Web APIs' talk `_\n- `Blog post about Requests via Yum `_\n- `Russian blog post introducing Requests `_\n- `Sending JSON in Requests `_\n"},{"attributeType":"null","col":8,"comment":"null","endLoc":164,"id":1293,"name":"poolmanager","nodeType":"Attribute","startLoc":164,"text":"self.poolmanager"},{"attributeType":"null","col":8,"comment":"null","endLoc":121,"id":1294,"name":"config","nodeType":"Attribute","startLoc":121,"text":"self.config"},{"className":"MockResponse","col":0,"comment":"Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.\n\n ...what? Basically, expose the parsed HTTP headers from the server response\n the way `cookielib` expects to see them.\n ","endLoc":115,"id":1295,"nodeType":"Class","startLoc":97,"text":"class MockResponse(object):\n \"\"\"Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.\n\n ...what? Basically, expose the parsed HTTP headers from the server response\n the way `cookielib` expects to see them.\n \"\"\"\n\n def __init__(self, headers):\n \"\"\"Make a MockResponse for `cookielib` to read.\n\n :param headers: a httplib.HTTPMessage or analogous carrying the headers\n \"\"\"\n self._headers = headers\n\n def info(self):\n return self._headers\n\n def getheaders(self, name):\n self._headers.getheaders(name)"},{"col":4,"comment":"null","endLoc":112,"header":"def info(self)","id":1296,"name":"info","nodeType":"Function","startLoc":111,"text":"def info(self):\n return self._headers"},{"col":4,"comment":"null","endLoc":115,"header":"def getheaders(self, name)","id":1297,"name":"getheaders","nodeType":"Function","startLoc":114,"text":"def getheaders(self, name):\n self._headers.getheaders(name)"},{"fileName":"test_hooks.py","filePath":"tests","id":1298,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\nimport pytest\n\nfrom requests import hooks\n\n\ndef hook(value):\n return value[1:]\n\n\n@pytest.mark.parametrize(\n 'hooks_list, result', (\n (hook, 'ata'),\n ([hook, lambda x: None, hook], 'ta'),\n )\n)\ndef test_hooks(hooks_list, result):\n assert hooks.dispatch_hook('response', {'response': hooks_list}, 'Data') == result\n\n\ndef test_default_hooks():\n assert hooks.default_hooks() == {'response': []}\n"},{"col":0,"comment":"null","endLoc":9,"header":"def hook(value)","id":1299,"name":"hook","nodeType":"Function","startLoc":8,"text":"def hook(value):\n return value[1:]"},{"col":0,"comment":"null","endLoc":19,"header":"@pytest.mark.parametrize(\n 'hooks_list, result', (\n (hook, 'ata'),\n ([hook, lambda x","id":1300,"name":"test_hooks","nodeType":"Function","startLoc":12,"text":"@pytest.mark.parametrize(\n 'hooks_list, result', (\n (hook, 'ata'),\n ([hook, lambda x: None, hook], 'ta'),\n )\n)\ndef test_hooks(hooks_list, result):\n assert hooks.dispatch_hook('response', {'response': hooks_list}, 'Data') == result"},{"col":16,"endLoc":15,"id":1301,"nodeType":"Lambda","startLoc":15,"text":"lambda x: None"},{"col":0,"comment":"null","endLoc":23,"header":"def test_default_hooks()","id":1302,"name":"test_default_hooks","nodeType":"Function","startLoc":22,"text":"def test_default_hooks():\n assert hooks.default_hooks() == {'response': []}"},{"attributeType":"function","col":8,"comment":"null","endLoc":43,"id":1304,"name":"preferred_clock","nodeType":"Attribute","startLoc":43,"text":"preferred_clock"},{"id":1305,"name":"LICENSE","nodeType":"TextFile","path":"docs/_themes","text":"Modifications:\n\nCopyright (c) 2011 Kenneth Reitz.\n\n\nOriginal Project:\n\nCopyright (c) 2010 by Armin Ronacher.\n\n\nSome rights reserved.\n\nRedistribution and use in source and binary forms of the theme, with or\nwithout modification, are permitted provided that the following conditions\nare met:\n\n* Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided\n with the distribution.\n\n* The names of the contributors may not be used to endorse or\n promote products derived from this software without specific\n prior written permission.\n\nWe kindly ask you to only use these themes in an unmodified manner just\nfor Flask and Flask-related products, not for unrelated projects. If you\nlike the visual style and want to use it for your own projects, please\nconsider making some larger changes to the themes (such as changing\nfont faces, sizes, colors or margins).\n\nTHIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\nARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\nLIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\nCONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\nSUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\nINTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\nARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGE.\n"},{"id":1306,"name":"advanced.rst","nodeType":"TextFile","path":"docs/user","text":".. _advanced:\n\nAdvanced Usage\n==============\n\nThis document covers some of Requests more advanced features.\n\n.. _session-objects:\n\nSession Objects\n---------------\n\nThe Session object allows you to persist certain parameters across\nrequests. It also persists cookies across all requests made from the\nSession instance, and will use ``urllib3``'s `connection pooling`_. So if\nyou're making several requests to the same host, the underlying TCP\nconnection will be reused, which can result in a significant performance\nincrease (see `HTTP persistent connection`_).\n\nA Session object has all the methods of the main Requests API.\n\nLet's persist some cookies across requests::\n\n s = requests.Session()\n\n s.get('https://httpbin.org/cookies/set/sessioncookie/123456789')\n r = s.get('https://httpbin.org/cookies')\n\n print(r.text)\n # '{\"cookies\": {\"sessioncookie\": \"123456789\"}}'\n\n\nSessions can also be used to provide default data to the request methods. This\nis done by providing data to the properties on a Session object::\n\n s = requests.Session()\n s.auth = ('user', 'pass')\n s.headers.update({'x-test': 'true'})\n\n # both 'x-test' and 'x-test2' are sent\n s.get('https://httpbin.org/headers', headers={'x-test2': 'true'})\n\n\nAny dictionaries that you pass to a request method will be merged with the\nsession-level values that are set. The method-level parameters override session\nparameters.\n\nNote, however, that method-level parameters will *not* be persisted across\nrequests, even if using a session. This example will only send the cookies\nwith the first request, but not the second::\n\n s = requests.Session()\n\n r = s.get('https://httpbin.org/cookies', cookies={'from-my': 'browser'})\n print(r.text)\n # '{\"cookies\": {\"from-my\": \"browser\"}}'\n\n r = s.get('https://httpbin.org/cookies')\n print(r.text)\n # '{\"cookies\": {}}'\n\n\nIf you want to manually add cookies to your session, use the\n:ref:`Cookie utility functions ` to manipulate\n:attr:`Session.cookies `.\n\nSessions can also be used as context managers::\n\n with requests.Session() as s:\n s.get('https://httpbin.org/cookies/set/sessioncookie/123456789')\n\nThis will make sure the session is closed as soon as the ``with`` block is\nexited, even if unhandled exceptions occurred.\n\n\n.. admonition:: Remove a Value From a Dict Parameter\n\n Sometimes you'll want to omit session-level keys from a dict parameter. To\n do this, you simply set that key's value to ``None`` in the method-level\n parameter. It will automatically be omitted.\n\nAll values that are contained within a session are directly available to you.\nSee the :ref:`Session API Docs ` to learn more.\n\n.. _request-and-response-objects:\n\nRequest and Response Objects\n----------------------------\n\nWhenever a call is made to ``requests.get()`` and friends, you are doing two\nmajor things. First, you are constructing a ``Request`` object which will be\nsent off to a server to request or query some resource. Second, a ``Response``\nobject is generated once Requests gets a response back from the server.\nThe ``Response`` object contains all of the information returned by the server and\nalso contains the ``Request`` object you created originally. Here is a simple\nrequest to get some very important information from Wikipedia's servers::\n\n >>> r = requests.get('https://en.wikipedia.org/wiki/Monty_Python')\n\nIf we want to access the headers the server sent back to us, we do this::\n\n >>> r.headers\n {'content-length': '56170', 'x-content-type-options': 'nosniff', 'x-cache':\n 'HIT from cp1006.eqiad.wmnet, MISS from cp1010.eqiad.wmnet', 'content-encoding':\n 'gzip', 'age': '3080', 'content-language': 'en', 'vary': 'Accept-Encoding,Cookie',\n 'server': 'Apache', 'last-modified': 'Wed, 13 Jun 2012 01:33:50 GMT',\n 'connection': 'close', 'cache-control': 'private, s-maxage=0, max-age=0,\n must-revalidate', 'date': 'Thu, 14 Jun 2012 12:59:39 GMT', 'content-type':\n 'text/html; charset=UTF-8', 'x-cache-lookup': 'HIT from cp1006.eqiad.wmnet:3128,\n MISS from cp1010.eqiad.wmnet:80'}\n\nHowever, if we want to get the headers we sent the server, we simply access the\nrequest, and then the request's headers::\n\n >>> r.request.headers\n {'Accept-Encoding': 'identity, deflate, compress, gzip',\n 'Accept': '*/*', 'User-Agent': 'python-requests/1.2.0'}\n\n.. _prepared-requests:\n\nPrepared Requests\n-----------------\n\nWhenever you receive a :class:`Response ` object\nfrom an API call or a Session call, the ``request`` attribute is actually the\n``PreparedRequest`` that was used. In some cases you may wish to do some extra\nwork to the body or headers (or anything else really) before sending a\nrequest. The simple recipe for this is the following::\n\n from requests import Request, Session\n\n s = Session()\n\n req = Request('POST', url, data=data, headers=headers)\n prepped = req.prepare()\n\n # do something with prepped.body\n prepped.body = 'No, I want exactly this as the body.'\n\n # do something with prepped.headers\n del prepped.headers['Content-Type']\n\n resp = s.send(prepped,\n stream=stream,\n verify=verify,\n proxies=proxies,\n cert=cert,\n timeout=timeout\n )\n\n print(resp.status_code)\n\nSince you are not doing anything special with the ``Request`` object, you\nprepare it immediately and modify the ``PreparedRequest`` object. You then\nsend that with the other parameters you would have sent to ``requests.*`` or\n``Session.*``.\n\nHowever, the above code will lose some of the advantages of having a Requests\n:class:`Session ` object. In particular,\n:class:`Session `-level state such as cookies will\nnot get applied to your request. To get a\n:class:`PreparedRequest ` with that state\napplied, replace the call to :meth:`Request.prepare()\n` with a call to\n:meth:`Session.prepare_request() `, like this::\n\n from requests import Request, Session\n\n s = Session()\n req = Request('GET', url, data=data, headers=headers)\n\n prepped = s.prepare_request(req)\n\n # do something with prepped.body\n prepped.body = 'Seriously, send exactly these bytes.'\n\n # do something with prepped.headers\n prepped.headers['Keep-Dead'] = 'parrot'\n\n resp = s.send(prepped,\n stream=stream,\n verify=verify,\n proxies=proxies,\n cert=cert,\n timeout=timeout\n )\n\n print(resp.status_code)\n\nWhen you are using the prepared request flow, keep in mind that it does not take into account the environment.\nThis can cause problems if you are using environment variables to change the behaviour of requests.\nFor example: Self-signed SSL certificates specified in ``REQUESTS_CA_BUNDLE`` will not be taken into account.\nAs a result an ``SSL: CERTIFICATE_VERIFY_FAILED`` is thrown.\nYou can get around this behaviour by explicitly merging the environment settings into your session::\n\n from requests import Request, Session\n\n s = Session()\n req = Request('GET', url)\n\n prepped = s.prepare_request(req)\n\n # Merge environment settings into session\n settings = s.merge_environment_settings(prepped.url, {}, None, None, None)\n resp = s.send(prepped, **settings)\n\n print(resp.status_code)\n\n.. _verification:\n\nSSL Cert Verification\n---------------------\n\nRequests verifies SSL certificates for HTTPS requests, just like a web browser.\nBy default, SSL verification is enabled, and Requests will throw a SSLError if\nit's unable to verify the certificate::\n\n >>> requests.get('https://requestb.in')\n requests.exceptions.SSLError: hostname 'requestb.in' doesn't match either of '*.herokuapp.com', 'herokuapp.com'\n\nI don't have SSL setup on this domain, so it throws an exception. Excellent. GitHub does though::\n\n >>> requests.get('https://github.com')\n \n\nYou can pass ``verify`` the path to a CA_BUNDLE file or directory with certificates of trusted CAs::\n\n >>> requests.get('https://github.com', verify='/path/to/certfile')\n\nor persistent::\n\n s = requests.Session()\n s.verify = '/path/to/certfile'\n\n.. note:: If ``verify`` is set to a path to a directory, the directory must have been processed using\n the ``c_rehash`` utility supplied with OpenSSL.\n\nThis list of trusted CAs can also be specified through the ``REQUESTS_CA_BUNDLE`` environment variable.\nIf ``REQUESTS_CA_BUNDLE`` is not set, ``CURL_CA_BUNDLE`` will be used as fallback.\n\nRequests can also ignore verifying the SSL certificate if you set ``verify`` to False::\n\n >>> requests.get('https://kennethreitz.org', verify=False)\n \n\nNote that when ``verify`` is set to ``False``, requests will accept any TLS\ncertificate presented by the server, and will ignore hostname mismatches\nand/or expired certificates, which will make your application vulnerable to\nman-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful\nduring local development or testing.\n\nBy default, ``verify`` is set to True. Option ``verify`` only applies to host certs.\n\nClient Side Certificates\n------------------------\n\nYou can also specify a local cert to use as client side certificate, as a single\nfile (containing the private key and the certificate) or as a tuple of both\nfiles' paths::\n\n >>> requests.get('https://kennethreitz.org', cert=('/path/client.cert', '/path/client.key'))\n \n\nor persistent::\n\n s = requests.Session()\n s.cert = '/path/client.cert'\n\nIf you specify a wrong path or an invalid cert, you'll get a SSLError::\n\n >>> requests.get('https://kennethreitz.org', cert='/wrong_path/client.pem')\n SSLError: [Errno 336265225] _ssl.c:347: error:140B0009:SSL routines:SSL_CTX_use_PrivateKey_file:PEM lib\n\n.. warning:: The private key to your local certificate *must* be unencrypted.\n Currently, Requests does not support using encrypted keys.\n\n.. _ca-certificates:\n\nCA Certificates\n---------------\n\nRequests uses certificates from the package `certifi`_. This allows for users\nto update their trusted certificates without changing the version of Requests.\n\nBefore version 2.16, Requests bundled a set of root CAs that it trusted,\nsourced from the `Mozilla trust store`_. The certificates were only updated\nonce for each Requests version. When ``certifi`` was not installed, this led to\nextremely out-of-date certificate bundles when using significantly older\nversions of Requests.\n\nFor the sake of security we recommend upgrading certifi frequently!\n\n.. _HTTP persistent connection: https://en.wikipedia.org/wiki/HTTP_persistent_connection\n.. _connection pooling: https://urllib3.readthedocs.io/en/latest/reference/index.html#module-urllib3.connectionpool\n.. _certifi: https://certifiio.readthedocs.io/\n.. _Mozilla trust store: https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt\n\n.. _body-content-workflow:\n\nBody Content Workflow\n---------------------\n\nBy default, when you make a request, the body of the response is downloaded\nimmediately. You can override this behaviour and defer downloading the response\nbody until you access the :attr:`Response.content `\nattribute with the ``stream`` parameter::\n\n tarball_url = 'https://github.com/psf/requests/tarball/main'\n r = requests.get(tarball_url, stream=True)\n\nAt this point only the response headers have been downloaded and the connection\nremains open, hence allowing us to make content retrieval conditional::\n\n if int(r.headers['content-length']) < TOO_LONG:\n content = r.content\n ...\n\nYou can further control the workflow by use of the :meth:`Response.iter_content() `\nand :meth:`Response.iter_lines() ` methods.\nAlternatively, you can read the undecoded body from the underlying\nurllib3 :class:`urllib3.HTTPResponse ` at\n:attr:`Response.raw `.\n\nIf you set ``stream`` to ``True`` when making a request, Requests cannot\nrelease the connection back to the pool unless you consume all the data or call\n:meth:`Response.close `. This can lead to\ninefficiency with connections. If you find yourself partially reading request\nbodies (or not reading them at all) while using ``stream=True``, you should\nmake the request within a ``with`` statement to ensure it's always closed::\n\n with requests.get('https://httpbin.org/get', stream=True) as r:\n # Do things with the response here.\n\n.. _keep-alive:\n\nKeep-Alive\n----------\n\nExcellent news — thanks to urllib3, keep-alive is 100% automatic within a session!\nAny requests that you make within a session will automatically reuse the appropriate\nconnection!\n\nNote that connections are only released back to the pool for reuse once all body\ndata has been read; be sure to either set ``stream`` to ``False`` or read the\n``content`` property of the ``Response`` object.\n\n.. _streaming-uploads:\n\nStreaming Uploads\n-----------------\n\nRequests supports streaming uploads, which allow you to send large streams or\nfiles without reading them into memory. To stream and upload, simply provide a\nfile-like object for your body::\n\n with open('massive-body', 'rb') as f:\n requests.post('http://some.url/streamed', data=f)\n\n.. warning:: It is strongly recommended that you open files in :ref:`binary\n mode `. This is because Requests may attempt to provide\n the ``Content-Length`` header for you, and if it does this value\n will be set to the number of *bytes* in the file. Errors may occur\n if you open the file in *text mode*.\n\n\n.. _chunk-encoding:\n\nChunk-Encoded Requests\n----------------------\n\nRequests also supports Chunked transfer encoding for outgoing and incoming requests.\nTo send a chunk-encoded request, simply provide a generator (or any iterator without\na length) for your body::\n\n def gen():\n yield 'hi'\n yield 'there'\n\n requests.post('http://some.url/chunked', data=gen())\n\nFor chunked encoded responses, it's best to iterate over the data using\n:meth:`Response.iter_content() `. In\nan ideal situation you'll have set ``stream=True`` on the request, in which\ncase you can iterate chunk-by-chunk by calling ``iter_content`` with a ``chunk_size``\nparameter of ``None``. If you want to set a maximum size of the chunk,\nyou can set a ``chunk_size`` parameter to any integer.\n\n\n.. _multipart:\n\nPOST Multiple Multipart-Encoded Files\n-------------------------------------\n\nYou can send multiple files in one request. For example, suppose you want to\nupload image files to an HTML form with a multiple file field 'images'::\n\n \n\nTo do that, just set files to a list of tuples of ``(form_field_name, file_info)``::\n\n >>> url = 'https://httpbin.org/post'\n >>> multiple_files = [\n ... ('images', ('foo.png', open('foo.png', 'rb'), 'image/png')),\n ... ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))]\n >>> r = requests.post(url, files=multiple_files)\n >>> r.text\n {\n ...\n 'files': {'images': 'data:image/png;base64,iVBORw ....'}\n 'Content-Type': 'multipart/form-data; boundary=3131623adb2043caaeb5538cc7aa0b3a',\n ...\n }\n\n.. warning:: It is strongly recommended that you open files in :ref:`binary\n mode `. This is because Requests may attempt to provide\n the ``Content-Length`` header for you, and if it does this value\n will be set to the number of *bytes* in the file. Errors may occur\n if you open the file in *text mode*.\n\n\n.. _event-hooks:\n\nEvent Hooks\n-----------\n\nRequests has a hook system that you can use to manipulate portions of\nthe request process, or signal event handling.\n\nAvailable hooks:\n\n``response``:\n The response generated from a Request.\n\n\nYou can assign a hook function on a per-request basis by passing a\n``{hook_name: callback_function}`` dictionary to the ``hooks`` request\nparameter::\n\n hooks={'response': print_url}\n\nThat ``callback_function`` will receive a chunk of data as its first\nargument.\n\n::\n\n def print_url(r, *args, **kwargs):\n print(r.url)\n\nYour callback function must handle its own exceptions. Any unhandled exception won't be passed silently and thus should be handled by the code calling Requests.\n\nIf the callback function returns a value, it is assumed that it is to\nreplace the data that was passed in. If the function doesn't return\nanything, nothing else is affected.\n\n::\n\n def record_hook(r, *args, **kwargs):\n r.hook_called = True\n return r\n\nLet's print some request method arguments at runtime::\n\n >>> requests.get('https://httpbin.org/', hooks={'response': print_url})\n https://httpbin.org/\n \n\nYou can add multiple hooks to a single request. Let's call two hooks at once::\n\n >>> r = requests.get('https://httpbin.org/', hooks={'response': [print_url, record_hook]})\n >>> r.hook_called\n True\n\nYou can also add hooks to a ``Session`` instance. Any hooks you add will then\nbe called on every request made to the session. For example::\n\n >>> s = requests.Session()\n >>> s.hooks['response'].append(print_url)\n >>> s.get('https://httpbin.org/')\n https://httpbin.org/\n \n\nA ``Session`` can have multiple hooks, which will be called in the order\nthey are added.\n\n.. _custom-auth:\n\nCustom Authentication\n---------------------\n\nRequests allows you to specify your own authentication mechanism.\n\nAny callable which is passed as the ``auth`` argument to a request method will\nhave the opportunity to modify the request before it is dispatched.\n\nAuthentication implementations are subclasses of :class:`AuthBase `,\nand are easy to define. Requests provides two common authentication scheme\nimplementations in ``requests.auth``: :class:`HTTPBasicAuth ` and\n:class:`HTTPDigestAuth `.\n\nLet's pretend that we have a web service that will only respond if the\n``X-Pizza`` header is set to a password value. Unlikely, but just go with it.\n\n::\n\n from requests.auth import AuthBase\n\n class PizzaAuth(AuthBase):\n \"\"\"Attaches HTTP Pizza Authentication to the given Request object.\"\"\"\n def __init__(self, username):\n # setup any auth-related data here\n self.username = username\n\n def __call__(self, r):\n # modify and return the request\n r.headers['X-Pizza'] = self.username\n return r\n\nThen, we can make a request using our Pizza Auth::\n\n >>> requests.get('http://pizzabin.org/admin', auth=PizzaAuth('kenneth'))\n \n\n.. _streaming-requests:\n\nStreaming Requests\n------------------\n\nWith :meth:`Response.iter_lines() ` you can easily\niterate over streaming APIs such as the `Twitter Streaming\nAPI `_. Simply\nset ``stream`` to ``True`` and iterate over the response with\n:meth:`~requests.Response.iter_lines()`::\n\n import json\n import requests\n\n r = requests.get('https://httpbin.org/stream/20', stream=True)\n\n for line in r.iter_lines():\n\n # filter out keep-alive new lines\n if line:\n decoded_line = line.decode('utf-8')\n print(json.loads(decoded_line))\n\nWhen using `decode_unicode=True` with\n:meth:`Response.iter_lines() ` or\n:meth:`Response.iter_content() `, you'll want\nto provide a fallback encoding in the event the server doesn't provide one::\n\n r = requests.get('https://httpbin.org/stream/20', stream=True)\n\n if r.encoding is None:\n r.encoding = 'utf-8'\n\n for line in r.iter_lines(decode_unicode=True):\n if line:\n print(json.loads(line))\n\n.. warning::\n\n :meth:`~requests.Response.iter_lines()` is not reentrant safe.\n Calling this method multiple times causes some of the received data\n being lost. In case you need to call it from multiple places, use\n the resulting iterator object instead::\n\n lines = r.iter_lines()\n # Save the first line for later or just skip it\n\n first_line = next(lines)\n\n for line in lines:\n print(line)\n\n.. _proxies:\n\nProxies\n-------\n\nIf you need to use a proxy, you can configure individual requests with the\n``proxies`` argument to any request method::\n\n import requests\n\n proxies = {\n 'http': 'http://10.10.1.10:3128',\n 'https': 'http://10.10.1.10:1080',\n }\n\n requests.get('http://example.org', proxies=proxies)\n\nAlternatively you can configure it once for an entire\n:class:`Session `::\n\n import requests\n\n proxies = {\n 'http': 'http://10.10.1.10:3128',\n 'https': 'http://10.10.1.10:1080',\n }\n session = requests.Session()\n session.proxies.update(proxies)\n\n session.get('http://example.org')\n\nWhen the proxies configuration is not overridden in python as shown above,\nby default Requests relies on the proxy configuration defined by standard\nenvironment variables ``http_proxy``, ``https_proxy``, ``no_proxy`` and\n``curl_ca_bundle``. Uppercase variants of these variables are also supported.\nYou can therefore set them to configure Requests (only set the ones relevant\nto your needs)::\n\n $ export HTTP_PROXY=\"http://10.10.1.10:3128\"\n $ export HTTPS_PROXY=\"http://10.10.1.10:1080\"\n\n $ python\n >>> import requests\n >>> requests.get('http://example.org')\n\nTo use HTTP Basic Auth with your proxy, use the `http://user:password@host/`\nsyntax in any of the above configuration entries::\n\n $ export HTTPS_PROXY=\"http://user:pass@10.10.1.10:1080\"\n\n $ python\n >>> proxies = {'http': 'http://user:pass@10.10.1.10:3128/'}\n\n.. warning:: Storing sensitive username and password information in an\n environment variable or a version-controlled file is a security risk and is\n highly discouraged.\n\nTo give a proxy for a specific scheme and host, use the\n`scheme://hostname` form for the key. This will match for\nany request to the given scheme and exact hostname.\n\n::\n\n proxies = {'http://10.20.1.128': 'http://10.10.1.10:5323'}\n\nNote that proxy URLs must include the scheme.\n\nFinally, note that using a proxy for https connections typically requires your\nlocal machine to trust the proxy's root certificate. By default the list of\ncertificates trusted by Requests can be found with::\n\n from requests.utils import DEFAULT_CA_BUNDLE_PATH\n print(DEFAULT_CA_BUNDLE_PATH)\n\nYou override this default certificate bundle by setting the standard\n``curl_ca_bundle`` environment variable to another file path::\n\n $ export curl_ca_bundle=\"/usr/local/myproxy_info/cacert.pem\"\n $ export https_proxy=\"http://10.10.1.10:1080\"\n\n $ python\n >>> import requests\n >>> requests.get('https://example.org')\n\nSOCKS\n^^^^^\n\n.. versionadded:: 2.10.0\n\nIn addition to basic HTTP proxies, Requests also supports proxies using the\nSOCKS protocol. This is an optional feature that requires that additional\nthird-party libraries be installed before use.\n\nYou can get the dependencies for this feature from ``pip``:\n\n.. code-block:: bash\n\n $ python -m pip install requests[socks]\n\nOnce you've installed those dependencies, using a SOCKS proxy is just as easy\nas using a HTTP one::\n\n proxies = {\n 'http': 'socks5://user:pass@host:port',\n 'https': 'socks5://user:pass@host:port'\n }\n\nUsing the scheme ``socks5`` causes the DNS resolution to happen on the client, rather than on the proxy server. This is in line with curl, which uses the scheme to decide whether to do the DNS resolution on the client or proxy. If you want to resolve the domains on the proxy server, use ``socks5h`` as the scheme.\n\n.. _compliance:\n\nCompliance\n----------\n\nRequests is intended to be compliant with all relevant specifications and\nRFCs where that compliance will not cause difficulties for users. This\nattention to the specification can lead to some behaviour that may seem\nunusual to those not familiar with the relevant specification.\n\nEncodings\n^^^^^^^^^\n\nWhen you receive a response, Requests makes a guess at the encoding to\nuse for decoding the response when you access the :attr:`Response.text\n` attribute. Requests will first check for an\nencoding in the HTTP header, and if none is present, will use\n`charset_normalizer `_\nor `chardet `_ to attempt to\nguess the encoding.\n\nIf ``chardet`` is installed, ``requests`` uses it, however for python3\n``chardet`` is no longer a mandatory dependency. The ``chardet``\nlibrary is an LGPL-licenced dependency and some users of requests\ncannot depend on mandatory LGPL-licensed dependencies.\n\nWhen you install ``request`` without specifying ``[use_chardet_on_py3]]`` extra,\nand ``chardet`` is not already installed, ``requests`` uses ``charset-normalizer``\n(MIT-licensed) to guess the encoding. For Python 2, ``requests`` uses only\n``chardet`` and is a mandatory dependency there.\n\nThe only time Requests will not guess the encoding is if no explicit charset\nis present in the HTTP headers **and** the ``Content-Type``\nheader contains ``text``. In this situation, `RFC 2616\n`_ specifies\nthat the default charset must be ``ISO-8859-1``. Requests follows the\nspecification in this case. If you require a different encoding, you can\nmanually set the :attr:`Response.encoding `\nproperty, or use the raw :attr:`Response.content `.\n\n.. _http-verbs:\n\nHTTP Verbs\n----------\n\nRequests provides access to almost the full range of HTTP verbs: GET, OPTIONS,\nHEAD, POST, PUT, PATCH and DELETE. The following provides detailed examples of\nusing these various verbs in Requests, using the GitHub API.\n\nWe will begin with the verb most commonly used: GET. HTTP GET is an idempotent\nmethod that returns a resource from a given URL. As a result, it is the verb\nyou ought to use when attempting to retrieve data from a web location. An\nexample usage would be attempting to get information about a specific commit\nfrom GitHub. Suppose we wanted commit ``a050faf`` on Requests. We would get it\nlike so::\n\n >>> import requests\n >>> r = requests.get('https://api.github.com/repos/psf/requests/git/commits/a050faf084662f3a352dd1a941f2c7c9f886d4ad')\n\nWe should confirm that GitHub responded correctly. If it has, we want to work\nout what type of content it is. Do this like so::\n\n >>> if r.status_code == requests.codes.ok:\n ... print(r.headers['content-type'])\n ...\n application/json; charset=utf-8\n\nSo, GitHub returns JSON. That's great, we can use the :meth:`r.json\n` method to parse it into Python objects.\n\n::\n\n >>> commit_data = r.json()\n\n >>> print(commit_data.keys())\n ['committer', 'author', 'url', 'tree', 'sha', 'parents', 'message']\n\n >>> print(commit_data['committer'])\n {'date': '2012-05-10T11:10:50-07:00', 'email': 'me@kennethreitz.com', 'name': 'Kenneth Reitz'}\n\n >>> print(commit_data['message'])\n makin' history\n\nSo far, so simple. Well, let's investigate the GitHub API a little bit. Now,\nwe could look at the documentation, but we might have a little more fun if we\nuse Requests instead. We can take advantage of the Requests OPTIONS verb to\nsee what kinds of HTTP methods are supported on the url we just used.\n\n::\n\n >>> verbs = requests.options(r.url)\n >>> verbs.status_code\n 500\n\nUh, what? That's unhelpful! Turns out GitHub, like many API providers, don't\nactually implement the OPTIONS method. This is an annoying oversight, but it's\nOK, we can just use the boring documentation. If GitHub had correctly\nimplemented OPTIONS, however, they should return the allowed methods in the\nheaders, e.g.\n\n::\n\n >>> verbs = requests.options('http://a-good-website.com/api/cats')\n >>> print(verbs.headers['allow'])\n GET,HEAD,POST,OPTIONS\n\nTurning to the documentation, we see that the only other method allowed for\ncommits is POST, which creates a new commit. As we're using the Requests repo,\nwe should probably avoid making ham-handed POSTS to it. Instead, let's play\nwith the Issues feature of GitHub.\n\nThis documentation was added in response to\n`Issue #482 `_. Given that\nthis issue already exists, we will use it as an example. Let's start by getting it.\n\n::\n\n >>> r = requests.get('https://api.github.com/repos/psf/requests/issues/482')\n >>> r.status_code\n 200\n\n >>> issue = json.loads(r.text)\n\n >>> print(issue['title'])\n Feature any http verb in docs\n\n >>> print(issue['comments'])\n 3\n\nCool, we have three comments. Let's take a look at the last of them.\n\n::\n\n >>> r = requests.get(r.url + '/comments')\n >>> r.status_code\n 200\n\n >>> comments = r.json()\n\n >>> print(comments[0].keys())\n ['body', 'url', 'created_at', 'updated_at', 'user', 'id']\n\n >>> print(comments[2]['body'])\n Probably in the \"advanced\" section\n\nWell, that seems like a silly place. Let's post a comment telling the poster\nthat he's silly. Who is the poster, anyway?\n\n::\n\n >>> print(comments[2]['user']['login'])\n kennethreitz\n\nOK, so let's tell this Kenneth guy that we think this example should go in the\nquickstart guide instead. According to the GitHub API doc, the way to do this\nis to POST to the thread. Let's do it.\n\n::\n\n >>> body = json.dumps({u\"body\": u\"Sounds great! I'll get right on it!\"})\n >>> url = u\"https://api.github.com/repos/psf/requests/issues/482/comments\"\n\n >>> r = requests.post(url=url, data=body)\n >>> r.status_code\n 404\n\nHuh, that's weird. We probably need to authenticate. That'll be a pain, right?\nWrong. Requests makes it easy to use many forms of authentication, including\nthe very common Basic Auth.\n\n::\n\n >>> from requests.auth import HTTPBasicAuth\n >>> auth = HTTPBasicAuth('fake@example.com', 'not_a_real_password')\n\n >>> r = requests.post(url=url, data=body, auth=auth)\n >>> r.status_code\n 201\n\n >>> content = r.json()\n >>> print(content['body'])\n Sounds great! I'll get right on it.\n\nBrilliant. Oh, wait, no! I meant to add that it would take me a while, because\nI had to go feed my cat. If only I could edit this comment! Happily, GitHub\nallows us to use another HTTP verb, PATCH, to edit this comment. Let's do\nthat.\n\n::\n\n >>> print(content[u\"id\"])\n 5804413\n\n >>> body = json.dumps({u\"body\": u\"Sounds great! I'll get right on it once I feed my cat.\"})\n >>> url = u\"https://api.github.com/repos/psf/requests/issues/comments/5804413\"\n\n >>> r = requests.patch(url=url, data=body, auth=auth)\n >>> r.status_code\n 200\n\nExcellent. Now, just to torture this Kenneth guy, I've decided to let him\nsweat and not tell him that I'm working on this. That means I want to delete\nthis comment. GitHub lets us delete comments using the incredibly aptly named\nDELETE method. Let's get rid of it.\n\n::\n\n >>> r = requests.delete(url=url, auth=auth)\n >>> r.status_code\n 204\n >>> r.headers['status']\n '204 No Content'\n\nExcellent. All gone. The last thing I want to know is how much of my ratelimit\nI've used. Let's find out. GitHub sends that information in the headers, so\nrather than download the whole page I'll send a HEAD request to get the\nheaders.\n\n::\n\n >>> r = requests.head(url=url, auth=auth)\n >>> print(r.headers)\n ...\n 'x-ratelimit-remaining': '4995'\n 'x-ratelimit-limit': '5000'\n ...\n\nExcellent. Time to write a Python program that abuses the GitHub API in all\nkinds of exciting ways, 4995 more times.\n\n.. _custom-verbs:\n\nCustom Verbs\n------------\n\nFrom time to time you may be working with a server that, for whatever reason,\nallows use or even requires use of HTTP verbs not covered above. One example of\nthis would be the MKCOL method some WEBDAV servers use. Do not fret, these can\nstill be used with Requests. These make use of the built-in ``.request``\nmethod. For example::\n\n >>> r = requests.request('MKCOL', url, data=data)\n >>> r.status_code\n 200 # Assuming your call was correct\n\nUtilising this, you can make use of any method verb that your server allows.\n\n\n.. _link-headers:\n\nLink Headers\n------------\n\nMany HTTP APIs feature Link headers. They make APIs more self describing and\ndiscoverable.\n\nGitHub uses these for `pagination `_\nin their API, for example::\n\n >>> url = 'https://api.github.com/users/kennethreitz/repos?page=1&per_page=10'\n >>> r = requests.head(url=url)\n >>> r.headers['link']\n '; rel=\"next\", ; rel=\"last\"'\n\nRequests will automatically parse these link headers and make them easily consumable::\n\n >>> r.links[\"next\"]\n {'url': 'https://api.github.com/users/kennethreitz/repos?page=2&per_page=10', 'rel': 'next'}\n\n >>> r.links[\"last\"]\n {'url': 'https://api.github.com/users/kennethreitz/repos?page=7&per_page=10', 'rel': 'last'}\n\n.. _transport-adapters:\n\nTransport Adapters\n------------------\n\nAs of v1.0.0, Requests has moved to a modular internal design. Part of the\nreason this was done was to implement Transport Adapters, originally\n`described here`_. Transport Adapters provide a mechanism to define interaction\nmethods for an HTTP service. In particular, they allow you to apply per-service\nconfiguration.\n\nRequests ships with a single Transport Adapter, the :class:`HTTPAdapter\n`. This adapter provides the default Requests\ninteraction with HTTP and HTTPS using the powerful `urllib3`_ library. Whenever\na Requests :class:`Session ` is initialized, one of these is\nattached to the :class:`Session ` object for HTTP, and one\nfor HTTPS.\n\nRequests enables users to create and use their own Transport Adapters that\nprovide specific functionality. Once created, a Transport Adapter can be\nmounted to a Session object, along with an indication of which web services\nit should apply to.\n\n::\n\n >>> s = requests.Session()\n >>> s.mount('https://github.com/', MyAdapter())\n\nThe mount call registers a specific instance of a Transport Adapter to a\nprefix. Once mounted, any HTTP request made using that session whose URL starts\nwith the given prefix will use the given Transport Adapter.\n\nMany of the details of implementing a Transport Adapter are beyond the scope of\nthis documentation, but take a look at the next example for a simple SSL use-\ncase. For more than that, you might look at subclassing the\n:class:`BaseAdapter `.\n\nExample: Specific SSL Version\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nThe Requests team has made a specific choice to use whatever SSL version is\ndefault in the underlying library (`urllib3`_). Normally this is fine, but from\ntime to time, you might find yourself needing to connect to a service-endpoint\nthat uses a version that isn't compatible with the default.\n\nYou can use Transport Adapters for this by taking most of the existing\nimplementation of HTTPAdapter, and adding a parameter *ssl_version* that gets\npassed-through to `urllib3`. We'll make a Transport Adapter that instructs the\nlibrary to use SSLv3::\n\n import ssl\n from urllib3.poolmanager import PoolManager\n\n from requests.adapters import HTTPAdapter\n\n\n class Ssl3HttpAdapter(HTTPAdapter):\n \"\"\"\"Transport adapter\" that allows us to use SSLv3.\"\"\"\n\n def init_poolmanager(self, connections, maxsize, block=False):\n self.poolmanager = PoolManager(\n num_pools=connections, maxsize=maxsize,\n block=block, ssl_version=ssl.PROTOCOL_SSLv3)\n\n.. _`described here`: https://kenreitz.org/essays/2012/06/14/the-future-of-python-http\n.. _`urllib3`: https://github.com/urllib3/urllib3\n\n.. _blocking-or-nonblocking:\n\nBlocking Or Non-Blocking?\n-------------------------\n\nWith the default Transport Adapter in place, Requests does not provide any kind\nof non-blocking IO. The :attr:`Response.content `\nproperty will block until the entire response has been downloaded. If\nyou require more granularity, the streaming features of the library (see\n:ref:`streaming-requests`) allow you to retrieve smaller quantities of the\nresponse at a time. However, these calls will still block.\n\nIf you are concerned about the use of blocking IO, there are lots of projects\nout there that combine Requests with one of Python's asynchronicity frameworks.\nSome excellent examples are `requests-threads`_, `grequests`_, `requests-futures`_, and `httpx`_.\n\n.. _`requests-threads`: https://github.com/requests/requests-threads\n.. _`grequests`: https://github.com/spyoungtech/grequests\n.. _`requests-futures`: https://github.com/ross/requests-futures\n.. _`httpx`: https://github.com/encode/httpx\n\nHeader Ordering\n---------------\n\nIn unusual circumstances you may want to provide headers in an ordered manner. If you pass an ``OrderedDict`` to the ``headers`` keyword argument, that will provide the headers with an ordering. *However*, the ordering of the default headers used by Requests will be preferred, which means that if you override default headers in the ``headers`` keyword argument, they may appear out of order compared to other headers in that keyword argument.\n\nIf this is problematic, users should consider setting the default headers on a :class:`Session ` object, by setting :attr:`Session ` to a custom ``OrderedDict``. That ordering will always be preferred.\n\n.. _timeouts:\n\nTimeouts\n--------\n\nMost requests to external servers should have a timeout attached, in case the\nserver is not responding in a timely manner. By default, requests do not time\nout unless a timeout value is set explicitly. Without a timeout, your code may\nhang for minutes or more.\n\nThe **connect** timeout is the number of seconds Requests will wait for your\nclient to establish a connection to a remote machine (corresponding to the\n`connect()`_) call on the socket. It's a good practice to set connect timeouts\nto slightly larger than a multiple of 3, which is the default `TCP packet\nretransmission window `_.\n\nOnce your client has connected to the server and sent the HTTP request, the\n**read** timeout is the number of seconds the client will wait for the server\nto send a response. (Specifically, it's the number of seconds that the client\nwill wait *between* bytes sent from the server. In 99.9% of cases, this is the\ntime before the server sends the first byte).\n\nIf you specify a single value for the timeout, like this::\n\n r = requests.get('https://github.com', timeout=5)\n\nThe timeout value will be applied to both the ``connect`` and the ``read``\ntimeouts. Specify a tuple if you would like to set the values separately::\n\n r = requests.get('https://github.com', timeout=(3.05, 27))\n\nIf the remote server is very slow, you can tell Requests to wait forever for\na response, by passing None as a timeout value and then retrieving a cup of\ncoffee.\n\n::\n\n r = requests.get('https://github.com', timeout=None)\n\n.. _`connect()`: https://linux.die.net/man/2/connect\n"},{"id":1308,"name":"contributing.rst","nodeType":"TextFile","path":"docs/dev","text":".. _contributing:\n\nContributor's Guide\n===================\n\nIf you're reading this, you're probably interested in contributing to Requests.\nThank you very much! Open source projects live-and-die based on the support\nthey receive from others, and the fact that you're even considering\ncontributing to the Requests project is *very* generous of you.\n\nThis document lays out guidelines and advice for contributing to this project.\nIf you're thinking of contributing, please start by reading this document and\ngetting a feel for how contributing to this project works. If you have any\nquestions, feel free to reach out to either `Nate Prewitt`_, `Ian Cordasco`_,\nor `Seth Michael Larson`_, the primary maintainers.\n\n.. _Ian Cordasco: http://www.coglib.com/~icordasc/\n.. _Nate Prewitt: https://www.nateprewitt.com/\n.. _Seth Michael Larson: https://sethmlarson.dev/\n\nThe guide is split into sections based on the type of contribution you're\nthinking of making, with a section that covers general guidelines for all\ncontributors.\n\nBe Cordial\n----------\n\n **Be cordial or be on your way**. *—Kenneth Reitz*\n\nRequests has one very important rule governing all forms of contribution,\nincluding reporting bugs or requesting features. This golden rule is\n\"`be cordial or be on your way`_\".\n\n**All contributions are welcome**, as long as\neveryone involved is treated with respect.\n\n.. _be cordial or be on your way: https://kenreitz.org/essays/2013/01/27/be-cordial-or-be-on-your-way\n\n.. _early-feedback:\n\nGet Early Feedback\n------------------\n\nIf you are contributing, do not feel the need to sit on your contribution until\nit is perfectly polished and complete. It helps everyone involved for you to\nseek feedback as early as you possibly can. Submitting an early, unfinished\nversion of your contribution for feedback in no way prejudices your chances of\ngetting that contribution accepted, and can save you from putting a lot of work\ninto a contribution that is not suitable for the project.\n\nContribution Suitability\n------------------------\n\nOur project maintainers have the last word on whether or not a contribution is\nsuitable for Requests. All contributions will be considered carefully, but from\ntime to time, contributions will be rejected because they do not suit the\ncurrent goals or needs of the project.\n\nIf your contribution is rejected, don't despair! As long as you followed these\nguidelines, you will have a much better chance of getting your next\ncontribution accepted.\n\n\nCode Contributions\n------------------\n\nSteps for Submitting Code\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\nWhen contributing code, you'll want to follow this checklist:\n\n1. Fork the repository on GitHub.\n2. Run the tests to confirm they all pass on your system. If they don't, you'll\n need to investigate why they fail. If you're unable to diagnose this\n yourself, raise it as a bug report by following the guidelines in this\n document: :ref:`bug-reports`.\n3. Write tests that demonstrate your bug or feature. Ensure that they fail.\n4. Make your change.\n5. Run the entire test suite again, confirming that all tests pass *including\n the ones you just added*.\n6. Send a GitHub Pull Request to the main repository's ``main`` branch.\n GitHub Pull Requests are the expected method of code collaboration on this\n project.\n\nThe following sub-sections go into more detail on some of the points above.\n\nCode Review\n~~~~~~~~~~~\n\nContributions will not be merged until they've been code reviewed. You should\nimplement any code review feedback unless you strongly object to it. In the\nevent that you object to the code review feedback, you should make your case\nclearly and calmly. If, after doing so, the feedback is judged to still apply,\nyou must either apply the feedback or withdraw your contribution.\n\nNew Contributors\n~~~~~~~~~~~~~~~~\n\nIf you are new or relatively new to Open Source, welcome! Requests aims to\nbe a gentle introduction to the world of Open Source. If you're concerned about\nhow best to contribute, please consider mailing a maintainer (listed above) and\nasking for help.\n\nPlease also check the :ref:`early-feedback` section.\n\nKenneth Reitz's Code Style™\n~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nThe Requests codebase uses the `PEP 8`_ code style.\n\nIn addition to the standards outlined in PEP 8, we have a few guidelines:\n\n- Line-length can exceed 79 characters, to 100, when convenient.\n- Line-length can exceed 100 characters, when doing otherwise would be *terribly* inconvenient.\n- Always use single-quoted strings (e.g. ``'#flatearth'``), unless a single-quote occurs within the string.\n\nAdditionally, one of the styles that PEP8 recommends for `line continuations`_\ncompletely lacks all sense of taste, and is not to be permitted within\nthe Requests codebase::\n\n # Aligned with opening delimiter.\n foo = long_function_name(var_one, var_two,\n var_three, var_four)\n\nNo. Just don't. Please. This is much better::\n\n foo = long_function_name(\n var_one,\n var_two,\n var_three,\n var_four,\n )\n\nDocstrings are to follow the following syntaxes::\n\n def the_earth_is_flat():\n \"\"\"NASA divided up the seas into thirty-three degrees.\"\"\"\n pass\n\n::\n\n def fibonacci_spiral_tool():\n \"\"\"With my feet upon the ground I lose myself / between the sounds\n and open wide to suck it in. / I feel it move across my skin. / I'm\n reaching up and reaching out. / I'm reaching for the random or\n whatever will bewilder me. / Whatever will bewilder me. / And\n following our will and wind we may just go where no one's been. /\n We'll ride the spiral to the end and may just go where no one's\n been.\n\n Spiral out. Keep going...\n \"\"\"\n pass\n\nAll functions, methods, and classes are to contain docstrings. Object data\nmodel methods (e.g. ``__repr__``) are typically the exception to this rule.\n\nThanks for helping to make the world a better place!\n\n.. _PEP 8: https://pep8.org/\n.. _line continuations: https://www.python.org/dev/peps/pep-0008/#indentation\n\nDocumentation Contributions\n---------------------------\n\nDocumentation improvements are always welcome! The documentation files live in\nthe ``docs/`` directory of the codebase. They're written in\n`reStructuredText`_, and use `Sphinx`_ to generate the full suite of\ndocumentation.\n\nWhen contributing documentation, please do your best to follow the style of the\ndocumentation files. This means a soft-limit of 79 characters wide in your text\nfiles and a semi-formal, yet friendly and approachable, prose style.\n\nWhen presenting Python code, use single-quoted strings (``'hello'`` instead of\n``\"hello\"``).\n\n.. _reStructuredText: http://docutils.sourceforge.net/rst.html\n.. _Sphinx: http://sphinx-doc.org/index.html\n\n\n.. _bug-reports:\n\nBug Reports\n-----------\n\nBug reports are hugely important! Before you raise one, though, please check\nthrough the `GitHub issues`_, **both open and closed**, to confirm that the bug\nhasn't been reported before. Duplicate bug reports are a huge drain on the time\nof other contributors, and should be avoided as much as possible.\n\n.. _GitHub issues: https://github.com/psf/requests/issues\n\n\nFeature Requests\n----------------\n\nRequests is in a perpetual feature freeze, only the BDFL can add or approve of\nnew features. The maintainers believe that Requests is a feature-complete\npiece of software at this time.\n\nOne of the most important skills to have while maintaining a largely-used\nopen source project is learning the ability to say \"no\" to suggested changes,\nwhile keeping an open ear and mind.\n\nIf you believe there is a feature missing, feel free to raise a feature\nrequest, but please do be aware that the overwhelming likelihood is that your\nfeature request will not be accepted.\n"},{"attributeType":"null","col":8,"comment":"null","endLoc":109,"id":1310,"name":"_headers","nodeType":"Attribute","startLoc":109,"text":"self._headers"},{"className":"CookieConflictError","col":0,"comment":"There are two cookies that meet the criteria specified in the cookie jar.\n Use .get and .set and include domain and path args in order to be more specific.\n ","endLoc":168,"id":1311,"nodeType":"Class","startLoc":165,"text":"class CookieConflictError(RuntimeError):\n \"\"\"There are two cookies that meet the criteria specified in the cookie jar.\n Use .get and .set and include domain and path args in order to be more specific.\n \"\"\""},{"col":12,"comment":"null","endLoc":148,"header":"def md5_utf8(x)","id":1312,"name":"md5_utf8","nodeType":"Function","startLoc":145,"text":"def md5_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.md5(x).hexdigest()"},{"col":12,"comment":"null","endLoc":154,"header":"def sha_utf8(x)","id":1313,"name":"sha_utf8","nodeType":"Function","startLoc":151,"text":"def sha_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha1(x).hexdigest()"},{"col":12,"comment":"null","endLoc":160,"header":"def sha256_utf8(x)","id":1314,"name":"sha256_utf8","nodeType":"Function","startLoc":157,"text":"def sha256_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha256(x).hexdigest()"},{"col":12,"comment":"null","endLoc":166,"header":"def sha512_utf8(x)","id":1315,"name":"sha512_utf8","nodeType":"Function","startLoc":163,"text":"def sha512_utf8(x):\n if isinstance(x, str):\n x = x.encode('utf-8')\n return hashlib.sha512(x).hexdigest()"},{"col":13,"endLoc":169,"id":1316,"nodeType":"Lambda","startLoc":169,"text":"lambda s, d: hash_utf8(\"%s:%s\" % (s, d))"},{"fileName":"flask_theme_support.py","filePath":"docs/_themes","id":1317,"nodeType":"File","text":"# flasky extensions. flasky pygments style based on tango style\nfrom pygments.style import Style\nfrom pygments.token import Keyword, Name, Comment, String, Error, \\\n Number, Operator, Generic, Whitespace, Punctuation, Other, Literal\n\n\nclass FlaskyStyle(Style):\n background_color = \"#f8f8f8\"\n default_style = \"\"\n\n styles = {\n # No corresponding class for the following:\n #Text: \"\", # class: ''\n Whitespace: \"underline #f8f8f8\", # class: 'w'\n Error: \"#a40000 border:#ef2929\", # class: 'err'\n Other: \"#000000\", # class 'x'\n\n Comment: \"italic #8f5902\", # class: 'c'\n Comment.Preproc: \"noitalic\", # class: 'cp'\n\n Keyword: \"bold #004461\", # class: 'k'\n Keyword.Constant: \"bold #004461\", # class: 'kc'\n Keyword.Declaration: \"bold #004461\", # class: 'kd'\n Keyword.Namespace: \"bold #004461\", # class: 'kn'\n Keyword.Pseudo: \"bold #004461\", # class: 'kp'\n Keyword.Reserved: \"bold #004461\", # class: 'kr'\n Keyword.Type: \"bold #004461\", # class: 'kt'\n\n Operator: \"#582800\", # class: 'o'\n Operator.Word: \"bold #004461\", # class: 'ow' - like keywords\n\n Punctuation: \"bold #000000\", # class: 'p'\n\n # because special names such as Name.Class, Name.Function, etc.\n # are not recognized as such later in the parsing, we choose them\n # to look the same as ordinary variables.\n Name: \"#000000\", # class: 'n'\n Name.Attribute: \"#c4a000\", # class: 'na' - to be revised\n Name.Builtin: \"#004461\", # class: 'nb'\n Name.Builtin.Pseudo: \"#3465a4\", # class: 'bp'\n Name.Class: \"#000000\", # class: 'nc' - to be revised\n Name.Constant: \"#000000\", # class: 'no' - to be revised\n Name.Decorator: \"#888\", # class: 'nd' - to be revised\n Name.Entity: \"#ce5c00\", # class: 'ni'\n Name.Exception: \"bold #cc0000\", # class: 'ne'\n Name.Function: \"#000000\", # class: 'nf'\n Name.Property: \"#000000\", # class: 'py'\n Name.Label: \"#f57900\", # class: 'nl'\n Name.Namespace: \"#000000\", # class: 'nn' - to be revised\n Name.Other: \"#000000\", # class: 'nx'\n Name.Tag: \"bold #004461\", # class: 'nt' - like a keyword\n Name.Variable: \"#000000\", # class: 'nv' - to be revised\n Name.Variable.Class: \"#000000\", # class: 'vc' - to be revised\n Name.Variable.Global: \"#000000\", # class: 'vg' - to be revised\n Name.Variable.Instance: \"#000000\", # class: 'vi' - to be revised\n\n Number: \"#990000\", # class: 'm'\n\n Literal: \"#000000\", # class: 'l'\n Literal.Date: \"#000000\", # class: 'ld'\n\n String: \"#4e9a06\", # class: 's'\n String.Backtick: \"#4e9a06\", # class: 'sb'\n String.Char: \"#4e9a06\", # class: 'sc'\n String.Doc: \"italic #8f5902\", # class: 'sd' - like a comment\n String.Double: \"#4e9a06\", # class: 's2'\n String.Escape: \"#4e9a06\", # class: 'se'\n String.Heredoc: \"#4e9a06\", # class: 'sh'\n String.Interpol: \"#4e9a06\", # class: 'si'\n String.Other: \"#4e9a06\", # class: 'sx'\n String.Regex: \"#4e9a06\", # class: 'sr'\n String.Single: \"#4e9a06\", # class: 's1'\n String.Symbol: \"#4e9a06\", # class: 'ss'\n\n Generic: \"#000000\", # class: 'g'\n Generic.Deleted: \"#a40000\", # class: 'gd'\n Generic.Emph: \"italic #000000\", # class: 'ge'\n Generic.Error: \"#ef2929\", # class: 'gr'\n Generic.Heading: \"bold #000080\", # class: 'gh'\n Generic.Inserted: \"#00A000\", # class: 'gi'\n Generic.Output: \"#888\", # class: 'go'\n Generic.Prompt: \"#745334\", # class: 'gp'\n Generic.Strong: \"bold #000000\", # class: 'gs'\n Generic.Subheading: \"bold #800080\", # class: 'gu'\n Generic.Traceback: \"bold #a40000\", # class: 'gt'\n }\n"},{"className":"RuntimeError","col":0,"comment":"null","endLoc":1994,"id":1318,"nodeType":"Class","startLoc":1994,"text":"class RuntimeError(Exception): ..."},{"attributeType":"null","col":8,"comment":"null","endLoc":45,"id":1320,"name":"preferred_clock","nodeType":"Attribute","startLoc":45,"text":"preferred_clock"},{"attributeType":"function","col":4,"comment":"null","endLoc":47,"id":1328,"name":"preferred_clock","nodeType":"Attribute","startLoc":47,"text":"preferred_clock"},{"col":0,"comment":"","endLoc":10,"header":"cookies.py#","id":1332,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.cookies\n~~~~~~~~~~~~~~~~\n\nCompatibility code to be able to use `cookielib.CookieJar` with requests.\n\nrequests.utils imports from here, so be careful with imports.\n\"\"\"\n\ntry:\n import threading\nexcept ImportError:\n import dummy_threading as threading"},{"col":0,"comment":"","endLoc":9,"header":"sessions.py#","id":1333,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.sessions\n~~~~~~~~~~~~~~~~~\n\nThis module provides a Session object to manage and persist settings across\nrequests (cookies, auth, proxies).\n\"\"\"\n\nif sys.platform == 'win32':\n try: # Python 3.4+\n preferred_clock = time.perf_counter\n except AttributeError: # Earlier than Python 3.\n preferred_clock = time.clock\nelse:\n preferred_clock = time.time"},{"col":0,"comment":"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n ","endLoc":318,"header":"def from_key_val_list(value)","id":1337,"name":"from_key_val_list","nodeType":"Function","startLoc":294,"text":"def from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)"},{"col":4,"comment":"the server closes when leaving the context manager","endLoc":42,"header":"def test_server_closes(self)","id":1339,"name":"test_server_closes","nodeType":"Function","startLoc":32,"text":"def test_server_closes(self):\n \"\"\"the server closes when leaving the context manager\"\"\"\n with Server.basic_response_server() as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n\n sock.close()\n\n with pytest.raises(socket.error):\n new_sock = socket.socket()\n new_sock.connect((host, port))"},{"className":"FlaskyStyle","col":0,"comment":"null","endLoc":86,"id":1340,"nodeType":"Class","startLoc":7,"text":"class FlaskyStyle(Style):\n background_color = \"#f8f8f8\"\n default_style = \"\"\n\n styles = {\n # No corresponding class for the following:\n #Text: \"\", # class: ''\n Whitespace: \"underline #f8f8f8\", # class: 'w'\n Error: \"#a40000 border:#ef2929\", # class: 'err'\n Other: \"#000000\", # class 'x'\n\n Comment: \"italic #8f5902\", # class: 'c'\n Comment.Preproc: \"noitalic\", # class: 'cp'\n\n Keyword: \"bold #004461\", # class: 'k'\n Keyword.Constant: \"bold #004461\", # class: 'kc'\n Keyword.Declaration: \"bold #004461\", # class: 'kd'\n Keyword.Namespace: \"bold #004461\", # class: 'kn'\n Keyword.Pseudo: \"bold #004461\", # class: 'kp'\n Keyword.Reserved: \"bold #004461\", # class: 'kr'\n Keyword.Type: \"bold #004461\", # class: 'kt'\n\n Operator: \"#582800\", # class: 'o'\n Operator.Word: \"bold #004461\", # class: 'ow' - like keywords\n\n Punctuation: \"bold #000000\", # class: 'p'\n\n # because special names such as Name.Class, Name.Function, etc.\n # are not recognized as such later in the parsing, we choose them\n # to look the same as ordinary variables.\n Name: \"#000000\", # class: 'n'\n Name.Attribute: \"#c4a000\", # class: 'na' - to be revised\n Name.Builtin: \"#004461\", # class: 'nb'\n Name.Builtin.Pseudo: \"#3465a4\", # class: 'bp'\n Name.Class: \"#000000\", # class: 'nc' - to be revised\n Name.Constant: \"#000000\", # class: 'no' - to be revised\n Name.Decorator: \"#888\", # class: 'nd' - to be revised\n Name.Entity: \"#ce5c00\", # class: 'ni'\n Name.Exception: \"bold #cc0000\", # class: 'ne'\n Name.Function: \"#000000\", # class: 'nf'\n Name.Property: \"#000000\", # class: 'py'\n Name.Label: \"#f57900\", # class: 'nl'\n Name.Namespace: \"#000000\", # class: 'nn' - to be revised\n Name.Other: \"#000000\", # class: 'nx'\n Name.Tag: \"bold #004461\", # class: 'nt' - like a keyword\n Name.Variable: \"#000000\", # class: 'nv' - to be revised\n Name.Variable.Class: \"#000000\", # class: 'vc' - to be revised\n Name.Variable.Global: \"#000000\", # class: 'vg' - to be revised\n Name.Variable.Instance: \"#000000\", # class: 'vi' - to be revised\n\n Number: \"#990000\", # class: 'm'\n\n Literal: \"#000000\", # class: 'l'\n Literal.Date: \"#000000\", # class: 'ld'\n\n String: \"#4e9a06\", # class: 's'\n String.Backtick: \"#4e9a06\", # class: 'sb'\n String.Char: \"#4e9a06\", # class: 'sc'\n String.Doc: \"italic #8f5902\", # class: 'sd' - like a comment\n String.Double: \"#4e9a06\", # class: 's2'\n String.Escape: \"#4e9a06\", # class: 'se'\n String.Heredoc: \"#4e9a06\", # class: 'sh'\n String.Interpol: \"#4e9a06\", # class: 'si'\n String.Other: \"#4e9a06\", # class: 'sx'\n String.Regex: \"#4e9a06\", # class: 'sr'\n String.Single: \"#4e9a06\", # class: 's1'\n String.Symbol: \"#4e9a06\", # class: 'ss'\n\n Generic: \"#000000\", # class: 'g'\n Generic.Deleted: \"#a40000\", # class: 'gd'\n Generic.Emph: \"italic #000000\", # class: 'ge'\n Generic.Error: \"#ef2929\", # class: 'gr'\n Generic.Heading: \"bold #000080\", # class: 'gh'\n Generic.Inserted: \"#00A000\", # class: 'gi'\n Generic.Output: \"#888\", # class: 'go'\n Generic.Prompt: \"#745334\", # class: 'gp'\n Generic.Strong: \"bold #000000\", # class: 'gs'\n Generic.Subheading: \"bold #800080\", # class: 'gu'\n Generic.Traceback: \"bold #a40000\", # class: 'gt'\n }"},{"attributeType":"str","col":4,"comment":"null","endLoc":8,"id":1341,"name":"background_color","nodeType":"Attribute","startLoc":8,"text":"background_color"},{"col":4,"comment":"the text_response_server sends the given text","endLoc":57,"header":"def test_text_response(self)","id":1343,"name":"test_text_response","nodeType":"Function","startLoc":44,"text":"def test_text_response(self):\n \"\"\"the text_response_server sends the given text\"\"\"\n server = Server.text_response_server(\n \"HTTP/1.1 200 OK\\r\\n\" +\n \"Content-Length: 6\\r\\n\" +\n \"\\r\\nroflol\"\n )\n\n with server as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n\n assert r.status_code == 200\n assert r.text == u'roflol'\n assert r.headers['Content-Length'] == '6'"},{"col":4,"comment":"Reset num_401_calls counter on redirects.","endLoc":232,"header":"def handle_redirect(self, r, **kwargs)","id":1344,"name":"handle_redirect","nodeType":"Function","startLoc":229,"text":"def handle_redirect(self, r, **kwargs):\n \"\"\"Reset num_401_calls counter on redirects.\"\"\"\n if r.is_redirect:\n self._thread_local.num_401_calls = 1"},{"col":4,"comment":"\n Takes the given response and tries digest-auth, if needed.\n\n :rtype: requests.Response\n ","endLoc":276,"header":"def handle_401(self, r, **kwargs)","id":1345,"name":"handle_401","nodeType":"Function","startLoc":234,"text":"def handle_401(self, r, **kwargs):\n \"\"\"\n Takes the given response and tries digest-auth, if needed.\n\n :rtype: requests.Response\n \"\"\"\n\n # If response is not 4xx, do not auth\n # See https://github.com/psf/requests/issues/3772\n if not 400 <= r.status_code < 500:\n self._thread_local.num_401_calls = 1\n return r\n\n if self._thread_local.pos is not None:\n # Rewind the file position indicator of the body to where\n # it was to resend the request.\n r.request.body.seek(self._thread_local.pos)\n s_auth = r.headers.get('www-authenticate', '')\n\n if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:\n\n self._thread_local.num_401_calls += 1\n pat = re.compile(r'digest ', flags=re.IGNORECASE)\n self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))\n\n # Consume content and release the original connection\n # to allow our new request to reuse the same one.\n r.content\n r.close()\n prep = r.request.copy()\n extract_cookies_to_jar(prep._cookies, r.request, r.raw)\n prep.prepare_cookies(prep._cookies)\n\n prep.headers['Authorization'] = self.build_digest_header(\n prep.method, prep.url)\n _r = r.connection.send(prep, **kwargs)\n _r.history.append(r)\n _r.request = prep\n\n return _r\n\n self._thread_local.num_401_calls = 1\n return r"},{"col":4,"comment":"null","endLoc":296,"header":"def __call__(self, r)","id":1349,"name":"__call__","nodeType":"Function","startLoc":278,"text":"def __call__(self, r):\n # Initialize per-thread state, if needed\n self.init_per_thread_state()\n # If we have a saved nonce, skip the 401\n if self._thread_local.last_nonce:\n r.headers['Authorization'] = self.build_digest_header(r.method, r.url)\n try:\n self._thread_local.pos = r.body.tell()\n except AttributeError:\n # In the case of HTTPDigestAuth being reused and the body of\n # the previous request was a file-like object, pos has the\n # file position of the previous body. Ensure it's set to\n # None.\n self._thread_local.pos = None\n r.register_hook('response', self.handle_401)\n r.register_hook('response', self.handle_redirect)\n self._thread_local.num_401_calls = 1\n\n return r"},{"id":1350,"name":"vulnerabilities.rst","nodeType":"TextFile","path":"docs/community","text":"Vulnerability Disclosure\n========================\n\nIf you think you have found a potential security vulnerability in requests,\nplease email `Nate `_ and `Seth `_ directly. **Do not file a public issue.**\n\nOur PGP Key fingerprints are:\n\n- 8722 7E29 AD9C FF5C FAC3 EA6A 44D3 FF97 B80D C864 (`@nateprewitt `_)\n\n- EDD5 6765 A9D8 4653 CBC8 A134 51B0 6736 1740 F5FC (`@sethmlarson `_)\n\nYou can also contact us on `Keybase `_ with the\nprofiles above if desired.\n\nIf English is not your first language, please try to describe the problem and\nits impact to the best of your ability. For greater detail, please use your\nnative language and we will try our best to translate it using online services.\n\nPlease also include the code you used to find the problem and the shortest\namount of code necessary to reproduce it.\n\nPlease do not disclose this to anyone else. We will retrieve a CVE identifier\nif necessary and give you full credit under whatever name or alias you provide.\nWe will only request an identifier when we have a fix and can publish it in a\nrelease.\n\nWe will respect your privacy and will only publicize your involvement if you\ngrant us permission.\n\nProcess\n-------\n\nThis following information discusses the process the requests project follows\nin response to vulnerability disclosures. If you are disclosing a\nvulnerability, this section of the documentation lets you know how we will\nrespond to your disclosure.\n\nTimeline\n~~~~~~~~\n\nWhen you report an issue, one of the project members will respond to you within\ntwo days *at the outside*. In most cases responses will be faster, usually\nwithin 12 hours. This initial response will at the very least confirm receipt\nof the report.\n\nIf we were able to rapidly reproduce the issue, the initial response will also\ncontain confirmation of the issue. If we are not, we will often ask for more\ninformation about the reproduction scenario.\n\nOur goal is to have a fix for any vulnerability released within two weeks of\nthe initial disclosure. This may potentially involve shipping an interim\nrelease that simply disables function while a more mature fix can be prepared,\nbut will in the vast majority of cases mean shipping a complete release as soon\nas possible.\n\nThroughout the fix process we will keep you up to speed with how the fix is\nprogressing. Once the fix is prepared, we will notify you that we believe we\nhave a fix. Often we will ask you to confirm the fix resolves the problem in\nyour environment, especially if we are not confident of our reproduction\nscenario.\n\nAt this point, we will prepare for the release. We will obtain a CVE number\nif one is required, providing you with full credit for the discovery. We will\nalso decide on a planned release date, and let you know when it is. This\nrelease date will *always* be on a weekday.\n\nAt this point we will reach out to our major downstream packagers to notify\nthem of an impending security-related patch so they can make arrangements. In\naddition, these packagers will be provided with the intended patch ahead of\ntime, to ensure that they are able to promptly release their downstream\npackages. Currently the list of people we actively contact *ahead of a public\nrelease* is:\n\n- Python Maintenance Team, Red Hat (python-maint@redhat.com)\n- Daniele Tricoli, Debian (@eriol)\n\nWe will notify these individuals at least a week ahead of our planned release\ndate to ensure that they have sufficient time to prepare. If you believe you\nshould be on this list, please let one of the maintainers know at one of the\nemail addresses at the top of this article.\n\nOn release day, we will push the patch to our public repository, along with an\nupdated changelog that describes the issue and credits you. We will then issue\na PyPI release containing the patch.\n\nAt this point, we will publicise the release. This will involve mails to\nmailing lists, Tweets, and all other communication mechanisms available to the\ncore team.\n\nWe will also explicitly mention which commits contain the fix to make it easier\nfor other distributors and users to easily patch their own versions of requests\nif upgrading is not an option.\n\nPrevious CVEs\n-------------\n\n- Fixed in 2.20.0\n - `CVE 2018-18074 `_\n\n- Fixed in 2.6.0\n\n - `CVE 2015-2296 `_,\n reported by Matthew Daley of `BugFuzz `_.\n\n- Fixed in 2.3.0\n\n - `CVE 2014-1829 `_\n\n - `CVE 2014-1830 `_\n"},{"col":4,"comment":"null","endLoc":302,"header":"def __eq__(self, other)","id":1351,"name":"__eq__","nodeType":"Function","startLoc":298,"text":"def __eq__(self, other):\n return all([\n self.username == getattr(other, 'username', None),\n self.password == getattr(other, 'password', None)\n ])"},{"id":1352,"name":"support.rst","nodeType":"TextFile","path":"docs/community","text":".. _support:\n\nSupport\n=======\n\nIf you have questions or issues about Requests, there are several options:\n\nStack Overflow\n--------------\n\nIf your question does not contain sensitive (possibly proprietary)\ninformation or can be properly anonymized, please ask a question on\n`Stack Overflow `_\nand use the tag ``python-requests``.\n\n\nFile an Issue\n-------------\n\nIf you notice some unexpected behaviour in Requests, or want to see support\nfor a new feature,\n`file an issue on GitHub `_.\n\n\nSend a Tweet\n------------\n\nIf your question is less than 280 characters, feel free to send a tweet to\n`@nateprewitt `_,\n`@sethmlarson `_, or\n`@sigmavirus24 `_.\n"},{"col":4,"comment":"null","endLoc":305,"header":"def __ne__(self, other)","id":1353,"name":"__ne__","nodeType":"Function","startLoc":304,"text":"def __ne__(self, other):\n return not self == other"},{"attributeType":"null","col":8,"comment":"null","endLoc":113,"id":1354,"name":"password","nodeType":"Attribute","startLoc":113,"text":"self.password"},{"attributeType":"null","col":0,"comment":"null","endLoc":71,"id":1355,"name":"release","nodeType":"Attribute","startLoc":71,"text":"release"},{"attributeType":"None","col":0,"comment":"null","endLoc":78,"id":1356,"name":"language","nodeType":"Attribute","startLoc":78,"text":"language"},{"attributeType":"list","col":0,"comment":"null","endLoc":88,"id":1357,"name":"exclude_patterns","nodeType":"Attribute","startLoc":88,"text":"exclude_patterns"},{"attributeType":"bool","col":0,"comment":"null","endLoc":95,"id":1358,"name":"add_function_parentheses","nodeType":"Attribute","startLoc":95,"text":"add_function_parentheses"},{"attributeType":"bool","col":0,"comment":"null","endLoc":99,"id":1359,"name":"add_module_names","nodeType":"Attribute","startLoc":99,"text":"add_module_names"},{"attributeType":"_local","col":8,"comment":"null","endLoc":115,"id":1360,"name":"_thread_local","nodeType":"Attribute","startLoc":115,"text":"self._thread_local"},{"attributeType":"null","col":8,"comment":"null","endLoc":112,"id":1361,"name":"username","nodeType":"Attribute","startLoc":112,"text":"self.username"},{"attributeType":"str","col":0,"comment":"null","endLoc":106,"id":1362,"name":"pygments_style","nodeType":"Attribute","startLoc":106,"text":"pygments_style"},{"attributeType":"bool","col":0,"comment":"null","endLoc":115,"id":1363,"name":"todo_include_todos","nodeType":"Attribute","startLoc":115,"text":"todo_include_todos"},{"attributeType":"str","col":0,"comment":"null","endLoc":122,"id":1364,"name":"html_theme","nodeType":"Attribute","startLoc":122,"text":"html_theme"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":127,"id":1365,"name":"html_theme_options","nodeType":"Attribute","startLoc":127,"text":"html_theme_options"},{"col":4,"comment":"the basic response server returns an empty http response","endLoc":65,"header":"def test_basic_response(self)","id":1366,"name":"test_basic_response","nodeType":"Function","startLoc":59,"text":"def test_basic_response(self):\n \"\"\"the basic response server returns an empty http response\"\"\"\n with Server.basic_response_server() as (host, port):\n r = requests.get('http://{}:{}'.format(host, port))\n assert r.status_code == 200\n assert r.text == u''\n assert r.headers['Content-Length'] == '0'"},{"col":4,"comment":"the server waits for the block_server event to be set before closing","endLoc":77,"header":"def test_basic_waiting_server(self)","id":1367,"name":"test_basic_waiting_server","nodeType":"Function","startLoc":67,"text":"def test_basic_waiting_server(self):\n \"\"\"the server waits for the block_server event to be set before closing\"\"\"\n block_server = threading.Event()\n\n with Server.basic_response_server(wait_to_close_event=block_server) as (host, port):\n sock = socket.socket()\n sock.connect((host, port))\n sock.sendall(b'send something')\n time.sleep(2.5)\n sock.sendall(b'still alive')\n block_server.set() # release server block"},{"attributeType":"str","col":4,"comment":"null","endLoc":9,"id":1368,"name":"default_style","nodeType":"Attribute","startLoc":9,"text":"default_style"},{"attributeType":"str","col":0,"comment":"null","endLoc":24,"id":1369,"name":"CONTENT_TYPE_FORM_URLENCODED","nodeType":"Attribute","startLoc":24,"text":"CONTENT_TYPE_FORM_URLENCODED"},{"attributeType":"str","col":0,"comment":"null","endLoc":25,"id":1370,"name":"CONTENT_TYPE_MULTI_PART","nodeType":"Attribute","startLoc":25,"text":"CONTENT_TYPE_MULTI_PART"},{"attributeType":"dict","col":4,"comment":"null","endLoc":11,"id":1371,"name":"styles","nodeType":"Attribute","startLoc":11,"text":"styles"},{"attributeType":"list","col":0,"comment":"null","endLoc":158,"id":1372,"name":"html_static_path","nodeType":"Attribute","startLoc":158,"text":"html_static_path"},{"col":0,"comment":"","endLoc":8,"header":"auth.py#","id":1373,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.auth\n~~~~~~~~~~~~~\n\nThis module contains the authentication handlers for Requests.\n\"\"\"\n\nCONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'\n\nCONTENT_TYPE_MULTI_PART = 'multipart/form-data'"},{"attributeType":"bool","col":0,"comment":"null","endLoc":171,"id":1374,"name":"html_use_smartypants","nodeType":"Attribute","startLoc":171,"text":"html_use_smartypants"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":174,"id":1375,"name":"html_sidebars","nodeType":"Attribute","startLoc":174,"text":"html_sidebars"},{"attributeType":"bool","col":0,"comment":"null","endLoc":200,"id":1376,"name":"html_show_sourcelink","nodeType":"Attribute","startLoc":200,"text":"html_show_sourcelink"},{"attributeType":"bool","col":0,"comment":"null","endLoc":203,"id":1377,"name":"html_show_sphinx","nodeType":"Attribute","startLoc":203,"text":"html_show_sphinx"},{"attributeType":"bool","col":0,"comment":"null","endLoc":206,"id":1378,"name":"html_show_copyright","nodeType":"Attribute","startLoc":206,"text":"html_show_copyright"},{"attributeType":"str","col":0,"comment":"null","endLoc":231,"id":1379,"name":"htmlhelp_basename","nodeType":"Attribute","startLoc":231,"text":"htmlhelp_basename"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":235,"id":1380,"name":"latex_elements","nodeType":"Attribute","startLoc":235,"text":"latex_elements"},{"attributeType":"list","col":0,"comment":"null","endLoc":249,"id":1381,"name":"latex_documents","nodeType":"Attribute","startLoc":249,"text":"latex_documents"},{"attributeType":"list","col":0,"comment":"null","endLoc":278,"id":1382,"name":"man_pages","nodeType":"Attribute","startLoc":278,"text":"man_pages"},{"attributeType":"list","col":0,"comment":"null","endLoc":289,"id":1383,"name":"texinfo_documents","nodeType":"Attribute","startLoc":289,"text":"texinfo_documents"},{"attributeType":"str","col":0,"comment":"null","endLoc":317,"id":1384,"name":"epub_title","nodeType":"Attribute","startLoc":317,"text":"epub_title"},{"attributeType":"str","col":0,"comment":"null","endLoc":318,"id":1385,"name":"epub_author","nodeType":"Attribute","startLoc":318,"text":"epub_author"},{"attributeType":"str","col":0,"comment":"null","endLoc":319,"id":1386,"name":"epub_publisher","nodeType":"Attribute","startLoc":319,"text":"epub_publisher"},{"attributeType":"str","col":0,"comment":"null","endLoc":320,"id":1387,"name":"epub_copyright","nodeType":"Attribute","startLoc":320,"text":"epub_copyright"},{"attributeType":"list","col":0,"comment":"null","endLoc":360,"id":1389,"name":"epub_exclude_files","nodeType":"Attribute","startLoc":360,"text":"epub_exclude_files"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":383,"id":1390,"name":"intersphinx_mapping","nodeType":"Attribute","startLoc":383,"text":"intersphinx_mapping"},{"col":0,"comment":"","endLoc":15,"header":"conf.py#","id":1391,"name":"","nodeType":"Function","startLoc":15,"text":"sys.path.insert(0, os.path.abspath(\"..\"))\n\nsys.path.insert(0, os.path.abspath(\"_themes\"))\n\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.todo\",\n \"sphinx.ext.viewcode\",\n]\n\ntemplates_path = [\"_templates\"]\n\nsource_suffix = \".rst\"\n\nmaster_doc = \"index\"\n\nproject = u\"Requests\"\n\ncopyright = u'MMXVIX. A Kenneth Reitz Project'\n\nauthor = u\"Kenneth Reitz\"\n\nversion = requests.__version__\n\nrelease = requests.__version__\n\nlanguage = None\n\nexclude_patterns = [\"_build\"]\n\nadd_function_parentheses = False\n\nadd_module_names = True\n\npygments_style = \"flask_theme_support.FlaskyStyle\"\n\ntodo_include_todos = True\n\nhtml_theme = \"alabaster\"\n\nhtml_theme_options = {\n \"show_powered_by\": False,\n \"github_user\": \"requests\",\n \"github_repo\": \"requests\",\n \"github_banner\": True,\n \"show_related\": False,\n \"note_bg\": \"#FFF59C\",\n}\n\nhtml_static_path = [\"_static\"]\n\nhtml_use_smartypants = False\n\nhtml_sidebars = {\n \"index\": [\"sidebarintro.html\", \"sourcelink.html\", \"searchbox.html\", \"hacks.html\"],\n \"**\": [\n \"sidebarlogo.html\",\n \"localtoc.html\",\n \"relations.html\",\n \"sourcelink.html\",\n \"searchbox.html\",\n \"hacks.html\",\n ],\n}\n\nhtml_show_sourcelink = False\n\nhtml_show_sphinx = False\n\nhtml_show_copyright = True\n\nhtmlhelp_basename = \"Requestsdoc\"\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #'preamble': '',\n # Latex figure (float) alignment\n #'figure_align': 'htbp',\n}\n\nlatex_documents = [\n (master_doc, \"Requests.tex\", u\"Requests Documentation\", u\"Kenneth Reitz\", \"manual\")\n]\n\nman_pages = [(master_doc, \"requests\", u\"Requests Documentation\", [author], 1)]\n\ntexinfo_documents = [\n (\n master_doc,\n \"Requests\",\n u\"Requests Documentation\",\n author,\n \"Requests\",\n \"One line description of project.\",\n \"Miscellaneous\",\n )\n]\n\nepub_title = project\n\nepub_author = author\n\nepub_publisher = author\n\nepub_copyright = copyright\n\nepub_exclude_files = [\"search.html\"]\n\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"urllib3\": (\"https://urllib3.readthedocs.io/en/latest\", None),\n}"},{"col":4,"comment":"multiple requests can be served","endLoc":93,"header":"def test_multiple_requests(self)","id":1392,"name":"test_multiple_requests","nodeType":"Function","startLoc":79,"text":"def test_multiple_requests(self):\n \"\"\"multiple requests can be served\"\"\"\n requests_to_handle = 5\n\n server = Server.basic_response_server(requests_to_handle=requests_to_handle)\n\n with server as (host, port):\n server_url = 'http://{}:{}'.format(host, port)\n for _ in range(requests_to_handle):\n r = requests.get(server_url)\n assert r.status_code == 200\n\n # the (n+1)th request fails\n with pytest.raises(requests.exceptions.ConnectionError):\n r = requests.get(server_url)"},{"col":0,"comment":"","endLoc":41,"header":"__init__.py#","id":1394,"name":"","nodeType":"Function","startLoc":8,"text":"\"\"\"\nRequests HTTP Library\n~~~~~~~~~~~~~~~~~~~~~\n\nRequests is an HTTP library, written in Python, for human beings.\nBasic GET usage:\n\n >>> import requests\n >>> r = requests.get('https://www.python.org')\n >>> r.status_code\n 200\n >>> b'Python is a programming language' in r.content\n True\n\n... or POST:\n\n >>> payload = dict(key1='value1', key2='value2')\n >>> r = requests.post('https://httpbin.org/post', data=payload)\n >>> print(r.text)\n {\n ...\n \"form\": {\n \"key1\": \"value1\",\n \"key2\": \"value2\"\n },\n ...\n }\n\nThe other HTTP methods are supported - see `requests.api`. Full documentation\nis at .\n\n:copyright: (c) 2017 by Kenneth Reitz.\n:license: Apache 2.0, see LICENSE for more details.\n\"\"\"\n\ntry:\n from charset_normalizer import __version__ as charset_normalizer_version\nexcept ImportError:\n charset_normalizer_version = None\n\ntry:\n from chardet import __version__ as chardet_version\nexcept ImportError:\n chardet_version = None\n\ntry:\n check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)\nexcept (AssertionError, ValueError):\n warnings.warn(\"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported \"\n \"version!\".format(urllib3.__version__, chardet_version, charset_normalizer_version),\n RequestsDependencyWarning)\n\ntry:\n try:\n import ssl\n except ImportError:\n ssl = None\n\n if not getattr(ssl, \"HAS_SNI\", False):\n from urllib3.contrib import pyopenssl\n pyopenssl.inject_into_urllib3()\n\n # Check cryptography version\n from cryptography import __version__ as cryptography_version\n _check_cryptography(cryptography_version)\nexcept ImportError:\n pass\n\nwarnings.simplefilter('ignore', DependencyWarning)\n\nlogging.getLogger(__name__).addHandler(NullHandler())\n\nwarnings.simplefilter('default', FileModeWarning, append=True)"},{"col":4,"comment":"null","endLoc":105,"header":"def test_super_len_with__len__(self)","id":1398,"name":"test_super_len_with__len__","nodeType":"Function","startLoc":102,"text":"def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4"},{"col":4,"comment":"null","endLoc":112,"header":"def test_super_len_with_no__len__(self)","id":1399,"name":"test_super_len_with_no__len__","nodeType":"Function","startLoc":107,"text":"def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5"},{"col":4,"comment":"null","endLoc":118,"header":"def test_super_len_with_tell(self)","id":1401,"name":"test_super_len_with_tell","nodeType":"Function","startLoc":114,"text":"def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3"},{"col":4,"comment":"null","endLoc":124,"header":"def test_super_len_with_fileno(self)","id":1402,"name":"test_super_len_with_fileno","nodeType":"Function","startLoc":120,"text":"def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)"},{"col":4,"comment":"can check the requests content","endLoc":116,"header":"@pytest.mark.skip(reason=\"this fails non-deterministically under pytest-xdist\")\n def test_request_recovery(self)","id":1410,"name":"test_request_recovery","nodeType":"Function","startLoc":95,"text":"@pytest.mark.skip(reason=\"this fails non-deterministically under pytest-xdist\")\n def test_request_recovery(self):\n \"\"\"can check the requests content\"\"\"\n # TODO: figure out why this sometimes fails when using pytest-xdist.\n server = Server.basic_response_server(requests_to_handle=2)\n first_request = b'put your hands up in the air'\n second_request = b'put your hand down in the floor'\n\n with server as address:\n sock1 = socket.socket()\n sock2 = socket.socket()\n\n sock1.connect(address)\n sock1.sendall(first_request)\n sock1.close()\n\n sock2.connect(address)\n sock2.sendall(second_request)\n sock2.close()\n\n assert server.handler_results[0] == first_request\n assert server.handler_results[1] == second_request"},{"col":4,"comment":"the basic response handler times out when receiving requests","endLoc":129,"header":"def test_requests_after_timeout_are_not_received(self)","id":1412,"name":"test_requests_after_timeout_are_not_received","nodeType":"Function","startLoc":118,"text":"def test_requests_after_timeout_are_not_received(self):\n \"\"\"the basic response handler times out when receiving requests\"\"\"\n server = Server.basic_response_server(request_timeout=1)\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(b'hehehe, not received')\n sock.close()\n\n assert server.handler_results[0] == b''"},{"col":4,"comment":"a biggest timeout can be specified","endLoc":143,"header":"def test_request_recovery_with_bigger_timeout(self)","id":1413,"name":"test_request_recovery_with_bigger_timeout","nodeType":"Function","startLoc":131,"text":"def test_request_recovery_with_bigger_timeout(self):\n \"\"\"a biggest timeout can be specified\"\"\"\n server = Server.basic_response_server(request_timeout=3)\n data = b'bananadine'\n\n with server as address:\n sock = socket.socket()\n sock.connect(address)\n time.sleep(1.5)\n sock.sendall(data)\n sock.close()\n\n assert server.handler_results[0] == data"},{"col":4,"comment":"the server thread exits even if an exception exits the context manager","endLoc":155,"header":"def test_server_finishes_on_error(self)","id":1414,"name":"test_server_finishes_on_error","nodeType":"Function","startLoc":145,"text":"def test_server_finishes_on_error(self):\n \"\"\"the server thread exits even if an exception exits the context manager\"\"\"\n server = Server.basic_response_server()\n with pytest.raises(Exception):\n with server:\n raise Exception()\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout."},{"col":4,"comment":"the server thread exits even if there are no connections","endLoc":166,"header":"def test_server_finishes_when_no_connections(self)","id":1415,"name":"test_server_finishes_when_no_connections","nodeType":"Function","startLoc":157,"text":"def test_server_finishes_when_no_connections(self):\n \"\"\"the server thread exits even if there are no connections\"\"\"\n server = Server.basic_response_server()\n with server:\n pass\n\n assert len(server.handler_results) == 0\n\n # if the server thread fails to finish, the test suite will hang\n # and get killed by the jenkins timeout."},{"col":0,"comment":"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n ","endLoc":379,"header":"def parse_list_header(value)","id":1416,"name":"parse_list_header","nodeType":"Function","startLoc":351,"text":"def parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result"},{"col":0,"comment":"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n ","endLoc":455,"header":"def dict_from_cookiejar(cj)","id":1417,"name":"dict_from_cookiejar","nodeType":"Function","startLoc":443,"text":"def dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict"},{"col":0,"comment":"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n ","endLoc":600,"header":"def get_unicode_from_response(r)","id":1418,"name":"get_unicode_from_response","nodeType":"Function","startLoc":567,"text":"def get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content"},{"attributeType":"null","col":39,"comment":"null","endLoc":29,"id":1419,"name":"_parse_list_header","nodeType":"Attribute","startLoc":29,"text":"_parse_list_header"},{"attributeType":"(str, str)","col":0,"comment":"null","endLoc":39,"id":1420,"name":"NETRC_FILES","nodeType":"Attribute","startLoc":39,"text":"NETRC_FILES"},{"attributeType":"LiteralString","col":0,"comment":"null","endLoc":46,"id":1421,"name":"DEFAULT_ACCEPT_ENCODING","nodeType":"Attribute","startLoc":46,"text":"DEFAULT_ACCEPT_ENCODING"},{"col":4,"comment":"Ensure that objects without any length methods default to 0","endLoc":128,"header":"def test_super_len_with_no_matches(self)","id":1423,"name":"test_super_len_with_no_matches","nodeType":"Function","startLoc":126,"text":"def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0"},{"className":"TestToKeyValList","col":0,"comment":"null","endLoc":145,"id":1424,"nodeType":"Class","startLoc":131,"text":"class TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')"},{"col":4,"comment":"null","endLoc":141,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key'","id":1425,"name":"test_valid","nodeType":"Function","startLoc":133,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected"},{"col":4,"comment":"null","endLoc":145,"header":"def test_invalid(self)","id":1426,"name":"test_invalid","nodeType":"Function","startLoc":143,"text":"def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')"},{"className":"TestUnquoteHeaderValue","col":0,"comment":"null","endLoc":162,"id":1427,"nodeType":"Class","startLoc":148,"text":"class TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'"},{"col":4,"comment":"null","endLoc":159,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected)","id":1428,"name":"test_valid","nodeType":"Function","startLoc":150,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected"},{"attributeType":"frozenset","col":0,"comment":"null","endLoc":604,"id":1429,"name":"UNRESERVED_SET","nodeType":"Attribute","startLoc":604,"text":"UNRESERVED_SET"},{"col":4,"comment":"null","endLoc":162,"header":"def test_is_filename(self)","id":1430,"name":"test_is_filename","nodeType":"Function","startLoc":161,"text":"def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'"},{"className":"TestGetEnvironProxies","col":0,"comment":"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n ","endLoc":217,"id":1431,"nodeType":"Class","startLoc":165,"text":"class TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}"},{"col":4,"comment":"null","endLoc":172,"header":"@pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch)","id":1432,"name":"no_proxy","nodeType":"Function","startLoc":170,"text":"@pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')"},{"col":4,"comment":"null","endLoc":183,"header":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url)","id":1433,"name":"test_bypass","nodeType":"Function","startLoc":174,"text":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}"},{"col":4,"comment":"null","endLoc":192,"header":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url)","id":1434,"name":"test_not_bypass","nodeType":"Function","startLoc":185,"text":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}"},{"col":4,"comment":"null","endLoc":202,"header":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url)","id":1435,"name":"test_bypass_no_proxy_keyword","nodeType":"Function","startLoc":194,"text":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}"},{"col":4,"comment":"null","endLoc":217,"header":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch)","id":1436,"name":"test_not_bypass_no_proxy_keyword","nodeType":"Function","startLoc":204,"text":"@pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}"},{"className":"TestIsIPv4Address","col":0,"comment":"null","endLoc":227,"id":1437,"nodeType":"Class","startLoc":220,"text":"class TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)"},{"col":4,"comment":"null","endLoc":223,"header":"def test_valid(self)","id":1438,"name":"test_valid","nodeType":"Function","startLoc":222,"text":"def test_valid(self):\n assert is_ipv4_address('8.8.8.8')"},{"col":4,"comment":"null","endLoc":227,"header":"@pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value)","id":1439,"name":"test_invalid","nodeType":"Function","startLoc":225,"text":"@pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)"},{"className":"TestIsValidCIDR","col":0,"comment":"null","endLoc":244,"id":1440,"nodeType":"Class","startLoc":230,"text":"class TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)"},{"col":4,"comment":"null","endLoc":233,"header":"def test_valid(self)","id":1441,"name":"test_valid","nodeType":"Function","startLoc":232,"text":"def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')"},{"col":4,"comment":"null","endLoc":244,"header":"@pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value)","id":1442,"name":"test_invalid","nodeType":"Function","startLoc":235,"text":"@pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)"},{"className":"TestAddressInNetwork","col":0,"comment":"null","endLoc":253,"id":1443,"nodeType":"Class","startLoc":247,"text":"class TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')"},{"col":4,"comment":"null","endLoc":250,"header":"def test_valid(self)","id":1444,"name":"test_valid","nodeType":"Function","startLoc":249,"text":"def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')"},{"col":4,"comment":"null","endLoc":253,"header":"def test_invalid(self)","id":1445,"name":"test_invalid","nodeType":"Function","startLoc":252,"text":"def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')"},{"className":"TestGuessFilename","col":0,"comment":"null","endLoc":273,"id":1446,"nodeType":"Class","startLoc":256,"text":"class TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)"},{"col":4,"comment":"null","endLoc":262,"header":"@pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name'","id":1447,"name":"test_guess_filename_invalid","nodeType":"Function","startLoc":258,"text":"@pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None"},{"attributeType":"bytes","col":0,"comment":"null","endLoc":923,"id":1448,"name":"_null","nodeType":"Attribute","startLoc":923,"text":"_null"},{"attributeType":"bytes","col":0,"comment":"null","endLoc":924,"id":1449,"name":"_null2","nodeType":"Attribute","startLoc":924,"text":"_null2"},{"col":4,"comment":"null","endLoc":273,"header":"@pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type)","id":1451,"name":"test_guess_filename_valid","nodeType":"Function","startLoc":264,"text":"@pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)"},{"attributeType":"bytes","col":0,"comment":"null","endLoc":925,"id":1452,"name":"_null3","nodeType":"Attribute","startLoc":925,"text":"_null3"},{"attributeType":"Pattern","col":0,"comment":"null","endLoc":994,"id":1453,"name":"_CLEAN_HEADER_REGEX_BYTE","nodeType":"Attribute","startLoc":994,"text":"_CLEAN_HEADER_REGEX_BYTE"},{"className":"TestExtractZippedPaths","col":0,"comment":"null","endLoc":303,"id":1454,"nodeType":"Class","startLoc":276,"text":"class TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path"},{"col":4,"comment":"null","endLoc":286,"header":"@pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path)","id":1455,"name":"test_unzipped_paths_unchanged","nodeType":"Function","startLoc":278,"text":"@pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)"},{"col":4,"comment":"null","endLoc":299,"header":"def test_zipped_paths_extracted(self, tmpdir)","id":1456,"name":"test_zipped_paths_extracted","nodeType":"Function","startLoc":288,"text":"def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)"},{"attributeType":"Pattern","col":0,"comment":"null","endLoc":995,"id":1457,"name":"_CLEAN_HEADER_REGEX_STR","nodeType":"Attribute","startLoc":995,"text":"_CLEAN_HEADER_REGEX_STR"},{"col":0,"comment":"","endLoc":9,"header":"utils.py#","id":1458,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n\n_null2 = _null * 2\n\n_null3 = _null * 3\n\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')"},{"col":4,"comment":"null","endLoc":303,"header":"def test_invalid_unc_path(self)","id":1464,"name":"test_invalid_unc_path","nodeType":"Function","startLoc":301,"text":"def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path"},{"className":"TestContentEncodingDetection","col":0,"comment":"null","endLoc":334,"id":1465,"nodeType":"Class","startLoc":306,"text":"class TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '',\n # HTML4 pragma directive\n '',\n # XHTML 1.x served with text/html MIME type\n '',\n # XHTML 1.x served as XML\n '',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n \n \n \n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']"},{"col":4,"comment":"null","endLoc":310,"header":"def test_none(self)","id":1466,"name":"test_none","nodeType":"Function","startLoc":308,"text":"def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)"},{"col":4,"comment":"null","endLoc":326,"header":"@pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '',\n # HTML4 pragma directive\n '',\n # XHTML 1.x served with text/html MIME type\n '',\n # XHTML 1.x served as XML\n '',\n ))\n def test_pragmas(self, content)","id":1467,"name":"test_pragmas","nodeType":"Function","startLoc":312,"text":"@pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '',\n # HTML4 pragma directive\n '',\n # XHTML 1.x served with text/html MIME type\n '',\n # XHTML 1.x served as XML\n '',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'"},{"col":4,"comment":"null","endLoc":334,"header":"def test_precedence(self)","id":1468,"name":"test_precedence","nodeType":"Function","startLoc":328,"text":"def test_precedence(self):\n content = '''\n \n \n \n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']"},{"className":"TestGuessJSONUTF","col":0,"comment":"null","endLoc":360,"id":1471,"nodeType":"Class","startLoc":337,"text":"class TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected"},{"col":4,"comment":"null","endLoc":346,"header":"@pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding)","id":1472,"name":"test_encoded","nodeType":"Function","startLoc":339,"text":"@pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding"},{"col":4,"comment":"null","endLoc":349,"header":"def test_bad_utf_like_encoding(self)","id":1473,"name":"test_bad_utf_like_encoding","nodeType":"Function","startLoc":348,"text":"def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None"},{"col":4,"comment":"null","endLoc":360,"header":"@pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected)","id":1474,"name":"test_guess_by_bom","nodeType":"Function","startLoc":351,"text":"@pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected"},{"col":0,"comment":"null","endLoc":401,"header":"@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http","id":1475,"name":"test_get_auth_from_url","nodeType":"Function","startLoc":368,"text":"@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth"},{"col":0,"comment":"See: https://github.com/psf/requests/issues/2356","endLoc":419,"header":"@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http","id":1476,"name":"test_requote_uri_with_unquoted_percents","nodeType":"Function","startLoc":404,"text":"@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/psf/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected"},{"col":0,"comment":"null","endLoc":436,"header":"@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http","id":1477,"name":"test_unquote_unreserved","nodeType":"Function","startLoc":422,"text":"@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected"},{"col":0,"comment":"null","endLoc":446,"header":"@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected)","id":1478,"name":"test_dotted_netmask","nodeType":"Function","startLoc":439,"text":"@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected"},{"col":0,"comment":"Make sure we can select per-host proxies correctly.","endLoc":479,"header":"@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp","id":1479,"name":"test_select_proxies","nodeType":"Function","startLoc":456,"text":"@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected"},{"col":0,"comment":"null","endLoc":488,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value'","id":1480,"name":"test_parse_dict_header","nodeType":"Function","startLoc":482,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected"},{"col":0,"comment":"null","endLoc":531,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset'","id":1481,"name":"test__parse_content_type_header","nodeType":"Function","startLoc":491,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected"},{"col":0,"comment":"null","endLoc":550,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type'","id":1482,"name":"test_get_encoding_from_headers","nodeType":"Function","startLoc":534,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected"},{"col":0,"comment":"null","endLoc":567,"header":"@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length)","id":1483,"name":"test_iter_slices","nodeType":"Function","startLoc":553,"text":"@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length"},{"col":0,"comment":"null","endLoc":597,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n (\n '; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n ';',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '; type=\"image/jpeg\",;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected"},{"col":0,"comment":"null","endLoc":606,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http","id":1485,"name":"test_prepend_scheme_if_needed","nodeType":"Function","startLoc":600,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected"},{"col":0,"comment":"null","endLoc":616,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected)","id":1486,"name":"test_to_native_string","nodeType":"Function","startLoc":609,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected"},{"col":0,"comment":"null","endLoc":629,"header":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http","id":1487,"name":"test_urldefragauth","nodeType":"Function","startLoc":619,"text":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected"},{"col":0,"comment":"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n ","endLoc":651,"header":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http","id":1488,"name":"test_should_bypass_proxies","nodeType":"Function","startLoc":632,"text":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ('file:///some/path/on/disk', True),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected"},{"col":0,"comment":"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n ","endLoc":671,"header":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http","id":1489,"name":"test_should_bypass_proxies_pass_only_hostname","nodeType":"Function","startLoc":654,"text":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)"},{"col":0,"comment":"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n ","endLoc":687,"header":"@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar)","id":1490,"name":"test_add_dict_to_cookiejar","nodeType":"Function","startLoc":674,"text":"@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = {cookie.name: cookie.value for cookie in cj}\n assert cookiedict == cookies"},{"col":0,"comment":"null","endLoc":698,"header":"@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected)","id":1491,"name":"test_unicode_is_ascii","nodeType":"Function","startLoc":690,"text":"@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected"},{"col":0,"comment":"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n ","endLoc":719,"header":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http","id":1492,"name":"test_should_bypass_proxies_no_proxy","nodeType":"Function","startLoc":701,"text":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected"},{"col":0,"comment":"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n ","endLoc":774,"header":"@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http","id":1493,"name":"test_should_bypass_proxies_win_registry","nodeType":"Function","startLoc":722,"text":"@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, ''),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected"},{"col":0,"comment":"Tests set_environ will set environ values and will restore the environ.","endLoc":790,"header":"@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value)","id":1494,"name":"test_set_environ","nodeType":"Function","startLoc":777,"text":"@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy"},{"col":0,"comment":"Tests set_environ will raise exceptions in context when the\n value parameter is None.","endLoc":800,"header":"def test_set_environ_raises_exception()","id":1496,"name":"test_set_environ_raises_exception","nodeType":"Function","startLoc":793,"text":"def test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)"},{"attributeType":"str","col":0,"comment":"null","endLoc":363,"id":1497,"name":"USER","nodeType":"Attribute","startLoc":363,"text":"USER"},{"fileName":"test_requests.py","filePath":"tests","id":1498,"nodeType":"File","text":"# -*- coding: utf-8 -*-\n\n\"\"\"Tests for Requests.\"\"\"\n\nfrom __future__ import division\nimport json\nimport os\nimport pickle\nimport collections\nimport contextlib\nimport warnings\nimport re\n\nimport io\nimport requests\nimport pytest\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n Morsel, cookielib, getproxies, str, urlparse,\n builtin_str)\nfrom requests.cookies import (\n cookiejar_from_dict, morsel_to_cookie)\nfrom requests.exceptions import (\n ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,\n MissingSchema, ReadTimeout, Timeout, RetryError, TooManyRedirects,\n ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)\nfrom requests.models import PreparedRequest\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.models import urlencode\nfrom requests.hooks import default_hooks\nfrom requests.compat import MutableMapping\n\nfrom .compat import StringIO, u\nfrom .utils import override_environ\nfrom urllib3.util import Timeout as Urllib3Timeout\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = 'http://10.255.255.1'\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('; rel=\"next\", ; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok\n\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError):\n r.json()\n"},{"attributeType":"str","col":7,"comment":"null","endLoc":363,"id":1499,"name":"PASSWORD","nodeType":"Attribute","startLoc":363,"text":"PASSWORD"},{"attributeType":"str","col":0,"comment":"null","endLoc":364,"id":1500,"name":"ENCODED_USER","nodeType":"Attribute","startLoc":364,"text":"ENCODED_USER"},{"attributeType":"_Feature","col":0,"comment":"null","endLoc":12,"id":1501,"name":"division","nodeType":"Attribute","startLoc":12,"text":"division"},{"className":"SSLContext","col":0,"comment":"null","endLoc":451,"id":1502,"nodeType":"Class","startLoc":382,"text":"class SSLContext:\n check_hostname: bool\n options: Options\n verify_flags: VerifyFlags\n verify_mode: VerifyMode\n @property\n def protocol(self) -> _SSLMethod: ...\n hostname_checks_common_name: bool\n maximum_version: TLSVersion\n minimum_version: TLSVersion\n sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None\n # The following two attributes have class-level defaults.\n # However, the docs explicitly state that it's OK to override these attributes on instances,\n # so making these ClassVars wouldn't be appropriate\n sslobject_class: type[SSLObject]\n sslsocket_class: type[SSLSocket]\n keylog_filename: str\n post_handshake_auth: bool\n if sys.version_info >= (3, 10):\n security_level: int\n if sys.version_info >= (3, 10):\n # Using the default (None) for the `protocol` parameter is deprecated,\n # but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted\n def __new__(cls, protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ...\n else:\n def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ...\n\n def cert_store_stats(self) -> dict[str, int]: ...\n def load_cert_chain(\n self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None\n ) -> None: ...\n def load_default_certs(self, purpose: Purpose = ...) -> None: ...\n def load_verify_locations(\n self,\n cafile: StrOrBytesPath | None = None,\n capath: StrOrBytesPath | None = None,\n cadata: str | ReadableBuffer | None = None,\n ) -> None: ...\n @overload\n def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ...\n @overload\n def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ...\n @overload\n def get_ca_certs(self, binary_form: bool = False) -> Any: ...\n def get_ciphers(self) -> list[_Cipher]: ...\n def set_default_verify_paths(self) -> None: ...\n def set_ciphers(self, cipherlist: str, /) -> None: ...\n def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ...\n def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ...\n def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ...\n def load_dh_params(self, path: str, /) -> None: ...\n def set_ecdh_curve(self, name: str, /) -> None: ...\n def wrap_socket(\n self,\n sock: socket.socket,\n server_side: bool = False,\n do_handshake_on_connect: bool = True,\n suppress_ragged_eofs: bool = True,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLSocket: ...\n def wrap_bio(\n self,\n incoming: MemoryBIO,\n outgoing: MemoryBIO,\n server_side: bool = False,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLObject: ...\n def session_stats(self) -> dict[str, int]: ..."},{"attributeType":"str","col":0,"comment":"null","endLoc":365,"id":1503,"name":"ENCODED_PASSWORD","nodeType":"Attribute","startLoc":365,"text":"ENCODED_PASSWORD"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":449,"id":1504,"name":"http_proxies","nodeType":"Attribute","startLoc":449,"text":"http_proxies"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":451,"id":1505,"name":"all_proxies","nodeType":"Attribute","startLoc":451,"text":"all_proxies"},{"attributeType":"TypedDict","col":0,"comment":"null","endLoc":453,"id":1506,"name":"mixed_proxies","nodeType":"Attribute","startLoc":453,"text":"mixed_proxies"},{"col":0,"comment":"","endLoc":3,"header":"test_utils.py#","id":1507,"name":"","nodeType":"Function","startLoc":3,"text":"USER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\n\nENCODED_USER = compat.quote(USER, '')\n\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\n\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\n\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}"},{"col":4,"comment":"null","endLoc":388,"header":"@property\n def protocol(self) -> _SSLMethod","id":1508,"name":"protocol","nodeType":"Function","startLoc":387,"text":"@property\n def protocol(self) -> _SSLMethod: ..."},{"col":8,"comment":"null","endLoc":407,"header":"def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self","id":1509,"name":"__new__","nodeType":"Function","startLoc":407,"text":"def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ..."},{"col":4,"comment":"null","endLoc":409,"header":"def cert_store_stats(self) -> dict[str, int]","id":1510,"name":"cert_store_stats","nodeType":"Function","startLoc":409,"text":"def cert_store_stats(self) -> dict[str, int]: ..."},{"col":4,"comment":"null","endLoc":412,"header":"def load_cert_chain(\n self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None\n ) -> None","id":1511,"name":"load_cert_chain","nodeType":"Function","startLoc":410,"text":"def load_cert_chain(\n self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":413,"header":"def load_default_certs(self, purpose: Purpose = ...) -> None","id":1512,"name":"load_default_certs","nodeType":"Function","startLoc":413,"text":"def load_default_certs(self, purpose: Purpose = ...) -> None: ..."},{"col":4,"comment":"null","endLoc":419,"header":"def load_verify_locations(\n self,\n cafile: StrOrBytesPath | None = None,\n capath: StrOrBytesPath | None = None,\n cadata: str | ReadableBuffer | None = None,\n ) -> None","id":1513,"name":"load_verify_locations","nodeType":"Function","startLoc":414,"text":"def load_verify_locations(\n self,\n cafile: StrOrBytesPath | None = None,\n capath: StrOrBytesPath | None = None,\n cadata: str | ReadableBuffer | None = None,\n ) -> None: ..."},{"col":4,"comment":"null","endLoc":421,"header":"@overload\n def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]","id":1514,"name":"get_ca_certs","nodeType":"Function","startLoc":420,"text":"@overload\n def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ..."},{"col":4,"comment":"null","endLoc":423,"header":"@overload\n def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]","id":1515,"name":"get_ca_certs","nodeType":"Function","startLoc":422,"text":"@overload\n def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ..."},{"col":4,"comment":"null","endLoc":425,"header":"@overload\n def get_ca_certs(self, binary_form: bool = False) -> Any","id":1516,"name":"get_ca_certs","nodeType":"Function","startLoc":424,"text":"@overload\n def get_ca_certs(self, binary_form: bool = False) -> Any: ..."},{"col":4,"comment":"null","endLoc":426,"header":"def get_ciphers(self) -> list[_Cipher]","id":1517,"name":"get_ciphers","nodeType":"Function","startLoc":426,"text":"def get_ciphers(self) -> list[_Cipher]: ..."},{"col":4,"comment":"null","endLoc":427,"header":"def set_default_verify_paths(self) -> None","id":1518,"name":"set_default_verify_paths","nodeType":"Function","startLoc":427,"text":"def set_default_verify_paths(self) -> None: ..."},{"col":4,"comment":"null","endLoc":428,"header":"def set_ciphers(self, cipherlist: str, /) -> None","id":1519,"name":"set_ciphers","nodeType":"Function","startLoc":428,"text":"def set_ciphers(self, cipherlist: str, /) -> None: ..."},{"col":4,"comment":"null","endLoc":429,"header":"def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None","id":1520,"name":"set_alpn_protocols","nodeType":"Function","startLoc":429,"text":"def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ..."},{"col":4,"comment":"null","endLoc":430,"header":"def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None","id":1521,"name":"set_npn_protocols","nodeType":"Function","startLoc":430,"text":"def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ..."},{"col":4,"comment":"null","endLoc":431,"header":"def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None","id":1522,"name":"set_servername_callback","nodeType":"Function","startLoc":431,"text":"def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ..."},{"col":4,"comment":"null","endLoc":432,"header":"def load_dh_params(self, path: str, /) -> None","id":1523,"name":"load_dh_params","nodeType":"Function","startLoc":432,"text":"def load_dh_params(self, path: str, /) -> None: ..."},{"col":4,"comment":"null","endLoc":433,"header":"def set_ecdh_curve(self, name: str, /) -> None","id":1524,"name":"set_ecdh_curve","nodeType":"Function","startLoc":433,"text":"def set_ecdh_curve(self, name: str, /) -> None: ..."},{"col":4,"comment":"null","endLoc":450,"header":"def wrap_bio(\n self,\n incoming: MemoryBIO,\n outgoing: MemoryBIO,\n server_side: bool = False,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLObject","id":1525,"name":"wrap_bio","nodeType":"Function","startLoc":443,"text":"def wrap_bio(\n self,\n incoming: MemoryBIO,\n outgoing: MemoryBIO,\n server_side: bool = False,\n server_hostname: str | bytes | None = None,\n session: SSLSession | None = None,\n ) -> SSLObject: ..."},{"col":4,"comment":"null","endLoc":451,"header":"def session_stats(self) -> dict[str, int]","id":1526,"name":"session_stats","nodeType":"Function","startLoc":451,"text":"def session_stats(self) -> dict[str, int]: ..."},{"attributeType":"bool","col":4,"comment":"null","endLoc":383,"id":1527,"name":"check_hostname","nodeType":"Attribute","startLoc":383,"text":"check_hostname"},{"attributeType":"Options","col":4,"comment":"null","endLoc":384,"id":1528,"name":"options","nodeType":"Attribute","startLoc":384,"text":"options"},{"attributeType":"VerifyFlags","col":4,"comment":"null","endLoc":385,"id":1529,"name":"verify_flags","nodeType":"Attribute","startLoc":385,"text":"verify_flags"},{"attributeType":"VerifyMode","col":4,"comment":"null","endLoc":386,"id":1530,"name":"verify_mode","nodeType":"Attribute","startLoc":386,"text":"verify_mode"},{"attributeType":"bool","col":4,"comment":"null","endLoc":389,"id":1531,"name":"hostname_checks_common_name","nodeType":"Attribute","startLoc":389,"text":"hostname_checks_common_name"},{"attributeType":"TLSVersion","col":4,"comment":"null","endLoc":390,"id":1532,"name":"maximum_version","nodeType":"Attribute","startLoc":390,"text":"maximum_version"},{"attributeType":"TLSVersion","col":4,"comment":"null","endLoc":391,"id":1533,"name":"minimum_version","nodeType":"Attribute","startLoc":391,"text":"minimum_version"},{"attributeType":"(SSLObject, str, SSLContext) -> None | int | None","col":4,"comment":"null","endLoc":392,"id":1534,"name":"sni_callback","nodeType":"Attribute","startLoc":392,"text":"sni_callback"},{"attributeType":"SSLObject","col":4,"comment":"null","endLoc":396,"id":1535,"name":"sslobject_class","nodeType":"Attribute","startLoc":396,"text":"sslobject_class"},{"attributeType":"SSLSocket","col":4,"comment":"null","endLoc":397,"id":1536,"name":"sslsocket_class","nodeType":"Attribute","startLoc":397,"text":"sslsocket_class"},{"attributeType":"str","col":4,"comment":"null","endLoc":398,"id":1537,"name":"keylog_filename","nodeType":"Attribute","startLoc":398,"text":"keylog_filename"},{"attributeType":"bool","col":4,"comment":"null","endLoc":399,"id":1538,"name":"post_handshake_auth","nodeType":"Attribute","startLoc":399,"text":"post_handshake_auth"},{"className":"TestRequests","col":0,"comment":"null","endLoc":1994,"id":1539,"nodeType":"Class","startLoc":60,"text":"class TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('; rel=\"next\", ; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]"},{"col":4,"comment":"null","endLoc":75,"header":"def test_entry_points(self)","id":1540,"name":"test_entry_points","nodeType":"Function","startLoc":64,"text":"def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager"},{"col":4,"comment":"null","endLoc":87,"header":"@pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost","id":1541,"name":"test_invalid_url","nodeType":"Function","startLoc":77,"text":"@pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)"},{"col":4,"comment":"null","endLoc":96,"header":"def test_basic_building(self)","id":1542,"name":"test_basic_building","nodeType":"Function","startLoc":89,"text":"def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'"},{"col":4,"comment":"null","endLoc":101,"header":"@pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method)","id":1543,"name":"test_no_content_length","nodeType":"Function","startLoc":98,"text":"@pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers"},{"col":4,"comment":"null","endLoc":106,"header":"@pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method)","id":1544,"name":"test_no_body_content_length","nodeType":"Function","startLoc":103,"text":"@pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'"},{"col":4,"comment":"null","endLoc":111,"header":"@pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method)","id":1545,"name":"test_empty_content_length","nodeType":"Function","startLoc":108,"text":"@pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'"},{"col":4,"comment":"null","endLoc":119,"header":"def test_override_content_length(self, httpbin)","id":1546,"name":"test_override_content_length","nodeType":"Function","startLoc":113,"text":"def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'"},{"col":4,"comment":"null","endLoc":124,"header":"def test_path_is_not_double_encoded(self)","id":1547,"name":"test_path_is_not_double_encoded","nodeType":"Function","startLoc":121,"text":"def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'"},{"col":4,"comment":"null","endLoc":133,"header":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http","id":1548,"name":"test_params_are_added_before_fragment","nodeType":"Function","startLoc":126,"text":"@pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected"},{"col":4,"comment":"null","endLoc":140,"header":"def test_params_original_order_is_preserved_by_default(self)","id":1549,"name":"test_params_original_order_is_preserved_by_default","nodeType":"Function","startLoc":135,"text":"def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'"},{"col":4,"comment":"null","endLoc":145,"header":"def test_params_bytes_are_encoded(self)","id":1550,"name":"test_params_bytes_are_encoded","nodeType":"Function","startLoc":142,"text":"def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'"},{"col":4,"comment":"null","endLoc":150,"header":"def test_binary_put(self)","id":1551,"name":"test_binary_put","nodeType":"Function","startLoc":147,"text":"def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)"},{"col":4,"comment":"null","endLoc":155,"header":"def test_whitespaces_are_removed_from_url(self)","id":1552,"name":"test_whitespaces_are_removed_from_url","nodeType":"Function","startLoc":152,"text":"def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'"},{"col":4,"comment":"null","endLoc":165,"header":"@pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme)","id":1553,"name":"test_mixed_case_scheme_acceptable","nodeType":"Function","startLoc":157,"text":"@pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)"},{"col":4,"comment":"null","endLoc":174,"header":"def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin)","id":1554,"name":"test_HTTP_200_OK_GET_ALTERNATIVE","nodeType":"Function","startLoc":167,"text":"def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":180,"header":"def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin)","id":1555,"name":"test_HTTP_302_ALLOW_REDIRECT_GET","nodeType":"Function","startLoc":176,"text":"def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":187,"header":"def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin)","id":1556,"name":"test_HTTP_307_ALLOW_REDIRECT_POST","nodeType":"Function","startLoc":182,"text":"def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'"},{"col":4,"comment":"null","endLoc":195,"header":"def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin)","id":1557,"name":"test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE","nodeType":"Function","startLoc":189,"text":"def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')"},{"col":4,"comment":"null","endLoc":206,"header":"def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin)","id":1558,"name":"test_HTTP_302_TOO_MANY_REDIRECTS","nodeType":"Function","startLoc":197,"text":"def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')"},{"col":4,"comment":"null","endLoc":219,"header":"def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin)","id":1559,"name":"test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS","nodeType":"Function","startLoc":208,"text":"def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')"},{"col":4,"comment":"null","endLoc":226,"header":"def test_http_301_changes_post_to_get(self, httpbin)","id":1560,"name":"test_http_301_changes_post_to_get","nodeType":"Function","startLoc":221,"text":"def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":234,"header":"def test_http_301_doesnt_change_head_to_get(self, httpbin)","id":1561,"name":"test_http_301_doesnt_change_head_to_get","nodeType":"Function","startLoc":228,"text":"def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":241,"header":"def test_http_302_changes_post_to_get(self, httpbin)","id":1562,"name":"test_http_302_changes_post_to_get","nodeType":"Function","startLoc":236,"text":"def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":248,"header":"def test_http_302_doesnt_change_head_to_get(self, httpbin)","id":1563,"name":"test_http_302_doesnt_change_head_to_get","nodeType":"Function","startLoc":243,"text":"def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":255,"header":"def test_http_303_changes_post_to_get(self, httpbin)","id":1564,"name":"test_http_303_changes_post_to_get","nodeType":"Function","startLoc":250,"text":"def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":262,"header":"def test_http_303_doesnt_change_head_to_get(self, httpbin)","id":1565,"name":"test_http_303_doesnt_change_head_to_get","nodeType":"Function","startLoc":257,"text":"def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect"},{"col":4,"comment":"null","endLoc":279,"header":"def test_header_and_body_removal_on_redirect(self, httpbin)","id":1566,"name":"test_header_and_body_removal_on_redirect","nodeType":"Function","startLoc":264,"text":"def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers"},{"col":4,"comment":"null","endLoc":302,"header":"def test_transfer_enc_removal_on_redirect(self, httpbin)","id":1567,"name":"test_transfer_enc_removal_on_redirect","nodeType":"Function","startLoc":281,"text":"def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers"},{"col":42,"endLoc":292,"id":1568,"nodeType":"Lambda","startLoc":292,"text":"lambda *args: args"},{"col":4,"comment":"null","endLoc":310,"header":"def test_fragment_maintained_on_redirect(self, httpbin)","id":1569,"name":"test_fragment_maintained_on_redirect","nodeType":"Function","startLoc":304,"text":"def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment"},{"col":4,"comment":"null","endLoc":318,"header":"def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin)","id":1570,"name":"test_HTTP_200_OK_GET_WITH_PARAMS","nodeType":"Function","startLoc":312,"text":"def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":324,"header":"def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin)","id":1571,"name":"test_HTTP_200_OK_GET_WITH_MIXED_PARAMS","nodeType":"Function","startLoc":320,"text":"def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":330,"header":"def test_set_cookie_on_301(self, httpbin)","id":1572,"name":"test_set_cookie_on_301","nodeType":"Function","startLoc":326,"text":"def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'"},{"col":4,"comment":"null","endLoc":336,"header":"def test_cookie_sent_on_redirect(self, httpbin)","id":1573,"name":"test_cookie_sent_on_redirect","nodeType":"Function","startLoc":332,"text":"def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']"},{"col":4,"comment":"null","endLoc":349,"header":"def test_cookie_removed_on_expire(self, httpbin)","id":1574,"name":"test_cookie_removed_on_expire","nodeType":"Function","startLoc":338,"text":"def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies"},{"col":4,"comment":"null","endLoc":354,"header":"def test_cookie_quote_wrapped(self, httpbin)","id":1575,"name":"test_cookie_quote_wrapped","nodeType":"Function","startLoc":351,"text":"def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'"},{"col":4,"comment":"null","endLoc":360,"header":"def test_cookie_persists_via_api(self, httpbin)","id":1576,"name":"test_cookie_persists_via_api","nodeType":"Function","startLoc":356,"text":"def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']"},{"col":4,"comment":"null","endLoc":368,"header":"def test_request_cookie_overrides_session_cookie(self, httpbin)","id":1577,"name":"test_request_cookie_overrides_session_cookie","nodeType":"Function","startLoc":362,"text":"def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'"},{"col":4,"comment":"null","endLoc":374,"header":"def test_request_cookies_not_persisted(self, httpbin)","id":1578,"name":"test_request_cookies_not_persisted","nodeType":"Function","startLoc":370,"text":"def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies"},{"col":4,"comment":"null","endLoc":385,"header":"def test_generic_cookiejar_works(self, httpbin)","id":1579,"name":"test_generic_cookiejar_works","nodeType":"Function","startLoc":376,"text":"def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj"},{"col":4,"comment":"null","endLoc":393,"header":"def test_param_cookiejar_works(self, httpbin)","id":1580,"name":"test_param_cookiejar_works","nodeType":"Function","startLoc":387,"text":"def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'"},{"col":4,"comment":"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n ","endLoc":425,"header":"def test_cookielib_cookiejar_on_redirect(self, httpbin)","id":1581,"name":"test_cookielib_cookiejar_on_redirect","nodeType":"Function","startLoc":395,"text":"def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'"},{"col":4,"comment":"null","endLoc":431,"header":"def test_requests_in_history_are_not_overridden(self, httpbin)","id":1582,"name":"test_requests_in_history_are_not_overridden","nodeType":"Function","startLoc":427,"text":"def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls"},{"col":4,"comment":"Show that even with redirects, Response.history is always a list.","endLoc":439,"header":"def test_history_is_always_a_list(self, httpbin)","id":1583,"name":"test_history_is_always_a_list","nodeType":"Function","startLoc":433,"text":"def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)"},{"col":4,"comment":"Do not send headers in Session.headers with None values.","endLoc":447,"header":"def test_headers_on_session_with_None_are_not_sent(self, httpbin)","id":1584,"name":"test_headers_on_session_with_None_are_not_sent","nodeType":"Function","startLoc":441,"text":"def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers"},{"col":4,"comment":"Preserve order when headers provided as OrderedDict.","endLoc":467,"header":"def test_headers_preserve_order(self, httpbin)","id":1585,"name":"test_headers_preserve_order","nodeType":"Function","startLoc":449,"text":"def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')"},{"col":4,"comment":"null","endLoc":475,"header":"@pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key)","id":1586,"name":"test_user_agent_transfers","nodeType":"Function","startLoc":469,"text":"@pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text"},{"col":4,"comment":"null","endLoc":479,"header":"def test_HTTP_200_OK_HEAD(self, httpbin)","id":1587,"name":"test_HTTP_200_OK_HEAD","nodeType":"Function","startLoc":477,"text":"def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":483,"header":"def test_HTTP_200_OK_PUT(self, httpbin)","id":1588,"name":"test_HTTP_200_OK_PUT","nodeType":"Function","startLoc":481,"text":"def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":498,"header":"def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin)","id":1589,"name":"test_BASICAUTH_TUPLE_HTTP_200_OK_GET","nodeType":"Function","startLoc":485,"text":"def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":514,"header":"@pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password)","id":1590,"name":"test_set_basicauth","nodeType":"Function","startLoc":500,"text":"@pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)"},{"col":4,"comment":"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n ","endLoc":524,"header":"def test_basicauth_encodes_byte_strings(self)","id":1591,"name":"test_basicauth_encodes_byte_strings","nodeType":"Function","startLoc":516,"text":"def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='"},{"col":4,"comment":"null","endLoc":537,"header":"@pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http","id":1592,"name":"test_errors","nodeType":"Function","startLoc":526,"text":"@pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)"},{"col":4,"comment":"null","endLoc":542,"header":"def test_proxy_error(self)","id":1593,"name":"test_proxy_error","nodeType":"Function","startLoc":539,"text":"def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})"},{"col":4,"comment":"null","endLoc":555,"header":"def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure)","id":1594,"name":"test_proxy_error_on_bad_url","nodeType":"Function","startLoc":544,"text":"def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})"},{"col":4,"comment":"null","endLoc":562,"header":"def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin)","id":1595,"name":"test_respect_proxy_env_on_send_self_prepared_request","nodeType":"Function","startLoc":557,"text":"def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())"},{"col":4,"comment":"null","endLoc":570,"header":"def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin)","id":1596,"name":"test_respect_proxy_env_on_send_session_prepared_request","nodeType":"Function","startLoc":564,"text":"def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)"},{"col":4,"comment":"null","endLoc":579,"header":"def test_respect_proxy_env_on_send_with_redirects(self, httpbin)","id":1597,"name":"test_respect_proxy_env_on_send_with_redirects","nodeType":"Function","startLoc":572,"text":"def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())"},{"col":4,"comment":"null","endLoc":585,"header":"def test_respect_proxy_env_on_get(self, httpbin)","id":1598,"name":"test_respect_proxy_env_on_get","nodeType":"Function","startLoc":581,"text":"def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())"},{"col":4,"comment":"null","endLoc":591,"header":"def test_respect_proxy_env_on_request(self, httpbin)","id":1599,"name":"test_respect_proxy_env_on_request","nodeType":"Function","startLoc":587,"text":"def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())"},{"col":4,"comment":"null","endLoc":600,"header":"def test_proxy_authorization_preserved_on_request(self, httpbin)","id":1600,"name":"test_proxy_authorization_preserved_on_request","nodeType":"Function","startLoc":593,"text":"def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value"},{"col":4,"comment":"null","endLoc":633,"header":"def test_basicauth_with_netrc(self, httpbin)","id":1601,"name":"test_basicauth_with_netrc","nodeType":"Function","startLoc":602,"text":"def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth"},{"col":4,"comment":"null","endLoc":651,"header":"def test_DIGEST_HTTP_200_OK_GET(self, httpbin)","id":1602,"name":"test_DIGEST_HTTP_200_OK_GET","nodeType":"Function","startLoc":635,"text":"def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":662,"header":"def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin)","id":1603,"name":"test_DIGEST_AUTH_RETURNS_COOKIE","nodeType":"Function","startLoc":653,"text":"def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":671,"header":"def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin)","id":1604,"name":"test_DIGEST_AUTH_SETS_SESSION_COOKIES","nodeType":"Function","startLoc":664,"text":"def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'"},{"col":4,"comment":"null","endLoc":683,"header":"def test_DIGEST_STREAM(self, httpbin)","id":1605,"name":"test_DIGEST_STREAM","nodeType":"Function","startLoc":673,"text":"def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''"},{"col":4,"comment":"null","endLoc":700,"header":"def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin)","id":1606,"name":"test_DIGESTAUTH_WRONG_HTTP_401_GET","nodeType":"Function","startLoc":685,"text":"def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401"},{"col":4,"comment":"null","endLoc":709,"header":"def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin)","id":1607,"name":"test_DIGESTAUTH_QUOTES_QOP_VALUE","nodeType":"Function","startLoc":702,"text":"def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']"},{"col":4,"comment":"null","endLoc":727,"header":"def test_POSTBIN_GET_POST_FILES(self, httpbin)","id":1608,"name":"test_POSTBIN_GET_POST_FILES","nodeType":"Function","startLoc":711,"text":"def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])"},{"col":4,"comment":"null","endLoc":735,"header":"def test_invalid_files_input(self, httpbin)","id":1609,"name":"test_invalid_files_input","nodeType":"Function","startLoc":729,"text":"def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body"},{"col":4,"comment":"null","endLoc":777,"header":"def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin)","id":1610,"name":"test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER","nodeType":"Function","startLoc":737,"text":"def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'"},{"col":4,"comment":"null","endLoc":795,"header":"def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin)","id":1613,"name":"test_POSTBIN_GET_POST_FILES_WITH_DATA","nodeType":"Function","startLoc":779,"text":"def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])"},{"col":4,"comment":"null","endLoc":820,"header":"def test_post_with_custom_mapping(self, httpbin)","id":1614,"name":"test_post_with_custom_mapping","nodeType":"Function","startLoc":797,"text":"def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}"},{"col":4,"comment":"null","endLoc":828,"header":"def test_conflicting_post_params(self, httpbin)","id":1616,"name":"test_conflicting_post_params","nodeType":"Function","startLoc":822,"text":"def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})"},{"col":4,"comment":"null","endLoc":832,"header":"def test_request_ok_set(self, httpbin)","id":1617,"name":"test_request_ok_set","nodeType":"Function","startLoc":830,"text":"def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok"},{"col":4,"comment":"null","endLoc":840,"header":"def test_status_raising(self, httpbin)","id":1618,"name":"test_status_raising","nodeType":"Function","startLoc":834,"text":"def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok"},{"col":4,"comment":"null","endLoc":844,"header":"def test_decompress_gzip(self, httpbin)","id":1619,"name":"test_decompress_gzip","nodeType":"Function","startLoc":842,"text":"def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')"},{"col":4,"comment":"null","endLoc":855,"header":"@pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø'","id":1620,"name":"test_unicode_get","nodeType":"Function","startLoc":846,"text":"@pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)"},{"col":4,"comment":"null","endLoc":861,"header":"def test_unicode_header_name(self, httpbin)","id":1621,"name":"test_unicode_header_name","nodeType":"Function","startLoc":857,"text":"def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode."},{"col":4,"comment":"null","endLoc":864,"header":"def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle)","id":1622,"name":"test_pyopenssl_redirect","nodeType":"Function","startLoc":863,"text":"def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)"},{"col":4,"comment":"null","endLoc":870,"header":"def test_invalid_ca_certificate_path(self, httpbin_secure)","id":1623,"name":"test_invalid_ca_certificate_path","nodeType":"Function","startLoc":866,"text":"def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)"},{"col":4,"comment":"null","endLoc":880,"header":"def test_invalid_ssl_certificate_files(self, httpbin_secure)","id":1624,"name":"test_invalid_ssl_certificate_files","nodeType":"Function","startLoc":872,"text":"def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)"},{"col":4,"comment":"null","endLoc":884,"header":"def test_http_with_certificate(self, httpbin)","id":1625,"name":"test_http_with_certificate","nodeType":"Function","startLoc":882,"text":"def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200"},{"col":4,"comment":"warnings are emitted with requests.get","endLoc":905,"header":"def test_https_warnings(self, nosan_server)","id":1626,"name":"test_https_warnings","nodeType":"Function","startLoc":886,"text":"def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected"},{"col":4,"comment":"\n When underlying SSL problems occur, an SSLError is raised.\n ","endLoc":914,"header":"def test_certificate_failure(self, httpbin_secure)","id":1628,"name":"test_certificate_failure","nodeType":"Function","startLoc":907,"text":"def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))"},{"col":4,"comment":"null","endLoc":920,"header":"def test_urlencoded_get_query_multivalued_param(self, httpbin)","id":1629,"name":"test_urlencoded_get_query_multivalued_param","nodeType":"Function","startLoc":916,"text":"def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')"},{"col":4,"comment":"null","endLoc":926,"header":"def test_form_encoded_post_query_multivalued_element(self, httpbin)","id":1630,"name":"test_form_encoded_post_query_multivalued_element","nodeType":"Function","startLoc":922,"text":"def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'"},{"col":4,"comment":"null","endLoc":933,"header":"def test_different_encodings_dont_break_post(self, httpbin)","id":1631,"name":"test_different_encodings_dont_break_post","nodeType":"Function","startLoc":928,"text":"def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":946,"header":"@pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff'","id":1632,"name":"test_unicode_multipart_post","nodeType":"Function","startLoc":935,"text":"@pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":956,"header":"def test_unicode_multipart_post_fieldnames(self, httpbin)","id":1633,"name":"test_unicode_multipart_post_fieldnames","nodeType":"Function","startLoc":948,"text":"def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body"},{"col":4,"comment":"null","endLoc":962,"header":"def test_unicode_method_name(self, httpbin)","id":1636,"name":"test_unicode_method_name","nodeType":"Function","startLoc":958,"text":"def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":973,"header":"def test_unicode_method_name_with_request_object(self, httpbin)","id":1637,"name":"test_unicode_method_name_with_request_object","nodeType":"Function","startLoc":964,"text":"def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200"},{"col":4,"comment":"null","endLoc":981,"header":"def test_non_prepared_request_error(self)","id":1638,"name":"test_non_prepared_request_error","nodeType":"Function","startLoc":975,"text":"def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'"},{"col":4,"comment":"null","endLoc":992,"header":"def test_custom_content_type(self, httpbin)","id":1639,"name":"test_custom_content_type","nodeType":"Function","startLoc":983,"text":"def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body"},{"col":4,"comment":"null","endLoc":1002,"header":"def test_hook_receives_request_arguments(self, httpbin)","id":1640,"name":"test_hook_receives_request_arguments","nodeType":"Function","startLoc":994,"text":"def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)"},{"col":4,"comment":"null","endLoc":1011,"header":"def test_session_hooks_are_used_with_no_request_hooks(self, httpbin)","id":1641,"name":"test_session_hooks_are_used_with_no_request_hooks","nodeType":"Function","startLoc":1004,"text":"def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]"},{"col":15,"endLoc":1005,"id":1642,"nodeType":"Lambda","startLoc":1005,"text":"lambda x, *args, **kwargs: x"},{"col":4,"comment":"null","endLoc":1021,"header":"def test_session_hooks_are_overridden_by_request_hooks(self, httpbin)","id":1643,"name":"test_session_hooks_are_overridden_by_request_hooks","nodeType":"Function","startLoc":1013,"text":"def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]"},{"col":16,"endLoc":1014,"id":1644,"nodeType":"Lambda","startLoc":1014,"text":"lambda x, *args, **kwargs: x"},{"col":16,"endLoc":1015,"id":1645,"nodeType":"Lambda","startLoc":1015,"text":"lambda x, *args, **kwargs: x"},{"col":4,"comment":"null","endLoc":1035,"header":"def test_prepared_request_hook(self, httpbin)","id":1646,"name":"test_prepared_request_hook","nodeType":"Function","startLoc":1023,"text":"def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')"},{"col":4,"comment":"null","endLoc":1053,"header":"def test_prepared_from_session(self, httpbin)","id":1647,"name":"test_prepared_from_session","nodeType":"Function","startLoc":1037,"text":"def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'"},{"col":4,"comment":"null","endLoc":1059,"header":"def test_prepare_request_with_bytestring_url(self)","id":1648,"name":"test_prepare_request_with_bytestring_url","nodeType":"Function","startLoc":1055,"text":"def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\""},{"col":4,"comment":"null","endLoc":1069,"header":"def test_request_with_bytestring_host(self, httpbin)","id":1649,"name":"test_request_with_bytestring_host","nodeType":"Function","startLoc":1061,"text":"def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'"},{"col":4,"comment":"null","endLoc":1093,"header":"def test_links(self)","id":1650,"name":"test_links","nodeType":"Function","startLoc":1071,"text":"def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('; rel=\"next\", ; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'"},{"col":4,"comment":"null","endLoc":1111,"header":"def test_cookie_parameters(self)","id":1651,"name":"test_cookie_parameters","nodeType":"Function","startLoc":1095,"text":"def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']"},{"col":4,"comment":"null","endLoc":1131,"header":"def test_cookie_as_dict_keeps_len(self)","id":1652,"name":"test_cookie_as_dict_keeps_len","nodeType":"Function","startLoc":1113,"text":"def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2"},{"col":4,"comment":"null","endLoc":1150,"header":"def test_cookie_as_dict_keeps_items(self)","id":1653,"name":"test_cookie_as_dict_keeps_items","nodeType":"Function","startLoc":1133,"text":"def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'"},{"col":4,"comment":"null","endLoc":1166,"header":"def test_cookie_as_dict_keys(self)","id":1654,"name":"test_cookie_as_dict_keys","nodeType":"Function","startLoc":1152,"text":"def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)"},{"col":4,"comment":"null","endLoc":1182,"header":"def test_cookie_as_dict_values(self)","id":1655,"name":"test_cookie_as_dict_values","nodeType":"Function","startLoc":1168,"text":"def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)"},{"col":4,"comment":"null","endLoc":1198,"header":"def test_cookie_as_dict_items(self)","id":1656,"name":"test_cookie_as_dict_items","nodeType":"Function","startLoc":1184,"text":"def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)"},{"col":4,"comment":"null","endLoc":1219,"header":"def test_cookie_duplicate_names_different_domains(self)","id":1657,"name":"test_cookie_duplicate_names_different_domains","nodeType":"Function","startLoc":1200,"text":"def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value"},{"col":4,"comment":"null","endLoc":1230,"header":"def test_cookie_duplicate_names_raises_cookie_conflict_error(self)","id":1658,"name":"test_cookie_duplicate_names_raises_cookie_conflict_error","nodeType":"Function","startLoc":1221,"text":"def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)"},{"col":4,"comment":"null","endLoc":1238,"header":"def test_cookie_policy_copy(self)","id":1659,"name":"test_cookie_policy_copy","nodeType":"Function","startLoc":1232,"text":"def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)"},{"col":4,"comment":"null","endLoc":1244,"header":"def test_time_elapsed_blank(self, httpbin)","id":1661,"name":"test_time_elapsed_blank","nodeType":"Function","startLoc":1240,"text":"def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0"},{"col":4,"comment":"null","endLoc":1248,"header":"def test_empty_response_has_content_none(self)","id":1662,"name":"test_empty_response_has_content_none","nodeType":"Function","startLoc":1246,"text":"def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None"},{"col":4,"comment":"null","endLoc":1260,"header":"def test_response_is_iterable(self)","id":1663,"name":"test_response_is_iterable","nodeType":"Function","startLoc":1250,"text":"def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()"},{"col":4,"comment":"When called with decode_unicode, Response.iter_content should always\n return unicode.\n ","endLoc":1279,"header":"def test_response_decode_unicode(self)","id":1664,"name":"test_response_decode_unicode","nodeType":"Function","startLoc":1262,"text":"def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)"},{"col":4,"comment":"null","endLoc":1288,"header":"def test_response_reason_unicode(self)","id":1665,"name":"test_response_reason_unicode","nodeType":"Function","startLoc":1281,"text":"def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here"},{"col":4,"comment":"null","endLoc":1300,"header":"def test_response_reason_unicode_fallback(self)","id":1666,"name":"test_response_reason_unicode_fallback","nodeType":"Function","startLoc":1290,"text":"def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]"},{"col":4,"comment":"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n ","endLoc":1319,"header":"def test_response_chunk_size_type(self)","id":1667,"name":"test_response_chunk_size_type","nodeType":"Function","startLoc":1302,"text":"def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")"},{"col":4,"comment":"null","endLoc":1331,"header":"def test_request_and_response_are_pickleable(self, httpbin)","id":1668,"name":"test_request_and_response_are_pickleable","nodeType":"Function","startLoc":1321,"text":"def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers"},{"col":4,"comment":"null","endLoc":1345,"header":"def test_prepared_request_is_pickleable(self, httpbin)","id":1671,"name":"test_prepared_request_is_pickleable","nodeType":"Function","startLoc":1333,"text":"def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200"},{"col":4,"comment":"null","endLoc":1361,"header":"def test_prepared_request_with_file_is_pickleable(self, httpbin)","id":1672,"name":"test_prepared_request_with_file_is_pickleable","nodeType":"Function","startLoc":1347,"text":"def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200"},{"col":4,"comment":"null","endLoc":1377,"header":"def test_prepared_request_with_hook_is_pickleable(self, httpbin)","id":1673,"name":"test_prepared_request_with_hook_is_pickleable","nodeType":"Function","startLoc":1363,"text":"def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200"},{"col":4,"comment":"null","endLoc":1382,"header":"def test_cannot_send_unprepared_requests(self, httpbin)","id":1674,"name":"test_cannot_send_unprepared_requests","nodeType":"Function","startLoc":1379,"text":"def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)"},{"col":4,"comment":"null","endLoc":1392,"header":"def test_http_error(self)","id":1675,"name":"test_http_error","nodeType":"Function","startLoc":1384,"text":"def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response"},{"col":4,"comment":"null","endLoc":1402,"header":"def test_session_pickling(self, httpbin)","id":1676,"name":"test_session_pickling","nodeType":"Function","startLoc":1394,"text":"def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200"},{"col":4,"comment":"Ensure that header updates are done case-insensitively.","endLoc":1413,"header":"def test_fixes_1329(self, httpbin)","id":1677,"name":"test_fixes_1329","nodeType":"Function","startLoc":1404,"text":"def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'"},{"col":4,"comment":"null","endLoc":1420,"header":"def test_uppercase_scheme_redirect(self, httpbin)","id":1678,"name":"test_uppercase_scheme_redirect","nodeType":"Function","startLoc":1415,"text":"def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()"},{"col":4,"comment":"null","endLoc":1458,"header":"def test_transport_adapter_ordering(self)","id":1679,"name":"test_transport_adapter_ordering","nodeType":"Function","startLoc":1422,"text":"def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters"},{"col":4,"comment":"null","endLoc":1476,"header":"def test_session_get_adapter_prefix_matching(self)","id":1680,"name":"test_session_get_adapter_prefix_matching","nodeType":"Function","startLoc":1460,"text":"def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)"},{"col":4,"comment":"null","endLoc":1486,"header":"def test_session_get_adapter_prefix_matching_mixed_case(self)","id":1681,"name":"test_session_get_adapter_prefix_matching_mixed_case","nodeType":"Function","startLoc":1478,"text":"def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter"},{"col":4,"comment":"null","endLoc":1496,"header":"def test_session_get_adapter_prefix_matching_is_case_insensitive(self)","id":1682,"name":"test_session_get_adapter_prefix_matching_is_case_insensitive","nodeType":"Function","startLoc":1488,"text":"def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter"},{"col":4,"comment":"null","endLoc":1503,"header":"def test_header_remove_is_case_insensitive(self, httpbin)","id":1683,"name":"test_header_remove_is_case_insensitive","nodeType":"Function","startLoc":1498,"text":"def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers"},{"col":4,"comment":"null","endLoc":1509,"header":"def test_params_are_merged_case_sensitive(self, httpbin)","id":1684,"name":"test_params_are_merged_case_sensitive","nodeType":"Function","startLoc":1505,"text":"def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}"},{"col":4,"comment":"null","endLoc":1518,"header":"def test_long_authinfo_in_url(self)","id":1685,"name":"test_long_authinfo_in_url","nodeType":"Function","startLoc":1511,"text":"def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url"},{"col":4,"comment":"null","endLoc":1528,"header":"def test_header_keys_are_native(self, httpbin)","id":1686,"name":"test_header_keys_are_native","nodeType":"Function","startLoc":1520,"text":"def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()"},{"col":4,"comment":"Ensure prepare_headers regex isn't flagging valid header contents.","endLoc":1537,"header":"def test_header_validation(self, httpbin)","id":1687,"name":"test_header_validation","nodeType":"Function","startLoc":1530,"text":"def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']"},{"col":4,"comment":"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n ","endLoc":1558,"header":"def test_header_value_not_str(self, httpbin)","id":1688,"name":"test_header_value_not_str","nodeType":"Function","startLoc":1539,"text":"def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)"},{"col":4,"comment":"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n ","endLoc":1576,"header":"def test_header_no_return_chars(self, httpbin)","id":1689,"name":"test_header_no_return_chars","nodeType":"Function","startLoc":1560,"text":"def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)"},{"col":4,"comment":"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n ","endLoc":1590,"header":"def test_header_no_leading_space(self, httpbin)","id":1690,"name":"test_header_no_leading_space","nodeType":"Function","startLoc":1578,"text":"def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)"},{"col":4,"comment":"null","endLoc":1598,"header":"@pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files)","id":1691,"name":"test_can_send_objects_with_files","nodeType":"Function","startLoc":1592,"text":"@pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']"},{"col":4,"comment":"null","endLoc":1606,"header":"def test_can_send_file_object_with_non_string_filename(self, httpbin)","id":1695,"name":"test_can_send_file_object_with_non_string_filename","nodeType":"Function","startLoc":1600,"text":"def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']"},{"col":4,"comment":"null","endLoc":1614,"header":"def test_autoset_header_values_are_native(self, httpbin)","id":1696,"name":"test_autoset_header_values_are_native","nodeType":"Function","startLoc":1608,"text":"def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length"},{"col":4,"comment":"null","endLoc":1625,"header":"def test_nonhttp_schemes_dont_check_URLs(self)","id":1697,"name":"test_nonhttp_schemes_dont_check_URLs","nodeType":"Function","startLoc":1616,"text":"def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url"},{"col":4,"comment":"null","endLoc":1635,"header":"def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle)","id":1698,"name":"test_auth_is_stripped_on_http_downgrade","nodeType":"Function","startLoc":1627,"text":"def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers"},{"col":4,"comment":"null","endLoc":1642,"header":"def test_auth_is_retained_for_redirect_on_host(self, httpbin)","id":1699,"name":"test_auth_is_retained_for_redirect_on_host","nodeType":"Function","startLoc":1637,"text":"def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2"},{"col":4,"comment":"null","endLoc":1646,"header":"def test_should_strip_auth_host_change(self)","id":1700,"name":"test_should_strip_auth_host_change","nodeType":"Function","startLoc":1644,"text":"def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')"},{"col":4,"comment":"null","endLoc":1650,"header":"def test_should_strip_auth_http_downgrade(self)","id":1701,"name":"test_should_strip_auth_http_downgrade","nodeType":"Function","startLoc":1648,"text":"def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')"},{"col":4,"comment":"null","endLoc":1659,"header":"def test_should_strip_auth_https_upgrade(self)","id":1702,"name":"test_should_strip_auth_https_upgrade","nodeType":"Function","startLoc":1652,"text":"def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')"},{"col":4,"comment":"null","endLoc":1663,"header":"def test_should_strip_auth_port_change(self)","id":1703,"name":"test_should_strip_auth_port_change","nodeType":"Function","startLoc":1661,"text":"def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')"},{"col":4,"comment":"null","endLoc":1674,"header":"@pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http","id":1704,"name":"test_should_strip_auth_default_port","nodeType":"Function","startLoc":1665,"text":"@pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)"},{"col":4,"comment":"null","endLoc":1693,"header":"def test_manual_redirect_with_partial_body_read(self, httpbin)","id":1705,"name":"test_manual_redirect_with_partial_body_read","nodeType":"Function","startLoc":1676,"text":"def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect"},{"col":4,"comment":"null","endLoc":1698,"header":"def test_prepare_body_position_non_stream(self)","id":1706,"name":"test_prepare_body_position_non_stream","nodeType":"Function","startLoc":1695,"text":"def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None"},{"col":4,"comment":"null","endLoc":1711,"header":"def test_rewind_body(self)","id":1707,"name":"test_rewind_body","nodeType":"Function","startLoc":1700,"text":"def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'"},{"col":4,"comment":"null","endLoc":1725,"header":"def test_rewind_partially_read_body(self)","id":1708,"name":"test_rewind_partially_read_body","nodeType":"Function","startLoc":1713,"text":"def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'"},{"col":4,"comment":"null","endLoc":1745,"header":"def test_rewind_body_no_seek(self)","id":1709,"name":"test_rewind_body_no_seek","nodeType":"Function","startLoc":1727,"text":"def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)"},{"col":4,"comment":"null","endLoc":1768,"header":"def test_rewind_body_failed_seek(self)","id":1711,"name":"test_rewind_body_failed_seek","nodeType":"Function","startLoc":1747,"text":"def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)"},{"col":4,"comment":"null","endLoc":1788,"header":"def test_rewind_body_failed_tell(self)","id":1713,"name":"test_rewind_body_failed_tell","nodeType":"Function","startLoc":1770,"text":"def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)"},{"col":4,"comment":"null","endLoc":1802,"header":"def _patch_adapter_gzipped_redirect(self, session, url)","id":1715,"name":"_patch_adapter_gzipped_redirect","nodeType":"Function","startLoc":1790,"text":"def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response"},{"col":4,"comment":"null","endLoc":1808,"header":"def test_redirect_with_wrong_gzipped_header(self, httpbin)","id":1717,"name":"test_redirect_with_wrong_gzipped_header","nodeType":"Function","startLoc":1804,"text":"def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)"},{"col":4,"comment":"null","endLoc":1818,"header":"@pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str)","id":1718,"name":"test_basic_auth_str_is_always_native","nodeType":"Function","startLoc":1810,"text":"@pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str"},{"col":4,"comment":"null","endLoc":1826,"header":"def test_requests_history_is_saved(self, httpbin)","id":1719,"name":"test_requests_history_is_saved","nodeType":"Function","startLoc":1820,"text":"def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1"},{"col":4,"comment":"null","endLoc":1835,"header":"def test_json_param_post_content_type_works(self, httpbin)","id":1720,"name":"test_json_param_post_content_type_works","nodeType":"Function","startLoc":1828,"text":"def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']"},{"col":4,"comment":"null","endLoc":1842,"header":"def test_json_param_post_should_not_override_data_param(self, httpbin)","id":1721,"name":"test_json_param_post_should_not_override_data_param","nodeType":"Function","startLoc":1837,"text":"def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body"},{"col":4,"comment":"null","endLoc":1850,"header":"def test_response_iter_lines(self, httpbin)","id":1722,"name":"test_response_iter_lines","nodeType":"Function","startLoc":1844,"text":"def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3"},{"col":4,"comment":"null","endLoc":1856,"header":"def test_response_context_manager(self, httpbin)","id":1723,"name":"test_response_context_manager","nodeType":"Function","startLoc":1852,"text":"def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed"},{"col":4,"comment":"null","endLoc":1865,"header":"def test_unconsumed_session_response_closes_connection(self, httpbin)","id":1724,"name":"test_unconsumed_session_response_closes_connection","nodeType":"Function","startLoc":1858,"text":"def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed"},{"col":4,"comment":"Response.iter_lines() is not reentrant safe","endLoc":1874,"header":"@pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin)","id":1726,"name":"test_response_iter_lines_reentrant","nodeType":"Function","startLoc":1867,"text":"@pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3"},{"col":4,"comment":"null","endLoc":1885,"header":"def test_session_close_proxy_clear(self, mocker)","id":1727,"name":"test_session_close_proxy_clear","nodeType":"Function","startLoc":1876,"text":"def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()"},{"col":4,"comment":"null","endLoc":1890,"header":"def test_proxy_auth(self)","id":1728,"name":"test_proxy_auth","nodeType":"Function","startLoc":1887,"text":"def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}"},{"col":4,"comment":"null","endLoc":1895,"header":"def test_proxy_auth_empty_pass(self)","id":1729,"name":"test_proxy_auth_empty_pass","nodeType":"Function","startLoc":1892,"text":"def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}"},{"col":4,"comment":"null","endLoc":1906,"header":"def test_response_json_when_content_is_None(self, httpbin)","id":1730,"name":"test_response_json_when_content_is_None","nodeType":"Function","startLoc":1897,"text":"def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()"},{"col":4,"comment":"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n ","endLoc":1916,"header":"def test_response_without_release_conn(self)","id":1731,"name":"test_response_without_release_conn","nodeType":"Function","startLoc":1908,"text":"def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed"},{"col":4,"comment":"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n ","endLoc":1928,"header":"def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin)","id":1732,"name":"test_empty_stream_with_auth_does_not_set_content_length_header","nodeType":"Function","startLoc":1918,"text":"def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers"},{"col":4,"comment":"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n ","endLoc":1940,"header":"def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin)","id":1733,"name":"test_stream_with_auth_does_not_set_transfer_encoding_header","nodeType":"Function","startLoc":1930,"text":"def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers"},{"col":4,"comment":"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n ","endLoc":1951,"header":"def test_chunked_upload_does_not_set_content_length_header(self, httpbin)","id":1734,"name":"test_chunked_upload_does_not_set_content_length_header","nodeType":"Function","startLoc":1942,"text":"def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers"},{"col":4,"comment":"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n ","endLoc":1994,"header":"def test_custom_redirect_mixin(self, httpbin)","id":1735,"name":"test_custom_redirect_mixin","nodeType":"Function","startLoc":1953,"text":"def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]"},{"attributeType":"(str, str, str)","col":4,"comment":"null","endLoc":62,"id":1736,"name":"digest_auth_algo","nodeType":"Attribute","startLoc":62,"text":"digest_auth_algo"},{"attributeType":"bool","col":8,"comment":"null","endLoc":1793,"id":1737,"name":"_patched_response","nodeType":"Attribute","startLoc":1793,"text":"self._patched_response"},{"className":"TestCaseInsensitiveDict","col":0,"comment":"null","endLoc":2136,"id":1738,"nodeType":"Class","startLoc":1997,"text":"class TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy"},{"col":4,"comment":"null","endLoc":2008,"header":"@pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid)","id":1739,"name":"test_init","nodeType":"Function","startLoc":1999,"text":"@pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid"},{"col":4,"comment":"null","endLoc":2014,"header":"def test_docstring_example(self)","id":1740,"name":"test_docstring_example","nodeType":"Function","startLoc":2010,"text":"def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']"},{"col":4,"comment":"null","endLoc":2019,"header":"def test_len(self)","id":1741,"name":"test_len","nodeType":"Function","startLoc":2016,"text":"def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2"},{"col":4,"comment":"null","endLoc":2024,"header":"def test_getitem(self)","id":1742,"name":"test_getitem","nodeType":"Function","startLoc":2021,"text":"def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'"},{"col":4,"comment":"__setitem__ should behave case-insensitively.","endLoc":2035,"header":"def test_fixes_649(self)","id":1743,"name":"test_fixes_649","nodeType":"Function","startLoc":2026,"text":"def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']"},{"col":4,"comment":"null","endLoc":2042,"header":"def test_delitem(self)","id":1744,"name":"test_delitem","nodeType":"Function","startLoc":2037,"text":"def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0"},{"col":4,"comment":"null","endLoc":2051,"header":"def test_contains(self)","id":1745,"name":"test_contains","nodeType":"Function","startLoc":2044,"text":"def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid"},{"col":4,"comment":"null","endLoc":2060,"header":"def test_get(self)","id":1746,"name":"test_get","nodeType":"Function","startLoc":2053,"text":"def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'"},{"col":4,"comment":"null","endLoc":2071,"header":"def test_update(self)","id":1747,"name":"test_update","nodeType":"Function","startLoc":2062,"text":"def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'"},{"col":4,"comment":"null","endLoc":2076,"header":"def test_update_retains_unchanged(self)","id":1748,"name":"test_update_retains_unchanged","nodeType":"Function","startLoc":2073,"text":"def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'"},{"col":4,"comment":"null","endLoc":2081,"header":"def test_iter(self)","id":1749,"name":"test_iter","nodeType":"Function","startLoc":2078,"text":"def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys"},{"col":4,"comment":"null","endLoc":2090,"header":"def test_equality(self)","id":1750,"name":"test_equality","nodeType":"Function","startLoc":2083,"text":"def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()"},{"col":4,"comment":"null","endLoc":2095,"header":"def test_setdefault(self)","id":1751,"name":"test_setdefault","nodeType":"Function","startLoc":2092,"text":"def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'"},{"col":4,"comment":"null","endLoc":2104,"header":"def test_lower_items(self)","id":1752,"name":"test_lower_items","nodeType":"Function","startLoc":2097,"text":"def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset"},{"col":4,"comment":"null","endLoc":2114,"header":"def test_preserve_key_case(self)","id":1753,"name":"test_preserve_key_case","nodeType":"Function","startLoc":2106,"text":"def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset"},{"col":4,"comment":"null","endLoc":2126,"header":"def test_preserve_last_key_case(self)","id":1754,"name":"test_preserve_last_key_case","nodeType":"Function","startLoc":2116,"text":"def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset"},{"col":4,"comment":"null","endLoc":2136,"header":"def test_copy(self)","id":1755,"name":"test_copy","nodeType":"Function","startLoc":2128,"text":"def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy"},{"className":"TestMorselToCookieExpires","col":0,"comment":"Tests for morsel_to_cookie when morsel contains expires.","endLoc":2168,"id":1756,"nodeType":"Class","startLoc":2139,"text":"class TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None"},{"col":4,"comment":"Test case where we convert expires from string time.","endLoc":2148,"header":"def test_expires_valid_str(self)","id":1757,"name":"test_expires_valid_str","nodeType":"Function","startLoc":2142,"text":"def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1"},{"col":4,"comment":"Test case where an invalid type is passed for expires.","endLoc":2160,"header":"@pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception)","id":1758,"name":"test_expires_invalid_int","nodeType":"Function","startLoc":2150,"text":"@pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)"},{"col":4,"comment":"Test case where expires is None.","endLoc":2168,"header":"def test_expires_none(self)","id":1759,"name":"test_expires_none","nodeType":"Function","startLoc":2162,"text":"def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None"},{"className":"TestMorselToCookieMaxAge","col":0,"comment":"Tests for morsel_to_cookie when morsel contains max-age.","endLoc":2189,"id":1760,"nodeType":"Class","startLoc":2171,"text":"class TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)"},{"col":4,"comment":"Test case where a valid max age in seconds is passed.","endLoc":2181,"header":"def test_max_age_valid_int(self)","id":1761,"name":"test_max_age_valid_int","nodeType":"Function","startLoc":2175,"text":"def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)"},{"col":4,"comment":"Test case where a invalid max age is passed.","endLoc":2189,"header":"def test_max_age_invalid_str(self)","id":1762,"name":"test_max_age_invalid_str","nodeType":"Function","startLoc":2183,"text":"def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)"},{"className":"TestTimeout","col":0,"comment":"null","endLoc":2267,"id":1763,"nodeType":"Class","startLoc":2192,"text":"class TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok"},{"col":4,"comment":"null","endLoc":2198,"header":"def test_stream_timeout(self, httpbin)","id":1764,"name":"test_stream_timeout","nodeType":"Function","startLoc":2194,"text":"def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]"},{"col":4,"comment":"null","endLoc":2208,"header":"@pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text)","id":1765,"name":"test_invalid_timeout","nodeType":"Function","startLoc":2200,"text":"@pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)"},{"col":4,"comment":"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n ","endLoc":2225,"header":"@pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout)","id":1766,"name":"test_none_timeout","nodeType":"Function","startLoc":2210,"text":"@pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200"},{"col":4,"comment":"null","endLoc":2237,"header":"@pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout)","id":1767,"name":"test_read_timeout","nodeType":"Function","startLoc":2227,"text":"@pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass"},{"col":4,"comment":"null","endLoc":2250,"header":"@pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout)","id":1768,"name":"test_connect_timeout","nodeType":"Function","startLoc":2239,"text":"@pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)"},{"col":4,"comment":"null","endLoc":2262,"header":"@pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout)","id":1769,"name":"test_total_timeout_connect","nodeType":"Function","startLoc":2252,"text":"@pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass"},{"col":4,"comment":"See: https://github.com/psf/requests/issues/2316","endLoc":2267,"header":"def test_encoded_methods(self, httpbin)","id":1770,"name":"test_encoded_methods","nodeType":"Function","startLoc":2264,"text":"def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok"},{"className":"RedirectSession","col":0,"comment":"null","endLoc":2302,"id":1771,"nodeType":"Class","startLoc":2273,"text":"class RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string"},{"col":4,"comment":"null","endLoc":2279,"header":"def __init__(self, order_of_redirects)","id":1772,"name":"__init__","nodeType":"Function","startLoc":2274,"text":"def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False"},{"col":4,"comment":"null","endLoc":2283,"header":"def send(self, *args, **kwargs)","id":1773,"name":"send","nodeType":"Function","startLoc":2281,"text":"def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()"},{"col":4,"comment":"null","endLoc":2297,"header":"def build_response(self)","id":1774,"name":"build_response","nodeType":"Function","startLoc":2285,"text":"def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r"},{"col":4,"comment":"null","endLoc":2302,"header":"def _build_raw(self)","id":1775,"name":"_build_raw","nodeType":"Function","startLoc":2299,"text":"def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string"},{"col":40,"endLoc":2301,"id":1776,"nodeType":"Lambda","startLoc":2301,"text":"lambda *args: args"},{"attributeType":"null","col":8,"comment":"null","endLoc":2275,"id":1777,"name":"redirects","nodeType":"Attribute","startLoc":2275,"text":"self.redirects"},{"attributeType":"list","col":8,"comment":"null","endLoc":2276,"id":1778,"name":"calls","nodeType":"Attribute","startLoc":2276,"text":"self.calls"},{"attributeType":"bool","col":8,"comment":"null","endLoc":2279,"id":1779,"name":"trust_env","nodeType":"Attribute","startLoc":2279,"text":"self.trust_env"},{"attributeType":"int","col":8,"comment":"null","endLoc":2277,"id":1780,"name":"max_redirects","nodeType":"Attribute","startLoc":2277,"text":"self.max_redirects"},{"attributeType":"TypedDict","col":8,"comment":"null","endLoc":2278,"id":1781,"name":"cookies","nodeType":"Attribute","startLoc":2278,"text":"self.cookies"},{"className":"TestPreparingURLs","col":0,"comment":"null","endLoc":2587,"id":1782,"nodeType":"Class","startLoc":2430,"text":"class TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError):\n r.json()"},{"col":4,"comment":"null","endLoc":2482,"header":"@pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http","id":1783,"name":"test_preparing_url","nodeType":"Function","startLoc":2431,"text":"@pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected"},{"col":4,"comment":"null","endLoc":2497,"header":"@pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url)","id":1787,"name":"test_preparing_bad_url","nodeType":"Function","startLoc":2484,"text":"@pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()"},{"col":4,"comment":"null","endLoc":2507,"header":"@pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception)","id":1788,"name":"test_redirecting_to_bad_url","nodeType":"Function","startLoc":2499,"text":"@pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})"},{"col":4,"comment":"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n ","endLoc":2543,"header":"@pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix","id":1789,"name":"test_url_mutation","nodeType":"Function","startLoc":2509,"text":"@pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected"},{"col":4,"comment":"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n ","endLoc":2577,"header":"@pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix","id":1790,"name":"test_parameters_for_nonstandard_schemes","nodeType":"Function","startLoc":2545,"text":"@pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected"},{"col":4,"comment":"null","endLoc":2582,"header":"def test_post_json_nan(self, httpbin)","id":1791,"name":"test_post_json_nan","nodeType":"Function","startLoc":2579,"text":"def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)"},{"col":4,"comment":"null","endLoc":2587,"header":"def test_json_decode_compatibility(self, httpbin)","id":1793,"name":"test_json_decode_compatibility","nodeType":"Function","startLoc":2584,"text":"def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError):\n r.json()"},{"col":0,"comment":"null","endLoc":2314,"header":"def test_json_encodes_as_bytes()","id":1794,"name":"test_json_encodes_as_bytes","nodeType":"Function","startLoc":2305,"text":"def test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)"},{"col":0,"comment":"null","endLoc":2335,"header":"def test_requests_are_updated_each_time(httpbin)","id":1795,"name":"test_requests_are_updated_each_time","nodeType":"Function","startLoc":2317,"text":"def test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call"},{"col":0,"comment":"null","endLoc":2356,"header":"@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https","id":1796,"name":"test_proxy_env_vars_override_default","nodeType":"Function","startLoc":2338,"text":"@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy"},{"col":0,"comment":"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n ","endLoc":2376,"header":"@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data)","id":1797,"name":"test_data_argument_accepts_tuples","nodeType":"Function","startLoc":2359,"text":"@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)"},{"col":0,"comment":"null","endLoc":2406,"header":"@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method'","id":1798,"name":"test_prepared_copy","nodeType":"Function","startLoc":2379,"text":"@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)"},{"col":0,"comment":"null","endLoc":2417,"header":"def test_urllib3_retries(httpbin)","id":1799,"name":"test_urllib3_retries","nodeType":"Function","startLoc":2409,"text":"def test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))"},{"col":0,"comment":"null","endLoc":2427,"header":"def test_urllib3_pool_connection_closed(httpbin)","id":1800,"name":"test_urllib3_pool_connection_closed","nodeType":"Function","startLoc":2420,"text":"def test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)"},{"attributeType":"null","col":36,"comment":"null","endLoc":37,"id":1801,"name":"Urllib3Timeout","nodeType":"Attribute","startLoc":37,"text":"Urllib3Timeout"},{"attributeType":"str","col":0,"comment":"null","endLoc":41,"id":1802,"name":"TARPIT","nodeType":"Attribute","startLoc":41,"text":"TARPIT"},{"attributeType":"str","col":0,"comment":"null","endLoc":44,"id":1803,"name":"INVALID_PROXY","nodeType":"Attribute","startLoc":44,"text":"INVALID_PROXY"},{"attributeType":"bool","col":4,"comment":"null","endLoc":49,"id":1804,"name":"HAS_MODERN_SSL","nodeType":"Attribute","startLoc":49,"text":"HAS_MODERN_SSL"},{"attributeType":"bool","col":4,"comment":"null","endLoc":51,"id":1805,"name":"HAS_MODERN_SSL","nodeType":"Attribute","startLoc":51,"text":"HAS_MODERN_SSL"},{"attributeType":"bool","col":4,"comment":"null","endLoc":55,"id":1806,"name":"HAS_PYOPENSSL","nodeType":"Attribute","startLoc":55,"text":"HAS_PYOPENSSL"},{"attributeType":"bool","col":4,"comment":"null","endLoc":57,"id":1807,"name":"HAS_PYOPENSSL","nodeType":"Attribute","startLoc":57,"text":"HAS_PYOPENSSL"},{"attributeType":"SendCall","col":0,"comment":"null","endLoc":2270,"id":1808,"name":"SendCall","nodeType":"Attribute","startLoc":2270,"text":"SendCall"},{"col":0,"comment":"","endLoc":3,"header":"test_requests.py#","id":1809,"name":"","nodeType":"Function","startLoc":3,"text":"\"\"\"Tests for Requests.\"\"\"\n\nTARPIT = 'http://10.255.255.1'\n\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))"}]} \ No newline at end of file