#!/usr/bin/env python
# -*- coding: utf-8 -*-

# Project: GetBaiduImage
# File   : main.py
# Author : Long.Xu <fangkailove@yeah.net>
#          http://gnolux.blog.csdn.net
#  
# Time   : 2023/5/16 19:38
# Copyright 2023 Long.Xu All rights Reserved.

import json
import requests


def decode_url(url):
    if url.startswith("http"):
        return url
    table = {'w': "a", 'k': "b", 'v': "c", '1': "d", 'j': "e", 'u': "f", '2': "g", 'i': "h",
             't': "i", '3': "j", 'h': "k", 's': "l", '4': "m", 'g': "n", '5': "o", 'r': "p",
             'q': "q", '6': "r", 'f': "s", 'p': "t", '7': "u", 'e': "v", 'o': "w", '8': "1",
             'd': "2", 'n': "3", '9': "4", 'c': "5", 'm': "6", '0': "7", 'b': "8", 'l': "9", 'a': "0"
             }
    url=url.replace('_z2C$q', ":").replace("_z&e3B", ".").replace('AzdH3F', "/")
    return ''.join([table[c] if c in table.keys() else c for c in url])


def DownImage(SearchWord,PageNo=0):
    url = 'https://image.baidu.com/search/acjson'
    params = {
          'tn': 'resultjson_com',
          'logid': '5280191307193676757',
          'ipn': 'rj',
          # 'ct': '201326592',
          'ct': '201326592',
          'fp': 'result',
          'queryWord': SearchWord,
          'cl': '2',
          'lm': '-1',
          'ie': 'utf-8',
          'oe': 'utf-8',
          'st': '-1',
          'ic': '0',
          'word': SearchWord,
          'face': '0',
          'istype': '2',
          'nc': '1',
          'pn': PageNo*30,  # 这里可以构造翻页
          'rn': '30',
          'gsm': 'b4'
             }
    headers = {
#参考网友构造的cookie
           'Cookie': 'BDqhfp=%E5%8F%B2%E5%8A%AA%E6%AF%94%26%260-10-1undefined%26%263746%26%264; BIDUPSID=5A04241009FD166564DACF4050551F2D; PSTM=1611376447; __yjs_duid=1_2de46e288096c13a7edea3d05a5204421620544039468; BAIDUID=F163DBC1DF098AF604AE753E72659BAA:FG=1; BDUSS=Tczc3RnamRhaUhicm5rfm83V3pkMTBySUd1Z0V4Q25mbXhYdElRemJHdVktVnRoRVFBQUFBJCQAAAAAAQAAAAEAAACR090iR3JpZmZleTUxMQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJhsNGGYbDRhN; BDUSS_BFESS=Tczc3RnamRhaUhicm5rfm83V3pkMTBySUd1Z0V4Q25mbXhYdElRemJHdVktVnRoRVFBQUFBJCQAAAAAAQAAAAEAAACR090iR3JpZmZleTUxMQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJhsNGGYbDRhN; BAIDUID_BFESS=F163DBC1DF098AF604AE753E72659BAA:FG=1; BDORZ=AE84CDB3A529C0F8A2B9DCDD1D18B695; userFrom=cn.bing.com; IMG_WH=573_756; H_WISE_SIDS=110085_178384_179349_181588_182531_183327_183611_183750_184578_185029_185517_185653_185750_186317_186411_187020_187195_187206_187292_187450_187663_187670_187928_8000097_8000100_8000126_8000140_8000150_8000169_8000177_8000185; BDRCVFR[X_XKQks0S63]=mk3SLVN4HKm; firstShowTip=1; cleanHistoryStatus=0; indexPageSugList=%5B%22%E5%8F%B2%E5%8A%AA%E6%AF%94%22%2C%22%E6%B5%B7%E8%B4%BC%E7%8E%8B%22%2C%22%E8%8A%B1%E6%9C%B5%22%2C%22undertale%22%2C%22python%22%2C%22%E8%8A%B1%E7%AE%80%E7%AC%94%E7%94%BB%22%2C%22%E5%90%8C%E5%9E%8B%E4%B8%96%E4%BB%A3%E4%BA%A4%E6%9B%BF%22%2C%22%E6%A8%9F%E7%A7%91%E8%8A%B1%22%2C%22%E6%A8%9F%E7%A7%91%E8%8A%B1%E7%9A%84%E8%A7%A3%E5%89%96%22%5D; BDRCVFR[dG2JNJb_ajR]=mk3SLVN4HKm; ab_sr=1.0.1_MTY1OTI2YjEyMzViNzQyYWVhZjdhZWQxNzc0YjE1NzA2NGEyZmMwZGEwNzRmMWVjZGM3N2IzMDlkYjViZWVlOGYyNTllZDMzZjgwZGMxZWZhOWFiMmEyYjg0NjgyYzgwYjk0Y2QxYWVmM2E1ZTFiZjkyYTNlOGYzMDg1MWVjNjUyODViYzMyZjc2Mjk2OGFmZmZjZTkwNjg3OWI4NjhjZjdiNzJmNTY3NTIyZjg2ODVjMzUzNTExYjhiMjkxZjEx; BDRCVFR[-pGxjrCMryR]=mk3SLVN4HKm',
           'Referer': 'https://image.baidu.com/search/index?tn=baiduimage&ipn=r&ct=201326592&cl=2&lm=-1&st=-1&fm=index&fr=&hs=0&xthttps=111110&sf=1&fmq=&pv=&ic=0&nc=1&z=&se=1&showtab=0&fb=0&width=&height=&face=0&istype=2&ie=utf-8&word=%E5%8F%B2%E5%8A%AA%E6%AF%94&oq=%E5%8F%B2%E5%8A%AA%E6%AF%94&rsp=-1',
           'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Mobile Safari/537.36 Edg/93.0.961.52'
              }
    html = requests.get(url, params=params, headers=headers)
    txt = html.text
    j = json.loads(txt)
    nindex = PageNo*30
    for d in j['data']:
        pic_url=d.get('objURL')
        if pic_url:
            pic_url=decode_url(pic_url)
            nindex = nindex + 1
            oname = "./%s_%s.jpg"%(SearchWord,nindex)
            print(pic_url,oname)
            continue
            try:
                rp = requests.get(pic_url,headers=headers)
                if rp.status_code == 200:
                    with open(oname, 'ab') as f:
                        print(rp.status_code)
                        f.write(rp.content)
            except Exception as e:
                print(e)

DownImage("美女",0)