Backend Development 6 min read

How to Scrape All King of Glory Hero Images with Python Requests

This tutorial demonstrates how to use Python's requests library to retrieve every hero image from the game King of Glory, organizing the downloads into separate directories per hero, providing a ready-made collection of high‑resolution wallpapers.

Python Programming Learning Circle
Python Programming Learning Circle
Python Programming Learning Circle
How to Scrape All King of Glory Hero Images with Python Requests

Basic Environment Setup

Version: Python 3

System: Windows

Required module: requests

<code>pip install requests</code>

Full Code

<code># -*- coding: utf-8 -*-
"""
Created on Wed Dec 13 13:49:52 2017
@author: KillerTwo
"""
import requests
import os

hero_list_url = 'http://pvp.qq.com/web201605/js/herolist.json'
hero_skin_root_url = 'http://game.gtimg.cn/images/yxzj/img201606/skin/hero-info/'
skin_base_dir = 'C:\\Users\\lwt27\\Pictures\\image\\heroskin\\'

def get_ename(hero_json):
    """获取英雄名称对应英雄编号的一个字典,例如{小乔:106,...}"""
    cname_ename = {}
    for hero in hero_json:
        cname_ename[hero['cname']] = hero['ename']
    return cname_ename

def get_skin_name(hero_json):
    """获取英雄名称对应的皮肤的所有皮肤名称的字典,例如{'小乔':'恋之微风|万圣前夜|...'}"""
    cname_skin_name = {}
    for hero in hero_json:
        cname_skin_name[hero['cname']] = hero['skin_name']
    return cname_skin_name

def get_hero_skin_count(cname_skin_name):
    """获取每个英雄对应的皮肤的个数,例如{'小乔':5,...}"""
    cname_skin_count = {}
    for name, skins in cname_skin_name.items():
        cname_skin_count[name] = len(skins.split('|'))
    return cname_skin_count

def get_skin_name_url(skin_base_url, cname_skin_count, cname_ename):
    """返回英雄名称对应的所有皮肤的url地址列表的字典,例如{小乔:[url1,url2],...}"""
    cname_url_list = {}
    for cname, count in cname_skin_count.items():
        base_url = skin_base_url + str(cname_ename[cname]) + '/' + str(cname_ename[cname]) + '-bigskin-'
        skin_url_list = [base_url + str(num) + '.jpg' for num in range(1, count + 1)]
        cname_url_list[cname] = skin_url_list
    return cname_url_list

def get_cname_skin_name(cname_skin_name):
    """返回名称对应皮肤名称列表的字典"""
    cname_skin_name_dict = {}
    for cname, skin_name_str in cname_skin_name.items():
        skin_list = [name for name in skin_name_str.split('|')]
        cname_skin_name_dict[cname] = skin_list
    return cname_skin_name_dict

def mkdir(path):
    path = path.strip().rstrip('\\')
    if not os.path.exists(path):
        os.makedirs(path)
        print(path + ' 创建成功')
        return True
    else:
        print(path + ' 目录已存在')
        return False

def get_hero_skin(cname_url_list, cname_skin_name):
    """下载每个英雄的皮肤图片"""
    for cname, skin_url in cname_url_list.items():
        if mkdir(skin_base_dir + cname):
            os.chdir(skin_base_dir + cname)
            for i in range(len(skin_url)):
                file_name = cname_skin_name[cname][i] + '.jpg'
                r = requests.get(skin_url[i])
                with open(file_name, 'wb') as f:
                    f.write(r.content)

if __name__ == '__main__':
    hero_list_body = requests.get(hero_list_url)
    hero_list_json = hero_list_body.json()
    cname_ename = get_ename(hero_list_json)
    cname_skin_name_str = get_skin_name(hero_list_json)
    cname_skin_name = get_cname_skin_name(cname_skin_name_str)
    cname_skin_count = get_hero_skin_count(cname_skin_name_str)
    cname_skin_url_list = get_skin_name_url(hero_skin_root_url, cname_skin_count, cname_ename)
    get_hero_skin(cname_skin_url_list, cname_skin_name)
</code>

The script fetches the hero list JSON, extracts each hero’s ID and skin names, builds URLs for all skin images, creates a folder per hero, and downloads the images, resulting in a ready‑to‑use wallpaper collection.

automationWeb ScrapingRequestsImage Download
Python Programming Learning Circle
Written by

Python Programming Learning Circle

A global community of Chinese Python developers offering technical articles, columns, original video tutorials, and problem sets. Topics include web full‑stack development, web scraping, data analysis, natural language processing, image processing, machine learning, automated testing, DevOps automation, and big data.

0 followers
Reader feedback

How this landed with the community

login Sign in to like

Rate this article

Was this worth your time?

Sign in to rate
Discussion

0 Comments

Thoughtful readers leave field notes, pushback, and hard-won operational detail here.