Python基于pandas实现json格式转换成dataframe的方法
本文实例讲述了Python基于pandas实现json格式转换成dataframe的方法。分享给大家供大家参考,具体如下:
# -*- coding:utf-8 -*- #!python3 import re import json from bs4 import BeautifulSoup import pandas as pd import requests import os from pandas.io.json import json_normalize class image_structs(): def __init__(self): self.picture_url = { "image_id": '', "picture_url": '' } class data_structs(): def __init__(self): # columns=['title', 'item_url', 'id','picture_url','std_desc','description','information','fitment']) self.info={ "title":'', "item_url":'', "id":0, "picture_url":[], "std_desc":'', "description":'', "information":'', "fitment":'' } # "https://waldoch.com/store/catalogsearch/result/index/?cat=0&limit=200&p=1&q=nerf+bar" # https://waldoch.com/store/new-oem-ford-f-150-f150-5-running-boards-nerf-bar-crew-cab-2015-w-brackets-fl34-16451-ge5fm6.html def get_item_list(outfile): result = [] for i in range(6): print(i) i = str(i+1) url = "https://waldoch.com/store/catalogsearch/result/index/?cat=0&limit=200&p="+i+"&q=nerf+bar" web = requests.get(url) soup = BeautifulSoup(web.text,"html.parser") alink = soup.find_all("a",class_="product-image") for a in alink: title = a["title"] item_url = a["href"] result.append([title,item_url]) df = pd.DataFrame(result,columns=["title","item_url"]) df = df.drop_duplicates() df["id"] =df.index df.to_excel(outfile,index=False) def get_item_info(file,outfile): DEFAULT_FALSE = "" df = pd.read_excel(file) for i in df.index: id = df.loc[i,"id"] if os.path.exists(str(int(id))+".xlsx"): continue item_url = df.loc[i,"item_url"] url = item_url web = requests.get(url) soup = BeautifulSoup(web.text, "html.parser") # 图片 imglink = soup.find_all("img", class_=re.compile("^gallery-image")) data = data_structs() data.info["title"] = df.loc[i,"title"] data.info["id"] = id data.info["item_url"] = item_url for a in imglink: image = image_structs() image.picture_url["image_id"] = a["id"] image.picture_url["picture_url"]=a["src"] print(image.picture_url) data.info["picture_url"].append(image.picture_url) print(data.info) # std_desc std_desc = soup.find("div", itemprop="description") try: strings_desc = [] for ii in std_desc.stripped_strings: strings_desc.append(ii) strings_desc = "\n".join(strings_desc) except: strings_desc=DEFAULT_FALSE # description try: desc = soup.find('h2', text="Description") desc = desc.find_next() except: desc=DEFAULT_FALSE description=desc # information try: information = soup.find("h2", text='Information') desc = information desc = desc.find_next() except: desc=DEFAULT_FALSE information = desc # fitment try: fitment = soup.find('h2', text='Fitment') desc = fitment desc = desc.find_next() except: desc=DEFAULT_FALSE fitment=desc data.info["std_desc"] = strings_desc data.info["description"] = str(description) data.info["information"] = str(information) data.info["fitment"] = str(fitment) print(data.info.keys()) singledf = json_normalize(data.info,"picture_url",['title', 'item_url', 'id', 'std_desc', 'description', 'information', 'fitment']) singledf.to_excel("test.xlsx",index=False) exit() # print(df.ix[i]) df.to_excel(outfile,index=False) # get_item_list("item_urls.xlsx") get_item_info("item_urls.xlsx","item_urls_info.xlsx")
这里涉及到的几个Python模块都可以使用pip install命令进行安装,如:
pip install BeautifulSoup4
pip install xlrd
pip install openpyxl
PS:这里再为大家推荐几款比较实用的json在线工具供大家参考使用:
在线JSON代码检验、检验、美化、格式化工具:
http://tools.jb51.net/code/json
JSON在线格式化工具:
http://tools.jb51.net/code/jsonformat
在线XML/JSON互相转换工具:
http://tools.jb51.net/code/xmljson
json代码在线格式化/美化/压缩/编辑/转换工具:
http://tools.jb51.net/code/jsoncodeformat
在线json压缩/转义工具:
http://tools.jb51.net/code/json_yasuo_trans
更多Python相关内容感兴趣的读者可查看本站专题:《Python操作json技巧总结》、《Python编码操作技巧总结》、《Python数据结构与算法教程》、《Python函数使用技巧总结》、《Python字符串操作技巧汇总》、《Python入门与进阶经典教程》及《Python文件与目录操作技巧汇总》
希望本文所述对大家Python程序设计有所帮助。
相关文章
requests.gPython 用requests.get获取网页内容为空 ’ ’问题
这篇文章主要介绍了requests.gPython 用requests.get获取网页内容为空 ’ ’,温行首先举例说明,具有一定得参考价值,需要的小伙伴可以参考一下2022-01-01
最新评论