为什么要打印一个空列表和词典?

发布于 2025-02-07 19:26:01 字数 961 浏览 0 评论 0原文

import requests
from bs4 import BeautifulSoup
import json

data = {
    0:{
        0:"title",
        1:"dates",
        2:"city/state",
        3:"country"
    },
    1:{
        0:"event",
        1:"reps",
        2:"prize"
    },
    2:{
        0:"results"
    }
}


url = "https://mms.kcbs.us/members/evr_search.php?org_id=KCBA"
response = requests.get(url).text
soup = BeautifulSoup(response, features='lxml')
all_data = []
for element in soup.find_all('div', class_="row"):
    event = {}
    for i, col in enumerate(element.find_all('div', class_='col-md-4')):
        for j, item in enumerate(col.strings):
            event[data[i][j]] = item
    all_data.append(event)

print(json.dumps(all_data,indent=4))

这是网站 https://mms.kcbs.us/members/ evr_search.php?org_id = kcba

我不确定为什么什么都没有添加到列表和词典中

import requests
from bs4 import BeautifulSoup
import json

data = {
    0:{
        0:"title",
        1:"dates",
        2:"city/state",
        3:"country"
    },
    1:{
        0:"event",
        1:"reps",
        2:"prize"
    },
    2:{
        0:"results"
    }
}


url = "https://mms.kcbs.us/members/evr_search.php?org_id=KCBA"
response = requests.get(url).text
soup = BeautifulSoup(response, features='lxml')
all_data = []
for element in soup.find_all('div', class_="row"):
    event = {}
    for i, col in enumerate(element.find_all('div', class_='col-md-4')):
        for j, item in enumerate(col.strings):
            event[data[i][j]] = item
    all_data.append(event)

print(json.dumps(all_data,indent=4))

heres a link to the website https://mms.kcbs.us/members/evr_search.php?org_id=KCBA

Im unsure why nothing gets added to the list and dictionaries

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

爱要勇敢去追 2025-02-14 19:26:03

您看到的数据通过JavaScript从外部URL加载。要模拟AJAX请求,您可以使用下一个示例:

import json
import requests
from bs4 import BeautifulSoup


api_url = "https://mms.kcbs.us/members/evr_search_ol_json.php"

params = {
    "otype": "TEXT",
    "evr_map_type": "2",
    "org_id": "KCBA",
    "evr_begin": "6/16/2022",
    "evr_end": "7/16/2022",
    "evr_address": "",
    "evr_radius": "50",
    "evr_type": "269",
    "evr_openings": "0",
    "evr_region": "",
    "evr_region_type": "1",
    "evr_judge": "0",
    "evr_keyword": "",
    "evr_rep_name": "",
}

soup = BeautifulSoup(
    requests.get(api_url, params=params).content, "html.parser"
)


data = {
    0: {0: "title", 1: "dates", 2: "city/state", 3: "country"},
    1: {0: "event", 1: "reps", 2: "prize"},
    2: {0: "results"},
}

all_data = []
for element in soup.find_all("div", class_="row"):
    event = {}
    for i, col in enumerate(element.find_all("div", class_="col-md-4")):
        for j, item in enumerate(col.strings):
            event[data[i][j]] = item
    all_data.append(event)

print(json.dumps(all_data, indent=4))

打印:

[
    {
        "title": "Frisco BBQ Challenge",
        "dates": "6/16/2022 - 6/18/2022",
        "city/state": "Frisco, CO 80443",
        "country": "UNITED STATES",
        "event": "STATE CHAMPIONSHIP",
        "reps": "Reps: BUNNY TUTTLE, RICH TUTTLE, MICHAEL WINTER",
        "prize": "Prize Money: $13,050.00",
        "results": "Results Not In"
    },
    {
        "title": "York County BBQ Festival",
        "dates": "6/17/2022 - 6/18/2022",
        "city/state": "Delta, PA 17314",
        "country": "UNITED STATES",
        "event": "STATE CHAMPIONSHIP",
        "reps": "Reps: ANGELA MCKEE, ROBERT MCKEE, LOUISE WEIDNER",
        "prize": "Prize Money: $5,500.00",
        "results": "Results Not In"
    },


...and so on.

The data you see is loaded from external URL via JavaScript. To simulate the Ajax request you can use next example:

import json
import requests
from bs4 import BeautifulSoup


api_url = "https://mms.kcbs.us/members/evr_search_ol_json.php"

params = {
    "otype": "TEXT",
    "evr_map_type": "2",
    "org_id": "KCBA",
    "evr_begin": "6/16/2022",
    "evr_end": "7/16/2022",
    "evr_address": "",
    "evr_radius": "50",
    "evr_type": "269",
    "evr_openings": "0",
    "evr_region": "",
    "evr_region_type": "1",
    "evr_judge": "0",
    "evr_keyword": "",
    "evr_rep_name": "",
}

soup = BeautifulSoup(
    requests.get(api_url, params=params).content, "html.parser"
)


data = {
    0: {0: "title", 1: "dates", 2: "city/state", 3: "country"},
    1: {0: "event", 1: "reps", 2: "prize"},
    2: {0: "results"},
}

all_data = []
for element in soup.find_all("div", class_="row"):
    event = {}
    for i, col in enumerate(element.find_all("div", class_="col-md-4")):
        for j, item in enumerate(col.strings):
            event[data[i][j]] = item
    all_data.append(event)

print(json.dumps(all_data, indent=4))

Prints:

[
    {
        "title": "Frisco BBQ Challenge",
        "dates": "6/16/2022 - 6/18/2022",
        "city/state": "Frisco, CO 80443",
        "country": "UNITED STATES",
        "event": "STATE CHAMPIONSHIP",
        "reps": "Reps: BUNNY TUTTLE, RICH TUTTLE, MICHAEL WINTER",
        "prize": "Prize Money: $13,050.00",
        "results": "Results Not In"
    },
    {
        "title": "York County BBQ Festival",
        "dates": "6/17/2022 - 6/18/2022",
        "city/state": "Delta, PA 17314",
        "country": "UNITED STATES",
        "event": "STATE CHAMPIONSHIP",
        "reps": "Reps: ANGELA MCKEE, ROBERT MCKEE, LOUISE WEIDNER",
        "prize": "Prize Money: $5,500.00",
        "results": "Results Not In"
    },


...and so on.
~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文