baidusitemap SEO优化

前言

百度作为当前国内最大的搜索引擎,为了增加博客的访问量,所以我做了 SEO 优化。

效果图如下:

ffffffffwea

python 文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# coding:utf-8
import requests
import time
from bs4 import BeautifulSoup as bp

print('自动推送开启....', 'utf-8')
time.sleep(0.5)
site_url = 'https://******.github.io/baidusitemap.xml'

try:
print('获取sitemap链接....', 'utf-8')
# print requests.get(site_url).content
data_ = bp(requests.get(site_url).content, "html.parser")
print('成功', 'utf-8')
print(data_.url)

except ZeroDivisionError as err:
print(data_.url)
print(err)

list_url = []


def get_(data):
headers = {'User-Agent': 'curl/7.12.1 ',
'Content-Type': 'text/plain '}
try:
r = requests.post(
url='http://data.zz.baidu.com/urls?site=yuwangi.github.io&token=***token***', data=data)
print(r.status_code)
print(r.content)
except ZeroDivisionError as err:
print(err)


print(data_.find_all('loc'))
print('---------------------------------')
for x, y in enumerate(data_.find_all('loc')):
print(x, y.string)
list_url.append(y.string.replace('http://', 'http://www.'))

print('---------------------------------')

print('开始推送....', 'utf-8')

for x in list_url:
print('当前推送条目为:' + x, 'utf-8')
time.sleep(2)
get_(x)

运行:

fnweofhweohfiofwefwefe