.stanis-tits.latest created via script. PEP8 fix
This commit is contained in:
parent
928a27f104
commit
6d2397301b
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
|||||||
.pip
|
.pip
|
||||||
*.jpg
|
*.jpg
|
||||||
|
images/
|
||||||
|
@ -1 +0,0 @@
|
|||||||
0
|
|
@ -1,34 +1,42 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import sys
|
|
||||||
sys.path.append('./.pip')
|
|
||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import os.path
|
import os.path
|
||||||
|
try:
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
except ImportError:
|
||||||
|
import sys
|
||||||
|
sys.path.append('./.pip')
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
__author__ = 'Alexander Popov'
|
__author__ = 'Alexander Popov'
|
||||||
__version__ = '0.0.1'
|
__version__ = '0.1.0'
|
||||||
__license__ = 'Unlicense'
|
__license__ = 'Unlicense'
|
||||||
|
|
||||||
IMAGES_DIR = './images'
|
IMAGES_DIR = './images'
|
||||||
COOKIES = dict(block='951')
|
COOKIES = dict(block='951')
|
||||||
URL = 'http://blog.stanis.ru/?back=%d'
|
URL = 'http://blog.stanis.ru/?back=%d'
|
||||||
PAGE = 0
|
PAGE = 0
|
||||||
|
if not os.path.exists('%s/.stanis-tits.latest' % IMAGES_DIR):
|
||||||
|
if not os.path.exists('%s' % IMAGES_DIR):
|
||||||
|
os.mkdir('%s' % IMAGES_DIR)
|
||||||
|
|
||||||
|
with open('%s/.stanis-tits.latest' % IMAGES_DIR, 'w') as f:
|
||||||
|
f.write('0')
|
||||||
with open('%s/.stanis-tits.latest' % IMAGES_DIR, 'r') as f:
|
with open('%s/.stanis-tits.latest' % IMAGES_DIR, 'r') as f:
|
||||||
LATEST_FILE = f.read()
|
LATEST_FILE = f.read()
|
||||||
STOP = False
|
STOP = False
|
||||||
NEXT_LATEST = None
|
NEXT_LATEST = None
|
||||||
|
|
||||||
while STOP == False:
|
while STOP is False:
|
||||||
print('Loading page %d' % PAGE)
|
print('Loading page %d' % PAGE)
|
||||||
|
|
||||||
r = requests.get(URL % PAGE, cookies=COOKIES)
|
r = requests.get(URL % PAGE, cookies=COOKIES)
|
||||||
|
|
||||||
|
|
||||||
soup = BeautifulSoup(r.text.encode('cp1251'),
|
soup = BeautifulSoup(r.text.encode('cp1251'),
|
||||||
"html.parser", from_encoding="windows-1251")
|
"html.parser", from_encoding="windows-1251")
|
||||||
images = soup.findAll('img', src=re.compile('img/*'))
|
images = soup.findAll('img', src=re.compile('img/*'))
|
||||||
|
|
||||||
for image in images:
|
for image in images:
|
||||||
@ -36,17 +44,18 @@ while STOP == False:
|
|||||||
STOP = True
|
STOP = True
|
||||||
|
|
||||||
if PAGE == 0:
|
if PAGE == 0:
|
||||||
if NEXT_LATEST == None:
|
if NEXT_LATEST is None:
|
||||||
NEXT_LATEST = str(image['src'].split('/')[1].split('.')[0])
|
NEXT_LATEST = str(image['src'].split('/')[1].split('.')[0])
|
||||||
with open('%s/.stanis-tits.latest' % IMAGES_DIR, 'w+') as f:
|
with open('%s/.stanis-tits.latest' % IMAGES_DIR, 'w+') as f:
|
||||||
f.write(NEXT_LATEST)
|
f.write(NEXT_LATEST)
|
||||||
|
|
||||||
if not os.path.exists('%s/%s' % (IMAGES_DIR, image['src'].split('/')[1],)):
|
if not os.path.exists('%s/%s' % (IMAGES_DIR,
|
||||||
|
image['src'].split('/')[1],)):
|
||||||
print('\tDownload %s' % image['src'].split('/')[1])
|
print('\tDownload %s' % image['src'].split('/')[1])
|
||||||
response = requests.get('http://blog.stanis.ru/%s' % image['src'], stream=True)
|
response = requests.get('http://blog.stanis.ru/%s'
|
||||||
|
% image['src'], stream=True)
|
||||||
with open('%s/%s' % (IMAGES_DIR, image['src'].split('/')[1]), 'wb') as out_image:
|
with open('%s/%s' % (IMAGES_DIR, image['src'].split('/')[1]),
|
||||||
|
'wb') as out_image:
|
||||||
shutil.copyfileobj(response.raw, out_image,)
|
shutil.copyfileobj(response.raw, out_image,)
|
||||||
|
|
||||||
PAGE += 1
|
PAGE += 1
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user