mirror of
https://github.com/mrfluffy-dev/hart-cli.git
synced 2026-01-17 18:00:34 +00:00
Merge pull request #1 from DemonKingSwarn/patch-1
Chore: made it faster
This commit is contained in:
60
hart-cli.py
60
hart-cli.py
@@ -1,20 +1,34 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import requests
|
import httpx
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup as bs
|
||||||
|
import pyperclip as clip
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import pyperclip as clip
|
from os.path import expanduser
|
||||||
os.system('clear')
|
|
||||||
download_path = os.environ['HOME']+"/Pictures/hart-cli"
|
headers = {
|
||||||
|
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:101.0) Gecko/20100101 Firefox/101.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
client = httpx.Client(headers=headers, follow_redirects=True)
|
||||||
|
|
||||||
|
home = expanduser("~")
|
||||||
|
download_path = f"{home}/pix/hart-cli"
|
||||||
|
os.system(f"mkdir -p {download_path}")
|
||||||
|
|
||||||
item = 0
|
item = 0
|
||||||
page_num = 1
|
page_num = 1
|
||||||
URL = "https://yande.re/post?page="+str(page_num)
|
|
||||||
page = requests.get(URL)
|
url = f"https://yande.re/post?page={page_num}"
|
||||||
|
page = client.get(url)
|
||||||
|
|
||||||
links_arr_full = []
|
links_arr_full = []
|
||||||
links_arr_preview = []
|
links_arr_preview = []
|
||||||
|
|
||||||
def get_new_urls():
|
def get_new_urls():
|
||||||
global URL
|
global url
|
||||||
global page
|
global page
|
||||||
global page_num
|
global page_num
|
||||||
global soup
|
global soup
|
||||||
@@ -23,16 +37,19 @@ def get_new_urls():
|
|||||||
global links_arr_full
|
global links_arr_full
|
||||||
global links_preview
|
global links_preview
|
||||||
global links_arr_preview
|
global links_arr_preview
|
||||||
|
|
||||||
|
os.system("clear")
|
||||||
|
|
||||||
links_arr_full.clear
|
links_arr_full.clear
|
||||||
links_arr_full.clear
|
links_arr_preview.clear
|
||||||
URL = "https://yande.re/post?page="+str(page_num)
|
|
||||||
page = requests.get(URL)
|
soup = bs(page.content, "html.parser")
|
||||||
soup = BeautifulSoup(page.content, "html.parser")
|
|
||||||
main_content = soup.find(id="post-list-posts")
|
main_content = soup.find(id="post-list-posts")
|
||||||
main_content = str(main_content)
|
main_content = str(main_content)
|
||||||
main_content = main_content.replace("smallimg", "largeimg")
|
main_content = main_content.replace("smallimg", "largeimg")
|
||||||
main_content = BeautifulSoup(main_content, features="lxml")
|
main_content = bs(main_content, features="lxml")
|
||||||
main_content = main_content.find(id="post-list-posts")
|
main_content = main_content.find(id="post-list-posts")
|
||||||
|
|
||||||
links_full = main_content.find_all_next("a", class_="directlink largeimg")
|
links_full = main_content.find_all_next("a", class_="directlink largeimg")
|
||||||
links_arr_full = []
|
links_arr_full = []
|
||||||
links_preview = main_content.find_all_next("img", class_="preview")
|
links_preview = main_content.find_all_next("img", class_="preview")
|
||||||
@@ -47,39 +64,40 @@ def get_new_urls():
|
|||||||
def next():
|
def next():
|
||||||
global item
|
global item
|
||||||
global page_num
|
global page_num
|
||||||
|
|
||||||
|
os.system("clear")
|
||||||
if item != len(links_arr_preview)-1:
|
if item != len(links_arr_preview)-1:
|
||||||
item += 1
|
item += 1
|
||||||
os.system('clear')
|
|
||||||
else:
|
else:
|
||||||
page_num += 1
|
page_num += 1
|
||||||
item = 1
|
item = 1
|
||||||
get_new_urls()
|
get_new_urls()
|
||||||
os.system('clear')
|
|
||||||
|
|
||||||
def previus():
|
def previous():
|
||||||
global item
|
global item
|
||||||
global page_num
|
global page_num
|
||||||
global links_arr_preview
|
global links_arr_preview
|
||||||
|
|
||||||
|
os.system("clear")
|
||||||
if item != 1:
|
if item != 1:
|
||||||
item -= 1
|
item -= 1
|
||||||
os.system('clear')
|
|
||||||
else:
|
else:
|
||||||
page_num -= 1
|
page_num -= 1
|
||||||
get_new_urls()
|
get_new_urls()
|
||||||
item = len(links_arr_preview)-1
|
item = len(links_arr_preview)-1
|
||||||
os.system('clear')
|
|
||||||
|
|
||||||
def download():
|
def download():
|
||||||
global item
|
global item
|
||||||
global links_arr_full
|
global links_arr_full
|
||||||
global download_path
|
global download_path
|
||||||
|
|
||||||
command = 'echo ' + links_arr_full[item] + ' | cut -d "%" -f 2 |cut -b 3-8'
|
command = 'echo ' + links_arr_full[item] + ' | cut -d "%" -f 2 |cut -b 3-8'
|
||||||
name = subprocess.check_output(command, shell=True, text=True, encoding='utf_8')
|
name = subprocess.check_output(command, shell=True, text=True, encoding='utf_8')
|
||||||
name = name.strip('\n')
|
name = name.strip('\n')
|
||||||
name = str(name)+".jpg"
|
name = str(name)+".jpg"
|
||||||
command = "curl -s -o " + download_path + "/" + name + " " + links_arr_full[item]
|
command = "curl -s -o " + download_path + "/" + name + " " + links_arr_full[item]
|
||||||
os.system(command)
|
os.system(command)
|
||||||
os.system('clear')
|
os.system("clear")
|
||||||
|
|
||||||
get_new_urls()
|
get_new_urls()
|
||||||
|
|
||||||
@@ -99,7 +117,7 @@ while True:
|
|||||||
if choice == "n":
|
if choice == "n":
|
||||||
next()
|
next()
|
||||||
elif choice == "p":
|
elif choice == "p":
|
||||||
previus()
|
previous()
|
||||||
elif choice == "d":
|
elif choice == "d":
|
||||||
download()
|
download()
|
||||||
elif choice == "c":
|
elif choice == "c":
|
||||||
@@ -110,3 +128,5 @@ while True:
|
|||||||
exit()
|
exit()
|
||||||
else:
|
else:
|
||||||
print("invaled awnser")
|
print("invaled awnser")
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user