mirror of
https://github.com/mrfluffy-dev/hart-cli.git
synced 2026-01-17 01:40:33 +00:00
Chore: made it faster
This commit is contained in:
106
hart-cli.py
106
hart-cli.py
@@ -1,20 +1,34 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import requests
|
import httpx
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup as bs
|
||||||
|
import pyperclip as clip
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import pyperclip as clip
|
from os.path import expanduser
|
||||||
os.system('clear')
|
|
||||||
download_path = os.environ['HOME']+"/Pictures/hart-cli"
|
headers = {
|
||||||
|
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:101.0) Gecko/20100101 Firefox/101.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
client = httpx.Client(headers=headers, follow_redirects=True)
|
||||||
|
|
||||||
|
home = expanduser("~")
|
||||||
|
download_path = f"{home}/pix/hart-cli"
|
||||||
|
os.system(f"mkdir -p {download_path}")
|
||||||
|
|
||||||
item = 0
|
item = 0
|
||||||
page_num = 1
|
page_num = 1
|
||||||
URL = "https://yande.re/post?page="+str(page_num)
|
|
||||||
page = requests.get(URL)
|
url = f"https://yande.re/post?page={page_num}"
|
||||||
|
page = client.get(url)
|
||||||
|
|
||||||
links_arr_full = []
|
links_arr_full = []
|
||||||
links_arr_preview = []
|
links_arr_preview = []
|
||||||
|
|
||||||
def get_new_urls():
|
def get_new_urls():
|
||||||
global URL
|
global url
|
||||||
global page
|
global page
|
||||||
global page_num
|
global page_num
|
||||||
global soup
|
global soup
|
||||||
@@ -23,16 +37,17 @@ def get_new_urls():
|
|||||||
global links_arr_full
|
global links_arr_full
|
||||||
global links_preview
|
global links_preview
|
||||||
global links_arr_preview
|
global links_arr_preview
|
||||||
|
|
||||||
links_arr_full.clear
|
links_arr_full.clear
|
||||||
links_arr_full.clear
|
links_arr_preview.clear
|
||||||
URL = "https://yande.re/post?page="+str(page_num)
|
|
||||||
page = requests.get(URL)
|
soup = bs(page.content, "html.parser")
|
||||||
soup = BeautifulSoup(page.content, "html.parser")
|
|
||||||
main_content = soup.find(id="post-list-posts")
|
main_content = soup.find(id="post-list-posts")
|
||||||
main_content = str(main_content)
|
main_content = str(main_content)
|
||||||
main_content = main_content.replace("smallimg","largeimg")
|
main_content = main_content.replace("smallimg", "largeimg")
|
||||||
main_content = BeautifulSoup(main_content, features="lxml")
|
main_content = bs(main_content, features="lxml")
|
||||||
main_content = main_content.find(id="post-list-posts")
|
main_content = main_content.find(id="post-list-posts")
|
||||||
|
|
||||||
links_full = main_content.find_all_next("a", class_="directlink largeimg")
|
links_full = main_content.find_all_next("a", class_="directlink largeimg")
|
||||||
links_arr_full = []
|
links_arr_full = []
|
||||||
links_preview = main_content.find_all_next("img", class_="preview")
|
links_preview = main_content.find_all_next("img", class_="preview")
|
||||||
@@ -47,19 +62,70 @@ def get_new_urls():
|
|||||||
def next():
|
def next():
|
||||||
global item
|
global item
|
||||||
global page_num
|
global page_num
|
||||||
|
|
||||||
if item != len(links_arr_preview)-1:
|
if item != len(links_arr_preview)-1:
|
||||||
item+=1
|
item += 1
|
||||||
os.system('clear')
|
|
||||||
else:
|
else:
|
||||||
page_num+=1
|
page_num += 1
|
||||||
item=1
|
item = 1
|
||||||
get_new_urls()
|
get_new_urls()
|
||||||
os.system('clear')
|
|
||||||
|
|
||||||
def previus():
|
def previous():
|
||||||
global item
|
global item
|
||||||
global page_num
|
global page_num
|
||||||
global links_arr_preview
|
global links_arr_preview
|
||||||
|
|
||||||
|
if item != 1:
|
||||||
|
item -= 1
|
||||||
|
else:
|
||||||
|
page_num -= 1
|
||||||
|
get_new_urls()
|
||||||
|
item = len(links_arr_preview)-1
|
||||||
|
|
||||||
|
def download():
|
||||||
|
global item
|
||||||
|
global links_arr_full
|
||||||
|
global download_path
|
||||||
|
|
||||||
|
command = 'echo ' + links_arr_full[item] + ' | cut -d "%" -f 2 |cut -b 3-8'
|
||||||
|
name = subprocess.check_output(command, shell=True, text=True, encoding='utf_8')
|
||||||
|
name = name.strip('\n')
|
||||||
|
name = str(name)+".jpg"
|
||||||
|
command = "curl -s -o " + download_path + "/" + name + " " + links_arr_full[item]
|
||||||
|
os.system(command)
|
||||||
|
|
||||||
|
get_new_urls()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
command = "curl -s -o /tmp/hart-preview.jpg " + links_arr_preview[item]
|
||||||
|
os.system(command)
|
||||||
|
command = "convert /tmp/hart-preview.jpg -resize 500x500 /tmp/hart-preview.jpg"
|
||||||
|
os.system(command)
|
||||||
|
command = "kitty +icat --place 100x100@0x0 /tmp/hart-preview.jpg"
|
||||||
|
os.system(command)
|
||||||
|
print("next:\t\tn")
|
||||||
|
print("previous:\tp")
|
||||||
|
print("download:\td")
|
||||||
|
print("copy URL:\tc")
|
||||||
|
print("quit:\t\tq")
|
||||||
|
choice= input()
|
||||||
|
if choice == "n":
|
||||||
|
next()
|
||||||
|
elif choice == "p":
|
||||||
|
previus()
|
||||||
|
elif choice == "d":
|
||||||
|
download()
|
||||||
|
elif choice == "c":
|
||||||
|
clip.copy(links_arr_full[item])
|
||||||
|
os.system('clear')
|
||||||
|
elif choice == "q":
|
||||||
|
os.system('clear')
|
||||||
|
exit()
|
||||||
|
else:
|
||||||
|
print("invaled awnser")
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
|
||||||
if item != 1:
|
if item != 1:
|
||||||
item-=1
|
item-=1
|
||||||
os.system('clear')
|
os.system('clear')
|
||||||
|
|||||||
Reference in New Issue
Block a user