produces the correct html code and summary. Next objective is to be able to push it to wordpress directly from this gradio UI.

This commit is contained in:
shahab00x 2024-02-20 00:11:44 +03:30
parent 0728ec31e8
commit c0c4f20762
2 changed files with 7 additions and 5 deletions

View File

@ -21,12 +21,12 @@ class AmazonScraper:
'https': f'socks5h://{PROXY_HOST}:{PROXY_PORT}'
}
self.HEADERS = {
HEADERS = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Accept-Language': 'en-US, en;q=0.5'
}
HEADERS = {
self.HEADERS = {
'authority': 'www.amazon.com',
'pragma': 'no-cache',
'cache-control': 'no-cache',

View File

@ -3,7 +3,9 @@ from scrape_amazon import AmazonScraper, AIInterface
import re
llms = ['meta-llama/Llama-2-70b-chat-hf', "mistralai/Mixtral-8x7B-Instruct-v0.1", "togethercomputer/LLaMA-2-7B-32K"]
llms = ['meta-llama/Llama-2-70b-chat-hf', "mistralai/Mixtral-8x7B-Instruct-v0.1",
"mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2",
"togethercomputer/LLaMA-2-7B-32K"]
scraper = AmazonScraper()
aii = AIInterface()
@ -50,13 +52,13 @@ def write_article(url):
# f"paid Amazon affiliate links</p> and Include a link to the product {url} at the very end. Also "
# f"include this image {image} after the second paragraph. Format it nicely and professionally in "
# f"HTML. :\n\n") + text
html_content = aii.ask_ai(prompt_for_ai, model=llms[1])
html_content = aii.ask_ai(prompt_for_ai, model=llms[2])
prompt_for_ai = (f"Take the following HTML code and slightly modify it. Underneath the title add this tag '<p "
f"class=\"has-small-font-size\">This page includes paid Amazon affiliate links</p>'. Include a "
f"link {url} to the product at the end. Also include this image {image} after the first or "
f"second paragraph. Return a nice and professional HTML code:\n") + html_content
html_content = aii.ask_ai(prompt_for_ai, model=llms[1])
html_content = aii.ask_ai(prompt_for_ai, model=llms[2])
html_content = replace_img_tag(url, html_content, scraper.images)
print(html_content)
return html_content