Spaces:
Sleeping
Sleeping
from django.shortcuts import render | |
from rest_framework.response import Response | |
from rest_framework.decorators import api_view | |
import http.client | |
import json | |
import requests | |
from bs4 import BeautifulSoup | |
# Create your views here. | |
def sessionIdGenrator(): | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("GET", "/", payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
ue_sid = response.split("ue_sid =")[1].split(',')[0].split("'")[1] | |
ue_mid = response.split("ue_mid =")[1].split(',')[0].split("'")[1] | |
return ue_sid, ue_mid | |
def searchAPI(query): | |
conn = http.client.HTTPSConnection("2.rome.api.flipkart.com") | |
payload = json.dumps({ | |
"query": query, | |
"marketPlaceId": "FLIPKART", | |
"types": [ | |
"QUERY", | |
"QUERY_STORE", | |
"PRODUCT", | |
"RICH", | |
"PARTITION" | |
], | |
"rows": 10 | |
}) | |
headers = { | |
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36', | |
'X-User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 FKUA/website/42/website/Desktop', | |
'Content-Type': 'application/json' | |
} | |
conn.request("POST", "/api/4/discover/autosuggest", payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
return json.loads(response) | |
def getAllProduct(query, page): | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("POST", "/s/query?k="+query + | |
"&page="+str(page), payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
tempData = response.split("&&&")[3:-1] | |
data = [] | |
for i in tempData: | |
try: | |
tempProduct = {} | |
tempHtml = json.loads(i)[2]["html"] | |
soup = BeautifulSoup(tempHtml, features="html5lib") | |
try: | |
stars = soup.find_all("span", {"class", "a-icon-alt"})[0].text | |
tempProduct["stars"] = stars | |
except: | |
pass | |
try: | |
imgs = soup.find_all("img", {"class", "s-image"})[0]['srcset'] | |
tempProduct["imgs"] = imgs | |
except: | |
pass | |
try: | |
title = soup.find_all( | |
"img", {"class", "s-image"})[0]['alt'].replace("Sponsored Ad - ", "") | |
tempProduct["title"] = title | |
except: | |
pass | |
try: | |
link = soup.find_all("a")[0]["href"] | |
tempProduct["link"] = link | |
except: | |
pass | |
try: | |
symbol = soup.find_all( | |
"span", {"class", "a-price-symbol"})[0].text | |
tempProduct["symbol"] = symbol | |
except: | |
pass | |
try: | |
price = soup.find_all( | |
"span", {"class", "a-price-whole"})[0].text | |
tempProduct["price"] = price | |
except: | |
pass | |
try: | |
fullPrice = soup.find_all( | |
"span", {"class", "a-offscreen"})[1].text | |
tempProduct["fullPrice"] = fullPrice | |
except: | |
pass | |
try: | |
offer = soup.find_all( | |
"span", {"class", "a-truncate-full"})[0].text | |
tempProduct["offer"] = offer | |
except: | |
pass | |
try: | |
dataAsin = json.loads(i)[2]["asin"] | |
tempProduct["asin"] = dataAsin | |
except: | |
pass | |
if(tempProduct != {} and "title" in tempProduct.keys() and "asin" in tempProduct.keys()): | |
data.append(tempProduct) | |
except: | |
pass | |
return data | |
def getProductsList(request): | |
query = (request.GET.get('query')).replace(" ", "+") | |
try: | |
page = (request.GET.get('page')) | |
except: | |
page = 1 | |
data = getAllProduct(query, page) | |
return Response({"data": data}) | |
def getProductDetail(request): | |
productId = request.GET.get('id') | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("GET", "/dp/"+productId+"/", payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
data = {} | |
soup = BeautifulSoup(response, features="html5lib") | |
#title = response.split('id="productTitle"')[1].split(">")[1].split("</span")[0].strip() | |
title = soup.find_all( | |
"span", {"class", "a-size-large product-title-word-break"})[0].text.strip() | |
data['title'] = title | |
symbol = soup.find_all("span", {"class", "a-price-symbol"})[0].text | |
data["symbol"] = symbol | |
savingsPercentage = soup.find_all( | |
"span", {"class", "savingsPercentage"})[0].text | |
data["savingPercentage"] = savingsPercentage | |
imgs = soup.find_all( | |
"img", {"class", "a-dynamic-image"}) | |
imgArr = [] | |
for i in imgs: | |
imgArr.append("https://m.media-amazon.com/images/I/" + | |
i["src"].split("/I/")[1].split(".")[0]+".jpg") | |
data["images"] = imgArr | |
scripts = soup.find_all("script") | |
tempData = {} | |
for i in scripts: | |
try: | |
temp = str(i).split("<script")[1].split( | |
">")[1].split("</script")[0] | |
for key, item in json.loads(temp).items(): | |
if item != None or item != "nulll" or item != True or item != False: | |
tempData[key] = item | |
except: | |
pass | |
data["currencyCode"] = tempData["currencyCode"] | |
data["productPrice"] = tempData["productPrice"] | |
data["brand"] = tempData["brand"] | |
data["category"] = tempData["buyBackCategory"] | |
return Response({"data": data}) | |
def searchQuery(request): | |
query = request.GET.get('query') | |
tempData = searchAPI(query) | |
values = [] | |
for i in tempData["RESPONSE"]["suggestions"]: | |
try: | |
temp = {} | |
temp["title"] = i["data"]["component"]["value"]["title"] | |
temp["imageUrl"] = i["data"]["component"]["value"]["imageUrl"].replace( | |
"{@width}", "1080").replace("{@height}", "1080").replace("{@quality}", "100") | |
values.append(temp) | |
except: | |
pass | |
data = {"data": values} | |
return Response(data) | |