Spaces:
Sleeping
Sleeping
from django.shortcuts import render | |
from rest_framework.response import Response | |
from rest_framework.decorators import api_view | |
import http.client | |
import json | |
import requests | |
from bs4 import BeautifulSoup | |
# Create your views here. | |
ue_sid_global = "" | |
ue_mid_global = "" | |
def sessionIdGenrator(): | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("GET", "/", payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
ue_sid = response.split("ue_sid =")[1].split(',')[0].split("'")[1] | |
ue_mid = response.split("ue_mid =")[1].split(',')[0].split("'")[1] | |
return ue_sid, ue_mid | |
def searchAPI(ue_sid, ue_mid, query): | |
conn = http.client.HTTPSConnection("completion.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("GET", "/api/2017/suggestions?prefix="+query.replace(" ", "+") + | |
"&alias=aps&session-id="+ue_sid+"&mid="+ue_mid, payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
return json.loads(response) | |
def getAllProduct(query, page): | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("POST", "/s/query?k="+query + | |
"&page="+str(page), payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
tempData = response.split("&&&")[3:-1] | |
data = [] | |
for i in tempData: | |
try: | |
tempProduct = {} | |
tempHtml = json.loads(i)[2]["html"] | |
soup = BeautifulSoup(tempHtml, features="html5lib") | |
try: | |
stars = soup.find_all("span", {"class", "a-icon-alt"})[0].text | |
tempProduct["stars"] = stars | |
except: | |
pass | |
try: | |
imgs = soup.find_all("img", {"class", "s-image"})[0]['srcset'] | |
tempProduct["imgs"] = imgs | |
except: | |
pass | |
try: | |
title = soup.find_all( | |
"img", {"class", "s-image"})[0]['alt'].replace("Sponsored Ad - ", "") | |
tempProduct["title"] = title | |
except: | |
pass | |
try: | |
link = soup.find_all("a")[0]["href"] | |
tempProduct["link"] = link | |
except: | |
pass | |
try: | |
symbol = soup.find_all( | |
"span", {"class", "a-price-symbol"})[0].text | |
tempProduct["symbol"] = symbol | |
except: | |
pass | |
try: | |
price = soup.find_all( | |
"span", {"class", "a-price-whole"})[0].text | |
tempProduct["price"] = price | |
except: | |
pass | |
try: | |
fullPrice = soup.find_all( | |
"span", {"class", "a-offscreen"})[1].text | |
tempProduct["fullPrice"] = fullPrice | |
except: | |
pass | |
try: | |
offer = soup.find_all( | |
"span", {"class", "a-truncate-full"})[0].text | |
tempProduct["offer"] = offer | |
except: | |
pass | |
try: | |
dataAsin = json.loads(i)[2]["asin"] | |
tempProduct["asin"] = dataAsin | |
except: | |
pass | |
if(tempProduct != {} and "title" in tempProduct.keys() and "asin" in tempProduct.keys()): | |
data.append(tempProduct) | |
except: | |
pass | |
return data | |
def getProductsList(request): | |
query = (request.GET.get('query')).replace(" ", "+") | |
try: | |
page = (request.GET.get('page')) | |
except: | |
page = 1 | |
data = getAllProduct(query, page) | |
return Response({"data": data}) | |
def getProductDetail(request): | |
productId = request.GET.get('id') | |
conn = http.client.HTTPSConnection("www.amazon.in") | |
payload = '' | |
headers = {} | |
conn.request("GET", "/dp/"+productId+"/", payload, headers) | |
res = conn.getresponse() | |
data = res.read() | |
response = data.decode("utf-8") | |
data = {} | |
soup = BeautifulSoup(response, features="html5lib") | |
#title = response.split('id="productTitle"')[1].split(">")[1].split("</span")[0].strip() | |
title = soup.find_all( | |
"span", {"class", "a-size-large product-title-word-break"})[0].text.strip() | |
data['title'] = title | |
symbol = soup.find_all("span", {"class", "a-price-symbol"})[0].text | |
data["symbol"] = symbol | |
savingsPercentage = soup.find_all( | |
"span", {"class", "savingsPercentage"})[0].text | |
data["savingPercentage"] = savingsPercentage | |
imgs = soup.find_all( | |
"img", {"class", "a-dynamic-image"}) | |
imgArr = [] | |
for i in imgs: | |
imgArr.append("https://m.media-amazon.com/images/I/" + | |
i["src"].split("/I/")[1].split(".")[0]+".jpg") | |
data["images"] = imgArr | |
scripts = soup.find_all("script") | |
tempData = {} | |
for i in scripts: | |
try: | |
temp = str(i).split("<script")[1].split( | |
">")[1].split("</script")[0] | |
for key, item in json.loads(temp).items(): | |
if item != None or item != "nulll" or item != True or item != False: | |
tempData[key] = item | |
except: | |
pass | |
data["currencyCode"] = tempData["currencyCode"] | |
data["productPrice"] = tempData["productPrice"] | |
data["brand"] = tempData["brand"] | |
data["category"] = tempData["buyBackCategory"] | |
return Response({"data": data}) | |
def searchQuery(request): | |
global ue_sid_global,ue_mid_global | |
query = request.GET.get('query') | |
if not ('ue_sid' in request.session and 'ue_mid' in request.session): | |
ue_sid, ue_mid = sessionIdGenrator() | |
ue_sid_global = ue_sid | |
ue_mid_global = ue_mid | |
else: | |
ue_sid = ue_sid_global | |
ue_mid = ue_mid_global | |
try: | |
tempData = searchAPI(ue_sid, ue_mid, query) | |
except: | |
ue_sid, ue_mid = sessionIdGenrator() | |
ue_sid_global = ue_sid | |
ue_mid_global = ue_mid | |
tempData = searchAPI(ue_sid, ue_mid, query) | |
values = [] | |
for i in tempData["suggestions"]: | |
values.append(i["value"]) | |
data = {"data": values} | |
return Response(data) | |