I want to import restaurant data like Restaurant name, phone number, website & address to excel but unfortunately I am getting ads & garbage data. I have created a code using http://automatetheweb.net/vba-getelementsbytagname-method/ website but it is not helping out. Please rectify the issue in my code.
Website:https://www.yellowpages.com/atlanta-ga/attorneys
Please donot refer json as it is not working on other webs.
Sub Yellowcom()
'Dim ieObj As InternetExplorer
Dim htmlELe As IHTMLElement
Dim HTML As HTMLDocument
Dim i As Integer
Dim URL As String
Dim URLParameter As String
Dim page As Long
Dim links As Object
Dim IE As Object
i = 1
Set IE = CreateObject("InternetExplorer.Application")
'Set ieObj = New InternetExplorer
IE.Visible = True
URL = "https://www.yellowpages.com/atlanta-ga/attorneys"
'Application.Wait Now + TimeValue("00:00:05")
For page = 2 To 4
If page > 1 Then URLParameter = "?page=" & page
IE.navigate URL & URLParameter
' Wait for the browser to load the page
Do Until IE.readyState = 4
DoEvents
Loop
Set HTML = IE.document
Set links = HTML.getElementsByClassName("info")
For Each htmlELe In links
With ActiveSheet
.Range("A" & i).Value = htmlELe.Children(0).textContent
.Range("B" & i).Value = htmlELe.getElementsByTagName("a")(0).href
.Range("C" & i).Value = htmlELe.Children(2).textContent
.Range("D" & i).Value = htmlELe.Children(2).querySelector("a[href]")
'links2 = htmlELe.getElementsByClassName("links")(1)
' .Range("D" & i).Value = links2.href
End With
i = i + 1
Next htmlELe
Next page
IE.Quit
Set IE = Nothing
End Sub
Required Output should be like this
I would use xhr rather than a browser and store data in an array for each page and write that out to sheet. You could really dimension one array to hold all results in advance based on results per page and number of pages but the below is still efficient
Option Explicit
Public Sub GetListings()
Dim html As HTMLDocument, page As Long, html2 As HTMLDocument
Dim results As Object, headers(), ws As Worksheet, i As Long
Const START_PAGE As Long = 1
Const END_PAGE As Long = 2
Set ws = ThisWorkbook.Worksheets("Sheet1")
headers = Array("Name", "Phone", "Website", "Address")
Application.ScreenUpdating = False
Set html = New HTMLDocument
Set html2 = New HTMLDocument
ws.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
With CreateObject("MSXML2.XMLHTTP")
For page = START_PAGE To END_PAGE
.Open "GET", "https://www.yellowpages.com/atlanta-ga/attorneys?page=" & page, False
.send
html.body.innerHTML = .responseText
Set results = html.querySelectorAll(".organic .result")
Dim output(), r As Long
ReDim output(1 To results.Length, 1 To 4)
r = 1
For i = 0 To results.Length - 1
On Error Resume Next
html2.body.innerHTML = results.item(i).outerHTML
output(r, 1) = html2.querySelector(".business-name").innerText
output(r, 2) = html2.querySelector(".phone").innerText
output(r, 3) = html2.querySelector(".track-visit-website").href
output(r, 4) = html2.querySelector(".street-address").innerText & " " & html2.querySelector(".locality").innerText
On Error GoTo 0
r = r + 1
Next
ws.Cells(GetLastRow(ws, 1) + 1, 1).Resize(UBound(output, 1), UBound(output, 2)) = output
Next
End With
Application.ScreenUpdating = True
End Sub
Public Function GetLastRow(ByVal ws As Worksheet, Optional ByVal columnNumber As Long = 1) As Long
With ws
GetLastRow = .Cells(.rows.Count, columnNumber).End(xlUp).Row
End With
End Function
Sample of output:
The info class is also used for the advertisements. You first need to go to the collection where the classname is "search-results organic" and in there find all the "info" classes.
This means that you need an extra collection variable:
Set HTML = IE.document
Set OrganicLinks = HTML.getElementsByClassName("search-results organic")
Set links = OrganicLinks.item(0).getElementsByClassName("info")
For getting the right website, you need to use another reference. It's better to get it by classname, since that one is more unique:
On Error Resume Next
.Range("B" & i).Value = htmlELe.getElementsByClassName("track-visit-website")(0).href
On Error GoTo 0
Related
I wrote excel macro to fetch data from multiple pages ( here around 25-40 pages ) . I have managed to change pages and scrape all pages from every page .
Sub Fetch_Data()
Dim IE As Object
Dim httpReq As Object
Dim HTMLdoc As Object
Dim resultsTable As Object
Dim tRow As Object, tCell As Object
Dim destCell As Range
Set IE = CreateObject("InternetExplorer.Application")
IE.Visible = False
'Application.ScreenUpdating = False
Application.StatusBar = "Data Fetching in progress, please wait..."
IE.Navigate "https://www.bseindia.com/markets/debt/TradenSettlement.aspx" 'load the Backshop Loan Locator page
Do
DoEvents
Loop Until IE.ReadyState = 4
Set HTMLdoc = IE.Document
'LR = Cells(Rows.Count, 1).End(xlUp).Row
With ActiveSheet
'.Cells.ClearContents
Set destCell = .Range("A1")
End With
Set resultsTable = HTMLdoc.getElementById("ContentPlaceHolder1_GridViewrcdsFC")
For Each tRow In resultsTable.Rows
For Each tCell In tRow.Cells
destCell.Offset(tRow.RowIndex, tCell.cellIndex).Value = tCell.innerText
Next
Next
'________________________________________________________________________________________________________________________
'Go to Next page
'IE.Navigate "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC','Page$2')"
i = 2
For i = 2 To 50
If i = 2 Then
Url = "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC','Page$2')"
On Error GoTo ErrorHandler
ElseIf i = 3 Then
Url = "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC','Page$3')"
On Error GoTo ErrorHandler
ElseIf i = 4 Then
Url = "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC','Page$4')"
On Error GoTo ErrorHandler
ElseIf i = 5 Then
Url = "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC','Page$5')"
On Error GoTo ErrorHandler
ErrorHandler:
GoTo XYZ
End If
IE.Navigate Url
Do
DoEvents
Loop Until IE.ReadyState = 4
Url = ""
LR = Cells(Rows.Count, 1).End(xlUp).Row - 1
With ActiveSheet
'.Cells.ClearContents
Set destCell = .Range("A" & LR)
End With
Set resultsTable = HTMLdoc.getElementById("ContentPlaceHolder1_GridViewrcdsFC")
For Each tRow In resultsTable.Rows
For Each tCell In tRow.Cells
destCell.Offset(tRow.RowIndex, tCell.cellIndex).Value = tCell.innerText
Next
Next
Next i
'________________________________________________________________________________________________________________________
XYZ: IE.Quit
Application.StatusBar = "Data Fetching Completed"
MsgBox ("Data Successfully Fetched")
Application.StatusBar = ""
Dim lrow As Long
Dim index As Long
Dim header As String
header = Range("A1").Value
lrow = Range("A" & Rows.Count).End(xlUp).Row
For index = 2 To lrow
If Range("A" & index).Value = header Then Rows(index).Delete
Next
End Sub
I want to change pages automatically without writing every page , I tried something like below , but pages are not getting changed , how to loop through pages :
For i = 2 To 4
x = "Page$" + CStr(i)
Url = "javascript:__doPostBack('ctl00$ContentPlaceHolder1$GridViewrcdsFC'," & x & ")"
On Error GoTo ErrorHandler
ErrorHandler:
GoTo XYZ
You have to look if there are url links to the other pages on the current page, find a tag and loop all the webpages. You can also look voor the url of each page and hardcode it.
Example with urls beneath tag "a":
Set AElements = HTMLDoc.getElementsByTagName("a")
For Each AElement In AElements
If AElement.id = "xxxxxxxxx" Then
Cells(Cell, 27) = AElement.src 'I write URL in the 27th column
'AElement.href
End If
Next AElement
I have multiple keywords in an excel file, I am looking for a way to get the image URL of the first google image result on another cell in this excel file,
For Example
if my cell A1 contains "tomato"
I want cell A2 to display "https://seeds-gallery.com/4963-large_default/novosadski-jabucar-tomato-450-seeds.jpg" which is the image URL of the first result that shows up on Google Images
Can someone please help me out
You could do that in VBA using something like the following;
Public Sub InsertPicturesFromWeb()
Dim IE As InternetExplorer
Dim HTMLdoc As HTMLDocument
Dim imgElements As IHTMLElementCollection
Dim imgElement As HTMLImg
Dim aElement As HTMLAnchorElement
Dim N As Integer, I As Integer
Dim Url As String, Url2 As String
Dim LastRow As Long
Dim M, sImageSearchString
LastRow = Range("A" & Rows.Count).End(xlUp).Row
For I = 1 To LastRow
Url = "https://www.google.co.in/search?q=" & Cells(I, 1) & "&source=lnms&tbm=isch&sa=X&rnd=1"
Set IE = New InternetExplorer
With IE
.Visible = False
.Navigate Url
Do Until .readyState = 4: DoEvents: Loop
Set HTMLdoc = .document
Set imgElements = HTMLdoc.getElementsByTagName("IMG")
N = 1
For Each imgElement In imgElements
If InStr(imgElement.src, sImageSearchString) Then
If imgElement.ParentNode.nodeName = "A" Then
Set aElement = imgElement.ParentNode
Url2 = imgElement.src
N = N + 1
End If
End If
Next
Call GetShapeFromWeb(Url2, Cells(I, 2))
IE.Quit
Set IE = Nothing
End With
Next I
End Sub
Sub GetShapeFromWeb(strShpUrl As String, rngTarget As Range)
With rngTarget.Parent
.Pictures.Insert strShpUrl
.Shapes(.Shapes.Count).Left = rngTarget.Left
.Shapes(.Shapes.Count).Top = rngTarget.Top
End With
End Sub
I have used code from this website to pull data from site:
Option Explicit
Public Sub GetInfo()
Dim sResponse As String, i As Long, Html As New HTMLDocument
Dim prices As Object, info As Object
Application.ScreenUpdating = False
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://bazashifer.ru/proflist-profnastil", False
.send
sResponse = .responseText
End With
With Html
.body.innerHTML = sResponse
Set info = .querySelectorAll("div.views-field.views-field-title")
Set prices = .querySelectorAll("div.views-field.views-field-field-cena-tovara")
End With
With Worksheets(2)
For i = 0 To info.Length - 1
.Cells(i + 1, 1) = info(i).innerText
.Cells(i + 1, 2) = prices(i).innerText
Next i
End With
Application.ScreenUpdating = True
End Sub
The code above works just as intended. I implemented code to take multiply links ( link 1, link 2, link 3 ) :
Option Explicit
Public Sub GetInfoAll()
Dim wsSheet As Worksheet, Rows As Long, http As New XMLHTTP60, Html As New HTMLDocument, links As Variant, link As Variant
Dim prices As Object, info As Object, i As Long, sResponse As String
Set wsSheet = Sheets(1)
Rows = wsSheet.Cells(wsSheet.Rows.Count, "A").End(xlUp).row
links = wsSheet.Range("A1:A" & Rows)
With http
For Each link In links
.Open "GET", link, False
.send
sResponse = .responseText
With Html
.body.innerHTML = sResponse
Set info = .querySelectorAll("div.views-field.views-field-title")
Set prices = .querySelectorAll("div.views-field.views-field-field-cena-tovara")
End With
With Worksheets(2)
For i = 0 To info.Length - 1
.Cells(i + 1, 1) = info(i).innerText
.Cells(i + 1, 2) = prices(i).innerText
Next i
End With
Next link
End With
End Sub
The above code works and should pull data into columns, but for the next link the code re-writes the data.
Any help would be great. Thanks
Rows = wsSheet.Cells(wsSheet.Rows.Count, "A").End(xlUp).row
You need to have something like this during your output loop on Sheet 2 because you cant hard code the number of results.
Edit here's what I really meant about the output
Public Sub GetInfoAll()
Dim wsSheet As Worksheet, Rows As Long, http As New XMLHTTP60, Html As New HTMLDocument, links As Variant, link As Variant
Dim prices As Object, info As Object, i As Long, sResponse As String, offsetRows As Long
Dim wb As Workbook
Set wb = Application.Workbooks("Book1")
Set wsSheet = wb.Sheets(1)
Rows = wsSheet.Cells(wsSheet.Rows.Count, "A").End(xlUp).Row
links = wsSheet.Range("A1:A" & Rows)
With http
For Each link In links
.Open "GET", link, False
.send
sResponse = .responseText
With Html
.body.innerHTML = sResponse
Set info = .querySelectorAll("div.views-field.views-field-title")
Set prices = .querySelectorAll("div.views-field.views-field-field-cena-tovara")
End With
With wb.Worksheets(2)
For i = 0 To info.Length - 1
offsetRows = 0
offsetRows = wb.Worksheets(2).Cells(wb.Worksheets(2).Rows.Count, "A").End(xlUp).Row + 1
.Cells(offsetRows, 1) = info(i).innerText
.Cells(offsetRows, 2) = prices(i).innerText
Next i
End With
Next link
End With
End Sub
I think it is ideal to make use of container and then loop through it to parse the desired content. Consider the following an example. You can always append the rest to suit your need.
Public Sub GetInfo()
Dim Html As New HTMLDocument, Htmldoc As New HTMLDocument
Dim Wb As Workbook, ws As Worksheet, R&, I&
Dim link As Variant, linklist As Variant
Set Wb = ThisWorkbook
Set ws = Wb.Worksheets("output")
linklist = Array( _
"https://bazashifer.ru/armatura-stekloplastikovaya", _
"https://bazashifer.ru/truby-0", _
"https://bazashifer.ru/setka-stekloplastikovaya" _
)
For Each link In linklist
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", link, False
.send
Html.body.innerHTML = .responseText
End With
With Html.querySelectorAll(".view-content > .views-row")
For I = 0 To .Length - 1
Htmldoc.body.innerHTML = .item(I).outerHTML
R = R + 1: ws.Cells(R, 1) = Htmldoc.querySelector(".views-field-title a").innerText
ws.Cells(R, 2) = Htmldoc.querySelector("[class*='cena-tovara'] > .field-content").innerText
Next I
End With
Next link
End Sub
I think the problem is that your columns aren't updated for each link.
For i = 0 To info.Length - 1
.Cells(i + 1, 1) = info(i).innerText
.Cells(i + 1, 2) = prices(i).innerText
Next i
In this part you write everything to the first and second column. This should be updated everytime you move to a new link.
So maybe add a 'colcount' variable which updates just before you move to the next link?
something like this:
Infocol = 1
Pricecol = 2
For Each link In links
....
.Cells(i + 1, Infocol) = info(i).innerText
.Cells(i + 1, Priceol) = prices(i).innerText
....
Infocol = infocol + 2
Pricecol = Pricecol + 2
Next link
You go +2 so you don't overwrite your price column with your new info.
I followed a Tutorial and have an error while executing the code i copied:
Private Sub CommandButton1_Click()
Dim internet As Object
Dim internetdata As Object
Dim div_result As Object
Dim header_links As Object
Dim link As Object
Dim URL As String
Set internet = CreateObject("InternetExplorer.Application")
internet.Visible = True
URL = "https://www.google.co.in/search?q=how+to+program+in+vba"
internet.navigate URL
Do Until internet.readyState >= 4
DoEvents
Loop
Application.Wait Now + TimeSerial(0, 0, 5)
Set internetdata = internet.document
Set div_result = internetdata.getElementById("res")
Set header_links = div_result.getElementsByTagName("h3")
For Each h In header_links
Set link = h.ChildNodes.Item(0)
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = link.href
Next
MsgBox "done"
End Sub
The error comes at
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = link.href
What ist wrong here?
EDIT:
The code should give the URLs out of a google search and write it to excel list:
The code is copied from here:
Getting Links/URL from a webpage-Excel VBA
The issue is that Google obviously changed the code of the website since the tutorial was written. This method is not very reliable as everytime Google changes the website it can easily break your code.
Try the following
Set div_result = internetdata.getElementById("res")
Set header_links = div_result.getElementsByTagName("a")
Dim h As Variant
For Each h In header_links
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = h.href
Next
If you want just the web results without the featured content and video links, you can use below. Also in this code you can control the page number you want to get the links fromby changing the variable pageNo.
Private Sub CommandButton1_Click()
Dim internet As Object
Dim internetdata As Object
Dim div_result As Object
Dim header_links As Object
Dim link As Object
Dim URL As String
Const pageNo = 10 '0 is page 1, 10 is page 2 and so on 0;10;20;30;40
Set internet = CreateObject("InternetExplorer.Application")
internet.Visible = True
URL = "https://www.google.co.in/search?q=how+to+program+in+vba"
internet.navigate URL & "&start=" & pageNo
Do Until internet.readyState >= 4
DoEvents
Loop
Application.Wait Now + TimeSerial(0, 0, 5)
Set internetdata = internet.document
Set div_result = internetdata.getElementById("res")
Set header_links = div_result.getelementsbytagname("h2")
Dim h As Variant
For Each div In header_links
If div.innertext = "Web results" Then
Set Links = div.ParentElement.getelementsbytagname("a")
For Each link In Links
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = link.href
Next
End If
Next
MsgBox "done"
End Sub
I am looking to follow a series of URL's that are found in column A (example: https://www.ebay.com/itm/Apple-iPhone-7-GSM-Unlocked-Verizon-AT-T-TMobile-Sprint-32GB-128GB-256GB/352381131997?epid=225303158&hash=item520b8d5cdd:m:mWgYDe4a79NeLuAlV-RmAQA:rk:7:pf:0) and pull the following information from them:
- Title
- Price
- Description
I think there are multiple issues with my code... For one, I can't get the program to follow specific URL's listed in the Excel (only if I specify one within the code). Also, pulling multiple fields has given me issues.
Option Explicit
Public Sub ListingInfo()
Dim ie As New InternetExplorer, ws As Worksheet, t As Date
Dim i As Integer
i = 0
Do While Worksheets("Sheet1").Cells(i, 1).Value <> ""
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
With ie
.Visible = True
.Navigate2 Worksheets("Sheet1").Cells(i, 1).Value
While .Busy Or .readyState < 4: DoEvents: Wend
Dim Links As Object, i As Long, count As Long
t = Timer
Do
On Error Resume Next
Set Title = .document.querySelectorAll("it-ttl")
Set price = .document.querySelectorAll("notranslate")
Set Description = .document.querySelectorAll("ds_div")
count = Links.Length
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While count = 0
For i = 0 To Title.Length - 1
ws.Cells(i + 1, 1) = Title.item(i)
ws.Cells(i + 1, 2) = price.item(i)
ws.Cells(i + 1, 3) = Description.item(i)
Next
.Quit
i = i + 1
Loop
End With
End Sub
I would use late binding for MSXML2.XMLHTTP and set a reference to the Microsoft HTML Object Library for the HTMLDocument.
Note: querySelector() references the first item it finds that matches its search string.
Here is the short version:
Public Sub ListingInfo()
Dim cell As Range
With ThisWorkbook.Worksheets("Sheet1")
For Each cell In .Range("A1", .Cells(.Rows.Count, 1).End(xlUp))
Dim Document As MSHTML.HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", cell.Value, False
.send
Set Document = New MSHTML.HTMLDocument
Document.body.innerHTML = .responseText
End With
cell.Offset(0, 1).Value = Document.getElementByID("itemTitle").innerText
cell.Offset(0, 2).Value = Document.getElementByID("prcIsum").innerText
If Not Document.querySelector(".viSNotesCnt") Is Nothing Then
cell.Offset(0, 3).Value = Document.querySelector(".viSNotesCnt").innerText
Else
'Try Something Else
End If
Next
End With
End Sub
A more elaborate solution would be to break the code up into smaller routines and load the data into an Array. The main advantage of this is that you can test each subroutine separately.
Option Explicit
Public Type tListingInfo
Description As String
Price As Currency
Title As String
End Type
Public Sub ListingInfo()
Dim source As Range
Dim data As Variant
With ThisWorkbook.Worksheets("Sheet1")
Set source = .Range("A1:D1", .Cells(.Rows.count, 1).End(xlUp))
data = source.Value
End With
Dim r As Long
Dim record As tListingInfo
Dim url As String
For r = 1 To UBound(data)
record = getListingInfo()
url = data(r, 1)
record = getListingInfo(url)
With record
data(r, 2) = .Description
data(r, 3) = .Price
data(r, 4) = .Title
End With
Next
source.Value = data
End Sub
Public Function getListingInfo(url As String) As tListingInfo
Dim ListingInfo As tListingInfo
Dim Document As MSHTML.HTMLDocument
Set Document = getHTMLDocument(url)
With ListingInfo
.Description = Document.getElementByID("itemTitle").innerText
.Price = Split(Document.getElementByID("prcIsum").innerText)(1)
.Title = Document.querySelectorAll(".viSNotesCnt")(0).innerText
Debug.Print .Description, .Price, .Title
End With
End Function
Public Function getHTMLDocument(url As String) As MSHTML.HTMLDocument
Const READYSTATE_COMPLETE As Long = 4
Dim Document As MSHTML.HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", url, False
.send
If .readyState = READYSTATE_COMPLETE And .Status = 200 Then
Set Document = New MSHTML.HTMLDocument
Document.body.innerHTML = .responseText
Set getHTMLDocument = Document
Else
MsgBox "URL: " & vbCrLf & "Ready state: " & .readyState & vbCrLf & "HTTP request status: " & .Status, vbInformation, "URL Not Responding"
End If
End With
End Function
There are a lot of things to fix in your code. It is late here so I will just give pointers (and update fully later) and working code below:
Declare all variables and use appropriate type
Review For Loops and how transpose can be used to create a 1d array of urls pulled from sheet to loop over
Review the difference between querySelector and querySelectorAll methods
Review CSS selectors (you are specifying everything as type selector when in fact you are not selecting by tag for the elements of interest; nor by your stated text)
Think about placement of your IE object creation and of your .Navigate2 to make use of existing object
Make sure to use distinct loop counters
Be sure not to overwrite values in sheet
Code:
Option Explicit
Public Sub ListingInfo()
Dim ie As New InternetExplorer, ws As Worksheet
Dim i As Long, urls(), rowCounter As Long
Dim title As Object, price As Object, description As Object
Set ws = ThisWorkbook.Worksheets("Sheet1")
urls = Application.Transpose(ws.Range("A1:A2").Value) '<= Adjust
With ie
.Visible = True
For i = LBound(urls) To UBound(urls)
If InStr(urls(i), "http") > 0 Then
rowCounter = rowCounter + 1
.Navigate2 urls(i)
While .Busy Or .readyState < 4: DoEvents: Wend
Set title = .document.querySelector(".it-ttl")
Set price = .document.querySelector("#prcIsum")
Set description = .document.querySelector("#viTabs_0_is")
ws.Cells(rowCounter, 3) = title.innerText
ws.Cells(rowCounter, 4) = price.innerText
ws.Cells(rowCounter, 5) = description.innerText
Set title = Nothing: Set price = Nothing: Set description = Nothing
End If
Next
.Quit
End With
End Sub
Here's an approach using Web Requests, using MSXML. It should be significantly faster than using IE, and I'd encourage you to strongly consider using this approach wherever possible.
You'll need references to Microsoft HTML Object Library and Microsoft XML v6.0 to get this working.
Option Explicit
Public Sub SubmitRequest()
Dim URLs As Excel.Range
Dim URL As Excel.Range
Dim LastRow As Long
Dim wb As Excel.Workbook: Set wb = ThisWorkbook
Dim ws As Excel.Worksheet: Set ws = wb.Worksheets(1)
Dim ListingDetail As Variant
Dim i As Long
Dim j As Long
Dim html As HTMLDocument
ReDim ListingDetail(0 To 2, 0 To 10000)
'Get URLs
With ws
LastRow = .Cells(.Rows.Count, 1).End(xlUp).Row
Set URLs = .Range(.Cells(1, 1), .Cells(LastRow, 1))
End With
'Update the ListingDetail
For Each URL In URLs
Set html = getHTML(URL.Value2)
ListingDetail(0, i) = html.getElementByID("itemTitle").innertext 'Title
ListingDetail(1, i) = html.getElementByID("prcIsum").innertext 'Price
ListingDetail(2, i) = html.getElementsByClassName("viSNotesCnt")(0).innertext 'Seller Notes
i = i + 1
Next
'Resize array
ReDim Preserve ListingDetail(0 To 2, 0 To i - 1)
'Dump in Column T,U,V of existing sheet
ws.Range("T1:V" & i).Value = WorksheetFunction.Transpose(ListingDetail)
End Sub
Private Function getHTML(ByVal URL As String) As HTMLDocument
'Add a reference to Microsoft HTML Object Library
Set getHTML = New HTMLDocument
With New MSXML2.XMLHTTP60
.Open "GET", URL
.send
getHTML.body.innerHTML = .responseText
End With
End Function