Capturing the web table by VBA without page break - excel

I would like to capture the full set of data within the table within
https://mis.twse.com.tw/stock/sblInquiryCap.jsp?lang=en_us#
I was using the codes from the other post but I could only grab the first 10th data due to the page break.
Anyway I can amend the code in order to capture the full set of data pls?
Option Explicit
Public Sub MakeSelectionGetData()
Sheets("Sheet1").Cells.Clear
Dim ie As New InternetExplorer
Const url = "https://mis.twse.com.tw/stock/sblInquiryCap.jsp?lang=en_us#"
Application.ScreenUpdating = False
With ie
.Visible = True
.navigate url
While .Busy Or .readyState < 4: DoEvents: Wend
Application.Wait Now + TimeSerial(0, 0, 6)
Dim nTable As HTMLTable
Set nTable = .document.getElementById("sblCapTable")
Dim Headers()
Headers = Array("Number", "Stock Code", "Real Time Available Volume for SBL Short Sales", "Last Modify")
Dim TR As Object, TD As Object, r As Long, c As Long
With ActiveSheet
r = 2
c = 1
Dim TR_col As Object, TD_col As Object
Set TR_col = nTable.getElementsByTagName("TR")
.Range("A1").Resize(1, UBound(Headers) + 1) = Headers
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
For Each TD In TD_col
.Cells(r, c) = TD.innerText
c = c + 1
Next
c = 1
r = r + 1
Next
End With
.Quit
End With
Application.ScreenUpdating = True
End Sub

It is advisable first of all to set the hits per page to 100. Then go through the pagination, which is a bit tricky on the page. I have commented on the macro:
Public Sub MakeSelectionGetData()
Const url = "https://mis.twse.com.tw/stock/sblInquiryCap.jsp?lang=en_us#"
Dim ie As Object
Dim nodeDropdown As Object
Dim nTable As Object
Dim TR As Object
Dim TD As Object
Dim TR_col As Object
Dim TD_col As Object
Dim nodePagination As Object
Dim nodesCssCurrentNext As Object
Dim Headers() As Variant
Dim r As Long
Dim c As Long
Dim endOfPagination As Boolean
Sheets("Sheet1").Cells.Clear
Headers = Array("Number", "Stock Code", "Real Time Available Volume for SBL Short Sales", "Last Modify")
ActiveSheet.Range("A1").Resize(1, UBound(Headers) + 1) = Headers
r = 2
c = 1
'Initialize Internet Explorer, set visibility,
'call URL and wait until page is fully loaded
Set ie = CreateObject("internetexplorer.application")
ie.Visible = True
ie.navigate url
Do Until ie.readyState = 4: DoEvents: Loop
Application.Wait Now + TimeSerial(0, 0, 6)
'Change hits per page to 100
'Get dropbox from html
On Error Resume Next
Set nodeDropdown = ie.document.getElementById("prePage")
On Error GoTo 0
'Select the entry with the value 100
nodeDropdown.selectedIndex = 3
'Trigger the change event to update the page
Call TriggerEvent(ie.document, nodeDropdown, "change")
'Short break to run the update
Application.Wait Now + TimeSerial(0, 0, 2)
'Loop through the pagination
Do
'Your code
Set nTable = ie.document.getElementById("sblCapTable").getElementsByTagName("tbody")(0)
Set TR_col = nTable.getElementsByTagName("TR")
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
For Each TD In TD_col
ActiveSheet.Cells(r, c) = TD.innerText
c = c + 1
Next TD
c = 1
r = r + 1
Next TR
'Click next button in pagination if it's a link
On Error Resume Next
Set nodePagination = ie.document.getElementById("Pagination")
On Error GoTo 0
'Check for no 'Next' button
Set nodesCssCurrentNext = nodePagination.getElementsByClassName("current next")
'While there is no element in the node collection we click the 'Next' button
If nodesCssCurrentNext.Length = 0 Then
'Click the 'Next' button
nodePagination.getElementsByClassName("next")(0).Click
'Short break to update the next 100 hits
'All data is in memmory, so there is nothing to load from the server
Application.Wait Now + TimeSerial(0, 0, 1)
Else
'If the node collection is not empty, we reached the end of pagination
endOfPagination = True
End If
Loop Until endOfPagination
ie.Quit
End Sub
This procedure to trigger the html event to change the dropdown to 100 hits per page:
Private Sub TriggerEvent(htmlDocument As Object, htmlElementWithEvent As Object, eventType As String)
Dim theEvent As Object
htmlElementWithEvent.Focus
Set theEvent = htmlDocument.createEvent("HTMLEvents")
theEvent.initEvent eventType, True, False
htmlElementWithEvent.dispatchEvent theEvent
End Sub

Related

Scrape CDC vaccination data using VBA

I am trying to scrape the vaccination data from the below CDC website:
https://covid.cdc.gov/covid-data-tracker/#vaccinations
I have tried querySelectorAll but no luck. Can anyone help take a look? Much appreciated!
Sub useClassnames()
Dim appIE As Object
Set appIE = CreateObject("internetexplorer.application")
With appIE
.navigate "https://covid.cdc.gov/covid-data-tracker/#vaccinations"
.Visible = False
End With
Do While appIE.Busy
DoEvents
Loop
Set allRowOfData = appIE.document.getElementById("maincontent")
Debug.Print allRowOfData.innerHTML
'Set element = appIE.document.querySelectorAll(".container mt-5")
'For Each elements In element
' Debug.Print elements
'Next elements
'For Each element In allRowOfData
'Debug.Print element
'Next element
End Sub
Here you have, just change your worksheet name or number :)
Option Explicit
Const updatedCol = 1
Const dosesDistributedColVal = 2
Const peopleInicVaccColVal = 3
Sub useClassnames()
'declare worksheet variable and set headers
Dim targetWsh As Worksheet: Set targetWsh = ThisWorkbook.Sheets(1)
targetWsh.Cells(1, 1).Value = "Last Update"
targetWsh.Cells(1, 2).Value = "Doses Distributed"
targetWsh.Cells(1, 3).Value = "People Initiating Vaccination"
Dim lstRegisterRow As Long: lstRegisterRow = targetWsh.Range("A" & targetWsh.Rows.Count).End(xlUp).Row + 1
'open IE and navigate to site
Dim appIE As InternetExplorer: Set appIE = New InternetExplorer
appIE.navigate "https://covid.cdc.gov/covid-data-tracker/#vaccinations"
appIE.Visible = False
While appIE.Busy = True Or appIE.readyState < 4: DoEvents: Wend
Dim oHtmlDoc As HTMLDocument: Set oHtmlDoc = appIE.document
Dim oHtmlElementColl As IHTMLElementCollection
'Get and write last update date
Application.Wait (Now + TimeValue("0:00:02")) 'wait 2 secs to avoid error, if recieve error, add seconds as needed
Set oHtmlElementColl = oHtmlDoc.getElementsByTagName("small")
targetWsh.Cells(lstRegisterRow, updatedCol) = oHtmlElementColl(0).innerHTML
'Get and write Doses Distributed and People Initiating Vaccination
Set oHtmlElementColl = oHtmlDoc.GetElementsByClassName("card-number")
targetWsh.Cells(lstRegisterRow, dosesDistributedColVal) = oHtmlElementColl(0).innerText
targetWsh.Cells(lstRegisterRow, peopleInicVaccColVal) = oHtmlElementColl(1).innerText
appIE.Quit
End Sub

How to select value from drop down on web url?

I am trying to select data from the dropdown in the web URL, my all code is working fine but I am unable to select the value from the dropdown.
Sub pulldata2()
Dim tod As String, UnderLay As String
Dim IE As Object
Dim doc As HTMLDocument
'Html table
Dim Tbl As HTMLTable, Cel As HTMLTableCell, Rw As HTMLTableRow, Col As HTMLTableCol
Dim TrgRw As Long, TrgCol As Long
'Create new sheet
tod = ThisWorkbook.Sheets("URLList").Range("C2").Value
have = False
For Each sht In ThisWorkbook.Sheets
If sht.Name = tod Then
have = True
Exit For
End If
Next sht
If have = False Then
Sheets.Add After:=Sheets(Sheets.Count)
ActiveSheet.Name = tod
Else
If MsgBox("Sheet " & tod & " already exists Overwrite Data?", vbYesNo) = vbNo Then Exit Sub
End If
'Start Internetexplorer
Set IE = CreateObject("InternetExplorer.Application")
IE.Visible = True
IE.navigate "https://nseindia.com/live_market/dynaContent/live_watch/option_chain/optionKeys.jsp?segmentLink=17&instrument=OPTIDX&symbol=NIFTY&date=25APR2019"
Do While IE.Busy Or IE.readyState <> 4
Application.Wait DateAdd("s", 1, Now)
Loop
Set doc = IE.document
Dim ColOff As Long
'Put data to sheet and loop to next URL
For Nurl = 2 To 191
ColOff = (Nurl - 2) * 23
TrgRw = 1
UnderLay = ThisWorkbook.Sheets("URLList").Range("A" & Nurl).Value
doc.getElementById("underlyStock").Value = UnderLay
doc.parentWindow.execScript "goBtnClick('stock');", "javascript"
'now i want to select data from dropdown id=date, value= 27JUN2019
doc.querySelector("Select[name=date] option[value=27JUN2019]").Selected = True
Do While IE.Busy Or IE.readyState <> 4
Application.Wait DateAdd("s", 1, Now)
Loop
Set Tbl = doc.getElementById("octable")
ThisWorkbook.Sheets(tod).Cells(TrgRw, ColOff + 1).Value = UnderLay
ThisWorkbook.Sheets(tod).Cells(TrgRw, ColOff + 1).Font.Size = 20
ThisWorkbook.Sheets(tod).Cells(TrgRw, ColOff + 1).Font.Bold = True
ThisWorkbook.Sheets(tod).Cells(TrgRw, ColOff + 1).Select
TrgRw = TrgRw + 1
For Each Rw In Tbl.Rows
TrgCol = 1
For Each Cel In Rw.Cells
ThisWorkbook.Sheets(tod).Cells(TrgRw, ColOff + TrgCol).Value = Cel.innerText
TrgCol = TrgCol + Cel.colSpan ' if Column span is > 1 multiple
Next Cel
TrgRw = TrgRw + 1
Next Rw
TrgRw = TrgRw + 1
Next Nurl
'exit the internetexplorer
IE.Quit
Set IE = Nothing
End Sub
why my code not working, I am new in VBA please help to find an error in my code.
Simply alter the url rather than use dropdown
https://nseindia.com/live_market/dynaContent/live_watch/option_chain/optionKeys.jsp?segmentLink=17&instrument=OPTIDX&symbol=NIFTY&date=27JUN2019
You can also use xhr to get the content rather than a slow browser. I use the clipboard to write out the table.
Option Explicit
Public Sub GetInfo()
Dim html As Object, hTable As Object, ws As Worksheet, clipboard As Object
Set html = New HTMLDocument
Set ws = ThisWorkbook.Worksheets("Sheet1")
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://nseindia.com/live_market/dynaContent/live_watch/option_chain/optionKeys.jsp?segmentLink=17&instrument=OPTIDX&symbol=NIFTY&date=27JUN2019", False
.send
html.body.innerHTML = .responseText
Set hTable = html.getElementById("octable")
clipboard.SetText hTable.outerHTML
clipboard.PutInClipboard
ws.Range("A1").PasteSpecial
End With
End Sub
Alternative:
1) You could loop the tr and td within hTable above to write out the table
2) You could also use powerquery from web (via data tab Excel 2016+ , or using free powerquery add-in for 2013. You paste the url into the top of the pop up browser and press Go then select the table to import.
Changing stocks:
Stocks are part of the url query string e.g. symbol=NIFTY , so you can concatenate the new symbol into the url during a loop
"https://nseindia.com/live_market/dynaContent/live_watch/option_chain/optionKeys.jsp?segmentLink=17&instrument=OPTIDX&symbol=" & yourSymbolGoesHere & "&date=27JUN2019"
If you really want to use IE be sure to encase the value of the attribute within '' e.g. '27JUN2019'
Option Explicit
'VBE > Tools > References: Microsoft Internet Controls
Public Sub ClickButton()
Dim ie As InternetExplorer
Const URL As String = "https://nseindia.com/live_market/dynaContent/live_watch/option_chain/optionKeys.jsp?segmentLink=17&instrument=OPTIDX&symbol=NIFTY&date=25APR2019"
Set ie = New InternetExplorer
With ie
.Visible = True
.Navigate2 URL
While .Busy Or .readyState < 4: DoEvents: Wend
With .document
.querySelector("[value='27JUN2019']").Selected = True
Stop
End With
End With
End Sub

VBA HTML Listing Info Pull

I am looking to follow a series of URL's that are found in column A (example: https://www.ebay.com/itm/Apple-iPhone-7-GSM-Unlocked-Verizon-AT-T-TMobile-Sprint-32GB-128GB-256GB/352381131997?epid=225303158&hash=item520b8d5cdd:m:mWgYDe4a79NeLuAlV-RmAQA:rk:7:pf:0) and pull the following information from them:
- Title
- Price
- Description
I think there are multiple issues with my code... For one, I can't get the program to follow specific URL's listed in the Excel (only if I specify one within the code). Also, pulling multiple fields has given me issues.
Option Explicit
Public Sub ListingInfo()
Dim ie As New InternetExplorer, ws As Worksheet, t As Date
Dim i As Integer
i = 0
Do While Worksheets("Sheet1").Cells(i, 1).Value <> ""
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
With ie
.Visible = True
.Navigate2 Worksheets("Sheet1").Cells(i, 1).Value
While .Busy Or .readyState < 4: DoEvents: Wend
Dim Links As Object, i As Long, count As Long
t = Timer
Do
On Error Resume Next
Set Title = .document.querySelectorAll("it-ttl")
Set price = .document.querySelectorAll("notranslate")
Set Description = .document.querySelectorAll("ds_div")
count = Links.Length
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While count = 0
For i = 0 To Title.Length - 1
ws.Cells(i + 1, 1) = Title.item(i)
ws.Cells(i + 1, 2) = price.item(i)
ws.Cells(i + 1, 3) = Description.item(i)
Next
.Quit
i = i + 1
Loop
End With
End Sub
I would use late binding for MSXML2.XMLHTTP and set a reference to the Microsoft HTML Object Library for the HTMLDocument.
Note: querySelector() references the first item it finds that matches its search string.
Here is the short version:
Public Sub ListingInfo()
Dim cell As Range
With ThisWorkbook.Worksheets("Sheet1")
For Each cell In .Range("A1", .Cells(.Rows.Count, 1).End(xlUp))
Dim Document As MSHTML.HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", cell.Value, False
.send
Set Document = New MSHTML.HTMLDocument
Document.body.innerHTML = .responseText
End With
cell.Offset(0, 1).Value = Document.getElementByID("itemTitle").innerText
cell.Offset(0, 2).Value = Document.getElementByID("prcIsum").innerText
If Not Document.querySelector(".viSNotesCnt") Is Nothing Then
cell.Offset(0, 3).Value = Document.querySelector(".viSNotesCnt").innerText
Else
'Try Something Else
End If
Next
End With
End Sub
A more elaborate solution would be to break the code up into smaller routines and load the data into an Array. The main advantage of this is that you can test each subroutine separately.
Option Explicit
Public Type tListingInfo
Description As String
Price As Currency
Title As String
End Type
Public Sub ListingInfo()
Dim source As Range
Dim data As Variant
With ThisWorkbook.Worksheets("Sheet1")
Set source = .Range("A1:D1", .Cells(.Rows.count, 1).End(xlUp))
data = source.Value
End With
Dim r As Long
Dim record As tListingInfo
Dim url As String
For r = 1 To UBound(data)
record = getListingInfo()
url = data(r, 1)
record = getListingInfo(url)
With record
data(r, 2) = .Description
data(r, 3) = .Price
data(r, 4) = .Title
End With
Next
source.Value = data
End Sub
Public Function getListingInfo(url As String) As tListingInfo
Dim ListingInfo As tListingInfo
Dim Document As MSHTML.HTMLDocument
Set Document = getHTMLDocument(url)
With ListingInfo
.Description = Document.getElementByID("itemTitle").innerText
.Price = Split(Document.getElementByID("prcIsum").innerText)(1)
.Title = Document.querySelectorAll(".viSNotesCnt")(0).innerText
Debug.Print .Description, .Price, .Title
End With
End Function
Public Function getHTMLDocument(url As String) As MSHTML.HTMLDocument
Const READYSTATE_COMPLETE As Long = 4
Dim Document As MSHTML.HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", url, False
.send
If .readyState = READYSTATE_COMPLETE And .Status = 200 Then
Set Document = New MSHTML.HTMLDocument
Document.body.innerHTML = .responseText
Set getHTMLDocument = Document
Else
MsgBox "URL: " & vbCrLf & "Ready state: " & .readyState & vbCrLf & "HTTP request status: " & .Status, vbInformation, "URL Not Responding"
End If
End With
End Function
There are a lot of things to fix in your code. It is late here so I will just give pointers (and update fully later) and working code below:
Declare all variables and use appropriate type
Review For Loops and how transpose can be used to create a 1d array of urls pulled from sheet to loop over
Review the difference between querySelector and querySelectorAll methods
Review CSS selectors (you are specifying everything as type selector when in fact you are not selecting by tag for the elements of interest; nor by your stated text)
Think about placement of your IE object creation and of your .Navigate2 to make use of existing object
Make sure to use distinct loop counters
Be sure not to overwrite values in sheet
Code:
Option Explicit
Public Sub ListingInfo()
Dim ie As New InternetExplorer, ws As Worksheet
Dim i As Long, urls(), rowCounter As Long
Dim title As Object, price As Object, description As Object
Set ws = ThisWorkbook.Worksheets("Sheet1")
urls = Application.Transpose(ws.Range("A1:A2").Value) '<= Adjust
With ie
.Visible = True
For i = LBound(urls) To UBound(urls)
If InStr(urls(i), "http") > 0 Then
rowCounter = rowCounter + 1
.Navigate2 urls(i)
While .Busy Or .readyState < 4: DoEvents: Wend
Set title = .document.querySelector(".it-ttl")
Set price = .document.querySelector("#prcIsum")
Set description = .document.querySelector("#viTabs_0_is")
ws.Cells(rowCounter, 3) = title.innerText
ws.Cells(rowCounter, 4) = price.innerText
ws.Cells(rowCounter, 5) = description.innerText
Set title = Nothing: Set price = Nothing: Set description = Nothing
End If
Next
.Quit
End With
End Sub
Here's an approach using Web Requests, using MSXML. It should be significantly faster than using IE, and I'd encourage you to strongly consider using this approach wherever possible.
You'll need references to Microsoft HTML Object Library and Microsoft XML v6.0 to get this working.
Option Explicit
Public Sub SubmitRequest()
Dim URLs As Excel.Range
Dim URL As Excel.Range
Dim LastRow As Long
Dim wb As Excel.Workbook: Set wb = ThisWorkbook
Dim ws As Excel.Worksheet: Set ws = wb.Worksheets(1)
Dim ListingDetail As Variant
Dim i As Long
Dim j As Long
Dim html As HTMLDocument
ReDim ListingDetail(0 To 2, 0 To 10000)
'Get URLs
With ws
LastRow = .Cells(.Rows.Count, 1).End(xlUp).Row
Set URLs = .Range(.Cells(1, 1), .Cells(LastRow, 1))
End With
'Update the ListingDetail
For Each URL In URLs
Set html = getHTML(URL.Value2)
ListingDetail(0, i) = html.getElementByID("itemTitle").innertext 'Title
ListingDetail(1, i) = html.getElementByID("prcIsum").innertext 'Price
ListingDetail(2, i) = html.getElementsByClassName("viSNotesCnt")(0).innertext 'Seller Notes
i = i + 1
Next
'Resize array
ReDim Preserve ListingDetail(0 To 2, 0 To i - 1)
'Dump in Column T,U,V of existing sheet
ws.Range("T1:V" & i).Value = WorksheetFunction.Transpose(ListingDetail)
End Sub
Private Function getHTML(ByVal URL As String) As HTMLDocument
'Add a reference to Microsoft HTML Object Library
Set getHTML = New HTMLDocument
With New MSXML2.XMLHTTP60
.Open "GET", URL
.send
getHTML.body.innerHTML = .responseText
End With
End Function

VBA: copy data from website into excel

I have a VBA code that selects info from drop-down menus on a government website and then submits the query. The requested data then opens up in another IE page. I am trying to copy this data into excel; however, I am unable to do so.
My code currently copies the text on the first IE page that contains the drop-down menus. The government website is: http://www.osfi-bsif.gc.ca/Eng/wt-ow/Pages/FINDAT.aspx
I have look all over the internet for a solution but nothing seems to work...
Here is my code:
Sub GetOsfiFinancialData()
Dim UrlAddress As String
UrlAddress = "http://ws1.osfi-bsif.gc.ca/WebApps/FINDAT/DTIBanks.aspx?T=0&LANG=E"
Dim ie As Object
Set ie = CreateObject("internetexplorer.application")
With ie
.Silent = True
.Visible = False
.navigate UrlAddress
End With
Do Until Not ie.Busy And ie.readyState = 4
DoEvents
Loop
Application.Wait (Now() + TimeValue("00:00:05"))
'Select Bank
ie.document.getElementById("DTIWebPartManager_gwpDTIBankControl1_DTIBankControl1_institutionTypeCriteria_institutionsDropDownList").Value = Z005
'open window with financial data
Dim objButton
Set objButton = ie.document.getElementById("DTIWebPartManager_gwpDTIBankControl1_DTIBankControl1_submitButton")
objButton.Focus
objButton.Click
'select new pop-up window
marker = 0
Set objshell = CreateObject("Shell.Application")
IE_count = objshell.Windows.Count
For x = 0 To (IE_count - 1)
On Error Resume Next ' sometimes more web pages are counted than are open
my_title = objshell.Windows(x).document.Title
If my_title Like "Consolidated Monthly Balance Sheet" & "*" Then 'compare to find if the desired web page is already open
Set ie = objshell.Windows(x)
marker = 1
Exit For
Else
End If
Next
Do Until Not ie.Busy And ie.readyState = 4
DoEvents
Loop
Application.Wait (Now() + TimeValue("00:00:05"))
Dim doc As MSHTML.HTMLDocument
Dim tables As MSHTML.IHTMLElementCollection
Dim table As MSHTML.HTMLTable
Dim clipboard As MSForms.DataObject
Set doc = ie.document
Set tables = doc.getElementsByTagName("body")
Set table = tables(0)
Set clipboard = New MSForms.DataObject
'paste in sheets
Dim test
Set test = ActiveWorkbook.Sheets("Test")
clipboard.SetText table.outerHTML
clipboard.PutInClipboard
test.Range("A1").PasteSpecial xlPasteAll
clipboard.Clear
MsgBox ("Task Completed")
End Sub
Your help is greatly appreciated!
You were using the current test with document.Title. I found that For Each of all windows looking for the full title worked in combination with copy pasting the pop-up window outerHTML. No additional wait time was required.
Inside the For Each Loop, after you reset the IE instance to the new window, you can obtain the new URL with ie.document.url. As you already have the data loaded you might as well just copy paste it straight away in my opinion.
Code:
Option Explicit
Public Sub GetOsfiFinancialData()
Dim UrlAddress As String, objButton, ie As Object
UrlAddress = "http://ws1.osfi-bsif.gc.ca/WebApps/FINDAT/DTIBanks.aspx?T=0&LANG=E"
Set ie = CreateObject("internetexplorer.application")
With ie
.Silent = True
.Visible = False
.navigate UrlAddress
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("DTIWebPartManager_gwpDTIBankControl1_DTIBankControl1_institutionTypeCriteria_institutionsDropDownList").Value = "Z005"
Set objButton = .document.getElementById("DTIWebPartManager_gwpDTIBankControl1_DTIBankControl1_submitButton")
objButton.Focus
objButton.Click
Dim objShellWindows As New SHDocVw.ShellWindows, currentWindow As IWebBrowser2
For Each currentWindow In objShellWindows
If currentWindow.document.Title = "Consolidated Monthly Balance Sheet - Banks, Trust and Loan" Then
Set ie = currentWindow
Exit For
End If
Next
Dim clipboard As Object
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
clipboard.SetText ie.document.body.outerHTML
clipboard.PutInClipboard
ThisWorkbook.Worksheets("Sheet1").Cells(1, 1).PasteSpecial
.Quit
End With
End Sub
References (VBE > Tools > References):
Microsoft Internet Controls
I don't have time to get into all the stuff about controlling one browser from another, but I think you can figure that part out, especially since you made some great progress on this already. Get URL#2 from URL#1, like you are doing, but with some better data controls around it, and then do this...
Option Explicit
Sub Web_Table_Option_One()
Dim xml As Object
Dim html As Object
Dim objTable As Object
Dim result As String
Dim lRow As Long
Dim lngTable As Long
Dim lngRow As Long
Dim lngCol As Long
Dim ActRw As Long
Set xml = CreateObject("MSXML2.XMLHTTP.6.0")
With xml
.Open "GET", "http://ws1.osfi-bsif.gc.ca/WebApps/Temp/2f40b7ef-d024-4eca-a8a3-fb82153efafaFinancialData.aspx", False
.send
End With
result = xml.responseText
Set html = CreateObject("htmlfile")
html.body.innerHTML = result
Set objTable = html.getElementsByTagName("Table")
For lngTable = 0 To objTable.Length - 1
For lngRow = 0 To objTable(lngTable).Rows.Length - 1
For lngCol = 0 To objTable(lngTable).Rows(lngRow).Cells.Length - 1
ThisWorkbook.Sheets("Sheet1").Cells(ActRw + lngRow + 1, lngCol + 1) = objTable(lngTable).Rows(lngRow).Cells(lngCol).innerText
Next lngCol
Next lngRow
ActRw = ActRw + objTable(lngTable).Rows.Length + 1
Next lngTable
End Sub

Failed to use VBA capturing the web table

I would like to capture the table showing here
But the number in the web table shows up to be "-" in my Excel file.
Here is the VBA scripts that I wrote for capturing the table in the middle:
Sub data()
Dim xmlHttp As Object
Dim TR_col As Object, TR As Object
Dim TD_col As Object, TD As Object
Dim row As Long, col As Long
Set xmlHttp = CreateObject("MSXML2.XMLHTTP.6.0")
xmlHttp.Open "GET", "http://mis.twse.com.tw/stock/fibest.jsp?stock=29151&lang=zh_tw", False
xmlHttp.setRequestHeader "Content-Type", "text/html; charset=utf-8"
xmlHttp.send
Dim html As Object
Set html = CreateObject("htmlfile")
html.body.innerHTML = xmlHttp.responseText
Dim tbl As Object
Set tbl = html.getElementById("hor-minimalist-tb")
row = 1
col = 1
Set TR_col = html.getElementsByTagName("TR")
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
For Each TD In TD_col
Cells(row, col) = TD.innerText
col = col + 1
Next
col = 1
row = row + 1
Next
End Sub
The table is slow to load. XHR is too fast. Swop to Selenium or Internet Explorer where you can take advantage of waits.
Here is an example of using IE to scrape the table including using a wait to ensure over the scheduled 5s refresh time. I have used the English version of site:
Image on web:
Output from code:
Code:
Option Explicit
Public Sub MakeSelectionGetData()
Dim ie As New InternetExplorer
Const url = "http://mis.twse.com.tw/stock/fibest.jsp?stock=2402&lang=en_us"
Application.ScreenUpdating = False
With ie
.Visible = True
.navigate url
While .Busy Or .readyState < 4: DoEvents: Wend
Application.Wait Now + TimeSerial(0, 0, 6)
Dim nTable As HTMLTable
Set nTable = .document.getElementById("hor-minimalist-tb")
Dim Headers()
Headers = Array("Best Bid Volume", "Best Bid Price", "Best Ask Price", "Best Ask Volume")
Dim TR As Object, TD As Object, r As Long, c As Long
With ActiveSheet
r = 2
c = 1
Dim TR_col As Object, TD_col As Object
Set TR_col = nTable.getElementsByTagName("TR")
.Range("A1").Resize(1, UBound(Headers) + 1) = Headers
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
For Each TD In TD_col
.Cells(r, c) = TD.innerText
c = c + 1
Next
c = 1
r = r + 1
Next
End With
.Quit
End With
Application.ScreenUpdating = True
End Sub

Resources