Parsing HTML with VBA - html

I am trying to pull data from some 500 urls of a website. All the pages are same in structure. I am facing a problem with understanding the HTML of this particular site
https://www.coworker.com/s-f/6033/united-states_hawaii_honolulu_impact-hub-honolulu
I want to extract Name, Address, Tel and website. My current code:
Sub GetData()
Dim wsSheet As Worksheet, Rows As Long, links As Variant, IE As Object, link As Variant
Set wb = ThisWorkbook
Set wsSheet = wb.Sheets("Sheet1")
Set IE = New InternetExplorer
Rows = wsSheet.Cells(wsSheet.Rows.Count, "A").End(xlUp).Row
links = wsSheet.Range("A1:A" & Rows)
With IE
.Visible = True
For Each link In links
.navigate (link)
While .Busy Or .readyState <> 4: DoEvents: Wend
Next
End With
End Sub

Here you go. Without more links to test with this is very fragile. It relies heavily on consistent styling across pages.
XHR Looping link list:
Option Explicit
Public Sub GetInfo()
Dim wsSheet As Worksheet, Rows As Long, links(), link As Long, wb As Workbook, html As HTMLDocument
Set wb = ThisWorkbook: Set wsSheet = wb.Sheets("Sheet1")
Application.ScreenUpdating = False
With wsSheet
Rows = .Cells(.Rows.Count, "A").End(xlUp).Row
If Rows = 1 Then
ReDim links(1 To 1, 1 To 1)
links(1, 1) = wsSheet.Range("A1")
Else
links = wsSheet.Range("A1:A" & Rows).Value
End If
Dim r As Long
For link = LBound(links, 1) To UBound(links, 1)
r = r + 1
Set html = GetHTML(links(link, 1))
On Error Resume Next
Dim aNodeList As Object: Set aNodeList = html.querySelectorAll(".col-xs-12.pade_none.muchroom_mail")
.Cells(r, 2) = "Name: " & html.querySelector(".col-sm-9.col-md-9.col-xs-12.pade_none.kohub_space_headings h2").innerText
.Cells(r, 3) = "Address: " & aNodeList.item(0).innerText
.Cells(r, 4) = "Tel: " & aNodeList.item(1).innerText
.Cells(r, 5) = "Website: " & html.querySelector(".website-link-text a[href]").getAttribute("href")
On Error GoTo 0
Next link
End With
Application.ScreenUpdating = True
End Sub
Public Function GetHTML(ByVal url As String) As HTMLDocument
Dim sResponse As String, html As New HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", url, False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
With html
.body.innerHTML = sResponse
End With
Set GetHTML = html
End Function
Output:
References (VBE>Tools>References):
HTML object Library
Internet Explorer:
Option Explicit
Public Sub GetInfo()
Dim wsSheet As Worksheet, Rows As Long, links(), link As Long, wb As Workbook, html As HTMLDocument, ie As InternetExplorer
Set wb = ThisWorkbook: Set wsSheet = wb.Sheets("Sheet1")
Application.ScreenUpdating = False
With wsSheet
Rows = .Cells(.Rows.Count, "A").End(xlUp).Row
If Rows = 1 Then
ReDim links(1, 1)
links(1, 1) = wsSheet.Range("A1")
Else
links = wsSheet.Range("A1:A" & Rows).Value
End If
Dim r As Long
Set ie = New InternetExplorer
ie.Visible = True
For link = LBound(links, 1) To UBound(links, 1)
ie.navigate links(link, 1)
While ie.Busy Or ie.readyState < 4: DoEvents: Wend
' Application.Wait Now + TimeSerial(0, 0, 10)
On Error Resume Next
r = r + 1: Set html = ie.document
.Cells(r, 2) = "Name: " & html.querySelector(".col-sm-9.col-md-9.col-xs-12.pade_none.kohub_space_headings h2").innerText
.Cells(r, 3) = "Address: " & html.querySelector(".col-xs-12.pade_none.muchroom_mail").innerText
.Cells(r, 4) = "Tel: " & html.querySelector(".fa.fa-phone.fa-rotate-270 ~ a").innerText
.Cells(r, 5) = "Website: " & html.querySelector(".website-link-text a[href]").getAttribute("href")
On Error GoTo 0
Next link
ie.Quit
End With
Application.ScreenUpdating = True
End Sub
References (VBE>Tools>References):
HTML object Library
Microsoft Internet Controls

Related

Error Handling on VBA Excel (Web Scraper)

This is the program I created. The goal of this program is to visit each link on a specific cell range and get the "a href" of each listed links.
I used a list of links but there's a certain link that ends with .pdf and from there I get a type mismatch. Is there a way that I could make my program continue and just skip the error that it got from a specific link?
This is the link that causes the error https://ir-web-assets-v.s3.amazonaws.com/uploads/nuggets/5d40644eafe17554cf969aab/Islands_Locals_Program_Guest_FAQ.pdf
Sub extensiveScrape()
Dim extractedLinks As Range 'Links taken from RUN
Dim urls As String 'Links taken from Extensive Search
Dim appIE As Object
Dim LastRow As Long 'Number of rows
Dim rCell As Range
Dim rRng As Range
Dim html2 As HTMLDocument
Dim itemEle As Object
Dim linkurl As Object
Dim y As Integer
Application.ScreenUpdating = False
Set appIE = CreateObject("InternetExplorer.Application")
Set sht = ThisWorkbook.Worksheets("results")
LastRow = sht.Cells(sht.Rows.Count, "A").End(xlUp).Row
Set rRng = sht.Range("A1:A" & LastRow)
For Each rCell In rRng.Cells
With appIE
.navigate rCell.Value
.Visible = True
End With
Do While appIE.readyState <> 4: Wait 5: Loop
Application.StatusBar = "Scraping Extensively..."
DoEvents
Set html2 = appIE.document
Set itemEle = html2.getElementsByTagName("a")
y = 1
For Each linkurl In itemEle
Sheets("results").Range("B" & y).Value = linkurl
y = y + 1
Next
'rCell.Offset(0, 1).Value = itemEle
Next rCell
appIE.Quit
Set appIE = Nothing
Application.StatusBar = ""
Application.ScreenUpdating = True
End Sub
Private Sub Wait(ByVal nSec As Long)
nSec = nSec + Timer
While nSec > Timer
DoEvents
Wend
End Sub

How to avoid duplicate information from HTML with .querySelectorAll in VBA?

I am trying to collect data from a website, which stores information about vehicle accidents in the US. For this I need to loop through all the cases on the website and store the tables in an Excel sheet, so that I can reformat this data to receive a CSV of all listed accidents.
I got most of the code from the user "QHarr" (thank you again) and so far the code works fine and collects all the data with a .querySelectorAll("table"). Unfortunately, it appears that the code takes the same information twice in specific situations. I think the reason for this is that some tables are nested within other tables and therefore the nested tables are copied twice to the sheet (sometimes even in a different format). I tried to work around this by adding if conditions which check for certain keywords but did not succeed.
Here is the code:
Option Explicit
Public Sub GetTables()
Dim sResponse As String, html As HTMLDocument, clipboard As Object, ws As Worksheet
Dim initialLinksURL As String, i As Long, j As Long, newURL As String
Set ws = ThisWorkbook.Worksheets(1)
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set html = New HTMLDocument
initialLinksURL = "https://crashviewer.nhtsa.dot.gov/LegacyCDS/Search"
'Application.ScreenUpdating = False
Dim ie As Object, caseLinks As Object
Set ie = CreateObject("InternetExplorer.Application")
With ie
.Visible = False
.Navigate2 initialLinksURL
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("btnSubmit1").Click
While .Busy Or .readyState < 4: DoEvents: Wend
Set caseLinks = .document.querySelectorAll("[href*='CaseID=']")
Dim linksAndIds()
ReDim linksAndIds(1 To caseLinks.Length, 1 To 2)
For i = 0 To caseLinks.Length - 1
linksAndIds(i + 1, 1) = caseLinks.Item(i)
linksAndIds(i + 1, 2) = Replace$(caseLinks.Item(i), "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?xsl=main.xsl&CaseID=", vbNullString)
Next
For i = LBound(linksAndIds, 1) To UBound(linksAndIds, 1)
newURL = "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewText&CaseID=" & linksAndIds(i, 2) & "&xsl=textonly.xsl&websrc=false" '"https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewPage&xsl=Case.xsl&tab=Crash&form=CaseForm&baseNode=&vehnum=-1&occnum=-1&pos=-1&pos2=-1&websrc=true&title=Crash%20Overview%20-%20Summary&caseid=" & linksAndIds(i, 2) & "&year=&fullimage=false"
.Navigate2 newURL
While .Busy Or .readyState < 4: DoEvents: Wend
Dim tables As Object
Set tables = .document.querySelectorAll("table")
For j = 0 To tables.Length - 1
clipboard.SetText tables.Item(j).outerHTML
If Not CBool(InStr(tables.Item(j).outerHTML, "Scene Photos")) And Not CBool(InStr(tables.Item(j).outerHTML, "Image ID: ")) _
And Not CBool(InStr(tables.Item(j).outerHTML, "Braint")) _
And UBound(Split(tables.Item(j).outerHTML, "subtable")) <= 1 Then 'no images and avoid duplicate tables
clipboard.PutInClipboard
ws.Cells(LastRow(ws) + 2, 1).PasteSpecial
End If
Next
Next
.Quit
End With
'Application.ScreenUpdating = True
End Sub
'https://www.rondebruin.nl/win/s9/win005.htm
Public Function LastRow(ByVal sh As Worksheet) As Long
On Error Resume Next
LastRow = sh.Cells.Find(What:="*", _
After:=sh.Range("A1"), _
Lookat:=xlPart, _
LookIn:=xlFormulas, _
SearchOrder:=xlByRows, _
SearchDirection:=xlPrevious, _
MatchCase:=False).Row
On Error GoTo 0
End Function
I was thinking about getting the deepest HTML element/child, but I could not figure out how to do this or if this even solves my problem.
Thank you ever so much.
We can work to improve this but I am not seeing where the duplicate comes in. If data occurs at constant indices across pages you restrict the tables you write out, for example, using an array holding the indices of interest
Option Explicit
Public Sub GetTables()
Dim sResponse As String, html As HTMLDocument, clipboard As Object, ws As Worksheet
Dim initialLinksURL As String, i As Long, j As Long, newURL As String
Set ws = ThisWorkbook.Worksheets(1)
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set html = New HTMLDocument
initialLinksURL = "https://crashviewer.nhtsa.dot.gov/LegacyCDS/Search"
'Application.ScreenUpdating = False
Dim ie As Object, caseLinks As Object
Set ie = CreateObject("InternetExplorer.Application")
With ie
.Visible = True
.Navigate2 initialLinksURL
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("btnSubmit1").Click
While .Busy Or .readyState < 4: DoEvents: Wend
Set caseLinks = .document.querySelectorAll("[href*='CaseID=']")
Dim linksAndIds()
ReDim linksAndIds(1 To caseLinks.Length, 1 To 2)
For i = 0 To caseLinks.Length - 1
linksAndIds(i + 1, 1) = caseLinks.item(i)
linksAndIds(i + 1, 2) = Replace$(caseLinks.item(i), "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?xsl=main.xsl&CaseID=", vbNullString)
Next
For i = LBound(linksAndIds, 1) To UBound(linksAndIds, 1)
newURL = "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewText&CaseID=" & linksAndIds(i, 2) & "&xsl=textonly.xsl&websrc=false" '"https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewPage&xsl=Case.xsl&tab=Crash&form=CaseForm&baseNode=&vehnum=-1&occnum=-1&pos=-1&pos2=-1&websrc=true&title=Crash%20Overview%20-%20Summary&caseid=" & linksAndIds(i, 2) & "&year=&fullimage=false"
.Navigate2 newURL
While .Busy Or .readyState < 4: DoEvents: Wend
Dim tables As Object
Set tables = .document.querySelectorAll("table")
Dim arr()
arr = Array(0, 4, 5, 6, 7, 8, 9, 10, 11)
For j = LBound(arr) To UBound(arr)
clipboard.SetText tables.item(arr(j)).outerHTML
clipboard.PutInClipboard
ws.Cells(LastRow(ws) + 2, 1).PasteSpecial
Next
Next
.Quit
End With
'Application.ScreenUpdating = True
End Sub
'https://www.rondebruin.nl/win/s9/win005.htm
Public Function LastRow(ByVal sh As Worksheet) As Long
On Error Resume Next
LastRow = sh.Cells.Find(What:="*", _
After:=sh.Range("A1"), _
Lookat:=xlPart, _
LookIn:=xlFormulas, _
SearchOrder:=xlByRows, _
SearchDirection:=xlPrevious, _
MatchCase:=False).Row
On Error GoTo 0
End Function
Debugging script:
To aid debugging and duplicates and for ease of reading the following script is amended to write out each case to a new worksheet
Option Explicit
Public Sub GetTables()
Dim html As HTMLDocument, clipboard As Object, ws As Worksheet
Dim initialLinksURL As String, i As Long, j As Long, newURL As String
Set ws = ThisWorkbook.Worksheets(1)
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set html = New HTMLDocument
initialLinksURL = "https://crashviewer.nhtsa.dot.gov/LegacyCDS/Search"
Application.ScreenUpdating = False
Dim ie As Object, caseLinks As Object
Set ie = CreateObject("InternetExplorer.Application")
With ie
.Visible = True
.Navigate2 initialLinksURL
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("btnSubmit1").Click
While .Busy Or .readyState < 4: DoEvents: Wend
Set caseLinks = .document.querySelectorAll("[href*='CaseID=']")
Dim linksAndIds()
ReDim linksAndIds(1 To caseLinks.Length, 1 To 2)
For i = 0 To caseLinks.Length - 1
linksAndIds(i + 1, 1) = caseLinks.item(i)
linksAndIds(i + 1, 2) = Replace$(caseLinks.item(i), "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?xsl=main.xsl&CaseID=", vbNullString)
Next
For i = LBound(linksAndIds, 1) To UBound(linksAndIds, 1)
newURL = "https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewText&CaseID=" & linksAndIds(i, 2) & "&xsl=textonly.xsl&websrc=false" '"https://crashviewer.nhtsa.dot.gov/nass-cds/CaseForm.aspx?ViewPage&xsl=Case.xsl&tab=Crash&form=CaseForm&baseNode=&vehnum=-1&occnum=-1&pos=-1&pos2=-1&websrc=true&title=Crash%20Overview%20-%20Summary&caseid=" & linksAndIds(i, 2) & "&year=&fullimage=false"
.Navigate2 newURL
While .Busy Or .readyState < 4: DoEvents: Wend
Dim tables As Object, arr()
Set tables = .document.querySelectorAll("table")
arr = Array(0, 4, 5, 6, 7, 8, 9, 10, 11)
Set ws = ThisWorkbook.Worksheets.Add
ws.NAME = linksAndIds(i, 2)
For j = LBound(arr) To UBound(arr)
clipboard.SetText tables.item(arr(j)).outerHTML
clipboard.PutInClipboard
ws.Cells(LastRow(ws) + 2, 1).PasteSpecial
Next
Next
.Quit
End With
Application.ScreenUpdating = True
End Sub
'https://www.rondebruin.nl/win/s9/win005.htm
Public Function LastRow(ByVal sh As Worksheet) As Long
On Error Resume Next
LastRow = sh.Cells.Find(What:="*", _
After:=sh.Range("A1"), _
Lookat:=xlPart, _
LookIn:=xlFormulas, _
SearchOrder:=xlByRows, _
SearchDirection:=xlPrevious, _
MatchCase:=False).Row
On Error GoTo 0
End Function

Retrieving all Excel file links from a webpage

I'm trying to get all the downloadable Excel file links from the website, but having difficulty. Please help to guide me. Thanks.
Sub TYEX()
Dim internet As Object
Dim internetdata As Object
Dim div_result As Object
Dim header_links As Object
Dim link As Object
Dim URL As String
Set internet = CreateObject("InternetExplorer.Application")
internet.Visible = True
URL = "https://www.jpx.co.jp/markets/public/short-selling/index.html"
internet.Navigate URL
Do Until internet.ReadyState >= 4
DoEvents
Loop
Application.Wait Now + TimeSerial(0, 0, 5)
Set internetdata = internet.Document
Set div_result = internetdata.getElementById("readArea")
Set header_links = div_result.getElementsByTagName("td")
For Each h In header_links
Set link = h.ChildNodes.item(0)
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = link.href
Next
MsgBox "done"
End Sub
You had the idea down correctly, but here's a different approach:
Sub TYEX()
Dim ie As Object
Set ie = CreateObject("InternetExplorer.Application")
With ie
.navigate "https://www.jpx.co.jp/markets/public/short-selling/index.html"
.Visible = True
Do While .Busy Or .readyState < 4
DoEvents
Loop
Dim doc As Object, tbl As Object
Set doc = .document
Set tbl = doc.getElementsByClassName("component-normal-table")(0).Children(0)
Dim r As Long, xlsArr(), a As Object
With tbl.Rows
ReDim xlsArr(1 To .Length - 1)
For r = 1 To .Length - 1 ' 0 is the table header
xlsArr(r) = .Item(r).Children(1).innerHTML
Next r
End With
With CreateObject("VBScript.RegExp")
.Pattern = "<a href=""(\/markets.*?\.xls)"
For r = 1 To UBound(xlsArr)
xlsArr(r) = "https://www.jpx.co.jp" & .Execute(xlsArr(r))(0).SubMatches(0)
Debug.Print xlsArr(r)
Next
End With
End With
'Add to sheet
Dim ws As Worksheet, rng As Range
Set ws = ThisWorkbook.Worksheets(1)
With ws
Set rng = .Range(.Cells(NextRow(ws), 1), .Cells(NextRow(ws) + UBound( _
xlsArr) - 1, 1))
rng.Value = Application.Transpose(xlsArr)
End With
End Sub
Public Function NextRow(ByVal ws As Worksheet, Optional ByVal col As Variant = 1) As Long
With ws
NextRow = .Cells(.Rows.Count, col).End(xlUp).Row + 1
End With
End Function
Breaking Down the Code
This will loop your html table rows. We start at 1, because 0 is actually just the table header.
With tbl.Rows
ReDim xlsArr(1 To .Length - 1)
For r = 1 To .Length - 1 ' 0 is the table header
xlsArr(r) = .Item(r).Children(1).innerHTML
Next r
End With
This uses regular expressions to extract the url from the innerHTML property. You can see how this particular regex works here: Regex101
With CreateObject("VBScript.RegExp")
.Pattern = "<a href=""(\/markets.*?\.xls)"
For r = 1 To UBound(xlsArr)
xlsArr(r) = "https://www.jpx.co.jp" & .Execute(xlsArr(r))(0).SubMatches(0)
Debug.Print xlsArr(r)
Next
End With
You will size your range to be the same size as your array that contains the links, then write the array to the worksheet. This is usually much faster than writing cells one-by-one.
'Add to sheet
Dim ws As Worksheet, rng As Range
Set ws = ThisWorkbook.Worksheets(1)
With ws
Set rng = .Range(.Cells(NextRow(ws), 1), .Cells(NextRow(ws) + UBound( _
xlsArr) - 1, 1))
rng.Value = Application.Transpose(xlsArr)
End With
You can use an attribute = value CSS selector with $ operator to say the href value must end with .xls. Then use querySelectorAll to retrieve all matched results. Using CSS selectors is a very fast and generally robust method.
Dim list As Object
Set list = ie.document.querySelectorAll("[href$='.xls']")
It is much faster to use XMLHTTP as well, rather than opening IE. Note that you can then pass these links to a function to perform a binary download or to URLMon for downloading.
Option Explicit
Public Sub Links()
Dim sResponse As String, html As HTMLDocument, list As Object, i As Long
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://www.jpx.co.jp/markets/public/short-selling/index.html", False
.setRequestHeader "If-Modified-Since", "Sat, 1 Jan 2000 00:00:00 GMT"
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
Set html = New HTMLDocument
With html
.body.innerHTML = sResponse
Set list = html.querySelectorAll("[href$='.xls']")
End With
For i = 0 To list.Length - 1
Debug.Print Replace$(list.item(i), "about:", "https://www.jpx.co.jp")
Next
End Sub
Example download function (though you could re-use your existing XMLHTTP object - this is just to illustrate):
Public Function DownloadFile(ByVal downloadFolder As String, ByVal downloadURL As String) As String
Dim http As Object , tempArr As Variant
Set http = CreateObject("WinHttp.WinHttpRequest.5.1")
http.Open "GET", downloadURL, False
http.send
On Error GoTo errhand
With CreateObject("ADODB.Stream")
.Open
.Type = 1
.write http.responseBody
tempArr = Split(downloadURL, "/")
tempArr = tempArr(UBound(tempArr))
.SaveToFile downloadFolder & tempArr, 2 '< "/" on enter of downloadFolder. 2 for overwrite which is Ok if no file modifications.
.Close
End With
DownloadFile = downloadFolder & tempArr
Exit Function
errhand:
If Err.Number <> 0 Then
Debug.Print Err.Number, Err.Description
MsgBox "Download failed"
End If
DownloadFile = vbNullString
End Function
References (VBE > Tools > References):
Microsoft HTML Object Library

How to download a table from a web with VBA?

I'am trying to download a table from this page
to excel with VBA: http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx --> table "Panel General"
I can download the table "Panel Merval" but i couldn't download the other table.
I use this code for table "Panel Merval":
Sub GetTable()
Dim ieApp As InternetExplorer
Dim ieDoc As Object
Dim ieTable As Object
Dim clip As DataObject
'create a new instance of ie
Set ieApp = New InternetExplorer
'you don’t need this, but it’s good for debugging
ieApp.Visible = False
'now that we’re in, go to the page we want
ieApp.Navigate "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
Do While ieApp.Busy: DoEvents: Loop
Do Until ieApp.ReadyState = READYSTATE_COMPLETE: DoEvents: Loop
'get the table based on the table’s id
Set ieDoc = ieApp.Document
Set ieTable = ieDoc.all.Item("ctl00_ContentCentral_tcAcciones_tpMerval_grdMerval")
'copy the tables html to the clipboard and paste to teh sheet
If Not ieTable Is Nothing Then
Set clip = New DataObject
clip.SetText "" & ieTable.outerHTML & ""
clip.PutInClipboard
Sheet1.Select
Sheet1.Range("b2").Select
Sheet1.PasteSpecial "Unicode Text"
End If
'close 'er up
ieApp.Quit
Set ieApp = Nothing
End Sub
or this one
Public Sub PanelLider()
Dim oDom As Object: Set oDom = CreateObject("htmlFile")
Dim x As Long, y As Long
Dim oRow As Object, oCell As Object
Dim vData As Variant
Dim link As String
link = "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
y = 1: x = 1
With CreateObject("msxml2.xmlhttp")
.Open "GET", link, False
.Send
oDom.body.innerHTML = .ResponseText
End With
With oDom.getElementsByTagName("table")(27)
Dim dataObj As Object
Set dataObj = CreateObject("new:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
dataObj.SetText "<table>" & .innerHTML & "</table>"
dataObj.PutInClipboard
End With
Sheets(2).Paste Sheets(2).Cells(1, 1)
End Sub
Could someone help me to download the table "Panel General"?
Many thanks.
Selenium
The following gets the table using selenium basic.
Option Explicit
Public Sub GetTable()
Dim html As New HTMLDocument, htable As HTMLTable, headers()
headers = Array("Especie", "Hora Cotización", "Cierre Anterior", "Precio Apertura", "Precio Máximo", _
"Precio Mínimo", "Último Precio", "Variación Diaria", "Volumen Efectivo ($)", "Volumen Nominal", "Precio Prom. Pon")
With New ChromeDriver
.get "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
.FindElementById("__tab_ctl00_ContentCentral_tcAcciones_tpGeneral").Click
Do
DoEvents
Loop While .FindElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral", timeout:=7000).Text = vbNullString
html.body.innerHTML = .PageSource
Set htable = html.getElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral")
WriteTable2 htable, headers, 1, ActiveSheet
.Quit
End With
End Sub
Public Sub WriteTable2(ByVal htable As HTMLTable, ByRef headers As Variant, Optional ByVal startRow As Long = 1, Optional ByVal ws As Worksheet)
If ws Is Nothing Then Set ws = ActiveSheet
Dim tRow As Object, tCell As Object, tr As Object, td As Object, R As Long, c As Long, tBody As Object
R = startRow: c = 1
With ActiveSheet
Set tRow = htable.getElementsByTagName("tr")
For Each tr In tRow
Set tCell = tr.getElementsByTagName("td")
For Each td In tCell
.Cells(R, c).Value = td.innerText
c = c + 1
Next td
R = R + 1: c = 1
Next tr
.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
End With
End Sub
References:
HTML Object Library
Selenium Type Library
With IE (Using WriteTable2 sub from above):
Option Explicit
Public Sub GetInfo()
Dim ie As New InternetExplorer, html As HTMLDocument, hTable As HTMLTable, headers(), a As Object
headers = Array("Especie", "Hora Cotización", "Cierre Anterior", "Precio Apertura", "Precio Máximo", _
"Precio Mínimo", "Último Precio", "Variación Diaria", "Volumen Efectivo ($)", "Volumen Nominal", "Precio Prom. Pon")
Application.ScreenUpdating = False
With ie
.Visible = True
.navigate "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("__tab_ctl00_ContentCentral_tcAcciones_tpGeneral").Click
Do
DoEvents
On Error Resume Next
Set hTable = .document.getElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral")
On Error GoTo 0
Loop While hTable Is Nothing
WriteTable2 hTable, headers, 1, ActiveSheet
.Quit '<== Remember to quit application
Application.ScreenUpdating = True
End With
End Sub
References:
Microsoft Internet Explorer Controls

Web-scraping from Excel List of PDGA Numbers using VBA

I have a list of numbers (PDGA Numbers) in MS Excel. I would like to automatically search the PDGA website (https://www.pdga.com/players/) from the list and automatically paste the player's location next to the corresponding PDGA Number. Currently, I am able to search the number and paste the location individually, but not the entire list.
First I select an excel cell and 'Define Name' as PDGA, and another as Location.
https://imgur.com/AcGtuX8
Then I basically followed this YouTube video. https://www.youtube.com/watch?v=7sZRcaaAVbg
And ultimately got this VBA code to work. (Make sure the proper VBA References are checked)
https://imgur.com/a/OYSM7Am
Private Sub Worksheet_Change(ByVal Target As Range)
If Target.Column = Range("PDGA").Column Then
Dim IE As New InternetExplorer
IE.Visible = False
IE.navigate "https://www.pdga.com/player/" & Range("PDGA").Value
Do
DoEvents
Loop Until IE.readyState = READYSTATE_COMPLETE
Dim Doc As HTMLDocument
Set Doc = IE.document
Dim sDD As String
sDD = Trim(Doc.getElementsByTagName("li")(127).outerText)
Range("Location").Value = sDD
End If
End Sub
I think I need some For Each loop, but I'm not sure. It should look like this when completed.
https://imgur.com/a/qOiW4JJ
Thanks in advance for any help.
If you have a specific list of players then you loop and issue XHR requests to get the info. Here I have the PDGA# in an array which is looped:
playerPDGA = Array(1, 5, 23, 46, 789, 567)
Code:
Option Explicit
Public Sub GetInfo()
Dim sResponse As String, html As New HTMLDocument, playerPDGA(), results(), i As Long
playerPDGA = Array(1, 5, 23, 46, 789, 567)
ReDim results(0 To UBound(playerPDGA), 0 To 1)
With CreateObject("MSXML2.XMLHTTP")
For i = LBound(playerPDGA) To UBound(playerPDGA)
.Open "GET", "https://www.pdga.com/player/" & playerPDGA(i), False
.send
sResponse = StrConv(.responseBody, vbUnicode)
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
With html
.body.innerHTML = sResponse
results(i, 0) = .querySelector(".pane-content > h1").innerText
results(i, 1) = .querySelector(".location").innerText
End With
Next i
End With
ActiveSheet.Cells(1, 1).Resize(UBound(results, 1) + 1, UBound(results, 2) + 1) = results
End Sub
For any page listing players:
Option Explicit
Public Sub GetInfo()
Dim sResponse As String, html As New HTMLDocument
Application.ScreenUpdating = False
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://www.pdga.com/players/", False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
Dim nameList As Object, cityList As Object, stateList As Object, countryList As Object, r As Long
With html
.body.innerHTML = sResponse
Set nameList = .querySelectorAll(".views-field.views-field-nothing")
Set cityList = .querySelectorAll(".views-field.views-field-City.city")
Set stateList = .querySelectorAll(".views-field.views-field-StateProv.state")
Set countryList = .querySelectorAll(".views-field.views-field-Country.country")
End With
With ActiveSheet
Dim i As Long
For i = 0 To nameList.Length - 1
r = r + 1
.Cells(r, 1) = nameList.item(i).innerText
.Cells(r, 2) = Trim$(cityList.item(i).innerText & Chr$(32) & stateList.item(i).innerText & Chr$(32) & countryList.item(i).innerText)
Next i
End With
Application.ScreenUpdating = True
End Sub
Reference:
HTML Object library
You can achieve your desired output in several ways. Here is one of such.
Sub FetchData()
Dim Http As New XMLHTTP60, Html As New HTMLDocument
Dim post As HTMLDivElement, Idic As New Scripting.Dictionary
Dim key As Variant, N$, CT$, S$, C$, R&
With Http
.Open "GET", "https://www.pdga.com/players/", False
.send
Html.body.innerHTML = .responseText
End With
For Each post In Html.querySelector("table.views-table tbody").getElementsByTagName("tr")
N = post.querySelector("a[title]").innerText
CT = post.querySelector(".city").innerText
S = post.querySelector(".state").innerText
C = post.querySelector(".country").innerText
Idic(N & "|" & CT & " " & S & " " & C) = 1
Next post
For Each key In Idic.Keys
R = R + 1: Cells(R, 1) = Split(key, "|")(0)
Cells(R, 2) = Split(key, "|")(1)
Next key
End Sub
Reference to add to the library:
Microsoft XML, v6.0
Microsoft HTML Object Library
Microsoft Scripting Runtime
Sub test()
Dim IE As New InternetExplorer
Dim Doc As HTMLDocument
Dim lastRow As Long, i As Long
Dim sDD As String
IE.Visible = False
lastRow = Range("A" & Rows.Count).End(xlUp).Row
For i = 1 To lastRow
IE.navigate "https://www.pdga.com/player/" & Range("PDGA").Cells(i).Value
Do
DoEvents
Loop Until IE.readyState = READYSTATE_COMPLETE
Set Doc = IE.document
sDD = Trim(Doc.getElementsByTagName("li")(127).outerText)
Range("Location").Cells(i) = sDD
Next
Set IE = Nothing
Set Doc = Nothing
End Sub