I'am trying to download a table from this page
to excel with VBA: http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx --> table "Panel General"
I can download the table "Panel Merval" but i couldn't download the other table.
I use this code for table "Panel Merval":
Sub GetTable()
Dim ieApp As InternetExplorer
Dim ieDoc As Object
Dim ieTable As Object
Dim clip As DataObject
'create a new instance of ie
Set ieApp = New InternetExplorer
'you don’t need this, but it’s good for debugging
ieApp.Visible = False
'now that we’re in, go to the page we want
ieApp.Navigate "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
Do While ieApp.Busy: DoEvents: Loop
Do Until ieApp.ReadyState = READYSTATE_COMPLETE: DoEvents: Loop
'get the table based on the table’s id
Set ieDoc = ieApp.Document
Set ieTable = ieDoc.all.Item("ctl00_ContentCentral_tcAcciones_tpMerval_grdMerval")
'copy the tables html to the clipboard and paste to teh sheet
If Not ieTable Is Nothing Then
Set clip = New DataObject
clip.SetText "" & ieTable.outerHTML & ""
clip.PutInClipboard
Sheet1.Select
Sheet1.Range("b2").Select
Sheet1.PasteSpecial "Unicode Text"
End If
'close 'er up
ieApp.Quit
Set ieApp = Nothing
End Sub
or this one
Public Sub PanelLider()
Dim oDom As Object: Set oDom = CreateObject("htmlFile")
Dim x As Long, y As Long
Dim oRow As Object, oCell As Object
Dim vData As Variant
Dim link As String
link = "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
y = 1: x = 1
With CreateObject("msxml2.xmlhttp")
.Open "GET", link, False
.Send
oDom.body.innerHTML = .ResponseText
End With
With oDom.getElementsByTagName("table")(27)
Dim dataObj As Object
Set dataObj = CreateObject("new:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
dataObj.SetText "<table>" & .innerHTML & "</table>"
dataObj.PutInClipboard
End With
Sheets(2).Paste Sheets(2).Cells(1, 1)
End Sub
Could someone help me to download the table "Panel General"?
Many thanks.
Selenium
The following gets the table using selenium basic.
Option Explicit
Public Sub GetTable()
Dim html As New HTMLDocument, htable As HTMLTable, headers()
headers = Array("Especie", "Hora Cotización", "Cierre Anterior", "Precio Apertura", "Precio Máximo", _
"Precio Mínimo", "Último Precio", "Variación Diaria", "Volumen Efectivo ($)", "Volumen Nominal", "Precio Prom. Pon")
With New ChromeDriver
.get "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
.FindElementById("__tab_ctl00_ContentCentral_tcAcciones_tpGeneral").Click
Do
DoEvents
Loop While .FindElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral", timeout:=7000).Text = vbNullString
html.body.innerHTML = .PageSource
Set htable = html.getElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral")
WriteTable2 htable, headers, 1, ActiveSheet
.Quit
End With
End Sub
Public Sub WriteTable2(ByVal htable As HTMLTable, ByRef headers As Variant, Optional ByVal startRow As Long = 1, Optional ByVal ws As Worksheet)
If ws Is Nothing Then Set ws = ActiveSheet
Dim tRow As Object, tCell As Object, tr As Object, td As Object, R As Long, c As Long, tBody As Object
R = startRow: c = 1
With ActiveSheet
Set tRow = htable.getElementsByTagName("tr")
For Each tr In tRow
Set tCell = tr.getElementsByTagName("td")
For Each td In tCell
.Cells(R, c).Value = td.innerText
c = c + 1
Next td
R = R + 1: c = 1
Next tr
.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
End With
End Sub
References:
HTML Object Library
Selenium Type Library
With IE (Using WriteTable2 sub from above):
Option Explicit
Public Sub GetInfo()
Dim ie As New InternetExplorer, html As HTMLDocument, hTable As HTMLTable, headers(), a As Object
headers = Array("Especie", "Hora Cotización", "Cierre Anterior", "Precio Apertura", "Precio Máximo", _
"Precio Mínimo", "Último Precio", "Variación Diaria", "Volumen Efectivo ($)", "Volumen Nominal", "Precio Prom. Pon")
Application.ScreenUpdating = False
With ie
.Visible = True
.navigate "http://www.merval.sba.com.ar/Vistas/Cotizaciones/Acciones.aspx"
While .Busy Or .readyState < 4: DoEvents: Wend
.document.getElementById("__tab_ctl00_ContentCentral_tcAcciones_tpGeneral").Click
Do
DoEvents
On Error Resume Next
Set hTable = .document.getElementById("ctl00_ContentCentral_tcAcciones_tpGeneral_dgrGeneral")
On Error GoTo 0
Loop While hTable Is Nothing
WriteTable2 hTable, headers, 1, ActiveSheet
.Quit '<== Remember to quit application
Application.ScreenUpdating = True
End With
End Sub
References:
Microsoft Internet Explorer Controls
Related
This is the program I created. The goal of this program is to visit each link on a specific cell range and get the "a href" of each listed links.
I used a list of links but there's a certain link that ends with .pdf and from there I get a type mismatch. Is there a way that I could make my program continue and just skip the error that it got from a specific link?
This is the link that causes the error https://ir-web-assets-v.s3.amazonaws.com/uploads/nuggets/5d40644eafe17554cf969aab/Islands_Locals_Program_Guest_FAQ.pdf
Sub extensiveScrape()
Dim extractedLinks As Range 'Links taken from RUN
Dim urls As String 'Links taken from Extensive Search
Dim appIE As Object
Dim LastRow As Long 'Number of rows
Dim rCell As Range
Dim rRng As Range
Dim html2 As HTMLDocument
Dim itemEle As Object
Dim linkurl As Object
Dim y As Integer
Application.ScreenUpdating = False
Set appIE = CreateObject("InternetExplorer.Application")
Set sht = ThisWorkbook.Worksheets("results")
LastRow = sht.Cells(sht.Rows.Count, "A").End(xlUp).Row
Set rRng = sht.Range("A1:A" & LastRow)
For Each rCell In rRng.Cells
With appIE
.navigate rCell.Value
.Visible = True
End With
Do While appIE.readyState <> 4: Wait 5: Loop
Application.StatusBar = "Scraping Extensively..."
DoEvents
Set html2 = appIE.document
Set itemEle = html2.getElementsByTagName("a")
y = 1
For Each linkurl In itemEle
Sheets("results").Range("B" & y).Value = linkurl
y = y + 1
Next
'rCell.Offset(0, 1).Value = itemEle
Next rCell
appIE.Quit
Set appIE = Nothing
Application.StatusBar = ""
Application.ScreenUpdating = True
End Sub
Private Sub Wait(ByVal nSec As Long)
nSec = nSec + Timer
While nSec > Timer
DoEvents
Wend
End Sub
I am testing the code below. I think this is very close, but I can't seem to login to the site for some reason.
Sub Website_Login_Test()
Dim oHTML_Element As IHTMLElement
Dim sURL As String
On Error GoTo Err_Clear
sURL = "https://login.my_site_here.jsp?"
Set oBrowser = New InternetExplorer
oBrowser.Silent = True
oBrowser.timeout = 60
oBrowser.navigate sURL
oBrowser.Visible = True
Do
' Wait till the Browser is loaded
Loop Until oBrowser.readyState = READYSTATE_COMPLETE
Set HTMLDoc = oBrowser.document
For Each oHTML_Element In HTMLDoc
Debug.Print oHTML_Element
Next
HTMLDoc.all.UserId.Value = "my_id"
HTMLDoc.all.Password.Value = "my_pass"
For Each oHTML_Element In HTMLDoc.getElementsByTagName("Button")
Debug.Print oHTML_Element.Name
'oHTML_Element.Click: Exit For
'Debug.Print oHTML_Element.Type
'If oHTML_Element.Type = "submit" Then oHTML_Element.Click: Exit For
Next
' oBrowser.Refresh ' Refresh If Needed
Err_Clear:
If Err <> 0 Then
Err.Clear
Resume Next
End If
Call Test
End Sub
Sub Test()
Dim ie As Object, i As Long, strText As String
Dim doc As Object, hTable As Object, hBody As Object, hTR As Object, hTD As Object
Dim tb As Object, bb As Object, tr As Object, td As Object
Dim y As Long, z As Long, wb As Excel.Workbook, ws As Excel.Worksheet
Set wb = Excel.ActiveWorkbook
Set ws = wb.ActiveSheet
Set ie = CreateObject("InternetExplorer.Application")
ie.Visible = True
y = 1 'Column A in Excel
z = 1 'Row 1 in Excel
ie.navigate "https://after_login_move_to_page_for_scraping.jsp"
Do While ie.busy: DoEvents: Loop
Do While ie.readyState <> 4: DoEvents: Loop
Set doc = ie.document
Set hTable = doc.getElementsByTagName("table")
For Each tb In hTable
Set hBody = tb.getElementsByTagName("tbody")
For Each bb In hBody
Set hTR = bb.getElementsByTagName("tr")
For Each tr In hTR
Set hTD = tr.getElementsByTagName("td")
y = 1 ' Resets back to column A
For Each td In hTD
ws.Cells(z, y).Value = td.innertext
y = y + 1
Next td
DoEvents
z = z + 1
Next tr
Exit For
Next bb
Exit For
Next tb
End Sub
I can't login to the site, so I can't do the scraping, but I think the code is pretty close. Here is the HTML for the id object, the password object and the button object. What am I doing wrong?
I think you must trigger the keypress event of the input fields. If there are other events you must trigger, have a look here, how you can find them:
Automate IE via Excel to fill in a dropdown and continue
Sub WebsiteLogin()
Const url As String = "https://login.my_site_here.jsp"
Const userName As String = "Here Your LogIn Name"
Const passWord As String = "Here Your Password"
Dim ie As Object
Dim htmlDoc As Object
Dim nodeInputUserName As Object
Dim nodeInputPassWord As Object
'Initialize Internet Explorer, set visibility,
'call URL and wait until page is fully loaded
Set ie = CreateObject("InternetExplorer.Application")
ie.Visible = True
ie.navigate url
Do Until ie.readyState = 4: DoEvents: Loop
Set htmlDoc = ie.document
'Set the log in name
Set nodeInputUserName = htmlDoc.getElementById("USERID")
nodeInputUserName.Value = userName
Call TriggerEvent(htmlDoc, nodeInputUserName, "onkeypress")
'Set the password
Set nodeInputPassWord = htmlDoc.getElementById("PASSWORD")
nodeInputPassWord.Value = passWord
Call TriggerEvent(htmlDoc, nodeInputPassWord, "onkeypress")
'Click submit button
htmlDoc.querySelector("a[role='button']").Click
End Sub
This is the procedure to trigger events:
Private Sub TriggerEvent(htmlDocument As Object, htmlElementWithEvent As Object, eventType As String)
Dim theEvent As Object
htmlElementWithEvent.Focus
Set theEvent = htmlDocument.createEvent("HTMLEvents")
theEvent.initEvent eventType, True, False
htmlElementWithEvent.dispatchEvent theEvent
End Sub
I'm trying to get all the downloadable Excel file links from the website, but having difficulty. Please help to guide me. Thanks.
Sub TYEX()
Dim internet As Object
Dim internetdata As Object
Dim div_result As Object
Dim header_links As Object
Dim link As Object
Dim URL As String
Set internet = CreateObject("InternetExplorer.Application")
internet.Visible = True
URL = "https://www.jpx.co.jp/markets/public/short-selling/index.html"
internet.Navigate URL
Do Until internet.ReadyState >= 4
DoEvents
Loop
Application.Wait Now + TimeSerial(0, 0, 5)
Set internetdata = internet.Document
Set div_result = internetdata.getElementById("readArea")
Set header_links = div_result.getElementsByTagName("td")
For Each h In header_links
Set link = h.ChildNodes.item(0)
Cells(Range("A" & Rows.Count).End(xlUp).Row + 1, 1) = link.href
Next
MsgBox "done"
End Sub
You had the idea down correctly, but here's a different approach:
Sub TYEX()
Dim ie As Object
Set ie = CreateObject("InternetExplorer.Application")
With ie
.navigate "https://www.jpx.co.jp/markets/public/short-selling/index.html"
.Visible = True
Do While .Busy Or .readyState < 4
DoEvents
Loop
Dim doc As Object, tbl As Object
Set doc = .document
Set tbl = doc.getElementsByClassName("component-normal-table")(0).Children(0)
Dim r As Long, xlsArr(), a As Object
With tbl.Rows
ReDim xlsArr(1 To .Length - 1)
For r = 1 To .Length - 1 ' 0 is the table header
xlsArr(r) = .Item(r).Children(1).innerHTML
Next r
End With
With CreateObject("VBScript.RegExp")
.Pattern = "<a href=""(\/markets.*?\.xls)"
For r = 1 To UBound(xlsArr)
xlsArr(r) = "https://www.jpx.co.jp" & .Execute(xlsArr(r))(0).SubMatches(0)
Debug.Print xlsArr(r)
Next
End With
End With
'Add to sheet
Dim ws As Worksheet, rng As Range
Set ws = ThisWorkbook.Worksheets(1)
With ws
Set rng = .Range(.Cells(NextRow(ws), 1), .Cells(NextRow(ws) + UBound( _
xlsArr) - 1, 1))
rng.Value = Application.Transpose(xlsArr)
End With
End Sub
Public Function NextRow(ByVal ws As Worksheet, Optional ByVal col As Variant = 1) As Long
With ws
NextRow = .Cells(.Rows.Count, col).End(xlUp).Row + 1
End With
End Function
Breaking Down the Code
This will loop your html table rows. We start at 1, because 0 is actually just the table header.
With tbl.Rows
ReDim xlsArr(1 To .Length - 1)
For r = 1 To .Length - 1 ' 0 is the table header
xlsArr(r) = .Item(r).Children(1).innerHTML
Next r
End With
This uses regular expressions to extract the url from the innerHTML property. You can see how this particular regex works here: Regex101
With CreateObject("VBScript.RegExp")
.Pattern = "<a href=""(\/markets.*?\.xls)"
For r = 1 To UBound(xlsArr)
xlsArr(r) = "https://www.jpx.co.jp" & .Execute(xlsArr(r))(0).SubMatches(0)
Debug.Print xlsArr(r)
Next
End With
You will size your range to be the same size as your array that contains the links, then write the array to the worksheet. This is usually much faster than writing cells one-by-one.
'Add to sheet
Dim ws As Worksheet, rng As Range
Set ws = ThisWorkbook.Worksheets(1)
With ws
Set rng = .Range(.Cells(NextRow(ws), 1), .Cells(NextRow(ws) + UBound( _
xlsArr) - 1, 1))
rng.Value = Application.Transpose(xlsArr)
End With
You can use an attribute = value CSS selector with $ operator to say the href value must end with .xls. Then use querySelectorAll to retrieve all matched results. Using CSS selectors is a very fast and generally robust method.
Dim list As Object
Set list = ie.document.querySelectorAll("[href$='.xls']")
It is much faster to use XMLHTTP as well, rather than opening IE. Note that you can then pass these links to a function to perform a binary download or to URLMon for downloading.
Option Explicit
Public Sub Links()
Dim sResponse As String, html As HTMLDocument, list As Object, i As Long
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://www.jpx.co.jp/markets/public/short-selling/index.html", False
.setRequestHeader "If-Modified-Since", "Sat, 1 Jan 2000 00:00:00 GMT"
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
Set html = New HTMLDocument
With html
.body.innerHTML = sResponse
Set list = html.querySelectorAll("[href$='.xls']")
End With
For i = 0 To list.Length - 1
Debug.Print Replace$(list.item(i), "about:", "https://www.jpx.co.jp")
Next
End Sub
Example download function (though you could re-use your existing XMLHTTP object - this is just to illustrate):
Public Function DownloadFile(ByVal downloadFolder As String, ByVal downloadURL As String) As String
Dim http As Object , tempArr As Variant
Set http = CreateObject("WinHttp.WinHttpRequest.5.1")
http.Open "GET", downloadURL, False
http.send
On Error GoTo errhand
With CreateObject("ADODB.Stream")
.Open
.Type = 1
.write http.responseBody
tempArr = Split(downloadURL, "/")
tempArr = tempArr(UBound(tempArr))
.SaveToFile downloadFolder & tempArr, 2 '< "/" on enter of downloadFolder. 2 for overwrite which is Ok if no file modifications.
.Close
End With
DownloadFile = downloadFolder & tempArr
Exit Function
errhand:
If Err.Number <> 0 Then
Debug.Print Err.Number, Err.Description
MsgBox "Download failed"
End If
DownloadFile = vbNullString
End Function
References (VBE > Tools > References):
Microsoft HTML Object Library
I am trying to pull data from some 500 urls of a website. All the pages are same in structure. I am facing a problem with understanding the HTML of this particular site
https://www.coworker.com/s-f/6033/united-states_hawaii_honolulu_impact-hub-honolulu
I want to extract Name, Address, Tel and website. My current code:
Sub GetData()
Dim wsSheet As Worksheet, Rows As Long, links As Variant, IE As Object, link As Variant
Set wb = ThisWorkbook
Set wsSheet = wb.Sheets("Sheet1")
Set IE = New InternetExplorer
Rows = wsSheet.Cells(wsSheet.Rows.Count, "A").End(xlUp).Row
links = wsSheet.Range("A1:A" & Rows)
With IE
.Visible = True
For Each link In links
.navigate (link)
While .Busy Or .readyState <> 4: DoEvents: Wend
Next
End With
End Sub
Here you go. Without more links to test with this is very fragile. It relies heavily on consistent styling across pages.
XHR Looping link list:
Option Explicit
Public Sub GetInfo()
Dim wsSheet As Worksheet, Rows As Long, links(), link As Long, wb As Workbook, html As HTMLDocument
Set wb = ThisWorkbook: Set wsSheet = wb.Sheets("Sheet1")
Application.ScreenUpdating = False
With wsSheet
Rows = .Cells(.Rows.Count, "A").End(xlUp).Row
If Rows = 1 Then
ReDim links(1 To 1, 1 To 1)
links(1, 1) = wsSheet.Range("A1")
Else
links = wsSheet.Range("A1:A" & Rows).Value
End If
Dim r As Long
For link = LBound(links, 1) To UBound(links, 1)
r = r + 1
Set html = GetHTML(links(link, 1))
On Error Resume Next
Dim aNodeList As Object: Set aNodeList = html.querySelectorAll(".col-xs-12.pade_none.muchroom_mail")
.Cells(r, 2) = "Name: " & html.querySelector(".col-sm-9.col-md-9.col-xs-12.pade_none.kohub_space_headings h2").innerText
.Cells(r, 3) = "Address: " & aNodeList.item(0).innerText
.Cells(r, 4) = "Tel: " & aNodeList.item(1).innerText
.Cells(r, 5) = "Website: " & html.querySelector(".website-link-text a[href]").getAttribute("href")
On Error GoTo 0
Next link
End With
Application.ScreenUpdating = True
End Sub
Public Function GetHTML(ByVal url As String) As HTMLDocument
Dim sResponse As String, html As New HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", url, False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
With html
.body.innerHTML = sResponse
End With
Set GetHTML = html
End Function
Output:
References (VBE>Tools>References):
HTML object Library
Internet Explorer:
Option Explicit
Public Sub GetInfo()
Dim wsSheet As Worksheet, Rows As Long, links(), link As Long, wb As Workbook, html As HTMLDocument, ie As InternetExplorer
Set wb = ThisWorkbook: Set wsSheet = wb.Sheets("Sheet1")
Application.ScreenUpdating = False
With wsSheet
Rows = .Cells(.Rows.Count, "A").End(xlUp).Row
If Rows = 1 Then
ReDim links(1, 1)
links(1, 1) = wsSheet.Range("A1")
Else
links = wsSheet.Range("A1:A" & Rows).Value
End If
Dim r As Long
Set ie = New InternetExplorer
ie.Visible = True
For link = LBound(links, 1) To UBound(links, 1)
ie.navigate links(link, 1)
While ie.Busy Or ie.readyState < 4: DoEvents: Wend
' Application.Wait Now + TimeSerial(0, 0, 10)
On Error Resume Next
r = r + 1: Set html = ie.document
.Cells(r, 2) = "Name: " & html.querySelector(".col-sm-9.col-md-9.col-xs-12.pade_none.kohub_space_headings h2").innerText
.Cells(r, 3) = "Address: " & html.querySelector(".col-xs-12.pade_none.muchroom_mail").innerText
.Cells(r, 4) = "Tel: " & html.querySelector(".fa.fa-phone.fa-rotate-270 ~ a").innerText
.Cells(r, 5) = "Website: " & html.querySelector(".website-link-text a[href]").getAttribute("href")
On Error GoTo 0
Next link
ie.Quit
End With
Application.ScreenUpdating = True
End Sub
References (VBE>Tools>References):
HTML object Library
Microsoft Internet Controls
I have a list of numbers (PDGA Numbers) in MS Excel. I would like to automatically search the PDGA website (https://www.pdga.com/players/) from the list and automatically paste the player's location next to the corresponding PDGA Number. Currently, I am able to search the number and paste the location individually, but not the entire list.
First I select an excel cell and 'Define Name' as PDGA, and another as Location.
https://imgur.com/AcGtuX8
Then I basically followed this YouTube video. https://www.youtube.com/watch?v=7sZRcaaAVbg
And ultimately got this VBA code to work. (Make sure the proper VBA References are checked)
https://imgur.com/a/OYSM7Am
Private Sub Worksheet_Change(ByVal Target As Range)
If Target.Column = Range("PDGA").Column Then
Dim IE As New InternetExplorer
IE.Visible = False
IE.navigate "https://www.pdga.com/player/" & Range("PDGA").Value
Do
DoEvents
Loop Until IE.readyState = READYSTATE_COMPLETE
Dim Doc As HTMLDocument
Set Doc = IE.document
Dim sDD As String
sDD = Trim(Doc.getElementsByTagName("li")(127).outerText)
Range("Location").Value = sDD
End If
End Sub
I think I need some For Each loop, but I'm not sure. It should look like this when completed.
https://imgur.com/a/qOiW4JJ
Thanks in advance for any help.
If you have a specific list of players then you loop and issue XHR requests to get the info. Here I have the PDGA# in an array which is looped:
playerPDGA = Array(1, 5, 23, 46, 789, 567)
Code:
Option Explicit
Public Sub GetInfo()
Dim sResponse As String, html As New HTMLDocument, playerPDGA(), results(), i As Long
playerPDGA = Array(1, 5, 23, 46, 789, 567)
ReDim results(0 To UBound(playerPDGA), 0 To 1)
With CreateObject("MSXML2.XMLHTTP")
For i = LBound(playerPDGA) To UBound(playerPDGA)
.Open "GET", "https://www.pdga.com/player/" & playerPDGA(i), False
.send
sResponse = StrConv(.responseBody, vbUnicode)
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
With html
.body.innerHTML = sResponse
results(i, 0) = .querySelector(".pane-content > h1").innerText
results(i, 1) = .querySelector(".location").innerText
End With
Next i
End With
ActiveSheet.Cells(1, 1).Resize(UBound(results, 1) + 1, UBound(results, 2) + 1) = results
End Sub
For any page listing players:
Option Explicit
Public Sub GetInfo()
Dim sResponse As String, html As New HTMLDocument
Application.ScreenUpdating = False
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://www.pdga.com/players/", False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
Dim nameList As Object, cityList As Object, stateList As Object, countryList As Object, r As Long
With html
.body.innerHTML = sResponse
Set nameList = .querySelectorAll(".views-field.views-field-nothing")
Set cityList = .querySelectorAll(".views-field.views-field-City.city")
Set stateList = .querySelectorAll(".views-field.views-field-StateProv.state")
Set countryList = .querySelectorAll(".views-field.views-field-Country.country")
End With
With ActiveSheet
Dim i As Long
For i = 0 To nameList.Length - 1
r = r + 1
.Cells(r, 1) = nameList.item(i).innerText
.Cells(r, 2) = Trim$(cityList.item(i).innerText & Chr$(32) & stateList.item(i).innerText & Chr$(32) & countryList.item(i).innerText)
Next i
End With
Application.ScreenUpdating = True
End Sub
Reference:
HTML Object library
You can achieve your desired output in several ways. Here is one of such.
Sub FetchData()
Dim Http As New XMLHTTP60, Html As New HTMLDocument
Dim post As HTMLDivElement, Idic As New Scripting.Dictionary
Dim key As Variant, N$, CT$, S$, C$, R&
With Http
.Open "GET", "https://www.pdga.com/players/", False
.send
Html.body.innerHTML = .responseText
End With
For Each post In Html.querySelector("table.views-table tbody").getElementsByTagName("tr")
N = post.querySelector("a[title]").innerText
CT = post.querySelector(".city").innerText
S = post.querySelector(".state").innerText
C = post.querySelector(".country").innerText
Idic(N & "|" & CT & " " & S & " " & C) = 1
Next post
For Each key In Idic.Keys
R = R + 1: Cells(R, 1) = Split(key, "|")(0)
Cells(R, 2) = Split(key, "|")(1)
Next key
End Sub
Reference to add to the library:
Microsoft XML, v6.0
Microsoft HTML Object Library
Microsoft Scripting Runtime
Sub test()
Dim IE As New InternetExplorer
Dim Doc As HTMLDocument
Dim lastRow As Long, i As Long
Dim sDD As String
IE.Visible = False
lastRow = Range("A" & Rows.Count).End(xlUp).Row
For i = 1 To lastRow
IE.navigate "https://www.pdga.com/player/" & Range("PDGA").Cells(i).Value
Do
DoEvents
Loop Until IE.readyState = READYSTATE_COMPLETE
Set Doc = IE.document
sDD = Trim(Doc.getElementsByTagName("li")(127).outerText)
Range("Location").Cells(i) = sDD
Next
Set IE = Nothing
Set Doc = Nothing
End Sub