Menu
Portfolio
Blog
Snippets
Login
Confirm
exit_to_app
Close
Entries
library_add
Utility(Event,Mouse): Log Movement X,Y
delete
Utility(txt): Basic Sentence Generator
delete
Utility(Event,Keyboard): Logger
delete
Utility(txt): Expanded Phrase Generator
delete
Scrape(General|Multi): Paginated Crawler
delete
Scrape(Sunbiz|Multi): Get Biz's By Zip
delete
Utility(Time): Test Load Speed
delete
Scrape(General|Single|NLP): Get Keyword Stats
delete
Scrape(General|Single): Get Text Near Links
delete
Utility(Event|Mouse): Press Down,Up, & Dragend
delete
Utility(Number): Between Range
delete
Utility(Event|Window): On Resize
delete
Scrape(Youtube|Single): Crawl Channel Videos
delete
Scrape(General|Multi): Tiny Link Crawler
delete
Utility(Time): Day of Week Report
delete
Utility(Number): Aspect Ratio Size Pairs
delete
Utility(Data): Sort Object Properties
delete
Scrape(General|Single): RiTa Sentences & Words
delete
Scrape(General|Single): Investigate Element Layers
delete
Scrape(NLP|Single): Using Compromise Plugins
delete
Utility(General): Remote Script Loader
delete
Scrape(General|Single|NLP): Compromise RiTa D3
delete
Scrape(General|Multi|Node): Grouped Node Crawler
delete
Scrape(Amazon|Multi): Crawl Product SERPs
delete
Scrape(Amazon|Multi): Get Paginated Brands
delete
Utility (Data): Download JSON in Browser
delete
Utility (Data): JSON AutoTypes
delete
Scrape(YouTube|Single): Video Page
delete
Utility (Text): Make Page Editable
delete
Utility (Text): Article Editor
delete
Scrape(General|Single|Text): Get Text On Click
delete
Utility (File): DnD File Parser (CSV,JSON,TXT)
delete
Scrape(General|Single): Get Links (Examples)
delete
Scrape(General|Single|Text): Get Sentences by Tag
delete
Utility (File): JSON to CSV via D3.js
delete
Scrape(General|Single): Auto Parse by Content Type
delete
Scrape(General|Single): Get Paragraphs & Sentences
delete
Scrape(Amazon|Multi): Get Reviews by ASIN
delete
Scrape(General|Single): Download Images on Page
delete
Utility(Event,Form): Custom Submit Function
delete
Utility (Fetch): Basic API Post Request
delete
Utility (Event,Form,Fetch): Form Data to API
delete
Utility (Time): Async Delay
delete
Utility (Time): Async Repeat Every N Secs
delete
Scrape(coj): Crawl Property SERPs
delete
Utility (Data): Promise Chain
delete
Utility (Fetch): Examples - JSON,Text,HTML
delete
Scrape(Amazon|Single): Product Review NLP
delete
Utility (Nodejs): Streaming Collections
delete
Scrape(Rate My Professor|Multi): Crawl Prof SERPs
delete
Utility (Time): JS Timer
delete
Utility (Text): Proper Case
delete
Scrape(Thingiverse API): Get Things via Search API
delete
Scrape(General|Single|Node): Get Node Attributes
delete
Scrape(General|Single|Node): Node Attributes + Text
delete
Scrape(Thesaurus): Get Words from SERPs
delete
Scrape(Walmart): Crawl Product SERPs
delete
Scrape(free3d): Crawl 3D Model SERPs
delete
Scrape(Aliexpress|Single): Get Products From SERP
delete
Scrape(simplify3d): Crawl Post SERPs
delete
Scrape(Twitter): Crawl Post Feed (infinite scroll)
delete
Scrape(DDuckGo|Single): Get Links from SERP
delete
Scrape(General|Single): Get Tokens String Distance
delete
Scrape(General|Single): Content Report
delete
Scrape(General|Single|Node): Node Recon (CSV)
delete
Utility (File): D3 JSON to CSV
delete
Scrape(coj|Multi): Crawl Property SERPs
delete
Scrape(coj|Single): sidenote
delete
Scrape(General|Single): Recursive Node Crawler
delete
Utility (Event,Window): Scroll to Root ScrollHeight
delete
Scrape(Indeed|Multi): Crawl Job SERPs
delete
Scrape(Thingiverse API): Get Things By Id
delete
Scrape(Thingiverse): Crawl Things by Category
delete
Scrape(Thingiverse API): Get Thing Batches by Id via DnD
delete
Scrape(YouTube|Single): Get Video Playlist
delete
Utility (Data): Join Thing Metrics & Meta
delete
Utility(Data): Get Nested Array Lengths
delete
Utility (Twitter): Hide Followed Profiles
delete
Utility (Time): YYYY-MM-DD HH:MM:SS
delete
Scrape(Thangs|Multi): Crawl 3D Model SERPs
delete
Scrape(PrusaPrints,Multi): Get Prints
delete
Scrape(Reddit,Single): Get Posts
delete
Userscript(Youtube): Scrape Channel Videos
delete
Userscript(Youtube): Tab Manager
delete
Scrape(Sunbiz|Multi): Biz Details
delete
Utility(Data):DnD View Types
delete
Scrape(General|Single|Node): Select Nodes by Attr
delete
Scrape(Aliexpress|Multi) Get Products via API
delete
Utility(Text): Strip Web Page CSS, Script, Events, Media
delete
Scrape(Youtube|Single) Get Subs
delete
Scrape(General|Single): SelectAll ReduceByProp
delete
Scrape(General|Single): SelectAll ReduceMultiProps
delete
Scrape(General|Multi): Tiny Link Crawler + Delay & Node Reports
delete
Scrape(P5|Multi): Get Examples
delete
Scrape(LinkedIn|Single): Find New Connections
delete
UserScript(linkedIn|Single) Get Jobs
delete
Utility (Time): Date From Days Ago
delete
Utility(General|Single) Keep Scrolling
delete
Scrape(YouTube) Videos From Search
delete
Utility(General|Single): getOffset
delete
Utility(Event,Form): Get Data On Form Input
delete
Utility(Event,Element): ResizeObserver
delete
COCO-SSD Object Categories
delete
Scrape(Wikipedia|Multi): What Links Here?
delete
Scrape(DDuckGo|Single): Download Images
delete
Scrape(General|Single|NLP): Compromise nGram
delete
Scrape(coj): Crawl Property SERPs
Edit Snippet
var collection = []; var page = 1; (async () => { var toNum = (s) => s.indexOf("$") > -1 || s.match(/^\d+$/) ? +(s.replace(/[\$,]+/gim,"")) : s; var extract = el => ({ nodeName:el.nodeName, id:el.id.replace("ctl00_cphBody_lbl","").replace("NoData","").split("_").slice(-1)[0].replace("lbl",""), text:el.innerText ? toNum(el.innerText.replace(/[ \n\r\t]+/gim," ").trim()) : "" }); var getProps = (doc) => { var props = [...doc.querySelectorAll("span[id]")].map(extract).filter(el => el.text).slice(2).filter(d => !isNaN(d.text) || d.text.length < 80).reduce((report,el) => ({...report,[el.id]:el.text}),{}); var areaRows = [...doc.querySelectorAll("[id*='gridBuildingArea'] tr")]; var areaFields = [...areaRows[0].querySelectorAll("th")].slice(1).map(th => th.innerText); var areaTotals = [...areaRows.slice(-1)[0].querySelectorAll("td")].slice(1).map(td => +td.innerText); Object.entries(props).filter(prop => prop[0].indexOf("Header") == 0 || prop[1] == "See below").forEach(prop => delete props[prop[0]]) delete props.SaleNote2; delete props.SaleNote3; delete props.LegalDescription1; delete props.LegalDescription2; delete props.ValueMethodCertified; delete props.ValueMethodInProgress; delete props.Version; var attributes = [...doc.querySelectorAll("[id*='gridBuildingAttributes'] tr")].slice(1).map(row => [...row.querySelectorAll("th,td")].slice(0,2).map(d => d.innerText)).reduce((o,arr) => { o[arr[0].replace(/ /gim,"")] = +arr[1]; return o; },{}); return { ...props, ...attributes, [areaFields[0]]:areaTotals[0], [areaFields[1]]:areaTotals[1], [areaFields[2]]:areaTotals[2] } } const download = (data, fileName) => { var jsonDUMP = JSON.stringify(data); var exportFilename = fileName + ".json"; var csvData = new Blob([jsonDUMP], { type: 'text/csv;charset=utf-8;' }); var link = document.createElement('a'); link.href = window.URL.createObjectURL(csvData); link.setAttribute('download', exportFilename); document.body.appendChild(link); link.click(); document.body.removeChild(link); }; var parseStr = (str) => { var hasCurrency = /\$/gim.test(str); var hasNum = /\d/gim.test(str); var hasAlpha = /[a-z]/gim.test(str); var dateTag = /^\d{1,2}[\/\-\/]\d\d/gim.test(str); if(hasCurrency && !hasAlpha && hasNum){ var n = +str.replace(/[\]$,\(\)]+/gim,""); var isNum = !isNaN(n); return n; }else if(dateTag){ return new Date(str); }else{ return str; } } var getSalesHistory = (doc) => { try{ var salesHistTable = doc.querySelector("[id*='ctl00_cphBody_gridSalesHistory']"); var salesTableRows = [...salesHistTable.querySelectorAll("tr")].map(r => { return [...r.querySelectorAll("td,th")].map(cell => cell.innerText) }); var columns = salesTableRows[0]; var salesHistory = salesTableRows.slice(1).map(row => { return columns.reduce((obj,key,i) =>{ obj[key] = parseStr(row[i]) || null; return obj; },{}) }) }catch(e){ salesHistory = []; console.log(e) } return salesHistory; } var columns = [...document.querySelectorAll("th")].map(el => el.innerText); for await (let a of [...document.querySelectorAll("a[href*='/Basic/Detail.aspx?RE=']")].slice()){ var row = a.parentElement.parentElement; var record = [...row.querySelectorAll("td")].reduce((o,el,i) => { o[columns[i]] = el.innerText; return o; },{}) try{ let res = await fetch(a.href); let text = await res.text(); let parser = await new DOMParser(); let doc = await parser.parseFromString(text, "text/html"); record.props = getProps(doc); record.salesHistory = getSalesHistory(doc); collection.push(record) }catch(e){ console.log(e) } if(collection.length == 2000){ await download(collection,"data") page++; collection = []; } } if(collection.length){ await download(collection,"data_final") collection = []; } })()