mirror of
https://github.com/RGBCube/nu_scripts
synced 2025-08-01 22:57:46 +00:00
refactor: ✨ (#418)
* refactor: ✨ move in one commit Eveything in modules should probably be changed to `exported` defs. The idea is to move everything first to keep proper history. * refactor: 📝 add modules readme (wip) * refactor: ✨ small move * refactor: 📝 changed nestring, updated modules readme * refactor: 📝 to document or not to document * fix: 🐛 themes replaced the template to use `main` and regenerated them from lemnos themes. * Revert "fix: 🐛 themes" This reverts commit 4918d3633c8d2d81950a0ed0cfd9eb84241bc886. * refactor: ✨ introduce sourced - Created a source `root` in which sourcable demos are stored. Some might get converted to modules later on. - Moved some files to bin too. * fix: 🐛 fehbg.nu * fix: 🐛 modules/after.nu * moved some other stuff around --------- Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
This commit is contained in:
parent
382696cd21
commit
c47ccd42b8
128 changed files with 185 additions and 12 deletions
5
sourced/webscraping/README.md
Normal file
5
sourced/webscraping/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Web Scraping
|
||||
|
||||
### Definition
|
||||
|
||||
Simple scripts to demonstrate how to scrape websites in nushell. Requires `query web` plugin
|
16
sourced/webscraping/anagram_unscramble_scrape.nu
Normal file
16
sourced/webscraping/anagram_unscramble_scrape.nu
Normal file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env nu
|
||||
# script to get anagrams with scrabble points from unscramble.me
|
||||
# NOTE: this is just a small show case of piping query web stuff
|
||||
def main [...words: string] {
|
||||
let base = "https://www.unscramble.me/"
|
||||
$words | par-each {
|
||||
|word|
|
||||
http get ($base + $word)
|
||||
|query web -q ".mBottom-6" -m # gets the anagram table part of the page
|
||||
|drop nth 0 # remove the description/definition of "words"
|
||||
|first # we only care about the biggest/first anagrams (which is the length of the input word)
|
||||
|query web -q "table" -m # get the html table
|
||||
|to text # we need it as raw html to parse it
|
||||
|query web --as-table ["Word" "Scrabble points" "Words with friends points"] # parse the html table as table
|
||||
}
|
||||
}
|
21
sourced/webscraping/nuschiit.nu
Normal file
21
sourced/webscraping/nuschiit.nu
Normal file
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/env nu
|
||||
let baseurl = 'https://www.schiit.co.uk/'
|
||||
let pages = ['headphone-amps' 'dacs' 'schiit-gaming-products' 'power-amplifiers' 'preamps' 'upgrades' 'accessories-cables' 'schiit%20graded%20stock']
|
||||
|
||||
# Simple script to check stock of https://schiit.co.uk store
|
||||
def main [] {
|
||||
$pages | par-each { |page|
|
||||
http get ($baseurl + $page)
|
||||
|query web -q '.price, .stock, .product-item h5'
|
||||
|str trim
|
||||
|group 3
|
||||
|each {
|
||||
|x| {
|
||||
name: $x.0,
|
||||
avail: $x.1,
|
||||
price: $x.2
|
||||
}
|
||||
}
|
||||
}
|
||||
|sort-by avail
|
||||
}
|
42
sourced/webscraping/shell_stars.nu
Normal file
42
sourced/webscraping/shell_stars.nu
Normal file
|
@ -0,0 +1,42 @@
|
|||
let shell_list = [
|
||||
[name repo];
|
||||
[bash bminor/bash]
|
||||
[fish fish-shell/fish-shell]
|
||||
[nushell nushell/nushell]
|
||||
# [powershell no-github-url]
|
||||
[pwsh PowerShell/PowerShell]
|
||||
[ksh2020 ksh2020/ksh]
|
||||
[ksh93u att/ast]
|
||||
# [csh no-github-url]
|
||||
# [dash no-github-url]
|
||||
# [sh no-github-url]
|
||||
# [cmd no-github-url]
|
||||
[aws-shell awslabs/aws-shell]
|
||||
[azure-cloud-shell Azure/CloudShell]
|
||||
[elvish elves/elvish]
|
||||
[es wryun/es-shell]
|
||||
[ion redox-os/ion]
|
||||
[MirBSDksh MirBSD/mksh]
|
||||
[ngs ngs-lang/ngs]
|
||||
[openbsd_ksh ibara/oksh]
|
||||
[oil oilshell/oil]
|
||||
[shell++ alexst07/shell-plus-plus]
|
||||
[tcsh tcsh-org/tcsh]
|
||||
[xonsh xonsh/xonsh]
|
||||
[yash magicant/yash]
|
||||
[zsh zsh-users/zsh]
|
||||
]
|
||||
|
||||
$shell_list | each { |r|
|
||||
print -n $"Working on ($r.name)"
|
||||
sleep 250ms
|
||||
if ($r.repo | str starts-with no) {
|
||||
[[shell repo stars]; [($r.name) "no github url" 0]]
|
||||
print ""
|
||||
} else {
|
||||
let url = $"https://api.github.com/repos/($r.repo)"
|
||||
let count = (fetch -u $env.GITHUB_USERNAME -p $env.GITHUB_PASSWORD ($url) | get stargazers_count)
|
||||
print $" ($count)"
|
||||
[[shell repo stars]; [($r.name) ($r.repo) ($count)]]
|
||||
}
|
||||
} | flatten | sort-by -r stars | table -n 1
|
28
sourced/webscraping/twitter.nu
Normal file
28
sourced/webscraping/twitter.nu
Normal file
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env nu
|
||||
#script to get basic info from twitter's unofficial API
|
||||
def main [...usernames: string] {
|
||||
|
||||
let bearer = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
|
||||
let token_endpoint = 'https://api.twitter.com/1.1/guest/activate.json'
|
||||
let user_endpoint = 'https://twitter.com/i/api/graphql/gr8Lk09afdgWo7NvzP89iQ/UserByScreenName'
|
||||
|
||||
#obtaining the guest token needed to perform further request
|
||||
|
||||
let token = (
|
||||
post -H [Authorization $bearer] $token_endpoint ''
|
||||
).guest_token
|
||||
|
||||
for $twitter_username in $usernames {
|
||||
|
||||
#getting all the usefull data from the api
|
||||
|
||||
let variables = {
|
||||
screen_name: $twitter_username,
|
||||
withSafetyModeUserFields: true,
|
||||
withSuperFollowsUserFields: true
|
||||
}
|
||||
|
||||
post $user_endpoint -t application/x-www-form-urlencoded [ variables ($variables|to json -r) ] -H [ Authorization $bearer, x-guest-token $token ] | get data.user.result | flatten | select name screen_name description protected verified created_at followers_count rest_id has_nft_avatar | get 0
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue