refactor(download): Move download-related functions to 'download.ps1' (#6095)

This commit is contained in:
Hsiao-nan Cheung
2024-08-11 17:23:59 +08:00
committed by GitHub
parent 7f99c499d7
commit 79cf33d0b7
18 changed files with 836 additions and 821 deletions

View File

@@ -2,7 +2,11 @@
### Bug Fixes
- **decompress**: `Expand-7zipArchive` Only delete temp dir / `$extractDir` if it is empty ([#6092](https://github.com/ScoopInstaller/Scoop/issues/6092))
- **decompress**: `Expand-7zipArchive` only delete temp dir / `$extractDir` if it is empty ([#6092](https://github.com/ScoopInstaller/Scoop/issues/6092))
### Code Refactoring
- **download:** Move download-related functions to 'download.ps1' ([#6095](https://github.com/ScoopInstaller/Scoop/issues/6095))
## [v0.5.2](https://github.com/ScoopInstaller/Scoop/compare/v0.5.1...v0.5.2) - 2024-07-26

View File

@@ -46,7 +46,7 @@ param(
. "$PSScriptRoot\..\lib\autoupdate.ps1"
. "$PSScriptRoot\..\lib\json.ps1"
. "$PSScriptRoot\..\lib\versions.ps1"
. "$PSScriptRoot\..\lib\install.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
$Dir = Convert-Path $Dir
if ($ForceUpdate) { $Update = $true }

View File

@@ -28,7 +28,7 @@ param(
. "$PSScriptRoot\..\lib\core.ps1"
. "$PSScriptRoot\..\lib\manifest.ps1"
. "$PSScriptRoot\..\lib\install.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
$Dir = Convert-Path $Dir
$Queue = @()

View File

@@ -73,7 +73,7 @@ param(
. "$PSScriptRoot\..\lib\buckets.ps1"
. "$PSScriptRoot\..\lib\json.ps1"
. "$PSScriptRoot\..\lib\versions.ps1"
. "$PSScriptRoot\..\lib\install.ps1" # needed for hash generation
. "$PSScriptRoot\..\lib\download.ps1"
if ($App -ne '*' -and (Test-Path $App -PathType Leaf)) {
$Dir = Split-Path $App

View File

@@ -23,6 +23,7 @@ param(
. "$PSScriptRoot\..\lib\core.ps1"
. "$PSScriptRoot\..\lib\manifest.ps1"
. "$PSScriptRoot\..\lib\description.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
$Dir = Convert-Path $Dir
$Queue = @()

View File

@@ -1,4 +1,17 @@
# Must included with 'json.ps1'
function format_hash([String] $hash) {
$hash = $hash.toLower()
switch ($hash.Length) {
32 { $hash = "md5:$hash" } # md5
40 { $hash = "sha1:$hash" } # sha1
64 { $hash = $hash } # sha256
128 { $hash = "sha512:$hash" } # sha512
default { $hash = $null }
}
return $hash
}
function find_hash_in_rdf([String] $url, [String] $basename) {
$xml = $null
try {

View File

@@ -63,18 +63,6 @@ function Optimize-SecurityProtocol {
}
}
function Get-Encoding($wc) {
if ($null -ne $wc.ResponseHeaders -and $wc.ResponseHeaders['Content-Type'] -match 'charset=([^;]*)') {
return [System.Text.Encoding]::GetEncoding($Matches[1])
} else {
return [System.Text.Encoding]::GetEncoding('utf-8')
}
}
function Get-UserAgent() {
return "Scoop/1.0 (+http://scoop.sh/) PowerShell/$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor) (Windows NT $([System.Environment]::OSVersion.Version.Major).$([System.Environment]::OSVersion.Version.Minor); $(if(${env:ProgramFiles(Arm)}){'ARM64; '}elseif($env:PROCESSOR_ARCHITECTURE -eq 'AMD64'){'Win64; x64; '})$(if($env:PROCESSOR_ARCHITEW6432 -in 'AMD64','ARM64'){'WOW64; '})$PSEdition)"
}
function Show-DeprecatedWarning {
<#
.SYNOPSIS
@@ -228,35 +216,6 @@ function Complete-ConfigChange {
}
}
function setup_proxy() {
# note: '@' and ':' in password must be escaped, e.g. 'p@ssword' -> p\@ssword'
$proxy = get_config PROXY
if(!$proxy) {
return
}
try {
$credentials, $address = $proxy -split '(?<!\\)@'
if(!$address) {
$address, $credentials = $credentials, $null # no credentials supplied
}
if($address -eq 'none') {
[net.webrequest]::defaultwebproxy = $null
} elseif($address -ne 'default') {
[net.webrequest]::defaultwebproxy = new-object net.webproxy "http://$address"
}
if($credentials -eq 'currentuser') {
[net.webrequest]::defaultwebproxy.credentials = [net.credentialcache]::defaultcredentials
} elseif($credentials) {
$username, $password = $credentials -split '(?<!\\):' | ForEach-Object { $_ -replace '\\([@:])','$1' }
[net.webrequest]::defaultwebproxy.credentials = new-object net.networkcredential($username, $password)
}
} catch {
warn "Failed to use proxy '$proxy': $($_.exception.message)"
}
}
function Invoke-Git {
[CmdletBinding()]
[OutputType([String])]
@@ -584,10 +543,6 @@ function Test-HelperInstalled {
return ![String]::IsNullOrWhiteSpace((Get-HelperPath -Helper $Helper))
}
function Test-Aria2Enabled {
return (Test-HelperInstalled -Helper Aria2) -and (get_config 'aria2-enabled' $true)
}
function app_status($app, $global) {
$status = @{}
$status.installed = installed $app $global
@@ -639,28 +594,6 @@ function fname($path) { split-path $path -leaf }
function strip_ext($fname) { $fname -replace '\.[^\.]*$', '' }
function strip_filename($path) { $path -replace [regex]::escape((fname $path)) }
function strip_fragment($url) { $url -replace (new-object uri $url).fragment }
function url_filename($url) {
(split-path $url -leaf).split('?') | Select-Object -First 1
}
# Unlike url_filename which can be tricked by appending a
# URL fragment (e.g. #/dl.7z, useful for coercing a local filename),
# this function extracts the original filename from the URL.
function url_remote_filename($url) {
$uri = (New-Object URI $url)
$basename = Split-Path $uri.PathAndQuery -Leaf
If ($basename -match ".*[?=]+([\w._-]+)") {
$basename = $matches[1]
}
If (($basename -notlike "*.*") -or ($basename -match "^[v.\d]+$")) {
$basename = Split-Path $uri.AbsolutePath -Leaf
}
If (($basename -notlike "*.*") -and ($uri.Fragment -ne "")) {
$basename = $uri.Fragment.Trim('/', '#')
}
return $basename
}
function ensure($dir) {
if (!(Test-Path -Path $dir)) {
New-Item -Path $dir -ItemType Directory | Out-Null
@@ -1282,112 +1215,6 @@ function substitute($entity, [Hashtable] $params, [Bool]$regexEscape = $false) {
return $newentity
}
function format_hash([String] $hash) {
$hash = $hash.toLower()
switch ($hash.Length)
{
32 { $hash = "md5:$hash" } # md5
40 { $hash = "sha1:$hash" } # sha1
64 { $hash = $hash } # sha256
128 { $hash = "sha512:$hash" } # sha512
default { $hash = $null }
}
return $hash
}
function format_hash_aria2([String] $hash) {
$hash = $hash -split ':' | Select-Object -Last 1
switch ($hash.Length)
{
32 { $hash = "md5=$hash" } # md5
40 { $hash = "sha-1=$hash" } # sha1
64 { $hash = "sha-256=$hash" } # sha256
128 { $hash = "sha-512=$hash" } # sha512
default { $hash = $null }
}
return $hash
}
function get_hash([String] $multihash) {
$type, $hash = $multihash -split ':'
if(!$hash) {
# no type specified, assume sha256
$type, $hash = 'sha256', $multihash
}
if(@('md5','sha1','sha256', 'sha512') -notcontains $type) {
return $null, "Hash type '$type' isn't supported."
}
return $type, $hash.ToLower()
}
function Get-GitHubToken {
return $env:SCOOP_GH_TOKEN, (get_config GH_TOKEN) | Where-Object -Property Length -Value 0 -GT | Select-Object -First 1
}
function handle_special_urls($url)
{
# FossHub.com
if ($url -match "^(?:.*fosshub.com\/)(?<name>.*)(?:\/|\?dwl=)(?<filename>.*)$") {
$Body = @{
projectUri = $Matches.name;
fileName = $Matches.filename;
source = 'CF';
isLatestVersion = $true
}
if ((Invoke-RestMethod -Uri $url) -match '"p":"(?<pid>[a-f0-9]{24}).*?"r":"(?<rid>[a-f0-9]{24})') {
$Body.Add("projectId", $Matches.pid)
$Body.Add("releaseId", $Matches.rid)
}
$url = Invoke-RestMethod -Method Post -Uri "https://api.fosshub.com/download/" -ContentType "application/json" -Body (ConvertTo-Json $Body -Compress)
if ($null -eq $url.error) {
$url = $url.data.url
}
}
# Sourceforge.net
if ($url -match "(?:downloads\.)?sourceforge.net\/projects?\/(?<project>[^\/]+)\/(?:files\/)?(?<file>.*?)(?:$|\/download|\?)") {
# Reshapes the URL to avoid redirections
$url = "https://downloads.sourceforge.net/project/$($matches['project'])/$($matches['file'])"
}
# Github.com
if ($url -match 'github.com/(?<owner>[^/]+)/(?<repo>[^/]+)/releases/download/(?<tag>[^/]+)/(?<file>[^/#]+)(?<filename>.*)' -and ($token = Get-GitHubToken)) {
$headers = @{ "Authorization" = "token $token" }
$privateUrl = "https://api.github.com/repos/$($Matches.owner)/$($Matches.repo)"
$assetUrl = "https://api.github.com/repos/$($Matches.owner)/$($Matches.repo)/releases/tags/$($Matches.tag)"
if ((Invoke-RestMethod -Uri $privateUrl -Headers $headers).Private) {
$url = ((Invoke-RestMethod -Uri $assetUrl -Headers $headers).Assets | Where-Object -Property Name -EQ -Value $Matches.file).Url, $Matches.filename -join ''
}
}
return $url
}
function get_magic_bytes($file) {
if(!(Test-Path $file)) {
return ''
}
if((Get-Command Get-Content).parameters.ContainsKey('AsByteStream')) {
# PowerShell Core (6.0+) '-Encoding byte' is replaced by '-AsByteStream'
return Get-Content $file -AsByteStream -TotalCount 8
}
else {
return Get-Content $file -Encoding byte -TotalCount 8
}
}
function get_magic_bytes_pretty($file, $glue = ' ') {
if(!(Test-Path $file)) {
return ''
}
return (get_magic_bytes $file | ForEach-Object { $_.ToString('x2') }) -join $glue
}
function Out-UTF8File {
param(
[Parameter(Mandatory = $True, Position = 0)]
@@ -1473,6 +1300,3 @@ $scoopPathEnvVar = switch (get_config USE_ISOLATED_PATH) {
# OS information
$WindowsBuild = [System.Environment]::OSVersion.Version.Build
# Setup proxy globally
setup_proxy

758
lib/download.ps1 Normal file
View File

@@ -0,0 +1,758 @@
# Description: Functions for downloading files
## Meta downloader
function Invoke-ScoopDownload ($app, $version, $manifest, $bucket, $architecture, $dir, $use_cache = $true, $check_hash = $true) {
# we only want to show this warning once
if (!$use_cache) { warn 'Cache is being ignored.' }
# can be multiple urls: if there are, then installer should go first to make 'installer.args' section work
$urls = @(script:url $manifest $architecture)
# can be multiple cookies: they will be used for all HTTP requests.
$cookies = $manifest.cookie
# download first
if (Test-Aria2Enabled) {
Invoke-CachedAria2Download $app $version $manifest $architecture $dir $cookies $use_cache $check_hash
} else {
foreach ($url in $urls) {
$fname = url_filename $url
try {
Invoke-CachedDownload $app $version $url "$dir\$fname" $cookies $use_cache
} catch {
Write-Host -ForegroundColor DarkRed $_
abort "URL $url is not valid"
}
if ($check_hash) {
$manifest_hash = hash_for_url $manifest $url $architecture
$ok, $err = check_hash "$dir\$fname" $manifest_hash $(show_app $app $bucket)
if (!$ok) {
error $err
$cached = cache_path $app $version $url
if (Test-Path $cached) {
# rm cached file
Remove-Item -Force $cached
}
if ($url.Contains('sourceforge.net')) {
Write-Host -ForegroundColor Yellow 'SourceForge.net is known for causing hash validation fails. Please try again before opening a ticket.'
}
abort $(new_issue_msg $app $bucket 'hash check failed')
}
}
}
}
return $urls.ForEach({ url_filename $_ })
}
## [System.Net] downloader
function Invoke-CachedDownload ($app, $version, $url, $to, $cookies = $null, $use_cache = $true) {
$cached = cache_path $app $version $url
if (!(Test-Path $cached) -or !$use_cache) {
ensure $cachedir | Out-Null
Start-Download $url "$cached.download" $cookies
Move-Item "$cached.download" $cached -Force
} else { Write-Host "Loading $(url_remote_filename $url) from cache" }
if (!($null -eq $to)) {
if ($use_cache) {
Copy-Item $cached $to
} else {
Move-Item $cached $to -Force
}
}
}
function Start-Download ($url, $to, $cookies) {
$progress = [console]::isoutputredirected -eq $false -and
$host.name -ne 'Windows PowerShell ISE Host'
try {
$url = handle_special_urls $url
Invoke-Download $url $to $cookies $progress
} catch {
$e = $_.exception
if ($e.Response.StatusCode -eq 'Unauthorized') {
warn 'Token might be misconfigured.'
}
if ($e.innerexception) { $e = $e.innerexception }
throw $e
}
}
function Invoke-Download ($url, $to, $cookies, $progress) {
# download with filesize and progress indicator
$reqUrl = ($url -split '#')[0]
$wreq = [Net.WebRequest]::Create($reqUrl)
if ($wreq -is [Net.HttpWebRequest]) {
$wreq.UserAgent = Get-UserAgent
if (-not ($url -match 'sourceforge\.net' -or $url -match 'portableapps\.com')) {
$wreq.Referer = strip_filename $url
}
if ($url -match 'api\.github\.com/repos') {
$wreq.Accept = 'application/octet-stream'
$wreq.Headers['Authorization'] = "Bearer $(Get-GitHubToken)"
$wreq.Headers['X-GitHub-Api-Version'] = '2022-11-28'
}
if ($cookies) {
$wreq.Headers.Add('Cookie', (cookie_header $cookies))
}
get_config PRIVATE_HOSTS | Where-Object { $_ -ne $null -and $url -match $_.match } | ForEach-Object {
(ConvertFrom-StringData -StringData $_.Headers).GetEnumerator() | ForEach-Object {
$wreq.Headers[$_.Key] = $_.Value
}
}
}
try {
$wres = $wreq.GetResponse()
} catch [System.Net.WebException] {
$exc = $_.Exception
$handledCodes = @(
[System.Net.HttpStatusCode]::MovedPermanently, # HTTP 301
[System.Net.HttpStatusCode]::Found, # HTTP 302
[System.Net.HttpStatusCode]::SeeOther, # HTTP 303
[System.Net.HttpStatusCode]::TemporaryRedirect # HTTP 307
)
# Only handle redirection codes
$redirectRes = $exc.Response
if ($handledCodes -notcontains $redirectRes.StatusCode) {
throw $exc
}
# Get the new location of the file
if ((-not $redirectRes.Headers) -or ($redirectRes.Headers -notcontains 'Location')) {
throw $exc
}
$newUrl = $redirectRes.Headers['Location']
info "Following redirect to $newUrl..."
# Handle manual file rename
if ($url -like '*#/*') {
$null, $postfix = $url -split '#/'
$newUrl = "$newUrl#/$postfix"
}
Invoke-Download $newUrl $to $cookies $progress
return
}
$total = $wres.ContentLength
if ($total -eq -1 -and $wreq -is [net.ftpwebrequest]) {
$total = ftp_file_size($url)
}
if ($progress -and ($total -gt 0)) {
[console]::CursorVisible = $false
function Trace-DownloadProgress ($read) {
Write-DownloadProgress $read $total $url
}
} else {
Write-Host "Downloading $url ($(filesize $total))..."
function Trace-DownloadProgress {
#no op
}
}
try {
$s = $wres.getresponsestream()
$fs = [io.file]::openwrite($to)
$buffer = New-Object byte[] 2048
$totalRead = 0
$sw = [diagnostics.stopwatch]::StartNew()
Trace-DownloadProgress $totalRead
while (($read = $s.read($buffer, 0, $buffer.length)) -gt 0) {
$fs.write($buffer, 0, $read)
$totalRead += $read
if ($sw.elapsedmilliseconds -gt 100) {
$sw.restart()
Trace-DownloadProgress $totalRead
}
}
$sw.stop()
Trace-DownloadProgress $totalRead
} finally {
if ($progress) {
[console]::CursorVisible = $true
Write-Host
}
if ($fs) {
$fs.close()
}
if ($s) {
$s.close()
}
$wres.close()
}
}
function Format-DownloadProgress ($url, $read, $total, $console) {
$filename = url_remote_filename $url
# calculate current percentage done
$p = [math]::Round($read / $total * 100, 0)
# pre-generate LHS and RHS of progress string
# so we know how much space we have
$left = "$filename ($(filesize $total))"
$right = [string]::Format('{0,3}%', $p)
# calculate remaining width for progress bar
$midwidth = $console.BufferSize.Width - ($left.Length + $right.Length + 8)
# calculate how many characters are completed
$completed = [math]::Abs([math]::Round(($p / 100) * $midwidth, 0) - 1)
# generate dashes to symbolise completed
if ($completed -gt 1) {
$dashes = [string]::Join('', ((1..$completed) | ForEach-Object { '=' }))
}
# this is why we calculate $completed - 1 above
$dashes += switch ($p) {
100 { '=' }
default { '>' }
}
# the remaining characters are filled with spaces
$spaces = switch ($dashes.Length) {
$midwidth { [string]::Empty }
default {
[string]::Join('', ((1..($midwidth - $dashes.Length)) | ForEach-Object { ' ' }))
}
}
"$left [$dashes$spaces] $right"
}
function Write-DownloadProgress ($read, $total, $url) {
$console = $Host.UI.RawUI
$left = $console.CursorPosition.X
$top = $console.CursorPosition.Y
$width = $console.BufferSize.Width
if ($read -eq 0) {
$maxOutputLength = $(Format-DownloadProgress $url 100 $total $console).Length
if (($left + $maxOutputLength) -gt $width) {
# not enough room to print progress on this line
# print on new line
Write-Host
$left = 0
$top = $top + 1
if ($top -gt $console.CursorPosition.Y) { $top = $console.CursorPosition.Y }
}
}
Write-Host $(Format-DownloadProgress $url $read $total $console) -NoNewline
[console]::SetCursorPosition($left, $top)
}
## Aria2 downloader
function Test-Aria2Enabled {
return (Test-HelperInstalled -Helper Aria2) -and (get_config 'aria2-enabled' $true)
}
function aria_exit_code($exitcode) {
$codes = @{
0 = 'All downloads were successful'
1 = 'An unknown error occurred'
2 = 'Timeout'
3 = 'Resource was not found'
4 = 'Aria2 saw the specified number of "resource not found" error. See --max-file-not-found option'
5 = 'Download aborted because download speed was too slow. See --lowest-speed-limit option'
6 = 'Network problem occurred.'
7 = 'There were unfinished downloads. This error is only reported if all finished downloads were successful and there were unfinished downloads in a queue when aria2 exited by pressing Ctrl-C by an user or sending TERM or INT signal'
8 = 'Remote server did not support resume when resume was required to complete download'
9 = 'There was not enough disk space available'
10 = 'Piece length was different from one in .aria2 control file. See --allow-piece-length-change option'
11 = 'Aria2 was downloading same file at that moment'
12 = 'Aria2 was downloading same info hash torrent at that moment'
13 = 'File already existed. See --allow-overwrite option'
14 = 'Renaming file failed. See --auto-file-renaming option'
15 = 'Aria2 could not open existing file'
16 = 'Aria2 could not create new file or truncate existing file'
17 = 'File I/O error occurred'
18 = 'Aria2 could not create directory'
19 = 'Name resolution failed'
20 = 'Aria2 could not parse Metalink document'
21 = 'FTP command failed'
22 = 'HTTP response header was bad or unexpected'
23 = 'Too many redirects occurred'
24 = 'HTTP authorization failed'
25 = 'Aria2 could not parse bencoded file (usually ".torrent" file)'
26 = '".torrent" file was corrupted or missing information that aria2 needed'
27 = 'Magnet URI was bad'
28 = 'Bad/unrecognized option was given or unexpected option argument was given'
29 = 'The remote server was unable to handle the request due to a temporary overloading or maintenance'
30 = 'Aria2 could not parse JSON-RPC request'
31 = 'Reserved. Not used'
32 = 'Checksum validation failed'
}
if ($null -eq $codes[$exitcode]) {
return 'An unknown error occurred'
}
return $codes[$exitcode]
}
function get_filename_from_metalink($file) {
$bytes = get_magic_bytes_pretty $file ''
# check if file starts with '<?xml'
if (!($bytes.StartsWith('3c3f786d6c'))) {
return $null
}
# Add System.Xml for reading metalink files
Add-Type -AssemblyName 'System.Xml'
$xr = [System.Xml.XmlReader]::Create($file)
$filename = $null
try {
$xr.ReadStartElement('metalink')
if ($xr.ReadToFollowing('file') -and $xr.MoveToFirstAttribute()) {
$filename = $xr.Value
}
} catch [System.Xml.XmlException] {
return $null
} finally {
$xr.Close()
}
return $filename
}
function Invoke-CachedAria2Download ($app, $version, $manifest, $architecture, $dir, $cookies = $null, $use_cache = $true, $check_hash = $true) {
$data = @{}
$urls = @(script:url $manifest $architecture)
# aria2 input file
$urlstxt = Join-Path $cachedir "$app.txt"
$urlstxt_content = ''
$download_finished = $true
# aria2 options
$options = @(
"--input-file='$urlstxt'"
"--user-agent='$(Get-UserAgent)'"
'--allow-overwrite=true'
'--auto-file-renaming=false'
"--retry-wait=$(get_config 'aria2-retry-wait' 2)"
"--split=$(get_config 'aria2-split' 5)"
"--max-connection-per-server=$(get_config 'aria2-max-connection-per-server' 5)"
"--min-split-size=$(get_config 'aria2-min-split-size' '5M')"
'--console-log-level=warn'
'--enable-color=false'
'--no-conf=true'
'--follow-metalink=true'
'--metalink-preferred-protocol=https'
'--min-tls-version=TLSv1.2'
"--stop-with-process=$PID"
'--continue'
'--summary-interval=0'
'--auto-save-interval=1'
)
if ($cookies) {
$options += "--header='Cookie: $(cookie_header $cookies)'"
}
$proxy = get_config PROXY
if ($proxy -ne 'none') {
if ([Net.Webrequest]::DefaultWebProxy.Address) {
$options += "--all-proxy='$([Net.Webrequest]::DefaultWebProxy.Address.Authority)'"
}
if ([Net.Webrequest]::DefaultWebProxy.Credentials.UserName) {
$options += "--all-proxy-user='$([Net.Webrequest]::DefaultWebProxy.Credentials.UserName)'"
}
if ([Net.Webrequest]::DefaultWebProxy.Credentials.Password) {
$options += "--all-proxy-passwd='$([Net.Webrequest]::DefaultWebProxy.Credentials.Password)'"
}
}
$more_options = get_config 'aria2-options'
if ($more_options) {
$options += $more_options
}
foreach ($url in $urls) {
$data.$url = @{
'target' = Join-Path $dir (url_filename $url)
'cachename' = fname (cache_path $app $version $url)
'source' = cache_path $app $version $url
}
if ((Test-Path $data.$url.source) -and -not((Test-Path "$($data.$url.source).aria2") -or (Test-Path $urlstxt)) -and $use_cache) {
Write-Host 'Loading ' -NoNewline
Write-Host $(url_remote_filename $url) -ForegroundColor Cyan -NoNewline
Write-Host ' from cache.'
} else {
$download_finished = $false
# create aria2 input file content
try {
$try_url = handle_special_urls $url
} catch {
if ($_.Exception.Response.StatusCode -eq 'Unauthorized') {
warn 'Token might be misconfigured.'
}
}
$urlstxt_content += "$try_url`n"
if (!$url.Contains('sourceforge.net')) {
$urlstxt_content += " referer=$(strip_filename $url)`n"
}
$urlstxt_content += " dir=$cachedir`n"
$urlstxt_content += " out=$($data.$url.cachename)`n"
}
}
if (-not($download_finished)) {
# write aria2 input file
if ($urlstxt_content -ne '') {
ensure $cachedir | Out-Null
# Write aria2 input-file with UTF8NoBOM encoding
$urlstxt_content | Out-UTF8File -FilePath $urlstxt
}
# build aria2 command
$aria2 = "& '$(Get-HelperPath -Helper Aria2)' $($options -join ' ')"
# handle aria2 console output
Write-Host 'Starting download with aria2 ...'
# Set console output encoding to UTF8 for non-ASCII characters printing
$oriConsoleEncoding = [Console]::OutputEncoding
[Console]::OutputEncoding = New-Object System.Text.UTF8Encoding
Invoke-Command ([scriptblock]::Create($aria2)) | ForEach-Object {
# Skip blank lines
if ([String]::IsNullOrWhiteSpace($_)) { return }
# Prevent potential overlaping of text when one line is shorter
$len = $Host.UI.RawUI.WindowSize.Width - $_.Length - 20
$blank = if ($len -gt 0) { ' ' * $len } else { '' }
$color = 'Gray'
if ($_.StartsWith('(OK):')) {
$noNewLine = $true
$color = 'Green'
} elseif ($_.StartsWith('[') -and $_.EndsWith(']')) {
$noNewLine = $true
$color = 'Cyan'
} elseif ($_.StartsWith('Download Results:')) {
$noNewLine = $false
}
Write-Host "`rDownload: $_$blank" -ForegroundColor $color -NoNewline:$noNewLine
}
Write-Host ''
if ($lastexitcode -gt 0) {
error "Download failed! (Error $lastexitcode) $(aria_exit_code $lastexitcode)"
error $urlstxt_content
error $aria2
abort $(new_issue_msg $app $bucket 'download via aria2 failed')
}
# remove aria2 input file when done
if (Test-Path $urlstxt, "$($data.$url.source).aria2*") {
Remove-Item $urlstxt -Force -ErrorAction SilentlyContinue
Remove-Item "$($data.$url.source).aria2*" -Force -ErrorAction SilentlyContinue
}
# Revert console encoding
[Console]::OutputEncoding = $oriConsoleEncoding
}
foreach ($url in $urls) {
$metalink_filename = get_filename_from_metalink $data.$url.source
if ($metalink_filename) {
Remove-Item $data.$url.source -Force
Rename-Item -Force (Join-Path -Path $cachedir -ChildPath $metalink_filename) $data.$url.source
}
# run hash checks
if ($check_hash) {
$manifest_hash = hash_for_url $manifest $url $architecture
$ok, $err = check_hash $data.$url.source $manifest_hash $(show_app $app $bucket)
if (!$ok) {
error $err
if (Test-Path $data.$url.source) {
# rm cached file
Remove-Item $data.$url.source -Force -ErrorAction SilentlyContinue
Remove-Item "$($data.$url.source).aria2*" -Force -ErrorAction SilentlyContinue
}
if ($url.Contains('sourceforge.net')) {
Write-Host -f yellow 'SourceForge.net is known for causing hash validation fails. Please try again before opening a ticket.'
}
abort $(new_issue_msg $app $bucket 'hash check failed')
}
}
# copy or move file to target location
if (!(Test-Path $data.$url.source) ) {
abort $(new_issue_msg $app $bucket 'cached file not found')
}
if (!($dir -eq $cachedir)) {
if ($use_cache) {
Copy-Item $data.$url.source $data.$url.target
} else {
Move-Item $data.$url.source $data.$url.target -Force
}
}
}
}
## Helper functions
### Downloader parameters
function cookie_header($cookies) {
if (!$cookies) { return }
$vals = $cookies.psobject.properties | ForEach-Object {
"$($_.name)=$($_.value)"
}
[string]::join(';', $vals)
}
function Get-Encoding($wc) {
if ($null -ne $wc.ResponseHeaders -and $wc.ResponseHeaders['Content-Type'] -match 'charset=([^;]*)') {
return [System.Text.Encoding]::GetEncoding($Matches[1])
} else {
return [System.Text.Encoding]::GetEncoding('utf-8')
}
}
function Get-UserAgent() {
return "Scoop/1.0 (+http://scoop.sh/) PowerShell/$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor) (Windows NT $([System.Environment]::OSVersion.Version.Major).$([System.Environment]::OSVersion.Version.Minor); $(if(${env:ProgramFiles(Arm)}){'ARM64; '}elseif($env:PROCESSOR_ARCHITECTURE -eq 'AMD64'){'Win64; x64; '})$(if($env:PROCESSOR_ARCHITEW6432 -in 'AMD64','ARM64'){'WOW64; '})$PSEdition)"
}
function setup_proxy() {
# note: '@' and ':' in password must be escaped, e.g. 'p@ssword' -> p\@ssword'
$proxy = get_config PROXY
if (!$proxy) {
return
}
try {
$credentials, $address = $proxy -split '(?<!\\)@'
if (!$address) {
$address, $credentials = $credentials, $null # no credentials supplied
}
if ($address -eq 'none') {
[net.webrequest]::defaultwebproxy = $null
} elseif ($address -ne 'default') {
[net.webrequest]::defaultwebproxy = New-Object net.webproxy "http://$address"
}
if ($credentials -eq 'currentuser') {
[net.webrequest]::defaultwebproxy.credentials = [net.credentialcache]::defaultcredentials
} elseif ($credentials) {
$username, $password = $credentials -split '(?<!\\):' | ForEach-Object { $_ -replace '\\([@:])', '$1' }
[net.webrequest]::defaultwebproxy.credentials = New-Object net.networkcredential($username, $password)
}
} catch {
warn "Failed to use proxy '$proxy': $($_.exception.message)"
}
}
function Get-GitHubToken {
return $env:SCOOP_GH_TOKEN, (get_config GH_TOKEN) | Where-Object -Property Length -Value 0 -GT | Select-Object -First 1
}
function github_ratelimit_reached {
$api_link = 'https://api.github.com/rate_limit'
$ret = (download_json $api_link).rate.remaining -eq 0
if ($ret) {
Write-Host "GitHub API rate limit reached.`r`nPlease try again later or configure your API token using 'scoop config gh_token <your token>'."
}
$ret
}
### URL handling
function handle_special_urls($url) {
# FossHub.com
if ($url -match '^(?:.*fosshub.com\/)(?<name>.*)(?:\/|\?dwl=)(?<filename>.*)$') {
$Body = @{
projectUri = $Matches.name
fileName = $Matches.filename
source = 'CF'
isLatestVersion = $true
}
if ((Invoke-RestMethod -Uri $url) -match '"p":"(?<pid>[a-f0-9]{24}).*?"r":"(?<rid>[a-f0-9]{24})') {
$Body.Add('projectId', $Matches.pid)
$Body.Add('releaseId', $Matches.rid)
}
$url = Invoke-RestMethod -Method Post -Uri 'https://api.fosshub.com/download/' -ContentType 'application/json' -Body (ConvertTo-Json $Body -Compress)
if ($null -eq $url.error) {
$url = $url.data.url
}
}
# Sourceforge.net
if ($url -match '(?:downloads\.)?sourceforge.net\/projects?\/(?<project>[^\/]+)\/(?:files\/)?(?<file>.*?)(?:$|\/download|\?)') {
# Reshapes the URL to avoid redirections
$url = "https://downloads.sourceforge.net/project/$($matches['project'])/$($matches['file'])"
}
# Github.com
if ($url -match 'github.com/(?<owner>[^/]+)/(?<repo>[^/]+)/releases/download/(?<tag>[^/]+)/(?<file>[^/#]+)(?<filename>.*)' -and ($token = Get-GitHubToken)) {
$headers = @{ 'Authorization' = "token $token" }
$privateUrl = "https://api.github.com/repos/$($Matches.owner)/$($Matches.repo)"
$assetUrl = "https://api.github.com/repos/$($Matches.owner)/$($Matches.repo)/releases/tags/$($Matches.tag)"
if ((Invoke-RestMethod -Uri $privateUrl -Headers $headers).Private) {
$url = ((Invoke-RestMethod -Uri $assetUrl -Headers $headers).Assets | Where-Object -Property Name -EQ -Value $Matches.file).Url, $Matches.filename -join ''
}
}
return $url
}
### Remote file information
function download_json($url) {
$githubtoken = Get-GitHubToken
$authheader = @{}
if ($githubtoken) {
$authheader = @{'Authorization' = "token $githubtoken" }
}
$ProgressPreference = 'SilentlyContinue'
$result = Invoke-WebRequest $url -UseBasicParsing -Headers $authheader | Select-Object -ExpandProperty content | ConvertFrom-Json
$ProgressPreference = 'Continue'
$result
}
function get_magic_bytes($file) {
if (!(Test-Path $file)) {
return ''
}
if ((Get-Command Get-Content).parameters.ContainsKey('AsByteStream')) {
# PowerShell Core (6.0+) '-Encoding byte' is replaced by '-AsByteStream'
return Get-Content $file -AsByteStream -TotalCount 8
} else {
return Get-Content $file -Encoding byte -TotalCount 8
}
}
function get_magic_bytes_pretty($file, $glue = ' ') {
if (!(Test-Path $file)) {
return ''
}
return (get_magic_bytes $file | ForEach-Object { $_.ToString('x2') }) -join $glue
}
Function Get-RemoteFileSize ($Uri) {
$response = Invoke-WebRequest -Uri $Uri -Method HEAD -UseBasicParsing
if (!$response.Headers.StatusCode) {
$response.Headers.'Content-Length' | ForEach-Object { [int]$_ }
}
}
function ftp_file_size($url) {
$request = [net.ftpwebrequest]::create($url)
$request.method = [net.webrequestmethods+ftp]::getfilesize
$request.getresponse().contentlength
}
function url_filename($url) {
(Split-Path $url -Leaf).split('?') | Select-Object -First 1
}
function url_remote_filename($url) {
# Unlike url_filename which can be tricked by appending a
# URL fragment (e.g. #/dl.7z, useful for coercing a local filename),
# this function extracts the original filename from the URL.
$uri = (New-Object URI $url)
$basename = Split-Path $uri.PathAndQuery -Leaf
If ($basename -match '.*[?=]+([\w._-]+)') {
$basename = $matches[1]
}
If (($basename -notlike '*.*') -or ($basename -match '^[v.\d]+$')) {
$basename = Split-Path $uri.AbsolutePath -Leaf
}
If (($basename -notlike '*.*') -and ($uri.Fragment -ne '')) {
$basename = $uri.Fragment.Trim('/', '#')
}
return $basename
}
### Hash-related functions
function hash_for_url($manifest, $url, $arch) {
$hashes = @(hash $manifest $arch) | Where-Object { $_ -ne $null }
if ($hashes.length -eq 0) { return $null }
$urls = @(script:url $manifest $arch)
$index = [array]::IndexOf($urls, $url)
if ($index -eq -1) { abort "Couldn't find hash in manifest for '$url'." }
@($hashes)[$index]
}
function check_hash($file, $hash, $app_name) {
# returns (ok, err)
if (!$hash) {
warn "Warning: No hash in manifest. SHA256 for '$(fname $file)' is:`n $((Get-FileHash -Path $file -Algorithm SHA256).Hash.ToLower())"
return $true, $null
}
Write-Host 'Checking hash of ' -NoNewline
Write-Host $(url_remote_filename $url) -ForegroundColor Cyan -NoNewline
Write-Host ' ... ' -NoNewline
$algorithm, $expected = get_hash $hash
if ($null -eq $algorithm) {
return $false, "Hash type '$algorithm' isn't supported."
}
$actual = (Get-FileHash -Path $file -Algorithm $algorithm).Hash.ToLower()
$expected = $expected.ToLower()
if ($actual -ne $expected) {
$msg = "Hash check failed!`n"
$msg += "App: $app_name`n"
$msg += "URL: $url`n"
if (Test-Path $file) {
$msg += "First bytes: $((get_magic_bytes_pretty $file ' ').ToUpper())`n"
}
if ($expected -or $actual) {
$msg += "Expected: $expected`n"
$msg += "Actual: $actual"
}
return $false, $msg
}
Write-Host 'ok.' -f Green
return $true, $null
}
function get_hash([String] $multihash) {
$type, $hash = $multihash -split ':'
if (!$hash) {
# no type specified, assume sha256
$type, $hash = 'sha256', $multihash
}
if (@('md5', 'sha1', 'sha256', 'sha512') -notcontains $type) {
return $null, "Hash type '$type' isn't supported."
}
return $type, $hash.ToLower()
}
# Setup proxy globally
setup_proxy

View File

@@ -81,576 +81,10 @@ function install_app($app, $architecture, $global, $suggested, $use_cache = $tru
show_notes $manifest $dir $original_dir $persist_dir
}
function Invoke-CachedDownload ($app, $version, $url, $to, $cookies = $null, $use_cache = $true) {
$cached = cache_path $app $version $url
if (!(Test-Path $cached) -or !$use_cache) {
ensure $cachedir | Out-Null
Start-Download $url "$cached.download" $cookies
Move-Item "$cached.download" $cached -Force
} else { Write-Host "Loading $(url_remote_filename $url) from cache" }
if (!($null -eq $to)) {
if ($use_cache) {
Copy-Item $cached $to
} else {
Move-Item $cached $to -Force
}
}
}
function Start-Download ($url, $to, $cookies) {
$progress = [console]::isoutputredirected -eq $false -and
$host.name -ne 'Windows PowerShell ISE Host'
try {
$url = handle_special_urls $url
Invoke-Download $url $to $cookies $progress
} catch {
$e = $_.exception
if ($e.Response.StatusCode -eq 'Unauthorized') {
warn 'Token might be misconfigured.'
}
if ($e.innerexception) { $e = $e.innerexception }
throw $e
}
}
function aria_exit_code($exitcode) {
$codes = @{
0 = 'All downloads were successful'
1 = 'An unknown error occurred'
2 = 'Timeout'
3 = 'Resource was not found'
4 = 'Aria2 saw the specified number of "resource not found" error. See --max-file-not-found option'
5 = 'Download aborted because download speed was too slow. See --lowest-speed-limit option'
6 = 'Network problem occurred.'
7 = 'There were unfinished downloads. This error is only reported if all finished downloads were successful and there were unfinished downloads in a queue when aria2 exited by pressing Ctrl-C by an user or sending TERM or INT signal'
8 = 'Remote server did not support resume when resume was required to complete download'
9 = 'There was not enough disk space available'
10 = 'Piece length was different from one in .aria2 control file. See --allow-piece-length-change option'
11 = 'Aria2 was downloading same file at that moment'
12 = 'Aria2 was downloading same info hash torrent at that moment'
13 = 'File already existed. See --allow-overwrite option'
14 = 'Renaming file failed. See --auto-file-renaming option'
15 = 'Aria2 could not open existing file'
16 = 'Aria2 could not create new file or truncate existing file'
17 = 'File I/O error occurred'
18 = 'Aria2 could not create directory'
19 = 'Name resolution failed'
20 = 'Aria2 could not parse Metalink document'
21 = 'FTP command failed'
22 = 'HTTP response header was bad or unexpected'
23 = 'Too many redirects occurred'
24 = 'HTTP authorization failed'
25 = 'Aria2 could not parse bencoded file (usually ".torrent" file)'
26 = '".torrent" file was corrupted or missing information that aria2 needed'
27 = 'Magnet URI was bad'
28 = 'Bad/unrecognized option was given or unexpected option argument was given'
29 = 'The remote server was unable to handle the request due to a temporary overloading or maintenance'
30 = 'Aria2 could not parse JSON-RPC request'
31 = 'Reserved. Not used'
32 = 'Checksum validation failed'
}
if ($null -eq $codes[$exitcode]) {
return 'An unknown error occurred'
}
return $codes[$exitcode]
}
function get_filename_from_metalink($file) {
$bytes = get_magic_bytes_pretty $file ''
# check if file starts with '<?xml'
if (!($bytes.StartsWith('3c3f786d6c'))) {
return $null
}
# Add System.Xml for reading metalink files
Add-Type -AssemblyName 'System.Xml'
$xr = [System.Xml.XmlReader]::Create($file)
$filename = $null
try {
$xr.ReadStartElement('metalink')
if ($xr.ReadToFollowing('file') -and $xr.MoveToFirstAttribute()) {
$filename = $xr.Value
}
} catch [System.Xml.XmlException] {
return $null
} finally {
$xr.Close()
}
return $filename
}
function Invoke-CachedAria2Download ($app, $version, $manifest, $architecture, $dir, $cookies = $null, $use_cache = $true, $check_hash = $true) {
$data = @{}
$urls = @(script:url $manifest $architecture)
# aria2 input file
$urlstxt = Join-Path $cachedir "$app.txt"
$urlstxt_content = ''
$download_finished = $true
# aria2 options
$options = @(
"--input-file='$urlstxt'"
"--user-agent='$(Get-UserAgent)'"
'--allow-overwrite=true'
'--auto-file-renaming=false'
"--retry-wait=$(get_config 'aria2-retry-wait' 2)"
"--split=$(get_config 'aria2-split' 5)"
"--max-connection-per-server=$(get_config 'aria2-max-connection-per-server' 5)"
"--min-split-size=$(get_config 'aria2-min-split-size' '5M')"
'--console-log-level=warn'
'--enable-color=false'
'--no-conf=true'
'--follow-metalink=true'
'--metalink-preferred-protocol=https'
'--min-tls-version=TLSv1.2'
"--stop-with-process=$PID"
'--continue'
'--summary-interval=0'
'--auto-save-interval=1'
)
if ($cookies) {
$options += "--header='Cookie: $(cookie_header $cookies)'"
}
$proxy = get_config PROXY
if ($proxy -ne 'none') {
if ([Net.Webrequest]::DefaultWebProxy.Address) {
$options += "--all-proxy='$([Net.Webrequest]::DefaultWebProxy.Address.Authority)'"
}
if ([Net.Webrequest]::DefaultWebProxy.Credentials.UserName) {
$options += "--all-proxy-user='$([Net.Webrequest]::DefaultWebProxy.Credentials.UserName)'"
}
if ([Net.Webrequest]::DefaultWebProxy.Credentials.Password) {
$options += "--all-proxy-passwd='$([Net.Webrequest]::DefaultWebProxy.Credentials.Password)'"
}
}
$more_options = get_config 'aria2-options'
if ($more_options) {
$options += $more_options
}
foreach ($url in $urls) {
$data.$url = @{
'target' = Join-Path $dir (url_filename $url)
'cachename' = fname (cache_path $app $version $url)
'source' = cache_path $app $version $url
}
if ((Test-Path $data.$url.source) -and -not((Test-Path "$($data.$url.source).aria2") -or (Test-Path $urlstxt)) -and $use_cache) {
Write-Host 'Loading ' -NoNewline
Write-Host $(url_remote_filename $url) -f Cyan -NoNewline
Write-Host ' from cache.'
} else {
$download_finished = $false
# create aria2 input file content
try {
$try_url = handle_special_urls $url
} catch {
if ($_.Exception.Response.StatusCode -eq 'Unauthorized') {
warn 'Token might be misconfigured.'
}
}
$urlstxt_content += "$try_url`n"
if (!$url.Contains('sourceforge.net')) {
$urlstxt_content += " referer=$(strip_filename $url)`n"
}
$urlstxt_content += " dir=$cachedir`n"
$urlstxt_content += " out=$($data.$url.cachename)`n"
}
}
if (-not($download_finished)) {
# write aria2 input file
if ($urlstxt_content -ne '') {
ensure $cachedir | Out-Null
# Write aria2 input-file with UTF8NoBOM encoding
$urlstxt_content | Out-UTF8File -FilePath $urlstxt
}
# build aria2 command
$aria2 = "& '$(Get-HelperPath -Helper Aria2)' $($options -join ' ')"
# handle aria2 console output
Write-Host 'Starting download with aria2 ...'
# Set console output encoding to UTF8 for non-ASCII characters printing
$oriConsoleEncoding = [Console]::OutputEncoding
[Console]::OutputEncoding = New-Object System.Text.UTF8Encoding
Invoke-Command ([scriptblock]::Create($aria2)) | ForEach-Object {
# Skip blank lines
if ([String]::IsNullOrWhiteSpace($_)) { return }
# Prevent potential overlaping of text when one line is shorter
$len = $Host.UI.RawUI.WindowSize.Width - $_.Length - 20
$blank = if ($len -gt 0) { ' ' * $len } else { '' }
$color = 'Gray'
if ($_.StartsWith('(OK):')) {
$noNewLine = $true
$color = 'Green'
} elseif ($_.StartsWith('[') -and $_.EndsWith(']')) {
$noNewLine = $true
$color = 'Cyan'
} elseif ($_.StartsWith('Download Results:')) {
$noNewLine = $false
}
Write-Host "`rDownload: $_$blank" -ForegroundColor $color -NoNewline:$noNewLine
}
Write-Host ''
if ($lastexitcode -gt 0) {
error "Download failed! (Error $lastexitcode) $(aria_exit_code $lastexitcode)"
error $urlstxt_content
error $aria2
abort $(new_issue_msg $app $bucket 'download via aria2 failed')
}
# remove aria2 input file when done
if (Test-Path $urlstxt, "$($data.$url.source).aria2*") {
Remove-Item $urlstxt -Force -ErrorAction SilentlyContinue
Remove-Item "$($data.$url.source).aria2*" -Force -ErrorAction SilentlyContinue
}
# Revert console encoding
[Console]::OutputEncoding = $oriConsoleEncoding
}
foreach ($url in $urls) {
$metalink_filename = get_filename_from_metalink $data.$url.source
if ($metalink_filename) {
Remove-Item $data.$url.source -Force
Rename-Item -Force (Join-Path -Path $cachedir -ChildPath $metalink_filename) $data.$url.source
}
# run hash checks
if ($check_hash) {
$manifest_hash = hash_for_url $manifest $url $architecture
$ok, $err = check_hash $data.$url.source $manifest_hash $(show_app $app $bucket)
if (!$ok) {
error $err
if (Test-Path $data.$url.source) {
# rm cached file
Remove-Item $data.$url.source -Force -ErrorAction SilentlyContinue
Remove-Item "$($data.$url.source).aria2*" -Force -ErrorAction SilentlyContinue
}
if ($url.Contains('sourceforge.net')) {
Write-Host -f yellow 'SourceForge.net is known for causing hash validation fails. Please try again before opening a ticket.'
}
abort $(new_issue_msg $app $bucket 'hash check failed')
}
}
# copy or move file to target location
if (!(Test-Path $data.$url.source) ) {
abort $(new_issue_msg $app $bucket 'cached file not found')
}
if (!($dir -eq $cachedir)) {
if ($use_cache) {
Copy-Item $data.$url.source $data.$url.target
} else {
Move-Item $data.$url.source $data.$url.target -Force
}
}
}
}
# download with filesize and progress indicator
function Invoke-Download ($url, $to, $cookies, $progress) {
$reqUrl = ($url -split '#')[0]
$wreq = [Net.WebRequest]::Create($reqUrl)
if ($wreq -is [Net.HttpWebRequest]) {
$wreq.UserAgent = Get-UserAgent
if (-not ($url -match 'sourceforge\.net' -or $url -match 'portableapps\.com')) {
$wreq.Referer = strip_filename $url
}
if ($url -match 'api\.github\.com/repos') {
$wreq.Accept = 'application/octet-stream'
$wreq.Headers['Authorization'] = "Bearer $(Get-GitHubToken)"
$wreq.Headers['X-GitHub-Api-Version'] = '2022-11-28'
}
if ($cookies) {
$wreq.Headers.Add('Cookie', (cookie_header $cookies))
}
get_config PRIVATE_HOSTS | Where-Object { $_ -ne $null -and $url -match $_.match } | ForEach-Object {
(ConvertFrom-StringData -StringData $_.Headers).GetEnumerator() | ForEach-Object {
$wreq.Headers[$_.Key] = $_.Value
}
}
}
try {
$wres = $wreq.GetResponse()
} catch [System.Net.WebException] {
$exc = $_.Exception
$handledCodes = @(
[System.Net.HttpStatusCode]::MovedPermanently, # HTTP 301
[System.Net.HttpStatusCode]::Found, # HTTP 302
[System.Net.HttpStatusCode]::SeeOther, # HTTP 303
[System.Net.HttpStatusCode]::TemporaryRedirect # HTTP 307
)
# Only handle redirection codes
$redirectRes = $exc.Response
if ($handledCodes -notcontains $redirectRes.StatusCode) {
throw $exc
}
# Get the new location of the file
if ((-not $redirectRes.Headers) -or ($redirectRes.Headers -notcontains 'Location')) {
throw $exc
}
$newUrl = $redirectRes.Headers['Location']
info "Following redirect to $newUrl..."
# Handle manual file rename
if ($url -like '*#/*') {
$null, $postfix = $url -split '#/'
$newUrl = "$newUrl#/$postfix"
}
Invoke-Download $newUrl $to $cookies $progress
return
}
$total = $wres.ContentLength
if ($total -eq -1 -and $wreq -is [net.ftpwebrequest]) {
$total = ftp_file_size($url)
}
if ($progress -and ($total -gt 0)) {
[console]::CursorVisible = $false
function Trace-DownloadProgress ($read) {
Write-DownloadProgress $read $total $url
}
} else {
Write-Host "Downloading $url ($(filesize $total))..."
function Trace-DownloadProgress {
#no op
}
}
try {
$s = $wres.getresponsestream()
$fs = [io.file]::openwrite($to)
$buffer = New-Object byte[] 2048
$totalRead = 0
$sw = [diagnostics.stopwatch]::StartNew()
Trace-DownloadProgress $totalRead
while (($read = $s.read($buffer, 0, $buffer.length)) -gt 0) {
$fs.write($buffer, 0, $read)
$totalRead += $read
if ($sw.elapsedmilliseconds -gt 100) {
$sw.restart()
Trace-DownloadProgress $totalRead
}
}
$sw.stop()
Trace-DownloadProgress $totalRead
} finally {
if ($progress) {
[console]::CursorVisible = $true
Write-Host
}
if ($fs) {
$fs.close()
}
if ($s) {
$s.close()
}
$wres.close()
}
}
function Format-DownloadProgress ($url, $read, $total, $console) {
$filename = url_remote_filename $url
# calculate current percentage done
$p = [math]::Round($read / $total * 100, 0)
# pre-generate LHS and RHS of progress string
# so we know how much space we have
$left = "$filename ($(filesize $total))"
$right = [string]::Format('{0,3}%', $p)
# calculate remaining width for progress bar
$midwidth = $console.BufferSize.Width - ($left.Length + $right.Length + 8)
# calculate how many characters are completed
$completed = [math]::Abs([math]::Round(($p / 100) * $midwidth, 0) - 1)
# generate dashes to symbolise completed
if ($completed -gt 1) {
$dashes = [string]::Join('', ((1..$completed) | ForEach-Object { '=' }))
}
# this is why we calculate $completed - 1 above
$dashes += switch ($p) {
100 { '=' }
default { '>' }
}
# the remaining characters are filled with spaces
$spaces = switch ($dashes.Length) {
$midwidth { [string]::Empty }
default {
[string]::Join('', ((1..($midwidth - $dashes.Length)) | ForEach-Object { ' ' }))
}
}
"$left [$dashes$spaces] $right"
}
function Write-DownloadProgress ($read, $total, $url) {
$console = $host.UI.RawUI
$left = $console.CursorPosition.X
$top = $console.CursorPosition.Y
$width = $console.BufferSize.Width
if ($read -eq 0) {
$maxOutputLength = $(Format-DownloadProgress $url 100 $total $console).length
if (($left + $maxOutputLength) -gt $width) {
# not enough room to print progress on this line
# print on new line
Write-Host
$left = 0
$top = $top + 1
if ($top -gt $console.CursorPosition.Y) { $top = $console.CursorPosition.Y }
}
}
Write-Host $(Format-DownloadProgress $url $read $total $console) -NoNewline
[console]::SetCursorPosition($left, $top)
}
function Invoke-ScoopDownload ($app, $version, $manifest, $bucket, $architecture, $dir, $use_cache = $true, $check_hash = $true) {
# we only want to show this warning once
if (!$use_cache) { warn 'Cache is being ignored.' }
# can be multiple urls: if there are, then installer should go first to make 'installer.args' section work
$urls = @(script:url $manifest $architecture)
# can be multiple cookies: they will be used for all HTTP requests.
$cookies = $manifest.cookie
# download first
if (Test-Aria2Enabled) {
Invoke-CachedAria2Download $app $version $manifest $architecture $dir $cookies $use_cache $check_hash
} else {
foreach ($url in $urls) {
$fname = url_filename $url
try {
Invoke-CachedDownload $app $version $url "$dir\$fname" $cookies $use_cache
} catch {
Write-Host -f darkred $_
abort "URL $url is not valid"
}
if ($check_hash) {
$manifest_hash = hash_for_url $manifest $url $architecture
$ok, $err = check_hash "$dir\$fname" $manifest_hash $(show_app $app $bucket)
if (!$ok) {
error $err
$cached = cache_path $app $version $url
if (Test-Path $cached) {
# rm cached file
Remove-Item -Force $cached
}
if ($url.Contains('sourceforge.net')) {
Write-Host -f yellow 'SourceForge.net is known for causing hash validation fails. Please try again before opening a ticket.'
}
abort $(new_issue_msg $app $bucket 'hash check failed')
}
}
}
}
return $urls.ForEach({ url_filename $_ })
}
function cookie_header($cookies) {
if (!$cookies) { return }
$vals = $cookies.psobject.properties | ForEach-Object {
"$($_.name)=$($_.value)"
}
[string]::join(';', $vals)
}
function is_in_dir($dir, $check) {
$check -match "^$([regex]::Escape("$dir"))([/\\]|$)"
}
function ftp_file_size($url) {
$request = [net.ftpwebrequest]::create($url)
$request.method = [net.webrequestmethods+ftp]::getfilesize
$request.getresponse().contentlength
}
# hashes
function hash_for_url($manifest, $url, $arch) {
$hashes = @(hash $manifest $arch) | Where-Object { $_ -ne $null }
if ($hashes.length -eq 0) { return $null }
$urls = @(script:url $manifest $arch)
$index = [array]::indexof($urls, $url)
if ($index -eq -1) { abort "Couldn't find hash in manifest for '$url'." }
@($hashes)[$index]
}
# returns (ok, err)
function check_hash($file, $hash, $app_name) {
if (!$hash) {
warn "Warning: No hash in manifest. SHA256 for '$(fname $file)' is:`n $((Get-FileHash -Path $file -Algorithm SHA256).Hash.ToLower())"
return $true, $null
}
Write-Host 'Checking hash of ' -NoNewline
Write-Host $(url_remote_filename $url) -f Cyan -NoNewline
Write-Host ' ... ' -NoNewline
$algorithm, $expected = get_hash $hash
if ($null -eq $algorithm) {
return $false, "Hash type '$algorithm' isn't supported."
}
$actual = (Get-FileHash -Path $file -Algorithm $algorithm).Hash.ToLower()
$expected = $expected.ToLower()
if ($actual -ne $expected) {
$msg = "Hash check failed!`n"
$msg += "App: $app_name`n"
$msg += "URL: $url`n"
if (Test-Path $file) {
$msg += "First bytes: $((get_magic_bytes_pretty $file ' ').ToUpper())`n"
}
if ($expected -or $actual) {
$msg += "Expected: $expected`n"
$msg += "Actual: $actual"
}
return $false, $msg
}
Write-Host 'ok.' -f Green
return $true, $null
}
function Invoke-Installer {
[CmdletBinding()]
param (

View File

@@ -23,7 +23,7 @@
. "$PSScriptRoot\..\lib\json.ps1" # 'autoupdate.ps1' (indirectly)
. "$PSScriptRoot\..\lib\autoupdate.ps1" # 'generate_user_manifest' (indirectly)
. "$PSScriptRoot\..\lib\manifest.ps1" # 'generate_user_manifest' 'Get-Manifest'
. "$PSScriptRoot\..\lib\install.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
if (get_config USE_SQLITE_CACHE) {
. "$PSScriptRoot\..\lib\database.ps1"
}

View File

@@ -6,6 +6,7 @@
. "$PSScriptRoot\..\lib\getopt.ps1"
. "$PSScriptRoot\..\lib\manifest.ps1" # 'Get-Manifest'
. "$PSScriptRoot\..\lib\versions.ps1" # 'Get-InstalledVersion'
. "$PSScriptRoot\..\lib\download.ps1" # 'Get-RemoteFileSize'
$opt, $app, $err = getopt $args 'v' 'verbose'
if ($err) { error "scoop info: $err"; exit 1 }
@@ -166,7 +167,7 @@ if ($status.installed) {
$cached = $null
}
$urlLength = (Invoke-WebRequest $url -Method Head).Headers.'Content-Length' | ForEach-Object { [int]$_ }
$urlLength = Get-RemoteFileSize $url
$totalPackage += $urlLength
} catch [System.Management.Automation.RuntimeException] {
$totalPackage = 0

View File

@@ -33,6 +33,7 @@
. "$PSScriptRoot\..\lib\manifest.ps1" # 'generate_user_manifest' 'Get-Manifest' 'Select-CurrentVersion' (indirectly)
. "$PSScriptRoot\..\lib\system.ps1"
. "$PSScriptRoot\..\lib\install.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
. "$PSScriptRoot\..\lib\decompress.ps1"
. "$PSScriptRoot\..\lib\shortcuts.ps1"
. "$PSScriptRoot\..\lib\psmodules.ps1"

View File

@@ -10,15 +10,10 @@ param($query)
. "$PSScriptRoot\..\lib\manifest.ps1" # 'manifest'
. "$PSScriptRoot\..\lib\versions.ps1" # 'Get-LatestVersion'
. "$PSScriptRoot\..\lib\download.ps1"
$list = [System.Collections.Generic.List[PSCustomObject]]::new()
$githubtoken = Get-GitHubToken
$authheader = @{}
if ($githubtoken) {
$authheader = @{'Authorization' = "token $githubtoken" }
}
function bin_match($manifest, $query) {
if (!$manifest.bin) { return $false }
$bins = foreach ($bin in $manifest.bin) {
@@ -122,23 +117,6 @@ function search_bucket_legacy($bucket, $query) {
}
}
function download_json($url) {
$ProgressPreference = 'SilentlyContinue'
$result = Invoke-WebRequest $url -UseBasicParsing -Headers $authheader | Select-Object -ExpandProperty content | ConvertFrom-Json
$ProgressPreference = 'Continue'
$result
}
function github_ratelimit_reached {
$api_link = 'https://api.github.com/rate_limit'
$ret = (download_json $api_link).rate.remaining -eq 0
if ($ret) {
Write-Host "GitHub API rate limit reached.
Please try again later or configure your API token using 'scoop config gh_token <your token>'."
}
$ret
}
function search_remote($bucket, $query) {
$uri = [System.Uri](known_bucket_repo $bucket)
if ($uri.AbsolutePath -match '/([a-zA-Z0-9]*)/([a-zA-Z0-9-]*)(?:.git|/)?') {

View File

@@ -24,6 +24,7 @@
. "$PSScriptRoot\..\lib\versions.ps1"
. "$PSScriptRoot\..\lib\depends.ps1"
. "$PSScriptRoot\..\lib\install.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
if (get_config USE_SQLITE_CACHE) {
. "$PSScriptRoot\..\lib\database.ps1"
}

View File

@@ -31,7 +31,7 @@
. "$PSScriptRoot\..\lib\getopt.ps1"
. "$PSScriptRoot\..\lib\manifest.ps1" # 'Get-Manifest'
. "$PSScriptRoot\..\lib\json.ps1" # 'json_path'
. "$PSScriptRoot\..\lib\install.ps1" # 'hash_for_url'
. "$PSScriptRoot\..\lib\download.ps1" # 'hash_for_url'
. "$PSScriptRoot\..\lib\depends.ps1" # 'Get-Dependency'
$opt, $apps, $err = getopt $args 'asnup' @('all', 'scan', 'no-depends', 'no-update-scoop', 'passthru')
@@ -86,11 +86,6 @@ Function ConvertTo-VirusTotalUrlId ($url) {
$url_id
}
Function Get-RemoteFileSize ($url) {
$response = Invoke-WebRequest -Uri $url -Method HEAD -UseBasicParsing
$response.Headers.'Content-Length' | ForEach-Object { [System.Convert]::ToInt32($_) }
}
Function Get-VirusTotalResultByHash ($hash, $url, $app) {
$hash = $hash.ToLower()
$api_url = "https://www.virustotal.com/api/v3/files/$hash"

View File

@@ -73,28 +73,6 @@ Describe 'Test-HelperInstalled' -Tag 'Scoop' {
}
}
Describe 'Test-Aria2Enabled' -Tag 'Scoop' {
It 'should return true if aria2 is installed' {
Mock Test-HelperInstalled { $true }
Mock get_config { $true }
Test-Aria2Enabled | Should -BeTrue
}
It 'should return false if aria2 is not installed' {
Mock Test-HelperInstalled { $false }
Mock get_config { $false }
Test-Aria2Enabled | Should -BeFalse
Mock Test-HelperInstalled { $false }
Mock get_config { $true }
Test-Aria2Enabled | Should -BeFalse
Mock Test-HelperInstalled { $true }
Mock get_config { $false }
Test-Aria2Enabled | Should -BeFalse
}
}
Describe 'Test-CommandAvailable' -Tag 'Scoop' {
It 'should return true if command exists' {
Test-CommandAvailable 'Write-Host' | Should -BeTrue

View File

@@ -0,0 +1,49 @@
BeforeAll {
. "$PSScriptRoot\Scoop-TestLib.ps1"
. "$PSScriptRoot\..\lib\core.ps1"
. "$PSScriptRoot\..\lib\download.ps1"
}
Describe 'Test-Aria2Enabled' -Tag 'Scoop' {
It 'should return true if aria2 is installed' {
Mock Test-HelperInstalled { $true }
Mock get_config { $true }
Test-Aria2Enabled | Should -BeTrue
}
It 'should return false if aria2 is not installed' {
Mock Test-HelperInstalled { $false }
Mock get_config { $false }
Test-Aria2Enabled | Should -BeFalse
Mock Test-HelperInstalled { $false }
Mock get_config { $true }
Test-Aria2Enabled | Should -BeFalse
Mock Test-HelperInstalled { $true }
Mock get_config { $false }
Test-Aria2Enabled | Should -BeFalse
}
}
Describe 'url_filename' -Tag 'Scoop' {
It 'should extract the real filename from an url' {
url_filename 'http://example.org/foo.txt' | Should -Be 'foo.txt'
url_filename 'http://example.org/foo.txt?var=123' | Should -Be 'foo.txt'
}
It 'can be tricked with a hash to override the real filename' {
url_filename 'http://example.org/foo-v2.zip#/foo.zip' | Should -Be 'foo.zip'
}
}
Describe 'url_remote_filename' -Tag 'Scoop' {
It 'should extract the real filename from an url' {
url_remote_filename 'http://example.org/foo.txt' | Should -Be 'foo.txt'
url_remote_filename 'http://example.org/foo.txt?var=123' | Should -Be 'foo.txt'
}
It 'can not be tricked with a hash to override the real filename' {
url_remote_filename 'http://example.org/foo-v2.zip#/foo.zip' | Should -Be 'foo-v2.zip'
}
}

View File

@@ -12,28 +12,6 @@ Describe 'appname_from_url' -Tag 'Scoop' {
}
}
Describe 'url_filename' -Tag 'Scoop' {
It 'should extract the real filename from an url' {
url_filename 'http://example.org/foo.txt' | Should -Be 'foo.txt'
url_filename 'http://example.org/foo.txt?var=123' | Should -Be 'foo.txt'
}
It 'can be tricked with a hash to override the real filename' {
url_filename 'http://example.org/foo-v2.zip#/foo.zip' | Should -Be 'foo.zip'
}
}
Describe 'url_remote_filename' -Tag 'Scoop' {
It 'should extract the real filename from an url' {
url_remote_filename 'http://example.org/foo.txt' | Should -Be 'foo.txt'
url_remote_filename 'http://example.org/foo.txt?var=123' | Should -Be 'foo.txt'
}
It 'can not be tricked with a hash to override the real filename' {
url_remote_filename 'http://example.org/foo-v2.zip#/foo.zip' | Should -Be 'foo-v2.zip'
}
}
Describe 'is_in_dir' -Tag 'Scoop', 'Windows' {
It 'should work correctly' {
is_in_dir 'C:\test' 'C:\foo' | Should -BeFalse