chore: update deps

This commit is contained in:
Rim 2024-05-15 15:20:32 -04:00
parent 77b7f08e35
commit fcb420c576
1954 changed files with 75199 additions and 61443 deletions

View File

@ -29,221 +29,221 @@
trigger:
branches:
include:
- 'master'
- '*/ci'
- 'master'
- '*/ci'
paths:
exclude:
- '.circleci/*'
- '.cirrus.yml'
- '.github/*'
- '.github/workflows/*'
- 'appveyor.yml'
- 'packages/*'
- 'plan9/*'
- '.circleci/*'
- '.cirrus.yml'
- '.github/*'
- '.github/workflows/*'
- 'appveyor.*'
- 'packages/*'
- 'plan9/*'
pr:
branches:
include:
- 'master'
- 'master'
paths:
exclude:
- '.circleci/*'
- '.cirrus.yml'
- '.github/*'
- '.github/workflows/*'
- 'appveyor.yml'
- 'packages/*'
- 'plan9/*'
- '.circleci/*'
- '.cirrus.yml'
- '.github/*'
- '.github/workflows/*'
- 'appveyor.*'
- 'packages/*'
- 'plan9/*'
variables:
MAKEFLAGS: '-j 2'
stages:
##########################################
### Linux jobs first
##########################################
##########################################
### Linux jobs first
##########################################
- stage: linux
dependsOn: []
jobs:
- job: ubuntu
# define defaults to make sure variables are always expanded/replaced
- stage: linux
dependsOn: []
jobs:
- job: ubuntu
# define defaults to make sure variables are always expanded/replaced
variables:
install: ''
configure: ''
tests: '!433'
timeoutInMinutes: 60
pool:
vmImage: 'ubuntu-latest'
strategy:
matrix:
default:
name: default
install:
configure: --enable-debug --with-openssl
disable_ipv6:
name: w/o IPv6
configure: --disable-ipv6 --with-openssl
disable_http_smtp_imap:
name: w/o HTTP/SMTP/IMAP
configure: --disable-http --disable-smtp --disable-imap --without-ssl
disable_thredres:
name: sync resolver
configure: --disable-threaded-resolver --with-openssl
https_only:
name: HTTPS only
configure: --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-pop3 --disable-rtmp --disable-rtsp --disable-scp --disable-sftp --disable-smb --disable-smtp --disable-telnet --disable-tftp --with-openssl
torture:
name: torture
install: libnghttp2-dev
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl
tests: -n -t --shallow=25 !FTP
steps:
- script: sudo apt-get update && sudo apt-get install -y stunnel4 python3-impacket libzstd-dev libbrotli-dev libpsl-dev $(install)
displayName: 'apt install'
retryCountOnTaskFailure: 3
- script: autoreconf -fi && ./configure --enable-warnings --enable-werror $(configure)
displayName: 'configure $(name)'
- script: make V=1 && make V=1 examples && cd tests && make V=1
displayName: 'compile'
- script: make V=1 test-ci
displayName: 'test'
env:
AZURE_ACCESS_TOKEN: "$(System.AccessToken)"
TFLAGS: "-ac /usr/bin/curl -r $(tests)"
- stage: scanbuild
dependsOn: []
jobs:
- job: ubuntu
timeoutInMinutes: 30
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo apt-get update && sudo apt-get install -y clang-tools clang libssl-dev libssh2-1-dev libpsl-dev libbrotli-dev libzstd-dev
displayName: 'apt install'
retryCountOnTaskFailure: 3
- script: autoreconf -fi
displayName: 'autoreconf'
- script: scan-build ./configure --enable-debug --enable-werror --with-openssl --with-libssh2
displayName: 'configure'
env:
CC: "clang"
CCX: "clang++"
- script: scan-build --status-bugs make
displayName: 'make'
- script: scan-build --status-bugs make examples
displayName: 'make examples'
##########################################
### Windows jobs below
##########################################
- stage: windows
dependsOn: []
variables:
install: ''
configure: ''
tests: '!433'
timeoutInMinutes: 60
pool:
vmImage: 'ubuntu-latest'
strategy:
matrix:
default:
name: default
install:
configure: --enable-debug --with-openssl
disable_ipv6:
name: w/o IPv6
configure: --disable-ipv6 --with-openssl
disable_http_smtp_imap:
name: w/o HTTP/SMTP/IMAP
configure: --disable-http --disable-smtp --disable-imap --without-ssl
disable_thredres:
name: sync resolver
configure: --disable-threaded-resolver --with-openssl
https_only:
name: HTTPS only
configure: --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-pop3 --disable-rtmp --disable-rtsp --disable-scp --disable-sftp --disable-smb --disable-smtp --disable-telnet --disable-tftp --with-openssl
torture:
name: torture
install: libnghttp2-dev
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl
tests: -n -t --shallow=25 !FTP
steps:
- script: sudo apt-get update && sudo apt-get install -y stunnel4 python3-impacket libzstd-dev libbrotli-dev $(install)
displayName: 'apt install'
retryCountOnTaskFailure: 3
agent.preferPowerShellOnContainers: true
jobs:
- job: msys2
# define defaults to make sure variables are always expanded/replaced
variables:
container_img: ''
container_cmd: ''
configure: ''
tests: ''
timeoutInMinutes: 120
pool:
vmImage: 'windows-2019'
strategy:
matrix:
mingw32_openssl:
name: 32-bit OpenSSL/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --with-libssh2 --with-openssl --without-libpsl
tests: "~571"
mingw64_openssl:
name: 64-bit OpenSSL/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --with-libssh2 --with-openssl --without-libpsl
tests: "~571"
mingw64_libssh:
name: 64-bit OpenSSL/libssh
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh-devel mingw-w64-x86_64-libssh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --with-libssh --with-openssl --without-libpsl
tests: "~571 ~614"
mingw32:
name: 32-bit w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --without-zlib --without-ssl --without-libpsl
tests: "!203 !1143"
mingw64:
name: 64-bit w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --without-zlib --without-ssl --without-libpsl
tests: "!203 !1143"
mingw32_schannel:
name: 32-bit Schannel/SSPI/WinIDN/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --with-libssh2 --without-libpsl
tests: "~571"
mingw64_schannel:
name: 64-bit Schannel/SSPI/WinIDN/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --with-libssh2 --without-libpsl
tests: "~571"
mingw32_schannel_nozlib:
name: 32-bit Schannel/SSPI/WinIDN w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --without-zlib --without-libpsl
tests: "!203 !1143"
mingw64_schannel_nozlib:
name: 64-bit Schannel/SSPI/WinIDN w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --without-zlib --without-libpsl
tests: "!203 !1143"
container:
image: $(container_img)
env:
MSYS2_PATH_TYPE: inherit
steps:
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && $(prepare)"
displayName: 'prepare'
condition: variables.prepare
retryCountOnTaskFailure: 3
- script: autoreconf -fi && ./configure --enable-warnings --enable-werror $(configure)
displayName: 'configure $(name)'
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && autoreconf -fi && ./configure $(configure)"
displayName: 'configure $(name)'
- script: make V=1 && make V=1 examples && cd tests && make V=1
displayName: 'compile'
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 && make V=1 examples && cd tests && make V=1"
displayName: 'compile'
- script: make V=1 test-ci
displayName: 'test'
env:
AZURE_ACCESS_TOKEN: "$(System.AccessToken)"
TFLAGS: "-ac /usr/bin/curl -r $(tests)"
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 install && PATH=/usr/bin:/bin find . -type f -path '*/.libs/*.exe' -print -execdir mv -t .. {} \;"
displayName: 'install'
- stage: scanbuild
dependsOn: []
jobs:
- job: ubuntu
timeoutInMinutes: 30
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo apt-get update && sudo apt-get install -y clang-tools clang libssl-dev libssh2-1-dev libpsl-dev libbrotli-dev libzstd-dev
displayName: 'apt install'
retryCountOnTaskFailure: 3
- script: autoreconf -fi
displayName: 'autoreconf'
- script: scan-build ./configure --enable-debug --enable-werror --with-openssl --with-libssh2
displayName: 'configure'
env:
CC: "clang"
CCX: "clang++"
- script: scan-build --status-bugs make
displayName: 'make'
- script: scan-build --status-bugs make examples
displayName: 'make examples'
##########################################
### Windows jobs below
##########################################
- stage: windows
dependsOn: []
variables:
agent.preferPowerShellOnContainers: true
jobs:
- job: msys2
# define defaults to make sure variables are always expanded/replaced
variables:
container_img: ''
container_cmd: ''
configure: ''
tests: ''
timeoutInMinutes: 120
pool:
vmImage: 'windows-2019'
strategy:
matrix:
mingw32_openssl:
name: 32-bit OpenSSL/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --with-libssh2 --with-openssl
tests: "~571"
mingw64_openssl:
name: 64-bit OpenSSL/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --with-libssh2 --with-openssl
tests: "~571"
mingw64_libssh:
name: 64-bit OpenSSL/libssh
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh-devel mingw-w64-x86_64-libssh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --with-libssh --with-openssl
tests: "~571 ~614"
mingw32:
name: 32-bit w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --without-zlib --without-ssl
tests: "!203 !1143"
mingw64:
name: 64-bit w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --without-zlib --without-ssl
tests: "!203 !1143"
mingw32_schannel:
name: 32-bit Schannel/SSPI/WinIDN/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --with-libssh2
tests: "~571"
mingw64_schannel:
name: 64-bit Schannel/SSPI/WinIDN/libssh2
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --with-libssh2
tests: "~571"
mingw32_schannel_nozlib:
name: 32-bit Schannel/SSPI/WinIDN w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw32:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --without-zlib
tests: "!203 !1143"
mingw64_schannel_nozlib:
name: 64-bit Schannel/SSPI/WinIDN w/o zlib
container_img: ghcr.io/mback2k/curl-docker-winbuildenv/msys2-mingw64:ltsc2019
container_cmd: C:\msys64\usr\bin\sh
configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --with-schannel --with-winidn --without-zlib
tests: "!203 !1143"
container:
image: $(container_img)
env:
MSYS2_PATH_TYPE: inherit
steps:
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && $(prepare)"
displayName: 'prepare'
condition: variables.prepare
retryCountOnTaskFailure: 3
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && autoreconf -fi && ./configure $(configure)"
displayName: 'configure $(name)'
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 && make V=1 examples && cd tests && make V=1"
displayName: 'compile'
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 install && PATH=/usr/bin:/bin find . -type f -path '*/.libs/*.exe' -print -execdir mv -t .. {} \;"
displayName: 'install'
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 test-ci"
displayName: 'test'
env:
AZURE_ACCESS_TOKEN: "$(System.AccessToken)"
TFLAGS: "-ac /usr/bin/curl.exe !IDN !SCP ~612 $(tests)"
- script: $(container_cmd) -l -c "cd $(echo '%cd%') && make V=1 test-ci"
displayName: 'test'
env:
AZURE_ACCESS_TOKEN: "$(System.AccessToken)"
TFLAGS: "-ac /usr/bin/curl.exe !IDN !SCP ~612 $(tests)"

View File

@ -23,6 +23,9 @@
###########################################################################
# View these jobs in the browser: https://app.circleci.com/pipelines/github/curl/curl
#
# The macOS builds use M1 (ARM) machines for platform diversity.
# See https://circleci.com/docs/configuration-reference/#macos-execution-environment
# Use the latest 2.1 version of CircleCI pipeline process engine. See: https://circleci.com/docs/2.0/configuration-reference
version: 2.1
@ -54,84 +57,84 @@ commands:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --without-ssl CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --without-ssl CFLAGS='-Wno-vla -mmacosx-version-min=10.9' CPPFLAGS="-I$(brew --prefix libpsl)/include" LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-debug:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --without-ssl --enable-debug CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --without-ssl --enable-debug CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-libssh2:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --without-ssl --with-libssh2=/opt/homebrew/opt/libssh2 --enable-debug CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --without-ssl --with-libssh2=/opt/homebrew/opt/libssh2 --enable-debug CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-libssh-c-ares:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --with-openssl --with-libssh --enable-ares --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --with-openssl --with-libssh --enable-ares --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-libssh:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --with-openssl --with-libssh --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --with-openssl --with-libssh --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata" || tail -1000 config.log
configure-macos-c-ares:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --without-ssl --enable-ares --enable-debug CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --without-ssl --enable-ares --enable-debug CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-http-only:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-maintainer-mode --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-pop3 --disable-rtmp --disable-rtsp --disable-scp --disable-sftp --disable-smb --disable-smtp --disable-telnet --disable-tftp --disable-unix-sockets --disable-shared --without-brotli --without-gssapi --without-libidn2 --without-libpsl --without-librtmp --without-libssh2 --without-nghttp2 --without-ntlm-auth --without-ssl --without-zlib --enable-debug CFLAGS='-Wno-vla -mmacosx-version-min=10.15'
./configure --enable-warnings --enable-maintainer-mode --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-mqtt --disable-pop3 --disable-rtsp --disable-smb --disable-smtp --disable-telnet --disable-tftp --disable-unix-sockets --disable-shared --without-brotli --without-gssapi --without-libidn2 --without-libpsl --without-librtmp --without-libssh2 --without-nghttp2 --without-ssl --without-zlib --enable-debug CFLAGS='-Wno-vla -mmacosx-version-min=10.15'
configure-macos-securetransport-http2:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --with-secure-transport CFLAGS='-Wno-vla -mmacosx-version-min=10.8'
./configure --enable-warnings --enable-websockets --with-secure-transport CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.8' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-openssl-http2:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-libressl-http2:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix libressl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix libressl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-torture:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --disable-shared --disable-threaded-resolver --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --disable-shared --disable-threaded-resolver --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
configure-macos-torture-ftp:
steps:
- run:
command: |
autoreconf -fi
./configure --enable-warnings --enable-websockets --disable-shared --disable-threaded-resolver --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CFLAGS='-Wno-vla -mmacosx-version-min=10.9'
./configure --enable-warnings --enable-websockets --disable-shared --disable-threaded-resolver --with-openssl --enable-debug PKG_CONFIG_PATH="$(brew --prefix openssl)/lib/pkgconfig" CPPFLAGS="-I$(brew --prefix libpsl)/include" CFLAGS='-Wno-vla -mmacosx-version-min=10.9' LDFLAGS="-L$(brew --prefix libpsl)/lib -L$(brew --prefix icu4c)/lib" LIBS="-licuuc -licudata"
install-cares:
steps:
@ -149,7 +152,7 @@ commands:
steps:
- run:
command: |
sudo apt-get update && sudo apt-get install -y libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev python3-pip
sudo apt-get update && sudo apt-get install -y libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev python3-pip libpsl-dev
sudo python3 -m pip install impacket
install-deps-brew:
@ -157,7 +160,7 @@ commands:
- run:
command: |
# Drop libressl as long as we're not trying to build it
echo libtool autoconf automake pkg-config nghttp2 libssh2 openssl libssh c-ares | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
echo libtool autoconf automake pkg-config nghttp2 libssh2 openssl libssh c-ares libpsl icu4c | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
while [ $? -eq 0 ]; do for i in 1 2 3; do brew update && brew bundle install --no-lock --file /tmp/Brewfile && break 2 || { echo Error: wait to try again; sleep 10; } done; false Too many retries; done
sudo python3 -m pip install impacket
@ -220,8 +223,8 @@ commands:
build-macos:
steps:
- run: make -j7 V=1
- run: make -j7 V=1 examples
- run: make -j5 V=1
- run: make -j5 V=1 examples
test:
steps:
@ -229,7 +232,7 @@ commands:
test-macos:
steps:
- run: make -j7 V=1 test-ci
- run: make -j5 V=1 test-ci
test-torture:
steps:
@ -237,18 +240,20 @@ commands:
test-torture-ftp:
steps:
- run: make -j5 V=1 test-ci TFLAGS="-n -t --shallow=20 FTP"
# Test 250 takes too long, causing Circle CI to kill the job
- run: make -j5 V=1 test-ci TFLAGS="-n -t --shallow=20 FTP !250 !251"
executors:
ubuntu:
machine:
image: ubuntu-2004:202010-01
image: ubuntu-2004:2024.01.1
jobs:
basic:
executor: ubuntu
steps:
- checkout
- install-deps
- configure
- build
- test
@ -284,6 +289,7 @@ jobs:
executor: ubuntu
steps:
- checkout
- install-deps
- install-cares
- configure-cares
- build
@ -293,6 +299,7 @@ jobs:
executor: ubuntu
steps:
- checkout
- install-deps
- install-libssh
- configure-libssh
- build
@ -300,34 +307,31 @@ jobs:
arm:
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:2024.01.1
resource_class: arm.medium
steps:
- checkout
- install-deps
- configure
- build
- test
arm-cares:
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:2024.01.1
resource_class: arm.medium
steps:
- checkout
- install-deps
- install-cares
- configure-cares-debug
- build
- test
# TODO: All builds with "macos.x86.medium.gen2" must be changed to
# "macos.m1.medium.gen1" in January 2024 because the former will be removed
# (the names should also be changed from macos-x86-* to macos-arm-*). We
# want the M1 (ARM) machines anyway, for platform diversity.
# See https://circleci.com/docs/configuration-reference/#macos-execution-environment
macos-x86-normal:
macos-arm-normal:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -335,10 +339,10 @@ jobs:
- build-macos
- test-macos
macos-x86-debug:
macos-arm-debug:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -346,10 +350,10 @@ jobs:
- build-macos
- test-macos
macos-x86-libssh2:
macos-arm-libssh2:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -357,10 +361,10 @@ jobs:
- build-macos
- test-macos
macos-x86-libssh-c-ares:
macos-arm-libssh-c-ares:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -368,10 +372,10 @@ jobs:
- build-macos
- test-macos
macos-x86-libssh:
macos-arm-libssh:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -379,10 +383,10 @@ jobs:
- build-macos
- test-macos
macos-x86-c-ares:
macos-arm-c-ares:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -390,10 +394,10 @@ jobs:
- build-macos
- test-macos
macos-x86-http-only:
macos-arm-http-only:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -401,10 +405,10 @@ jobs:
- build-macos
- test-macos
macos-x86-http-securetransport-http2:
macos-arm-http-securetransport-http2:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -412,10 +416,10 @@ jobs:
- build-macos
- test-macos
macos-x86-http-openssl-http2:
macos-arm-http-openssl-http2:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -423,10 +427,10 @@ jobs:
- build-macos
- test-macos
macos-x86-http-libressl-http2:
macos-arm-http-libressl-http2:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -434,10 +438,10 @@ jobs:
- build-macos
- test-macos
macos-x86-http-torture:
macos-arm-http-torture:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -445,10 +449,10 @@ jobs:
- build-macos
- test-torture
macos-x86-http-torture-ftp:
macos-arm-http-torture-ftp:
macos:
xcode: 15.0.0
resource_class: macos.x86.medium.gen2
resource_class: macos.m1.medium.gen1
steps:
- checkout
- install-deps-brew
@ -489,52 +493,52 @@ workflows:
jobs:
- arm-cares
macos-x86-normal:
macos-arm-normal:
jobs:
- macos-x86-normal
- macos-arm-normal
macos-x86-debug:
macos-arm-debug:
jobs:
- macos-x86-debug
- macos-arm-debug
macos-x86-libssh2:
macos-arm-libssh2:
jobs:
- macos-x86-libssh2
- macos-arm-libssh2
macos-x86-libssh-c-ares:
macos-arm-libssh-c-ares:
jobs:
- macos-x86-libssh-c-ares
- macos-arm-libssh-c-ares
macos-x86-libssh:
macos-arm-libssh:
jobs:
- macos-x86-libssh
- macos-arm-libssh
macos-x86-c-ares:
macos-arm-c-ares:
jobs:
- macos-x86-c-ares
- macos-arm-c-ares
macos-x86-http-only:
macos-arm-http-only:
jobs:
- macos-x86-http-only
- macos-arm-http-only
macos-x86-http-securetransport-http2:
macos-arm-http-securetransport-http2:
jobs:
- macos-x86-http-securetransport-http2
- macos-arm-http-securetransport-http2
macos-x86-http-openssl-http2:
macos-arm-http-openssl-http2:
jobs:
- macos-x86-http-openssl-http2
- macos-arm-http-openssl-http2
# There are problem linking with LibreSSL on the CI boxes that prevent this
# from working.
#macos-x86-http-libressl-http2:
# jobs:
# - macos-x86-http-libressl-http2
# macos-arm-http-libressl-http2:
# jobs:
# - macos-arm-http-libressl-http2
macos-x86-http-torture:
macos-arm-http-torture:
jobs:
- macos-x86-http-torture
- macos-arm-http-torture
macos-x86-http-torture-ftp:
macos-arm-http-torture-ftp:
jobs:
- macos-x86-http-torture-ftp
- macos-arm-http-torture-ftp

18
deps/curl/.cirrus.yml vendored
View File

@ -32,7 +32,7 @@ freebsd_task:
'.azure-pipelines.yml',
'.circleci/**',
'.github/**',
'appveyor.yml',
'appveyor.*',
'CMake/**',
'packages/**',
'plan9/**',
@ -54,18 +54,18 @@ freebsd_task:
pkginstall_script:
- pkg update -f
- pkg install -y autoconf automake libtool pkgconf brotli openldap26-client heimdal libpsl libssh2 libidn2 librtmp libnghttp2 nghttp2 stunnel py39-openssl py39-impacket py39-cryptography
- pkg install -y autoconf automake libtool pkgconf brotli openldap26-client heimdal libpsl libssh2 libidn2 librtmp libnghttp2 nghttp2 stunnel py39-openssl py39-impacket py39-cryptography libpsl
- pkg delete -y curl
configure_script:
- autoreconf -fi
# Building with the address sanitizer is causing unexplainable test issues due to timeouts
#- case `uname -r` in
# 12.2*)
# export CC=clang;
# export CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g";
# export CXXFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g";
# export LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer" ;;
# esac
# - case `uname -r` in
# 12.2*)
# export CC=clang;
# export CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g";
# export CXXFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g";
# export LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer" ;;
# esac
- ./configure --prefix="${HOME}"/install --enable-debug --with-openssl --with-libssh2 --with-brotli --with-gssapi --with-libidn2 --enable-manual --enable-ldap --enable-ldaps --with-librtmp --with-libpsl --with-nghttp2 || { tail -300 config.log; false; }
compile_script:
- make V=1 && make V=1 examples && cd tests && make V=1

6
deps/curl/.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@ -13,481 +13,246 @@
# PR is all about that one topic, like HTTP/3), while the second ones are
# "addendums" that give useful information about a PR that's really mostly
# something else (e.g. CI if the PR also touches CI jobs).
#
# N.B. any-glob-to-all-files is misnamed; it acts like one-glob-to-all-files.
# Therefore, to get any-glob-to-all-files semantics, there must be a single glob
# with all matching patterns within braces.
#
# See https://github.com/actions/labeler/ for documentation on this file.
appleOS:
- all:
- changed-files:
- any-glob-to-all-files:
- '.github/workflows/macos.yml'
- 'lib/config-mac.h'
- 'lib/macos*'
- 'lib/vtls/sectransp*'
- 'm4/curl-sectransp.m4'
- 'MacOSX-Framework'
- all:
- changed-files:
- any-glob-to-all-files:
- '{.github/workflows/macos.yml,lib/config-mac.h,lib/macos*,lib/vtls/sectransp*,m4/curl-sectransp.m4,MacOSX-Framework}'
authentication:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/mk-ca-bundle.1'
- 'docs/libcurl/opts/CURLINFO_HTTPAUTH*'
- 'docs/libcurl/opts/CURLINFO_PROXYAUTH*'
- 'docs/libcurl/opts/CURLOPT_KRB*'
- 'docs/libcurl/opts/CURLOPT_SASL*'
- 'docs/libcurl/opts/CURLOPT_SERVICE_NAME*'
- 'docs/libcurl/opts/CURLOPT_USERNAME*'
- 'docs/libcurl/opts/CURLOPT_USERPWD*'
- 'docs/libcurl/opts/CURLOPT_XOAUTH*'
- 'lib/*gssapi*'
- 'lib/*krb5*'
- 'lib/*ntlm*'
- 'lib/curl_sasl.*'
- 'lib/http_aws*'
- 'lib/http_digest.*'
- 'lib/http_negotiate.*'
- 'lib/vauth/**'
- 'tests/server/fake_ntlm.c'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/mk-ca-bundle.1,docs/libcurl/opts/CURLINFO_HTTPAUTH*,docs/libcurl/opts/CURLINFO_PROXYAUTH*,docs/libcurl/opts/CURLOPT_KRB*,docs/libcurl/opts/CURLOPT_SASL*,docs/libcurl/opts/CURLOPT_SERVICE_NAME*,docs/libcurl/opts/CURLOPT_USERNAME*,docs/libcurl/opts/CURLOPT_USERPWD*,docs/libcurl/opts/CURLOPT_XOAUTH*,lib/*gssapi*,lib/*krb5*,lib/*ntlm*,lib/curl_sasl.*,lib/http_aws*,lib/http_digest.*,lib/http_negotiate.*,lib/vauth/**,tests/server/fake_ntlm.c}'
build:
- all:
- changed-files:
- any-glob-to-all-files:
- '**/CMakeLists.txt'
- '**/Makefile.am'
- '**/Makefile.inc'
- '**/Makefile.mk'
- '**/*.m4'
- '**/*.mk'
- '*.m4'
- 'docs/INSTALL.cmake'
- 'lib/curl_config.h.cmake'
- 'lib/libcurl*.in'
- 'CMake/**'
- 'CMakeLists.txt'
- 'configure.ac'
- 'm4/**'
- 'MacOSX-Framework'
- 'Makefile.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- 'libcurl.def'
- all:
- changed-files:
- any-glob-to-all-files:
- '{**/CMakeLists.txt,**/Makefile.am,**/Makefile.inc,**/Makefile.mk,**/*.m4,**/*.mk,*.m4,docs/INSTALL.cmake,lib/curl_config.h.cmake,lib/libcurl*.in,CMake/**,CMakeLists.txt,configure.ac,m4/**,MacOSX-Framework,Makefile.*,packages/**,plan9/**,projects/**,winbuild/**,libcurl.def}'
CI:
- all:
- changed-files:
- any-glob-to-any-file:
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- '.github/**'
- 'appveyor.yml'
- 'scripts/ci*'
- 'tests/azure.pm'
- 'tests/appveyor.pm'
- 'tests/CI.md'
- all:
- changed-files:
- any-glob-to-any-file:
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- '.github/**'
- 'appveyor.*'
- 'scripts/ci*'
- 'tests/azure.pm'
- 'tests/appveyor.pm'
- 'tests/CI.md'
cmake:
- all:
- changed-files:
- any-glob-to-all-files:
- '**/CMakeLists.txt'
- 'CMake/**'
- 'docs/INSTALL.cmake'
- 'lib/curl_config.h.cmake'
- all:
- changed-files:
- any-glob-to-all-files:
- '{**/CMakeLists.txt,CMake/**,docs/INSTALL.cmake,lib/curl_config.h.cmake}'
cmdline tool:
- all:
- changed-files:
- any-glob-to-any-file:
- 'docs/cmdline-opts/**'
- 'src/**'
- all:
- changed-files:
- any-glob-to-any-file:
- 'docs/cmdline-opts/**'
- 'src/**'
connecting & proxies:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/CONNECTION-FILTERS.md'
- 'docs/examples/ipv6.c'
- 'docs/libcurl/opts/CURLINFO_CONNECT*'
- 'docs/libcurl/opts/CURLINFO_PROXY*'
- 'docs/libcurl/opts/CURLOPT_ADDRESS*'
- 'docs/libcurl/opts/CURLOPT_CONNECT*'
- 'docs/libcurl/opts/CURLOPT_HAPROXY*'
- 'docs/libcurl/opts/CURLOPT_OPENSOCKET*'
- 'docs/libcurl/opts/CURLOPT_PRE_PROXY*'
- 'docs/libcurl/opts/CURLOPT_PROXY*'
- 'docs/libcurl/opts/CURLOPT_SOCKOPT*'
- 'docs/libcurl/opts/CURLOPT_SOCKS*'
- 'docs/libcurl/opts/CURLOPT_TCP*'
- 'docs/libcurl/opts/CURLOPT_TIMEOUT*'
- 'lib/cf-*proxy.*'
- 'lib/cf-socket.*'
- 'lib/cfilters.*'
- 'lib/conncache.*'
- 'lib/connect.*'
- 'lib/http_proxy.*'
- 'lib/if2ip.*'
- 'lib/noproxy.*'
- 'lib/socks.*'
- 'tests/server/socksd.c'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/CONNECTION-FILTERS.md,docs/examples/ipv6.c,docs/libcurl/opts/CURLINFO_CONNECT*,docs/libcurl/opts/CURLINFO_PROXY*,docs/libcurl/opts/CURLOPT_ADDRESS*,docs/libcurl/opts/CURLOPT_CONNECT*,docs/libcurl/opts/CURLOPT_HAPROXY*,docs/libcurl/opts/CURLOPT_OPENSOCKET*,docs/libcurl/opts/CURLOPT_PRE_PROXY*,docs/libcurl/opts/CURLOPT_PROXY*,docs/libcurl/opts/CURLOPT_SOCKOPT*,docs/libcurl/opts/CURLOPT_SOCKS*,docs/libcurl/opts/CURLOPT_TCP*,docs/libcurl/opts/CURLOPT_TIMEOUT*,lib/cf-*proxy.*,lib/cf-socket.*,lib/cfilters.*,lib/conncache.*,lib/connect.*,lib/http_proxy.*,lib/if2ip.*,lib/noproxy.*,lib/socks.*,tests/server/socksd.c}'
cookies:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/HTTP-COOKIES.md'
- 'docs/cmdline-opts/cookie*'
- 'docs/cmdline-opts/junk-session-cookies.d'
- 'docs/libcurl/opts/CURLINFO_COOKIE*'
- 'docs/libcurl/opts/CURLOPT_COOKIE*'
- 'docs/examples/cookie_interface.c'
- 'lib/cookie.*'
- 'lib/psl.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/HTTP-COOKIES.md,docs/cmdline-opts/cookie*,docs/cmdline-opts/junk-session-cookies.md,docs/libcurl/opts/CURLINFO_COOKIE*,docs/libcurl/opts/CURLOPT_COOKIE*,docs/examples/cookie_interface.c,lib/cookie.*,lib/psl.*}'
cryptography:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/CIPHERS.md'
- 'docs/RUSTLS.md'
- 'docs/libcurl/opts/CURLOPT_EGDSOCKET*'
- 'lib/*sha256*'
- 'lib/curl_des.*'
- 'lib/curl_hmac.*'
- 'lib/curl_md?.*'
- 'lib/md?.*'
- 'lib/rand.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/CIPHERS.md,docs/RUSTLS.md,docs/libcurl/opts/CURLOPT_EGDSOCKET*,lib/*sha256*,lib/*sha512*,lib/curl_des.*,lib/curl_hmac.*,lib/curl_md?.*,lib/md?.*,lib/rand.*}'
DICT:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/dict.*'
- 'tests/dictserver.py'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/dict.*,tests/dictserver.py}'
documentation:
- all:
- changed-files:
- any-glob-to-all-files:
- '**/*.md'
- '**/*.txt'
- '**/*.1'
- '**/*.3'
- 'CHANGES'
- 'docs/**'
- 'GIT-INFO'
- 'LICENSES/**'
- 'README'
- 'RELEASE-NOTES'
- AllGlobsToAllFiles:
# negative matches
- '!**/CMakeLists.txt'
- '!**/Makefile.am'
- all:
- changed-files:
- any-glob-to-all-files:
- '{.github/workflows/badwords.yml,.github/workflows/man-examples.yml,.github/workflows/synopsis.yml,.github/scripts/badwords.*,.github/scripts/cd2cd,.github/scripts/cd2nroff,.github/scripts/cdall.pl,.github/scripts/nroff2cd,.github/scripts/verify-examples.pl,.github/scripts/verify-synopsis.pl,**/*.md,**/*.txt,**/*.1,CHANGES,docs/**,LICENSES/**,README,RELEASE-NOTES,scripts/cd*}'
- all-globs-to-all-files:
# negative matches
- '!**/CMakeLists.txt'
- '!**/Makefile.am'
FTP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/libcurl/opts/CURLINFO_FTP*'
- 'docs/libcurl/opts/CURLOPT_FTP*'
- 'docs/libcurl/opts/CURLOPT_WILDCARDMATCH*'
- 'docs/examples/ftp*'
- 'lib/curl_fnmatch.*'
- 'lib/curl_range.*'
- 'lib/ftp*'
- 'tests/ftp*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/libcurl/opts/CURLINFO_FTP*,docs/libcurl/opts/CURLOPT_FTP*,docs/libcurl/opts/CURLOPT_WILDCARDMATCH*,docs/examples/ftp*,lib/curl_fnmatch.*,lib/curl_range.*,lib/ftp*,tests/ftp*'
GOPHER:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/gopher*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/gopher*}'
HTTP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/examples/hsts*'
- 'docs/examples/http-*'
- 'docs/examples/httpput*'
- 'docs/examples/https*'
- 'docs/examples/*post*'
- 'docs/HSTS.md'
- 'docs/HTTP-COOKIES.md'
- 'docs/libcurl/opts/CURLINFO_COOKIE*'
- 'docs/libcurl/opts/CURLOPT_COOKIE*'
- 'docs/libcurl/opts/CURLINFO_HTTP_**'
- 'docs/libcurl/opts/CURLINFO_REDIRECT*'
- 'docs/libcurl/opts/CURLINFO_REFER*'
- 'docs/libcurl/opts/CURLOPT_FOLLOWLOCATION*'
- 'docs/libcurl/opts/CURLOPT_HSTS*'
- 'docs/libcurl/opts/CURLOPT_HTTP*'
- 'docs/libcurl/opts/CURLOPT_POST.*'
- 'docs/libcurl/opts/CURLOPT_POSTFIELD*'
- 'docs/libcurl/opts/CURLOPT_POSTREDIR*'
- 'docs/libcurl/opts/CURLOPT_REDIR*'
- 'docs/libcurl/opts/CURLOPT_REFER*'
- 'docs/libcurl/opts/CURLOPT_TRAILER*'
- 'docs/libcurl/opts/CURLOPT_TRANSFER_ENCODING*'
- 'lib/cf-https*'
- 'lib/cf-h1*'
- 'lib/cf-h2*'
- 'lib/cookie.*'
- 'lib/http*'
- 'tests/http*'
- 'tests/http-server.pl'
- 'tests/http/*'
- 'tests/nghttp*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/examples/hsts*,docs/examples/http-*,docs/examples/httpput*,docs/examples/https*,docs/examples/*post*,docs/HSTS.md,docs/HTTP-COOKIES.md,docs/libcurl/opts/CURLINFO_COOKIE*,docs/libcurl/opts/CURLOPT_COOKIE*,docs/libcurl/opts/CURLINFO_HTTP_**,docs/libcurl/opts/CURLINFO_REDIRECT*,docs/libcurl/opts/CURLINFO_REFER*,docs/libcurl/opts/CURLOPT_FOLLOWLOCATION*,docs/libcurl/opts/CURLOPT_HSTS*,docs/libcurl/opts/CURLOPT_HTTP*,docs/libcurl/opts/CURLOPT_POST.*,docs/libcurl/opts/CURLOPT_POSTFIELD*,docs/libcurl/opts/CURLOPT_POSTREDIR*,docs/libcurl/opts/CURLOPT_REDIR*,docs/libcurl/opts/CURLOPT_REFER*,docs/libcurl/opts/CURLOPT_TRAILER*,docs/libcurl/opts/CURLOPT_TRANSFER_ENCODING*,lib/cf-https*,lib/cf-h1*,lib/cf-h2*,lib/cookie.*,lib/http*,tests/http*,tests/http-server.pl,tests/http/*,tests/nghttp*}'
HTTP/2:
- all:
- changed-files:
- any-glob-to-all-files:
- 'CMake/FindNGHTTP2.cmake'
- 'CMake/FindQUICHE.cmake'
- 'docs/HTTP2.md'
- 'docs/libcurl/opts/CURLOPT_STREAM*'
- 'docs/examples/http2*'
- 'lib/http2*'
- 'tests/http2-server.pl'
- all:
- changed-files:
- any-glob-to-all-files:
- '{CMake/FindNGHTTP2.cmake,CMake/FindQUICHE.cmake,docs/HTTP2.md,docs/libcurl/opts/CURLOPT_STREAM*,docs/examples/http2*,lib/http2*,tests/http2-server.pl}'
HTTP/3:
- all:
- changed-files:
- any-glob-to-all-files:
- '.github/workflows/ngtcp2*'
- '.github/workflows/quiche*'
- 'CMake/FindMSH3.cmake'
- 'CMake/FindNGHTTP3.cmake'
- 'CMake/FindNGTCP2.cmake'
- 'docs/HTTP3.md'
- 'docs/examples/http3*'
- 'lib/vquic/**'
- 'tests/http3-server.pl'
- 'tests/nghttpx.conf'
- all:
- changed-files:
- any-glob-to-all-files:
- '{.github/workflows/ngtcp2*,.github/workflows/quiche*,.github/workflows/osslq*,CMake/FindMSH3.cmake,CMake/FindNGHTTP3.cmake,CMake/FindNGTCP2.cmake,docs/HTTP3.md,docs/examples/http3*,lib/vquic/**,tests/http3-server.pl,tests/nghttpx.conf}'
Hyper:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/HYPER.md'
- 'lib/c-hyper.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/HYPER.md,lib/c-hyper.*}'
IMAP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/imap*'
- 'docs/examples/imap*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/imap*,docs/examples/imap*}'
LDAP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/*ldap*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/*ldap*}'
libcurl API:
- all:
- changed-files:
- any-glob-to-any-file:
- 'docs/libcurl/ABI.md'
- 'docs/libcurl/curl_*.3'
- 'include/curl/**'
- all:
- changed-files:
- any-glob-to-any-file:
- 'docs/libcurl/ABI.md'
- 'docs/libcurl/curl_*.md'
- 'include/curl/**'
logging:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/cmdline-opts/trace*'
- 'docs/libcurl/curl_global_trace*'
- 'lib/curl_trc*'
- 'tests/http/test_15_tracing.py'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/cmdline-opts/trace*,docs/libcurl/curl_global_trace*,lib/curl_trc*,tests/http/test_15_tracing.py}'
MIME:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/libcurl/curl_form*'
- 'docs/libcurl/curl_mime_*'
- 'docs/libcurl/opts/CURLOPT_MIME*'
- 'docs/libcurl/opts/CURLOPT_HTTPPOST*'
- 'lib/formdata*'
- 'lib/mime*'
- 'src/tool_formparse.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/libcurl/curl_form*,docs/libcurl/curl_mime_*,docs/libcurl/opts/CURLOPT_MIME*,docs/libcurl/opts/CURLOPT_HTTPPOST*,lib/formdata*,lib/mime*,src/tool_formparse.*}'
MQTT:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/MQTT.md'
- 'lib/mqtt*'
- 'tests/server/mqttd.c'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/MQTT.md,lib/mqtt*,tests/server/mqttd.c}'
name lookup:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/examples/resolve.c'
- 'docs/libcurl/opts/CURLINFO_NAMELOOKUP*'
- 'docs/libcurl/opts/CURLOPT_DNS*'
- 'docs/libcurl/opts/CURLOPT_DOH*'
- 'docs/libcurl/opts/CURLOPT_RESOLVE*'
- 'lib/asyn*'
- 'lib/curl_gethostname.*'
- 'lib/doh*'
- 'lib/host*'
- 'lib/idn*'
- 'lib/inet_pton.*'
- 'lib/socketpair*'
- 'tests/server/resolve.c'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/examples/resolve.c,docs/libcurl/opts/CURLINFO_NAMELOOKUP*,docs/libcurl/opts/CURLOPT_DNS*,docs/libcurl/opts/CURLOPT_DOH*,docs/libcurl/opts/CURLOPT_RESOLVE*,lib/asyn*,lib/curl_gethostname.*,lib/doh*,lib/host*,lib/idn*,lib/inet_pton.*,lib/socketpair*,tests/server/resolve.c}'
POP3:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/examples/pop3*'
- 'lib/pop3.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/examples/pop3*,lib/pop3.*}'
RTMP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/curl_rtmp.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/curl_rtmp.*}'
RTSP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/libcurl/opts/CURLINFO_RTSP*'
- 'docs/libcurl/opts/CURLOPT_RTSP*'
- 'lib/rtsp.*'
- 'tests/rtspserver.pl'
- 'tests/server/rtspd.c'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/libcurl/opts/CURLINFO_RTSP*,docs/libcurl/opts/CURLOPT_RTSP*,lib/rtsp.*,tests/rtspserver.pl,tests/server/rtspd.c}'
SCP/SFTP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'CMake/FindLibSSH2.cmake'
- 'docs/libcurl/opts/CURLOPT_SSH*'
- 'docs/examples/sftp*'
- 'lib/vssh/**'
- 'tests/sshhelp.pm'
- 'tests/sshserver.pl'
- all:
- changed-files:
- any-glob-to-all-files:
- '{CMake/FindLibSSH2.cmake,docs/libcurl/opts/CURLOPT_SSH*,docs/examples/sftp*,lib/vssh/**,tests/sshhelp.pm,tests/sshserver.pl}'
script:
- all:
- changed-files:
- any-glob-to-all-files:
- '**/*.pl'
- '**/*.sh'
- 'curl-config.in'
- 'docs/curl-config.1'
- 'docs/mk-ca-bundle.1'
- 'docs/THANKS-filter'
- 'scripts/**'
- all:
- changed-files:
- any-glob-to-all-files:
- '{**/*.pl,**/*.sh,curl-config.in,docs/curl-config.1,docs/mk-ca-bundle.1,docs/THANKS-filter,scripts/**}'
SMB:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/smb.*'
- 'tests/smbserver.py'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/smb.*,tests/smbserver.py}'
SMTP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/examples/smtp-*'
- 'docs/libcurl/opts/CURLOPT_MAIL*'
- 'lib/smtp.*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/examples/smtp-*,docs/libcurl/opts/CURLOPT_MAIL*,lib/smtp.*}'
tests:
- all:
- changed-files:
- any-glob-to-any-file:
- 'tests/**'
- all:
- changed-files:
- any-glob-to-any-file:
- 'tests/**'
TFTP:
- all:
- changed-files:
- any-glob-to-all-files:
- 'lib/tftp.*'
- 'tests/tftpserver.pl'
- 'tests/server/tftp*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{lib/tftp.*,tests/tftpserver.pl,tests/server/tftp*}'
TLS:
- all:
- changed-files:
- any-glob-to-all-files:
- 'CMake/FindBearSSL.cmake'
- 'CMake/FindMbedTLS.cmake'
- 'CMake/FindWolfSSL.cmake'
- 'docs/examples/ssl*'
- 'docs/examples/*ssl.*'
- 'docs/examples/*tls.*'
- 'docs/SSL*'
- 'docs/libcurl/curl_global_sslset*'
- 'docs/libcurl/opts/CURLINFO_CA*'
- 'docs/libcurl/opts/CURLINFO_CERT*'
- 'docs/libcurl/opts/CURLINFO_SSL*'
- 'docs/libcurl/opts/CURLINFO_TLS*'
- 'docs/libcurl/opts/CURLOPT_CA*'
- 'docs/libcurl/opts/CURLOPT_CERT*'
- 'docs/libcurl/opts/CURLOPT_PINNEDPUBLICKEY*'
- 'docs/libcurl/opts/CURLOPT_SSL*'
- 'docs/libcurl/opts/CURLOPT_TLS*'
- 'docs/libcurl/opts/CURLOPT_USE_SSL*'
- 'lib/vtls/**'
- 'm4/curl-bearssl.m4'
- 'm4/curl-gnutls.m4'
- 'm4/curl-mbedtls.m4'
- 'm4/curl-openssl.m4'
- 'm4/curl-rustls.m4'
- 'm4/curl-schannel.m4'
- 'm4/curl-sectransp.m4'
- 'm4/curl-wolfssl.m4'
- all:
- changed-files:
- any-glob-to-all-files:
- '{CMake/FindBearSSL.cmake,CMake/FindMbedTLS.cmake,CMake/FindWolfSSL.cmake,docs/examples/ssl*,docs/examples/*ssl.*,docs/examples/*tls.*,docs/SSL*,docs/libcurl/curl_global_sslset*,docs/libcurl/opts/CURLINFO_CA*,docs/libcurl/opts/CURLINFO_CERT*,docs/libcurl/opts/CURLINFO_SSL*,docs/libcurl/opts/CURLINFO_TLS*,docs/libcurl/opts/CURLOPT_CA*,docs/libcurl/opts/CURLOPT_CERT*,docs/libcurl/opts/CURLOPT_PINNEDPUBLICKEY*,docs/libcurl/opts/CURLOPT_SSL*,docs/libcurl/opts/CURLOPT_TLS*,docs/libcurl/opts/CURLOPT_USE_SSL*,lib/vtls/**,m4/curl-bearssl.m4,m4/curl-gnutls.m4,m4/curl-mbedtls.m4,m4/curl-openssl.m4,m4/curl-rustls.m4,m4/curl-schannel.m4,m4/curl-sectransp.m4,m4/curl-wolfssl.m4}'
URL:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/libcurl/curl_url*'
- 'docs/URL-SYNTAX.md'
- 'docs/examples/parseurl*'
- 'include/curl/urlapi.h'
- 'lib/urlapi*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/libcurl/curl_url*,docs/URL-SYNTAX.md,docs/examples/parseurl*,include/curl/urlapi.h,lib/urlapi*}'
WebSocket:
- all:
- changed-files:
- any-glob-to-all-files:
- 'docs/WEBSOCKET.md*'
- 'docs/examples/websocket*'
- 'docs/libcurl/curl_ws_*'
- 'docs/libcurl/libcurl-ws*'
- 'docs/libcurl/opts/CURLOPT_WS_*'
- 'include/curl/websockets.h'
- 'lib/ws.*'
- 'tests/http/clients/ws*'
- 'tests/http/test_20_websockets.py'
- 'tests/http/testenv/ws*'
- all:
- changed-files:
- any-glob-to-all-files:
- '{docs/WEBSOCKET.md*,docs/examples/websocket*,docs/libcurl/curl_ws_*,docs/libcurl/libcurl-ws*,docs/libcurl/opts/CURLOPT_WS_*,include/curl/websockets.h,lib/ws.*,tests/http/clients/ws*,tests/http/test_20_websockets.py,tests/http/testenv/ws*}'
Windows:
- all:
- changed-files:
- any-glob-to-all-files:
- '**/Makefile.mk'
- 'appveyor.yml'
- 'CMake/Platforms/WindowsCache.cmake'
- 'lib/*win32*'
- 'lib/curl_multibyte.*'
- 'lib/rename.*'
- 'lib/vtls/schannel*'
- 'm4/curl-schannel.m4'
- 'projects/**'
- 'src/tool_doswin.c'
- 'winbuild/**'
- 'libcurl.def'
- all:
- changed-files:
- any-glob-to-all-files:
- '{appveyor.*,CMake/Platforms/WindowsCache.cmake,lib/*win32*,lib/curl_multibyte.*,lib/rename.*,lib/vtls/schannel*,m4/curl-schannel.m4,projects/**,src/tool_doswin.c,winbuild/**,libcurl.def}'

67
deps/curl/.github/scripts/badwords.pl vendored Normal file
View File

@ -0,0 +1,67 @@
#!/usr/bin/perl
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
#
# bad[:=]correct
#
# If separator is '=', the string will be compared case sensitively.
# If separator is ':', the check is done case insensitively.
#
my $w;
while(<STDIN>) {
chomp;
if($_ =~ /^#/) {
next;
}
if($_ =~ /^([^:=]*)([:=])(.*)/) {
my ($bad, $sep, $better)=($1, $2, $3);
push @w, $bad;
$alt{$bad} = $better;
if($sep eq "=") {
$exactcase{$bad} = 1;
}
}
}
my $errors;
sub file {
my ($f) = @_;
my $l = 0;
open(F, "<$f");
while(<F>) {
my $in = $_;
$l++;
chomp $in;
if($in =~ /^ /) {
next;
}
# remove the link part
$in =~ s/(\[.*\])\(.*\)/$1/g;
# remove backticked texts
$in =~ s/\`.*\`//g;
foreach my $w (@w) {
my $case = $exactcase{$w};
if(($in =~ /^(.*)$w/i && !$case) ||
($in =~ /^(.*)$w/ && $case) ) {
my $p = $1;
my $c = length($p)+1;
print STDERR "$f:$l:$c: error: found bad word \"$w\"\n";
printf STDERR " %4d | $in\n", $l;
printf STDERR " | %*s^%s\n", length($p), " ",
"~" x (length($w)-1);
printf STDERR " maybe use \"%s\" instead?\n", $alt{$w};
$errors++;
}
}
}
close(F);
}
my @files = @ARGV;
foreach my $each (@files) {
file($each);
}
exit $errors;

50
deps/curl/.github/scripts/badwords.txt vendored Normal file
View File

@ -0,0 +1,50 @@
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
#
back-end:backend
e-mail:email
run-time:runtime
set-up:setup
tool chain:toolchain
tool-chain:toolchain
wild-card:wildcard
wild card:wildcard
i'm:I am
you've:You have
they've:They have
they're:They are
should've:should have
don't:do not
could've:could have
doesn't:does not
isn't:is not
a html: an html
a http: an http
a ftp: an ftp
url =URL
internet\b=Internet
isation:ization
it's:it is
there's:there is
[^.]\. And: Rewrite it somehow?
^(And|So|But) = Rewrite it somehow?
\. But: Rewrite it somehow?
\. So : Rewrite without "so" ?
dir :directory
you'd:you would
you'll:you will
can't:cannot
that's:that is
web page:webpage
host name\b:hostname
host names\b:hostnames
file name\b:filename
file names\b:filenames
\buser name\b:username
\buser names\b:usernames
didn't:did not
doesn't:does not
won't:will not
couldn't:could not
\bwill\b:rewrite to present tense

54
deps/curl/.github/scripts/cleancmd.pl vendored Normal file
View File

@ -0,0 +1,54 @@
#!/usr/bin/perl
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
#
# Input: a cmdline docs markdown, it gets modfied *in place*
#
# The main purpose is to strip off the leading meta-data part, but also to
# clean up whatever else the spell checker might have a problem with that we
# still deem is fine.
my $header = 1;
while(1) {
# set this if the markdown has no meta-data header to skip
if($ARGV[0] eq "--no-header") {
shift @ARGV;
$header = 0;
}
else {
last;
}
}
my $f = $ARGV[0];
open(F, "<$f") or die;
my $ignore = $header;
my $sepcount = 0;
my @out;
while(<F>) {
if(/^---/ && $header) {
if(++$sepcount == 2) {
$ignore = 0;
}
next;
}
next if($ignore);
# strip out all long command line options
$_ =~ s/--[a-z0-9-]+//g;
# strip out https URLs, we don't want them spellchecked
$_ =~ s!https://[a-z0-9\#_/.-]+!!gi;
push @out, $_;
}
close(F);
if(!$ignore) {
open(O, ">$f") or die;
print O @out;
close(O);
}

View File

@ -3,38 +3,31 @@
#
# SPDX-License-Identifier: curl
#
# Input: a libcurl nroff man page
# Output: the same file, minus the SYNOPSIS and the EXAMPLE sections
# Given: a libcurl curldown man page
# Outputs: the same file, minus the SYNOPSIS and the EXAMPLE sections
#
my $f = $ARGV[0];
my $o = $ARGV[1];
open(F, "<$f") or die;
open(O, ">$o") or die;
my @out;
my $ignore = 0;
while(<F>) {
if($_ =~ /^.SH (SYNOPSIS|EXAMPLE|\"SEE ALSO\"|SEE ALSO)/) {
if($_ =~ /^# (SYNOPSIS|EXAMPLE)/) {
$ignore = 1;
}
elsif($ignore && ($_ =~ /^.SH/)) {
elsif($ignore && ($_ =~ /^# [A-Z]/)) {
$ignore = 0;
}
elsif(!$ignore) {
# filter out mentioned CURLE_ names
# **bold**
$_ =~ s/\*\*(\S.*?)\*\*//g;
# *italics*
$_ =~ s/\*(\S.*?)\*//g;
$_ =~ s/CURL(M|SH|U|H)code//g;
$_ =~ s/CURL_(READ|WRITE)FUNC_[A-Z0-9_]*//g;
$_ =~ s/CURL_CSELECT_[A-Z0-9_]*//g;
$_ =~ s/CURL_DISABLE_[A-Z0-9_]*//g;
$_ =~ s/CURL_FORMADD_[A-Z0-9_]*//g;
$_ =~ s/CURL_HET_DEFAULT//g;
$_ =~ s/CURL_IPRESOLVE_[A-Z0-9_]*//g;
$_ =~ s/CURL_PROGRESSFUNC_CONTINUE//g;
$_ =~ s/CURL_REDIR_[A-Z0-9_]*//g;
$_ =~ s/CURL_RTSPREQ_[A-Z0-9_]*//g;
$_ =~ s/CURL_TIMECOND_[A-Z0-9_]*//g;
$_ =~ s/CURL_VERSION_[A-Z0-9_]*//g;
$_ =~ s/CURL_[A-Z0-9_]*//g;
$_ =~ s/CURLALTSVC_[A-Z0-9_]*//g;
$_ =~ s/CURLAUTH_[A-Z0-9_]*//g;
$_ =~ s/CURLE_[A-Z0-9_]*//g;
@ -56,24 +49,38 @@ while(<F>) {
$_ =~ s/CURLPX_[A-Z0-9_]*//g;
$_ =~ s/CURLSHE_[A-Z0-9_]*//g;
$_ =~ s/CURLSHOPT_[A-Z0-9_]*//g;
$_ =~ s/CURLSSLOPT_[A-Z0-9_]*//g;
$_ =~ s/CURLSSH_[A-Z0-9_]*//g;
$_ =~ s/CURLSSLBACKEND_[A-Z0-9_]*//g;
$_ =~ s/CURLU_[A-Z0-9_]*//g;
$_ =~ s/CURLUPART_[A-Z0-9_]*//g;
#$_ =~ s/\bCURLU\b//g; # stand-alone CURLU
$_ =~ s/CURLUE_[A-Z0-9_]*//g;
$_ =~ s/CURLHE_[A-Z0-9_]*//g;
$_ =~ s/CURLWS_[A-Z0-9_]*//g;
$_ =~ s/CURLKH[A-Z0-9_]*//g;
$_ =~ s/CURLUPART_[A-Z0-9_]*//g;
$_ =~ s/CURLUSESSL_[A-Z0-9_]*//g;
$_ =~ s/curl_global_(init_mem|sslset|cleanup)//g;
$_ =~ s/CURLPAUSE_[A-Z0-9_]*//g;
$_ =~ s/CURLHSTS_[A-Z0-9_]*//g;
$_ =~ s/curl_global_([a-z_]*)//g;
$_ =~ s/curl_(strequal|strnequal|formadd|waitfd|formget|getdate|formfree)//g;
$_ =~ s/curl_easy_(nextheader|duphandle)//g;
$_ =~ s/curl_multi_fdset//g;
$_ =~ s/curl_easy_([a-z]*)//g;
$_ =~ s/curl_multi_([a-z_]*)//g;
$_ =~ s/curl_mime_(subparts|addpart|filedata|data_cb)//g;
$_ =~ s/curl_ws_(send|recv|meta)//g;
$_ =~ s/curl_url_(dup)//g;
$_ =~ s/curl_pushheader_by(name|num)//g;
$_ =~ s/libcurl-(env|ws)//g;
$_ =~ s/(^|\W)((tftp|https|http|ftp):\/\/[a-z0-9\-._~%:\/?\#\[\]\@!\$&'()*+,;=]+)//gi;
print O $_;
$_ =~ s/libcurl\\-(env|ws)//g;
$_ =~ s/(^|\W)((tftp|https|http|ftp):\/\/[a-z0-9\-._~%:\/?\#\[\]\@!\$&'()*+,;=\\]+)//gi;
push @out, $_;
}
}
close(F);
open(O, ">$f") or die;
for my $l (@out) {
print O $l;
}
close(O);

View File

@ -39,7 +39,9 @@ auth
autobuild
autobuilds
Autoconf
autoconf
Automake
automake
Autotools
autotools
AVR
@ -50,6 +52,7 @@ backend
backends
backoff
backticks
balancers
Baratov
basename
bashrc
@ -83,6 +86,7 @@ CCC
CDN
CentOS
CFLAGS
cflags
CGI's
CHACHA
chacha
@ -110,8 +114,10 @@ cliget
closesocket
CMake
cmake
CMake's
cmake's
CMakeLists
CNA
CodeQL
codeql
CODESET
@ -138,10 +144,11 @@ cshrc
CTRL
cURL
CURLcode
curldown
CURLE
CURLH
CURLINFO
curlimages
CURLINFO
curlrc
curltest
customizable
@ -161,8 +168,8 @@ deepcode
DELE
DER
deselectable
Deserialized
deserialization
Deserialized
destructor
detections
dev
@ -279,12 +286,14 @@ GPL
GPLed
Greear
groff
gsasl
GSKit
gskit
GSS
GSSAPI
GTFO
Guenter
GUIs
Gunderson
Gustafsson
gzip
@ -350,11 +359,10 @@ interoperable
interoperates
IoT
ipadOS
IPFS
IPNS
ipld
trustless
IPCXN
IPFS
ipld
IPNS
IPv
IPv4
IPv4/6
@ -420,6 +428,7 @@ libssh
libSSH
libssh2
Libtool
libtool
libuv
libWebSocket
libz
@ -444,6 +453,8 @@ Makefile
makefiles
malloc
mallocs
manpage
manpages
maprintf
Marek
Mavrogiannopoulos
@ -790,6 +801,7 @@ Tekniska
testability
TFTP
tftp
threadsafe
Tizen
TLS
tlsv
@ -804,6 +816,7 @@ TPF
TrackMemory
transcode
Tru
trustless
Tse
Tsujikawa
TTL
@ -826,6 +839,7 @@ unencoded
unencrypted
unescape
Unglobbed
Unicode
UNICOS
unix
UnixSockets
@ -880,6 +894,7 @@ WB
web page
WebDAV
WebOS
webpage
WebSocket
WEBSOCKET
WHATWG

View File

@ -4,29 +4,29 @@
#
# Docs: https://github.com/UnicornGlobal/spellcheck-github-actions
matrix:
- name: Markdown
expect_match: false
apsell:
mode: en
dictionary:
wordlists:
- wordlist.txt
output: wordlist.dic
encoding: utf-8
pipeline:
- pyspelling.filters.markdown:
markdown_extensions:
- markdown.extensions.extra:
- pyspelling.filters.html:
comments: true
attributes:
- title
- alt
ignores:
- ':matches(code, pre)'
- 'code'
- 'pre'
- 'strong'
- 'em'
sources:
- '**/*.md|!docs/BINDINGS.md'
- name: Markdown
expect_match: false
apsell:
mode: en
dictionary:
wordlists:
- wordlist.txt
output: wordlist.dic
encoding: utf-8
pipeline:
- pyspelling.filters.markdown:
markdown_extensions:
- markdown.extensions.extra:
- pyspelling.filters.html:
comments: true
attributes:
- title
- alt
ignores:
- ':matches(code, pre)'
- 'code'
- 'pre'
- 'strong'
- 'em'
sources:
- '**/*.md|!docs/BINDINGS.md|!docs/DISTROS.md'

View File

@ -26,6 +26,7 @@
my @files = @ARGV;
my $cfile = "test.c";
my $check = "./scripts/checksrc.pl";
my $error;
if($files[0] eq "-h") {
print "Usage: verify-synopsis [man pages]\n";
@ -47,8 +48,9 @@ sub extract {
my $syn = 0;
my $l = 0;
my $iline = 0;
open(F, "<$f");
open(O, ">$cfile");
my $fail = 0;
open(F, "<$f") or die "failed opening input file $f : $!";
open(O, ">$cfile") or die "failed opening output file $cfile : $!";
print O "#include <curl/curl.h>\n";
while(<F>) {
$iline++;
@ -68,6 +70,15 @@ sub extract {
if(/^.fi/) {
last;
}
if(/(?<!\\)(?:\\{2})*\\(?!\\)/) {
print STDERR
"Error while processing file $f line $iline:\n$_" .
"Error: Single backslashes \\ are not properly shown in " .
"manpage EXAMPLE output unless they are escaped \\\\.\n";
$fail = 1;
$error = 1;
last;
}
# two backslashes become one
$_ =~ s/\\\\/\\/g;
print O $_;
@ -77,17 +88,23 @@ sub extract {
close(F);
close(O);
return $l;
return ($fail ? 0 : $l);
}
my $error;
my $count;
for my $m (@files) {
print "Verify $m\n";
#print "Verify $m\n";
my $out = extract($m);
if($out) {
$error |= testcompile($m);
$error |= checksrc($m);
}
$count++;
}
if(!$error) {
print "Verified $count man pages ok\n";
}
else {
print "Detected problems\n";
}
exit $error;

View File

@ -78,4 +78,3 @@ for my $m (@files) {
$error |= testcompile($m);
}
exit $error;

View File

@ -7,31 +7,31 @@ name: Linux AWS-LC
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
@ -51,58 +51,58 @@ jobs:
timeout-minutes: 30
steps:
- run: |
sudo apt-get update --yes
sudo apt-get install --yes libtool autoconf automake pkg-config stunnel4
# ensure we don't pick up openssl in this build
sudo apt remove --yes libssl-dev
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- run: |
sudo apt-get update --yes
sudo apt-get install --yes libtool autoconf automake pkg-config stunnel4 libpsl-dev
# ensure we don't pick up openssl in this build
sudo apt remove --yes libssl-dev
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- name: cache awslc
uses: actions/cache@v3
id: cache-awslc
env:
cache-name: cache-awslc
with:
path: /home/runner/awslc
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.awslc-version }}
- name: cache awslc
uses: actions/cache@v4
id: cache-awslc
env:
cache-name: cache-awslc
with:
path: /home/runner/awslc
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.awslc-version }}
- name: build awslc
if: steps.cache-awslc.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 \
https://github.com/awslabs/aws-lc/archive/refs/tags/v${{ env.awslc-version }}.tar.gz
tar xzf v${{ env.awslc-version }}.tar.gz
mkdir aws-lc-${{ env.awslc-version }}-build
cd aws-lc-${{ env.awslc-version }}-build
cmake -DCMAKE_INSTALL_PREFIX=$HOME/awslc ../aws-lc-${{ env.awslc-version }}
cmake --build . --parallel
cmake --install .
- name: build awslc
if: steps.cache-awslc.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 \
https://github.com/awslabs/aws-lc/archive/refs/tags/v${{ env.awslc-version }}.tar.gz
tar xzf v${{ env.awslc-version }}.tar.gz
mkdir aws-lc-${{ env.awslc-version }}-build
cd aws-lc-${{ env.awslc-version }}-build
cmake -DCMAKE_INSTALL_PREFIX=$HOME/awslc ../aws-lc-${{ env.awslc-version }}
cmake --build . --parallel
cmake --install .
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: |
mkdir build
cd build
../configure --enable-warnings --enable-werror --with-openssl=$HOME/awslc
cd ..
name: 'configure out-of-tree'
- run: |
mkdir build
cd build
../configure --enable-warnings --enable-werror --with-openssl=$HOME/awslc
cd ..
name: 'configure out-of-tree'
- run: make -C build V=1
name: 'make'
- run: make -C build V=1
name: 'make'
- run: make -C build V=1 examples
name: 'make examples'
- run: make -C build V=1 examples
name: 'make examples'
- run: make -C build V=1 -C tests
name: 'make tests'
- run: make -C build V=1 -C tests
name: 'make tests'
- run: make -C build V=1 test-ci
name: 'run tests'
- run: make -C build V=1 test-ci
name: 'run tests'
cmake:
name: awslc (cmake)
@ -110,43 +110,43 @@ jobs:
timeout-minutes: 15
steps:
- run: |
sudo apt-get update
sudo apt-get install cmake stunnel4
# ensure we don't pick up openssl in this build
sudo apt remove --yes libssl-dev
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- run: |
sudo apt-get update
sudo apt-get install cmake stunnel4
# ensure we don't pick up openssl in this build
sudo apt remove --yes libssl-dev
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- name: cache awslc
uses: actions/cache@v3
id: cache-awslc
env:
cache-name: cache-awslc
with:
path: /home/runner/awslc
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.awslc-version }}
- name: cache awslc
uses: actions/cache@v4
id: cache-awslc
env:
cache-name: cache-awslc
with:
path: /home/runner/awslc
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.awslc-version }}
- name: build awslc
if: steps.cache-awslc.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 \
https://github.com/awslabs/aws-lc/archive/refs/tags/v${{ env.awslc-version }}.tar.gz
tar xzf v${{ env.awslc-version }}.tar.gz
mkdir aws-lc-${{ env.awslc-version }}-build
cd aws-lc-${{ env.awslc-version }}-build
cmake -DCMAKE_INSTALL_PREFIX=$HOME/awslc ../aws-lc-${{ env.awslc-version }}
cmake --build . --parallel
cmake --install .
- name: build awslc
if: steps.cache-awslc.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 \
https://github.com/awslabs/aws-lc/archive/refs/tags/v${{ env.awslc-version }}.tar.gz
tar xzf v${{ env.awslc-version }}.tar.gz
mkdir aws-lc-${{ env.awslc-version }}-build
cd aws-lc-${{ env.awslc-version }}-build
cmake -DCMAKE_INSTALL_PREFIX=$HOME/awslc ../aws-lc-${{ env.awslc-version }}
cmake --build . --parallel
cmake --install .
- uses: actions/checkout@v4
- uses: actions/checkout@v4
# CMAKE_COMPILE_WARNING_AS_ERROR is available in cmake 3.24 or later
- run: cmake -Bbuild -DOPENSSL_ROOT_DIR=$HOME/awslc -DBUILD_SHARED_LIBS=ON -DCMAKE_COMPILE_WARNING_AS_ERROR=ON .
name: 'cmake generate out-of-tree'
# CMAKE_COMPILE_WARNING_AS_ERROR is available in cmake 3.24 or later
- run: cmake -Bbuild -DOPENSSL_ROOT_DIR=$HOME/awslc -DBUILD_SHARED_LIBS=ON -DCMAKE_COMPILE_WARNING_AS_ERROR=ON .
name: 'cmake generate out-of-tree'
- run: cmake --build build --parallel
name: 'cmake build'
- run: cmake --build build --parallel
name: 'cmake build'
- run: cmake --install build --prefix $HOME/curl --strip
name: 'cmake install'
- run: cmake --install build --prefix $HOME/curl --strip
name: 'cmake install'

View File

@ -0,0 +1,27 @@
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
name: badwords
on:
# Trigger the workflow on push or pull requests, but only for the
# master branch
push:
branches:
- master
- '*/ci'
pull_request:
branches:
- master
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: check
run: ./.github/scripts/badwords.pl < .github/scripts/badwords.txt docs/*.md docs/libcurl/*.md docs/libcurl/opts/*.md docs/cmdline-opts/*.md

View File

@ -7,35 +7,35 @@ name: CodeQL
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'docs/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'docs/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'docs/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'docs/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
schedule:
- cron: '0 0 * * 4'
@ -50,31 +50,31 @@ jobs:
permissions:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: cpp
queries: security-extended
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: cpp
queries: security-extended
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
# - run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@ -7,25 +7,25 @@ name: Codespell
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths:
- 'lib/**'
- 'src/**'
- 'include/**'
- 'lib/**'
- 'src/**'
- 'include/**'
pull_request:
branches:
- master
- 'lib/**'
- 'src/**'
- 'include/**'
- master
- 'lib/**'
- 'src/**'
- 'include/**'
jobs:
codespell:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: install
run: |

View File

@ -6,23 +6,23 @@ name: configure-vs-cmake
on:
push:
branches:
- master
- master
paths:
- '*.ac'
- '**/*.m4'
- '**/CMakeLists.txt'
- 'lib/curl_config.h.cmake'
- 'scripts/cmp-config.pl'
- '*.ac'
- '**/*.m4'
- '**/CMakeLists.txt'
- 'lib/curl_config.h.cmake'
- 'scripts/cmp-config.pl'
pull_request:
branches:
- master
- master
paths:
- '*.ac'
- '**/*.m4'
- '**/CMakeLists.txt'
- 'lib/curl_config.h.cmake'
- 'scripts/cmp-config.pl'
- '*.ac'
- '**/*.m4'
- '**/CMakeLists.txt'
- 'lib/curl_config.h.cmake'
- 'scripts/cmp-config.pl'
permissions: {}
@ -30,16 +30,16 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: run configure --with-openssl
run: |
autoreconf -fi
./configure --with-openssl
- name: run configure --with-openssl
run: |
autoreconf -fi
./configure --with-openssl --without-libpsl
- name: run cmake
run: |
mkdir build && cd build && cmake ..
- name: run cmake
run: |
mkdir build && cd build && cmake ..
- name: compare generated curl_config.h files
run: ./scripts/cmp-config.pl lib/curl_config.h build/lib/curl_config.h
- name: compare generated curl_config.h files
run: ./scripts/cmp-config.pl lib/curl_config.h build/lib/curl_config.h

View File

@ -7,11 +7,11 @@ name: dist
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
pull_request:
branches:
- master
- master
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -22,101 +22,101 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: sudo apt-get purge -y curl libcurl4 libcurl4-doc
name: 'remove preinstalled curl libcurl4{-doc}'
- run: sudo apt-get purge -y curl libcurl4 libcurl4-doc
name: 'remove preinstalled curl libcurl4{-doc}'
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure --without-ssl
name: 'configure'
- run: ./configure --without-ssl --without-libpsl
name: 'configure'
- run: make V=1 && make V=1 clean
name: 'make and clean'
- run: make V=1 && make V=1 clean
name: 'make and clean'
- run: ./maketgz 99.98.97
name: 'maketgz'
- run: ./maketgz 99.98.97
name: 'maketgz'
- uses: actions/upload-artifact@v3
with:
name: 'release-tgz'
path: 'curl-99.98.97.tar.gz'
- uses: actions/upload-artifact@v4
with:
name: 'release-tgz'
path: 'curl-99.98.97.tar.gz'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
./configure --prefix=$HOME/temp --without-ssl
make
make TFLAGS=1 test
make install
popd
# basic check of the installed files
bash scripts/installcheck.sh $HOME/temp
rm -rf curl-99.98.97
name: 'verify in-tree configure build including install'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
./configure --prefix=$HOME/temp --without-ssl --without-libpsl
make
make test-ci
make install
popd
# basic check of the installed files
bash scripts/installcheck.sh $HOME/temp
rm -rf curl-99.98.97
name: 'verify in-tree configure build including install'
verify-out-of-tree-docs:
runs-on: ubuntu-latest
timeout-minutes: 30
needs: maketgz-and-verify-in-tree
steps:
- uses: actions/download-artifact@v3
with:
name: 'release-tgz'
- uses: actions/download-artifact@v4
with:
name: 'release-tgz'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
touch curl-99.98.97/docs/{cmdline-opts,libcurl}/Makefile.inc
mkdir build
pushd build
../curl-99.98.97/configure --without-ssl
make
make TFLAGS='-p 1 1139' test
popd
rm -rf build
rm -rf curl-99.98.97
name: 'verify out-of-tree configure build including docs'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
touch curl-99.98.97/docs/{cmdline-opts,libcurl}/Makefile.inc
mkdir build
pushd build
../curl-99.98.97/configure --without-ssl --without-libpsl
make
make test-ci
popd
rm -rf build
rm -rf curl-99.98.97
name: 'verify out-of-tree configure build including docs'
verify-out-of-tree-autotools-debug:
runs-on: ubuntu-latest
timeout-minutes: 30
needs: maketgz-and-verify-in-tree
steps:
- uses: actions/download-artifact@v3
with:
name: 'release-tgz'
- uses: actions/download-artifact@v4
with:
name: 'release-tgz'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
mkdir build
pushd build
../configure --without-ssl --enable-debug "--prefix=${PWD}/pkg"
make -j3
make -j3 TFLAGS=1279 test
make -j3 install
name: 'verify out-of-tree autotools debug build'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
mkdir build
pushd build
../configure --without-ssl --enable-debug "--prefix=${PWD}/pkg" --without-libpsl
make -j3
make -j3 test-ci
make -j3 install
name: 'verify out-of-tree autotools debug build'
verify-out-of-tree-cmake:
runs-on: ubuntu-latest
timeout-minutes: 30
needs: maketgz-and-verify-in-tree
steps:
- uses: actions/download-artifact@v3
with:
name: 'release-tgz'
- uses: actions/download-artifact@v4
with:
name: 'release-tgz'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
mkdir build
pushd build
cmake ..
make
name: 'verify out-of-tree cmake build'
- run: |
echo "::stop-commands::$(uuidgen)"
tar xvf curl-99.98.97.tar.gz
pushd curl-99.98.97
mkdir build
pushd build
cmake ..
make
name: 'verify out-of-tree cmake build'

View File

@ -7,37 +7,37 @@ name: Fuzzer
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'tests/data/**'
- 'winbuild/**'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}

View File

@ -8,7 +8,7 @@ on:
# this must not ever run on any other branch than master
push:
branches:
- master
- master
concurrency:
# this should not run in parallel, so just run one at a time

View File

@ -21,6 +21,6 @@ jobs:
pull-requests: write
steps:
- uses: actions/labeler@v5
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
- uses: actions/labeler@v5
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"

View File

@ -7,17 +7,17 @@ name: Markdown links
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths:
- '.github/workflows/linkcheck.yml'
- '**.md'
- '.github/workflows/linkcheck.yml'
- '**.md'
pull_request:
branches:
- master
- master
paths:
- '.github/workflows/linkcheck.yml'
- '**.md'
- '.github/workflows/linkcheck.yml'
- '**.md'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -30,7 +30,12 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'
- uses: actions/checkout@v4
name: checkout
- name: trim the cmdline docs markdown files
run: find docs/cmdline-opts -name "*.md" ! -name "_*" ! -name MANPAGE.md | xargs -n1 ./.github/scripts/cleancmd.pl
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'

View File

@ -7,31 +7,31 @@ name: Linux
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -44,11 +44,11 @@ env:
bearssl-version: 0.6
libressl-version: v3.7.3
mbedtls-version: v3.5.0
mod_h2-version: v2.0.25
mod_h2-version: v2.0.26
msh3-version: v0.6.0
openssl3-version: openssl-3.1.3
quictls-version: 3.1.4+quic
rustls-version: v0.10.0
rustls-version: v0.12.0
jobs:
autotools:
@ -60,375 +60,383 @@ jobs:
fail-fast: false
matrix:
build:
- name: bearssl
install_packages: zlib1g-dev valgrind
install_steps: bearssl pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/bearssl/lib" --with-bearssl=$HOME/bearssl --enable-debug
singleuse: --unit
- name: bearssl
install_packages: zlib1g-dev valgrind
install_steps: bearssl pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/bearssl/lib" --with-bearssl=$HOME/bearssl --enable-debug
singleuse: --unit
- name: bearssl-clang
install_packages: zlib1g-dev clang
install_steps: bearssl
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/bearssl/lib" --with-bearssl=$HOME/bearssl --enable-debug
singleuse: --unit
- name: bearssl-clang
install_packages: zlib1g-dev clang
install_steps: bearssl
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/bearssl/lib" --with-bearssl=$HOME/bearssl --enable-debug
singleuse: --unit
- name: libressl
install_packages: zlib1g-dev valgrind
install_steps: libressl pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/libressl/lib" --with-openssl=$HOME/libressl --enable-debug
singleuse: --unit
- name: libressl
install_packages: zlib1g-dev valgrind
install_steps: libressl pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/libressl/lib" --with-openssl=$HOME/libressl --enable-debug
singleuse: --unit
- name: libressl-clang
install_packages: zlib1g-dev clang
install_steps: libressl
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/libressl/lib" --with-openssl=$HOME/libressl --enable-debug
singleuse: --unit
- name: libressl-clang
install_packages: zlib1g-dev clang
install_steps: libressl
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/libressl/lib" --with-openssl=$HOME/libressl --enable-debug
singleuse: --unit
- name: mbedtls
install_packages: libnghttp2-dev valgrind
install_steps: mbedtls pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/mbedtls/lib" --with-mbedtls=$HOME/mbedtls --enable-debug
singleuse: --unit
- name: mbedtls
install_packages: libnghttp2-dev valgrind
install_steps: mbedtls pytest
configure: LDFLAGS="-Wl,-rpath,$HOME/mbedtls/lib" --with-mbedtls=$HOME/mbedtls --enable-debug
singleuse: --unit
- name: mbedtls-clang
install_packages: libnghttp2-dev clang
install_steps: mbedtls
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/mbedtls/lib" --with-mbedtls=$HOME/mbedtls --enable-debug
singleuse: --unit
- name: mbedtls-clang
install_packages: libnghttp2-dev clang
install_steps: mbedtls
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/mbedtls/lib" --with-mbedtls=$HOME/mbedtls --enable-debug
singleuse: --unit
- name: msh3
install_packages: zlib1g-dev valgrind
install_steps: quictls msh3
configure: LDFLAGS="-Wl,-rpath,$HOME/msh3/lib -Wl,-rpath,$HOME/quictls/lib" --with-msh3=$HOME/msh3 --with-openssl=$HOME/quictls --enable-debug
singleuse: --unit
- name: msh3
install_packages: zlib1g-dev valgrind
install_steps: quictls msh3
configure: LDFLAGS="-Wl,-rpath,$HOME/msh3/lib -Wl,-rpath,$HOME/quictls/lib" --with-msh3=$HOME/msh3 --with-openssl=$HOME/quictls --enable-debug
singleuse: --unit
- name: openssl3
install_packages: zlib1g-dev valgrind
install_steps: gcc-11 openssl3 pytest
configure: CFLAGS=-std=gnu89 LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib64" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: openssl3
install_packages: zlib1g-dev valgrind
install_steps: gcc-11 openssl3 pytest
configure: CFLAGS=-std=gnu89 LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: openssl3-O3
install_packages: zlib1g-dev valgrind
install_steps: gcc-11 openssl3
configure: CFLAGS=-O3 LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib64" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: openssl3-O3
install_packages: zlib1g-dev valgrind
install_steps: gcc-11 openssl3
configure: CPPFLAGS=-DCURL_WARN_SIGN_CONVERSION CFLAGS=-O3 LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: openssl3-clang
install_packages: zlib1g-dev clang
install_steps: openssl3
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib64" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: openssl3-clang
install_packages: zlib1g-dev clang
install_steps: openssl3
configure: CC=clang LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib" --with-openssl=$HOME/openssl3 --enable-debug --enable-websockets
singleuse: --unit
- name: address-sanitizer
install_packages: zlib1g-dev libssh2-1-dev clang libssl-dev libubsan1 libasan8 libtsan2
install_steps: pytest
configure: >
CC=clang
CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g"
LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer"
LIBS="-ldl -lubsan"
--with-openssl --enable-debug --enable-websockets
singleuse: --unit
- name: address-sanitizer
install_packages: zlib1g-dev libssh2-1-dev clang libssl-dev libubsan1 libasan8 libtsan2
install_steps: pytest
configure: >
CC=clang
CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g"
LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer"
LIBS="-ldl -lubsan"
--with-openssl --enable-debug --enable-websockets
singleuse: --unit
- name: memory-sanitizer
install_packages: clang
install_steps:
configure: >
CC=clang
CFLAGS="-fsanitize=memory -Wformat -Werror=format-security -Werror=array-bounds -g"
LDFLAGS="-fsanitize=memory"
LIBS="-ldl"
--without-ssl --without-zlib --without-brotli --without-zstd --without-libpsl --without-nghttp2 --enable-debug --enable-websocketsx
singleuse: --unit
- name: memory-sanitizer
install_packages: clang
install_steps:
configure: >
CC=clang
CFLAGS="-fsanitize=memory -Wformat -Werror=format-security -Werror=array-bounds -g"
LDFLAGS="-fsanitize=memory"
LIBS="-ldl"
--without-ssl --without-zlib --without-brotli --without-zstd --without-libpsl --without-nghttp2 --enable-debug --enable-websockets
singleuse: --unit
- name: event-based
install_packages: libssh-dev valgrind
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-libssh --with-openssl
tflags: -n -e '!TLS-SRP'
singleuse: --unit
- name: event-based
install_packages: libssh-dev valgrind
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-libssh --with-openssl
tflags: -n -e '!TLS-SRP'
singleuse: --unit
- name: hyper
install_steps: rust hyper valgrind
configure: LDFLAGS="-Wl,-rpath,$HOME/hyper/target/debug" --with-openssl --with-hyper=$HOME/hyper --enable-debug --enable-websockets
singleuse: --unit
- name: hyper
install_steps: rust hyper valgrind
configure: LDFLAGS="-Wl,-rpath,$HOME/hyper/target/debug" --with-openssl --with-hyper=$HOME/hyper --enable-debug --enable-websockets
singleuse: --unit
- name: rustls
install_steps: rust rustls pytest valgrind
configure: --with-rustls=$HOME/rustls --enable-debug
singleuse: --unit
- name: rustls
install_steps: rust rustls pytest valgrind libpsl-dev
configure: --with-rustls=$HOME/rustls --enable-debug
singleuse: --unit
- name: Intel compiler - without SSL
install_packages: zlib1g-dev valgrind
install_steps: intel
configure: CC=icc --enable-debug --without-ssl
singleuse: --unit
- name: Intel compiler - without SSL
install_packages: zlib1g-dev valgrind
install_steps: intel
configure: CC=icc --enable-debug --without-ssl
singleuse: --unit
- name: Intel compiler - OpenSSL
install_packages: zlib1g-dev libssl-dev valgrind
install_steps: intel
configure: CC=icc --enable-debug --with-openssl
singleuse: --unit
- name: Intel compiler - OpenSSL
install_packages: zlib1g-dev libssl-dev valgrind
install_steps: intel
configure: CC=icc --enable-debug --with-openssl
singleuse: --unit
- name: Slackware-openssl-with-gssapi-gcc
# These are essentially the same flags used to build the curl Slackware package
# https://ftpmirror.infania.net/slackware/slackware64-current/source/n/curl/curl.SlackBuild
configure: --with-openssl --with-libssh2 --with-gssapi --enable-ares --enable-static=no --without-ca-bundle --with-ca-path=/etc/ssl/certs
# Docker Hub image that `container-job` executes in
container: 'andy5995/slackware-build-essential:15.0'
- name: Slackware-openssl-with-gssapi-gcc
# These are essentially the same flags used to build the curl Slackware package
# https://ftpmirror.infania.net/slackware/slackware64-current/source/n/curl/curl.SlackBuild
configure: --with-openssl --with-libssh2 --with-gssapi --enable-ares --enable-static=no --without-ca-bundle --with-ca-path=/etc/ssl/certs
# Docker Hub image that `container-job` executes in
container: 'andy5995/slackware-build-essential:15.0'
- name: Alpine MUSL
configure: --enable-debug --enable-websockets --with-ssl --with-libssh2 --with-libidn2 --with-gssapi --enable-ldap --with-libpsl
container: 'alpine:3.18'
singleuse: --unit
- name: Alpine MUSL
configure: --enable-debug --enable-websockets --with-ssl --with-libssh2 --with-libidn2 --with-gssapi --enable-ldap --with-libpsl
container: 'alpine:3.18'
singleuse: --unit
steps:
- if: matrix.build.container == null
run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 libpsl-dev libbrotli-dev libzstd-dev ${{ matrix.build.install_packages }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- if: matrix.build.container == null
run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 libpsl-dev libbrotli-dev libzstd-dev ${{ matrix.build.install_packages }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- if: startsWith(matrix.build.container, 'alpine')
run: |
apk add --no-cache build-base autoconf automake libtool perl openssl-dev libssh2-dev zlib-dev brotli-dev zstd-dev libidn2-dev openldap-dev heimdal-dev libpsl-dev py3-impacket py3-asn1 py3-six py3-pycryptodomex perl-time-hires openssh stunnel sudo git
name: 'install dependencies'
- if: startsWith(matrix.build.container, 'alpine')
run: |
apk add --no-cache build-base autoconf automake libtool perl openssl-dev libssh2-dev zlib-dev brotli-dev zstd-dev libidn2-dev openldap-dev heimdal-dev libpsl-dev py3-impacket py3-asn1 py3-six py3-pycryptodomex perl-time-hires openssh stunnel sudo git
name: 'install dependencies'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- if: contains(matrix.build.install_steps, 'gcc-11')
run: |
sudo add-apt-repository ppa:ubuntu-toolchain-r/ppa
sudo apt-get update
sudo apt-get install gcc-11
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
sudo update-alternatives --set gcc /usr/bin/gcc-11
gcc --version
name: 'install gcc-11'
- name: Fix kernel mmap rnd bits
# Asan in llvm 14 provided in ubuntu 22.04 is incompatible with
# high-entropy ASLR in much newer kernels that GitHub runners are
# using leading to random crashes: https://reviews.llvm.org/D148280
# See https://github.com/actions/runner-images/issues/9491
continue-on-error: true
run: sudo sysctl vm.mmap_rnd_bits=28
- name: cache bearssl
if: contains(matrix.build.install_steps, 'bearssl')
uses: actions/cache@v3
id: cache-bearssl
env:
cache-name: cache-bearssl
with:
path: /home/runner/bearssl
key: ${{ runner.os }}-build-${{ env.cache-name }}-bearssl-${{ env.bearssl-version }}
- if: contains(matrix.build.install_steps, 'gcc-11')
run: |
sudo add-apt-repository ppa:ubuntu-toolchain-r/ppa
sudo apt-get update
sudo apt-get install gcc-11
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
sudo update-alternatives --set gcc /usr/bin/gcc-11
gcc --version
name: 'install gcc-11'
- name: 'build bearssl'
if: contains(matrix.build.install_steps, 'bearssl') && steps.cache-bearssl.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 https://bearssl.org/bearssl-${{ env.bearssl-version }}.tar.gz
tar -xzf bearssl-${{ env.bearssl-version }}.tar.gz
cd bearssl-${{ env.bearssl-version }}
make
mkdir -p $HOME/bearssl/lib $HOME/bearssl/include
cp inc/*.h $HOME/bearssl/include
cp build/libbearssl.* $HOME/bearssl/lib
- name: cache bearssl
if: contains(matrix.build.install_steps, 'bearssl')
uses: actions/cache@v4
id: cache-bearssl
env:
cache-name: cache-bearssl
with:
path: /home/runner/bearssl
key: ${{ runner.os }}-build-${{ env.cache-name }}-bearssl-${{ env.bearssl-version }}
- name: cache libressl
if: contains(matrix.build.install_steps, 'libressl')
uses: actions/cache@v3
id: cache-libressl
env:
cache-name: cache-libressl
with:
path: /home/runner/libressl
key: ${{ runner.os }}-build-${{ env.cache-name }}-libressl-${{ env.libressl-version }}
- name: 'build bearssl'
if: contains(matrix.build.install_steps, 'bearssl') && steps.cache-bearssl.outputs.cache-hit != 'true'
run: |
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 https://bearssl.org/bearssl-${{ env.bearssl-version }}.tar.gz
tar -xzf bearssl-${{ env.bearssl-version }}.tar.gz
cd bearssl-${{ env.bearssl-version }}
make
mkdir -p $HOME/bearssl/lib $HOME/bearssl/include
cp inc/*.h $HOME/bearssl/include
cp build/libbearssl.* $HOME/bearssl/lib
- name: 'build libressl'
if: contains(matrix.build.install_steps, 'libressl') && steps.cache-libressl.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.libressl-version }} https://github.com/libressl-portable/portable.git libressl-git
cd libressl-git
./autogen.sh
./configure --prefix=$HOME/libressl
make install
- name: cache libressl
if: contains(matrix.build.install_steps, 'libressl')
uses: actions/cache@v4
id: cache-libressl
env:
cache-name: cache-libressl
with:
path: /home/runner/libressl
key: ${{ runner.os }}-build-${{ env.cache-name }}-libressl-${{ env.libressl-version }}
- name: cache mbedtls
if: contains(matrix.build.install_steps, 'mbedtls')
uses: actions/cache@v3
id: cache-mbedtls
env:
cache-name: cache-mbedtls
with:
path: /home/runner/mbedtls
key: ${{ runner.os }}-build-${{ env.cache-name }}-mbedtls-${{ env.mbedtls-version }}
- name: 'build libressl'
if: contains(matrix.build.install_steps, 'libressl') && steps.cache-libressl.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.libressl-version }} https://github.com/libressl-portable/portable.git libressl-git
cd libressl-git
./autogen.sh
./configure --prefix=$HOME/libressl
make install
- name: 'build mbedtls'
if: contains(matrix.build.install_steps, 'mbedtls') && steps.cache-mbedtls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.mbedtls-version }} https://github.com/ARMmbed/mbedtls
cd mbedtls
make DESTDIR=$HOME/mbedtls install
- name: cache mbedtls
if: contains(matrix.build.install_steps, 'mbedtls')
uses: actions/cache@v4
id: cache-mbedtls
env:
cache-name: cache-mbedtls
with:
path: /home/runner/mbedtls
key: ${{ runner.os }}-build-${{ env.cache-name }}-mbedtls-${{ env.mbedtls-version }}
- name: cache openssl3
if: contains(matrix.build.install_steps, 'openssl3')
uses: actions/cache@v3
id: cache-openssl3
env:
cache-name: cache-openssl3
with:
path: /home/runner/openssl3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.openssl3-version }}
- name: 'build mbedtls'
if: contains(matrix.build.install_steps, 'mbedtls') && steps.cache-mbedtls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.mbedtls-version }} https://github.com/ARMmbed/mbedtls
cd mbedtls
make DESTDIR=$HOME/mbedtls install
- name: 'install openssl3'
if: contains(matrix.build.install_steps, 'openssl3') && steps.cache-openssl3.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.openssl3-version }} https://github.com/openssl/openssl
cd openssl
./config enable-tls1_3 --prefix=$HOME/openssl3
make -j1 install_sw
- name: cache openssl3
if: contains(matrix.build.install_steps, 'openssl3')
uses: actions/cache@v4
id: cache-openssl3
env:
cache-name: cache-openssl3
with:
path: /home/runner/openssl3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.openssl3-version }}
- name: cache quictls
if: contains(matrix.build.install_steps, 'quictls')
uses: actions/cache@v3
id: cache-quictls
env:
cache-name: cache-quictls
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-quictls-${{ env.quictls-version }}
- name: 'install openssl3'
if: contains(matrix.build.install_steps, 'openssl3') && steps.cache-openssl3.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.openssl3-version }} https://github.com/openssl/openssl
cd openssl
./config --prefix=$HOME/openssl3 --libdir=$HOME/openssl3/lib
make -j1 install_sw
- name: 'build quictls'
if: contains(matrix.build.install_steps, 'quictls') && steps.cache-quictls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b openssl-${{ env.quictls-version }} https://github.com/quictls/openssl
cd openssl
./config enable-tls1_3 --prefix=$HOME/quictls --libdir=$HOME/quictls/lib
make -j1 install_sw
- name: cache quictls
if: contains(matrix.build.install_steps, 'quictls')
uses: actions/cache@v4
id: cache-quictls
env:
cache-name: cache-quictls
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-quictls-${{ env.quictls-version }}
- name: cache msh3
if: contains(matrix.build.install_steps, 'msh3')
uses: actions/cache@v3
id: cache-msh3
env:
cache-name: cache-msh3
with:
path: /home/runner/msh3
key: ${{ runner.os }}-build-${{ env.cache-name }}-msh3-${{ env.msh3-version }}
- name: 'build quictls'
if: contains(matrix.build.install_steps, 'quictls') && steps.cache-quictls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b openssl-${{ env.quictls-version }} https://github.com/quictls/openssl
cd openssl
./config --prefix=$HOME/quictls --libdir=$HOME/quictls/lib
make -j1 install_sw
- name: 'build msh3'
if: contains(matrix.build.install_steps, 'msh3') && steps.cache-msh3.outputs.cache-hit != 'true'
run: |
git clone --quiet -b ${{ env.msh3-version }} --depth=1 --recursive https://github.com/nibanks/msh3
cd msh3 && mkdir build && cd build
cmake -G 'Unix Makefiles' -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=$HOME/msh3 ..
cmake --build .
cmake --install .
- name: cache msh3
if: contains(matrix.build.install_steps, 'msh3')
uses: actions/cache@v4
id: cache-msh3
env:
cache-name: cache-msh3
with:
path: /home/runner/msh3
key: ${{ runner.os }}-build-${{ env.cache-name }}-msh3-${{ env.msh3-version }}
- if: contains(matrix.build.install_steps, 'rust')
run: |
cd $HOME
curl -sSf --compressed https://sh.rustup.rs/ | sh -s -- -y
source $HOME/.cargo/env
rustup toolchain install nightly
name: 'install rust'
- name: 'build msh3'
if: contains(matrix.build.install_steps, 'msh3') && steps.cache-msh3.outputs.cache-hit != 'true'
run: |
git clone --quiet -b ${{ env.msh3-version }} --depth=1 --recursive https://github.com/nibanks/msh3
cd msh3 && mkdir build && cd build
cmake -G 'Unix Makefiles' -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=$HOME/msh3 ..
cmake --build .
cmake --install .
- name: cache rustls
if: contains(matrix.build.install_steps, 'rustls')
uses: actions/cache@v3
id: cache-rustls
env:
cache-name: cache-rustls
with:
path: /home/runner/rustls
key: ${{ runner.os }}-build-${{ env.cache-name }}-rustls-${{ env.rustls-version }}
- if: contains(matrix.build.install_steps, 'rust')
run: |
cd $HOME
curl -sSf --compressed https://sh.rustup.rs/ | sh -s -- -y
source $HOME/.cargo/env
rustup toolchain install nightly
name: 'install rust'
- name: 'build rustls'
if: contains(matrix.build.install_steps, 'rustls') && steps.cache-rustls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.rustls-version }} --recursive https://github.com/rustls/rustls-ffi.git
cd rustls-ffi
make DESTDIR=$HOME/rustls install
- name: cache rustls
if: contains(matrix.build.install_steps, 'rustls')
uses: actions/cache@v4
id: cache-rustls
env:
cache-name: cache-rustls
with:
path: /home/runner/rustls
key: ${{ runner.os }}-build-${{ env.cache-name }}-rustls-${{ env.rustls-version }}
- if: contains(matrix.build.install_steps, 'hyper')
run: |
cd $HOME
git clone --quiet --depth=1 https://github.com/hyperium/hyper.git
cd $HOME/hyper
RUSTFLAGS="--cfg hyper_unstable_ffi" cargo +nightly rustc --features client,http1,http2,ffi -Z unstable-options --crate-type cdylib
echo "LD_LIBRARY_PATH=$HOME/hyper/target/debug:/usr/local/lib" >> $GITHUB_ENV
name: 'install hyper'
- name: 'build rustls'
if: contains(matrix.build.install_steps, 'rustls') && steps.cache-rustls.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.rustls-version }} --recursive https://github.com/rustls/rustls-ffi.git
cd rustls-ffi
make DESTDIR=$HOME/rustls install
- if: contains(matrix.build.install_steps, 'intel')
run: |
cd /tmp
curl -sSf --compressed https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | sudo apt-key add -
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
sudo apt install --no-install-recommends intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
name: 'install Intel compilers'
- if: contains(matrix.build.install_steps, 'hyper')
run: |
cd $HOME
git clone --quiet --depth=1 https://github.com/hyperium/hyper.git
cd $HOME/hyper
RUSTFLAGS="--cfg hyper_unstable_ffi" cargo +nightly rustc --features client,http1,http2,ffi -Z unstable-options --crate-type cdylib
echo "LD_LIBRARY_PATH=$HOME/hyper/target/debug:/usr/local/lib" >> $GITHUB_ENV
name: 'install hyper'
- if: contains(matrix.build.install_steps, 'pytest')
run: |
sudo apt-get install apache2 apache2-dev libnghttp2-dev
sudo python3 -m pip install -r tests/http/requirements.txt
name: 'install pytest and apach2-dev'
- if: contains(matrix.build.install_steps, 'intel')
run: |
cd /tmp
curl -sSf --compressed https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | sudo apt-key add -
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
sudo apt install --no-install-recommends intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
name: 'install Intel compilers'
- name: cache mod_h2
if: contains(matrix.build.install_steps, 'pytest')
uses: actions/cache@v3
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- if: contains(matrix.build.install_steps, 'pytest')
run: |
sudo apt-get install apache2 apache2-dev libnghttp2-dev
sudo python3 -m pip install -r tests/http/requirements.txt
name: 'install pytest and apach2-dev'
- name: 'build mod_h2'
if: contains(matrix.build.install_steps, 'pytest') && steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
- name: cache mod_h2
if: contains(matrix.build.install_steps, 'pytest')
uses: actions/cache@v4
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- name: 'install mod_h2'
if: contains(matrix.build.install_steps, 'pytest')
run: |
cd $HOME/mod_h2
sudo make install
- name: 'build mod_h2'
if: contains(matrix.build.install_steps, 'pytest') && steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
- run: autoreconf -fi
name: 'autoreconf'
- name: 'install mod_h2'
if: contains(matrix.build.install_steps, 'pytest')
run: |
cd $HOME/mod_h2
sudo make install
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: autoreconf -fi
name: 'autoreconf'
- run: make V=1
name: 'make'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: |
git config --global --add safe.directory "*"
./scripts/singleuse.pl ${{ matrix.build.singleuse }} lib/.libs/libcurl.a
name: single-use function check
- run: make V=1
name: 'make'
- run: ./src/curl -V
name: 'check curl -V output'
- run: |
git config --global --add safe.directory "*"
./scripts/singleuse.pl ${{ matrix.build.singleuse }} lib/.libs/libcurl.a
name: single-use function check
- run: make V=1 examples
name: 'make examples'
- run: ./src/curl -V
name: 'check curl -V output'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 -C tests
name: 'make tests'
- if: contains(matrix.build.install_steps, 'pytest')
# run for `tests` directory, so pytest does not pick up any other
# packages we might have built here
run:
pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- if: contains(matrix.build.install_steps, 'pytest')
# run for `tests` directory, so pytest does not pick up any other
# packages we might have built here
run:
pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github

View File

@ -7,35 +7,35 @@ name: Linux 32-bit
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -55,39 +55,39 @@ jobs:
fail-fast: false
matrix:
build:
- name: Linux i686
install_packages: gcc-11-i686-linux-gnu libssl-dev:i386 zlib1g-dev:i386 libpsl-dev:i386 libbrotli-dev:i386 libzstd-dev:i386
configure: --enable-debug --enable-websockets --with-openssl --host=i686-linux-gnu CC=i686-linux-gnu-gcc-11 PKG_CONFIG_PATH=/usr/lib/i386-linux-gnu/pkgconfig CPPFLAGS=-I/usr/include/i386-linux-gnu LDFLAGS=-L/usr/lib/i386-linux-gnu
- name: Linux i686
install_packages: gcc-11-i686-linux-gnu libssl-dev:i386 zlib1g-dev:i386 libpsl-dev:i386 libbrotli-dev:i386 libzstd-dev:i386
configure: --enable-debug --enable-websockets --with-openssl --host=i686-linux-gnu CC=i686-linux-gnu-gcc-11 PKG_CONFIG_PATH=/usr/lib/i386-linux-gnu/pkgconfig CPPFLAGS=-I/usr/include/i386-linux-gnu LDFLAGS=-L/usr/lib/i386-linux-gnu
steps:
- run: |
sudo dpkg --add-architecture i386
sudo apt-get update -y
sudo apt-get install -y --no-install-suggests --no-install-recommends libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install_packages }}
sudo python3 -m pip install impacket
name: 'install prereqs'
- run: |
sudo dpkg --add-architecture i386
sudo apt-get update -y
sudo apt-get install -y --no-install-suggests --no-install-recommends libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install_packages }}
sudo python3 -m pip install impacket
name: 'install prereqs'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: ./src/curl -V
name: 'check curl -V output'
- run: ./src/curl -V
name: 'check curl -V output'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"

View File

@ -7,31 +7,31 @@ name: macOS
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -52,141 +52,142 @@ jobs:
fail-fast: false
matrix:
build:
- name: normal
install: nghttp2
configure: --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: debug
install: nghttp2
configure: --enable-debug --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: libssh2
install: nghttp2 libssh2
configure: --enable-debug --with-libssh2 --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: libssh-c-ares
install: openssl nghttp2 libssh
configure: --enable-debug --with-libssh --with-openssl=/usr/local/opt/openssl --enable-ares --enable-websockets
macosx-version-min: 10.9
- name: libssh
install: openssl nghttp2 libssh
configure: --enable-debug --with-libssh --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.9
- name: c-ares
install: nghttp2
configure: --enable-debug --enable-ares --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: HTTP only
install: nghttp2
configure: |
--enable-debug \
--enable-maintainer-mode \
--disable-alt-svc \
--disable-dict \
--disable-file \
--disable-ftp \
--disable-gopher \
--disable-imap \
--disable-ldap \
--disable-pop3 \
--disable-rtmp \
--disable-rtsp \
--disable-scp \
--disable-sftp \
--disable-shared \
--disable-smb \
--disable-smtp \
--disable-telnet \
--disable-tftp \
--disable-unix-sockets \
--without-brotli \
--without-gssapi \
--without-libidn2 \
--without-libpsl \
--without-librtmp \
--without-libssh2 \
--without-nghttp2 \
--without-ntlm-auth \
--without-ssl \
--without-zlib \
--without-zstd
macosx-version-min: 10.15
- name: SecureTransport http2
install: nghttp2
configure: --enable-debug --with-secure-transport --enable-websockets
macosx-version-min: 10.8
- name: gcc SecureTransport
configure: CC=gcc-12 --enable-debug --with-secure-transport --enable-websockets
macosx-version-min: 10.8
- name: OpenSSL http2
install: nghttp2 openssl
configure: --enable-debug --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.9
- name: LibreSSL http2
install: nghttp2 libressl
configure: --enable-debug --with-openssl=/usr/local/opt/libressl --enable-websockets
macosx-version-min: 10.9
- name: torture
install: nghttp2 openssl
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl=/usr/local/opt/openssl --enable-websockets
tflags: -n -t --shallow=25 !FTP
macosx-version-min: 10.9
- name: torture-ftp
install: nghttp2 openssl
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl=/usr/local/opt/openssl --enable-websockets
tflags: -n -t --shallow=20 FTP
macosx-version-min: 10.9
- name: macOS 10.15
install: nghttp2 libssh2 openssl
configure: --enable-debug --disable-ldap --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.15
- name: normal
install: nghttp2
configure: --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: debug
install: nghttp2
configure: --enable-debug --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: libssh2
install: nghttp2 libssh2
configure: --enable-debug --with-libssh2 --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: libssh-c-ares
install: openssl nghttp2 libssh
configure: --enable-debug --with-libssh --with-openssl=/usr/local/opt/openssl --enable-ares --enable-websockets
macosx-version-min: 10.9
- name: libssh
install: openssl nghttp2 libssh
configure: --enable-debug --with-libssh --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.9
- name: c-ares
install: nghttp2
configure: --enable-debug --enable-ares --without-ssl --enable-websockets
macosx-version-min: 10.9
- name: HTTP only
install: nghttp2
configure: |
--enable-debug \
--enable-maintainer-mode \
--disable-alt-svc \
--disable-dict \
--disable-file \
--disable-ftp \
--disable-gopher \
--disable-imap \
--disable-ldap \
--disable-pop3 \
--disable-rtmp \
--disable-rtsp \
--disable-scp \
--disable-sftp \
--disable-shared \
--disable-smb \
--disable-smtp \
--disable-telnet \
--disable-tftp \
--disable-unix-sockets \
--without-brotli \
--without-gssapi \
--without-libidn2 \
--without-libpsl \
--without-librtmp \
--without-libssh2 \
--without-nghttp2 \
--without-ntlm-auth \
--without-ssl \
--without-zlib \
--without-zstd
macosx-version-min: 10.15
- name: SecureTransport http2
install: nghttp2
configure: --enable-debug --with-secure-transport --enable-websockets
macosx-version-min: 10.8
- name: gcc SecureTransport
configure: CC=gcc-12 --enable-debug --with-secure-transport --enable-websockets --without-libpsl
macosx-version-min: 10.8
- name: OpenSSL http2
install: nghttp2 openssl
configure: --enable-debug --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.9
- name: LibreSSL http2
install: nghttp2 libressl
configure: --enable-debug --with-openssl=/usr/local/opt/libressl --enable-websockets
macosx-version-min: 10.9
- name: torture
install: nghttp2 openssl
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl=/usr/local/opt/openssl --enable-websockets
tflags: -n -t --shallow=25 !FTP
macosx-version-min: 10.9
- name: torture-ftp
install: nghttp2 openssl
configure: --enable-debug --disable-shared --disable-threaded-resolver --with-openssl=/usr/local/opt/openssl --enable-websockets
tflags: -n -t --shallow=20 FTP
macosx-version-min: 10.9
- name: macOS 10.15
install: nghttp2 libssh2 openssl
configure: --enable-debug --disable-ldap --with-openssl=/usr/local/opt/openssl --enable-websockets
macosx-version-min: 10.15
steps:
- run: echo libtool autoconf automake pkg-config ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
name: 'brew bundle'
- run: echo libtool autoconf automake pkg-config libpsl ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
name: 'brew bundle'
# Run this command with retries because of spurious failures seen
# while running the tests, for example
# https://github.com/curl/curl/runs/4095721123?check_suite_focus=true
- run: "while [[ $? == 0 ]]; do for i in 1 2 3; do brew update && brew bundle install --no-lock --file /tmp/Brewfile && break 2 || { echo Error: wait to try again; sleep 10; } done; false Too many retries; done"
name: 'brew install'
# Run this command with retries because of spurious failures seen
# while running the tests, for example
# https://github.com/curl/curl/runs/4095721123?check_suite_focus=true
- run: "while [[ $? == 0 ]]; do for i in 1 2 3; do brew update && brew bundle install --no-lock --file /tmp/Brewfile && break 2 || { echo Error: wait to try again; sleep 10; } done; false Too many retries; done"
name: 'brew install'
- run: |
case "${{ matrix.build.install }}" in
*openssl*)
;;
*)
if test -d /usr/local/include/openssl; then
brew unlink openssl
fi;;
esac
name: 'brew unlink openssl'
- run: |
case "${{ matrix.build.install }}" in
*openssl*)
;;
*)
if test -d /usr/local/include/openssl; then
brew unlink openssl
fi;;
esac
name: 'brew unlink openssl'
- run: python3 -m pip install impacket
name: 'pip3 install'
- run: python3 -m pip install impacket
name: 'pip3 install'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
env:
CFLAGS: "-mmacosx-version-min=${{ matrix.build.macosx-version-min }}"
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
env:
CFLAGS: "-mmacosx-version-min=${{ matrix.build.macosx-version-min }}"
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }} ~1452"
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }} ~1452"
cmake:
name: cmake ${{ matrix.compiler.CC }} ${{ matrix.build.name }}
@ -196,47 +197,47 @@ jobs:
fail-fast: false
matrix:
compiler:
- CC: clang
CXX: clang++
CFLAGS: "-mmacosx-version-min=10.15 -Wno-deprecated-declarations"
- CC: gcc-12
CXX: g++-12
CFLAGS: "-mmacosx-version-min=10.15 -Wno-error=undef -Wno-error=conversion"
- CC: clang
CXX: clang++
CFLAGS: "-mmacosx-version-min=10.15 -Wno-deprecated-declarations"
- CC: gcc-12
CXX: g++-12
CFLAGS: "-mmacosx-version-min=10.15 -Wno-error=undef -Wno-error=conversion"
build:
- name: OpenSSL
install: nghttp2 openssl
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9
- name: LibreSSL
install: nghttp2 libressl
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/libressl -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON -DCMAKE_UNITY_BUILD=ON
- name: libssh2
install: nghttp2 openssl libssh2
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCURL_USE_LIBSSH2=ON -DBUILD_SHARED_LIBS=ON -DBUILD_STATIC_LIBS=ON
- name: GnuTLS
install: gnutls
generate: -DCURL_USE_GNUTLS=ON -DCURL_USE_OPENSSL=OFF -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON -DCMAKE_SHARED_LINKER_FLAGS=-L/usr/local/lib -DCMAKE_EXE_LINKER_FLAGS=-L/usr/local/lib
- name: OpenSSL
install: nghttp2 openssl
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9
- name: LibreSSL
install: nghttp2 libressl
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/libressl -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON -DCMAKE_UNITY_BUILD=ON
- name: libssh2
install: nghttp2 openssl libssh2
generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCURL_USE_LIBSSH2=ON -DBUILD_SHARED_LIBS=ON -DBUILD_STATIC_LIBS=ON
- name: GnuTLS
install: gnutls
generate: -DCURL_USE_GNUTLS=ON -DCURL_USE_OPENSSL=OFF -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON -DCMAKE_SHARED_LINKER_FLAGS=-L/usr/local/lib -DCMAKE_EXE_LINKER_FLAGS=-L/usr/local/lib
steps:
- run: echo libtool autoconf automake pkg-config ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
name: 'brew bundle'
- run: echo libtool autoconf automake pkg-config ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile
name: 'brew bundle'
- run: "while [[ $? == 0 ]]; do for i in 1 2 3; do brew update && brew bundle install --no-lock --file /tmp/Brewfile && break 2 || { echo Error: wait to try again; sleep 10; } done; false Too many retries; done"
name: 'brew install'
- run: "while [[ $? == 0 ]]; do for i in 1 2 3; do brew update && brew bundle install --no-lock --file /tmp/Brewfile && break 2 || { echo Error: wait to try again; sleep 10; } done; false Too many retries; done"
name: 'brew install'
- run: |
case "${{ matrix.build.install }}" in
*openssl*)
;;
*)
if test -d /usr/local/include/openssl; then
brew unlink openssl
fi;;
esac
name: 'brew unlink openssl'
- run: |
case "${{ matrix.build.install }}" in
*openssl*)
;;
*)
if test -d /usr/local/include/openssl; then
brew unlink openssl
fi;;
esac
name: 'brew unlink openssl'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: cmake -S. -Bbuild -DCURL_WERROR=ON -DPICKY_COMPILER=ON ${{ matrix.build.generate }}
name: 'cmake generate'
- run: cmake -S. -Bbuild -DCURL_WERROR=ON -DPICKY_COMPILER=ON ${{ matrix.build.generate }}
name: 'cmake generate'
- run: cmake --build build
name: 'cmake build'
- run: cmake --build build
name: 'cmake build'

View File

@ -7,24 +7,29 @@ name: manpage examples
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths:
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/opts/*.3'
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/opts/*.3'
- '.github/scripts/verify-examples.pl'
pull_request:
branches:
- master
- master
paths:
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/opts/*.3'
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/opts/*.3'
- '.github/scripts/verify-examples.pl'
jobs:
verify:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: render nroff versions
run: autoreconf -fi && ./configure --without-ssl --without-libpsl && make -C docs
- name: verify examples
run: ./.github/scripts/verify-examples.pl docs/libcurl/curl*.3 docs/libcurl/opts/*.3

View File

@ -7,35 +7,35 @@ name: ngtcp2-linux
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
@ -47,12 +47,12 @@ permissions: {}
env:
MAKEFLAGS: -j 3
quictls-version: 3.1.4+quic
gnutls-version: 3.8.0
gnutls-version: 3.8.3
wolfssl-version: master
nghttp3-version: v1.1.0
ngtcp2-version: v1.1.0
nghttp2-version: v1.58.0
mod_h2-version: v2.0.25
ngtcp2-version: v1.2.0
nghttp2-version: v1.59.0
mod_h2-version: v2.0.26
jobs:
autotools:
@ -63,208 +63,208 @@ jobs:
fail-fast: false
matrix:
build:
- name: quictls
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug --disable-ntlm
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-openssl=$HOME/nghttpx
- name: gnutls
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-gnutls=$HOME/nghttpx
- name: wolfssl
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-wolfssl=$HOME/nghttpx
- name: quictls
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug --disable-ntlm
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-openssl=$HOME/nghttpx
- name: gnutls
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-gnutls=$HOME/nghttpx
- name: wolfssl
configure: >-
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib"
--with-ngtcp2=$HOME/nghttpx --enable-warnings --enable-werror --enable-debug
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-wolfssl=$HOME/nghttpx
steps:
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 \
libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libev-dev libc-ares-dev \
nettle-dev libp11-kit-dev libtspi-dev libunistring-dev guile-2.2-dev libtasn1-bin \
libtasn1-6-dev libidn2-0-dev gawk gperf libtss2-dev dns-root-data bison gtk-doc-tools \
texinfo texlive texlive-extra-utils autopoint libev-dev \
apache2 apache2-dev libnghttp2-dev
name: 'install prereqs and impacket, pytest, crypto, apache2'
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 \
libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libev-dev libc-ares-dev \
nettle-dev libp11-kit-dev libtspi-dev libunistring-dev guile-2.2-dev libtasn1-bin \
libtasn1-6-dev libidn2-0-dev gawk gperf libtss2-dev dns-root-data bison gtk-doc-tools \
texinfo texlive texlive-extra-utils autopoint libev-dev \
apache2 apache2-dev libnghttp2-dev
name: 'install prereqs and impacket, pytest, crypto, apache2'
- name: cache quictls
uses: actions/cache@v3
id: cache-quictls-no-deprecated
env:
cache-name: cache-quictls-no-deprecated
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.quictls-version }}
- name: cache quictls
uses: actions/cache@v4
id: cache-quictls-no-deprecated
env:
cache-name: cache-quictls-no-deprecated
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.quictls-version }}
- if: steps.cache-quictls-no-deprecated.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b openssl-${{ env.quictls-version }} https://github.com/quictls/openssl quictls
cd quictls
./config no-deprecated --prefix=$HOME/nghttpx --libdir=$HOME/nghttpx/lib
make
name: 'build quictls'
- if: steps.cache-quictls-no-deprecated.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b openssl-${{ env.quictls-version }} https://github.com/quictls/openssl quictls
cd quictls
./config no-deprecated --prefix=$HOME/nghttpx --libdir=$HOME/nghttpx/lib
make
name: 'build quictls'
- run: |
cd $HOME/quictls
make -j1 install_sw
name: 'install quictls'
- run: |
cd $HOME/quictls
make -j1 install_sw
name: 'install quictls'
- name: cache gnutls
uses: actions/cache@v3
id: cache-gnutls
env:
cache-name: cache-gnutls
with:
path: /home/runner/gnutls
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.gnutls-version }}
- name: cache gnutls
uses: actions/cache@v4
id: cache-gnutls
env:
cache-name: cache-gnutls
with:
path: /home/runner/gnutls
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.gnutls-version }}
- if: steps.cache-gnutls.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.gnutls-version }} https://github.com/gnutls/gnutls.git
cd gnutls
./bootstrap
./configure --prefix=$HOME/nghttpx \
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib -L$HOME/nghttpx/lib" \
--with-included-libtasn1 --with-included-unistring \
--disable-guile --disable-doc --disable-tests --disable-tools
make
name: 'build gnutls'
- if: steps.cache-gnutls.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.gnutls-version }} https://github.com/gnutls/gnutls.git
cd gnutls
./bootstrap
./configure --prefix=$HOME/nghttpx \
PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/nghttpx/lib -L$HOME/nghttpx/lib" \
--with-included-libtasn1 --with-included-unistring \
--disable-guile --disable-doc --disable-tests --disable-tools
make
name: 'build gnutls'
- run: |
cd $HOME/gnutls
make install
name: 'install gnutls'
- run: |
cd $HOME/gnutls
make install
name: 'install gnutls'
- name: cache wolfssl
uses: actions/cache@v3
id: cache-wolfssl
env:
cache-name: cache-wolfssl
with:
path: /home/runner/wolfssl
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.wolfssl-version }}
- name: cache wolfssl
uses: actions/cache@v4
id: cache-wolfssl
env:
cache-name: cache-wolfssl
with:
path: /home/runner/wolfssl
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.wolfssl-version }}
- if: steps.cache-wolfssl.outputs.cache-hit != 'true' || ${{ env.wolfssl-version }} == 'master'
run: |
cd $HOME
rm -rf wolfssl
git clone --quiet --depth=1 -b ${{ env.wolfssl-version }} https://github.com/wolfSSL/wolfssl.git
cd wolfssl
./autogen.sh
./configure --enable-all --enable-quic --prefix=$HOME/nghttpx
make
name: 'build wolfssl'
- if: steps.cache-wolfssl.outputs.cache-hit != 'true' || ${{ env.wolfssl-version }} == 'master'
run: |
cd $HOME
rm -rf wolfssl
git clone --quiet --depth=1 -b ${{ env.wolfssl-version }} https://github.com/wolfSSL/wolfssl.git
cd wolfssl
./autogen.sh
./configure --enable-all --enable-quic --prefix=$HOME/nghttpx
make
name: 'build wolfssl'
- run: |
cd $HOME/wolfssl
make install
name: 'install wolfssl'
- run: |
cd $HOME/wolfssl
make install
name: 'install wolfssl'
- name: cache nghttp3
uses: actions/cache@v3
id: cache-nghttp3
env:
cache-name: cache-nghttp3
with:
path: /home/runner/nghttp3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.nghttp3-version }}
- name: cache nghttp3
uses: actions/cache@v4
id: cache-nghttp3
env:
cache-name: cache-nghttp3
with:
path: /home/runner/nghttp3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.nghttp3-version }}
- if: steps.cache-nghttp3.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.nghttp3-version }} https://github.com/ngtcp2/nghttp3
cd nghttp3
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only
make
name: 'build nghttp3'
- if: steps.cache-nghttp3.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.nghttp3-version }} https://github.com/ngtcp2/nghttp3
cd nghttp3
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only
make
name: 'build nghttp3'
- run: |
cd $HOME/nghttp3
make install
name: 'install nghttp3'
- run: |
cd $HOME/nghttp3
make install
name: 'install nghttp3'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.ngtcp2-version }} https://github.com/ngtcp2/ngtcp2
cd ngtcp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only --with-openssl --with-gnutls --with-wolfssl
make install
name: 'install ngtcp2'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.ngtcp2-version }} https://github.com/ngtcp2/ngtcp2
cd ngtcp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only --with-openssl --with-gnutls --with-wolfssl
make install
name: 'install ngtcp2'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.nghttp2-version }} https://github.com/nghttp2/nghttp2
cd nghttp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-http3
make install
name: 'install nghttp2'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.nghttp2-version }} https://github.com/nghttp2/nghttp2
cd nghttp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-http3
make install
name: 'install nghttp2'
- name: cache mod_h2
uses: actions/cache@v3
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- name: cache mod_h2
uses: actions/cache@v4
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- if: steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
name: 'build mod_h2'
- if: steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
name: 'build mod_h2'
- run: |
cd $HOME/mod_h2
sudo make install
name: 'install mod_h2'
- run: |
cd $HOME/mod_h2
sudo make install
name: 'install mod_h2'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: |
sudo python3 -m pip install -r tests/requirements.txt -r tests/http/requirements.txt
name: 'install python test prereqs'
- run: |
sudo python3 -m pip install -r tests/requirements.txt -r tests/http/requirements.txt
name: 'install python test prereqs'
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure ${{ matrix.build.configure }}
name: 'configure'
- run: ./configure ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github
- run: pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github

View File

@ -0,0 +1,233 @@
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
name: osslq-linux
on:
push:
branches:
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
group: osslq-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
permissions: {}
env:
MAKEFLAGS: -j 3
openssl3-version: openssl-3.2.0
quictls-version: 3.1.4+quic
nghttp3-version: v1.1.0
ngtcp2-version: v1.2.0
nghttp2-version: v1.59.0
mod_h2-version: v2.0.26
jobs:
autotools:
name: ${{ matrix.build.name }}
runs-on: 'ubuntu-latest'
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
build:
- name: openssl-quic
configure: >-
PKG_CONFIG_PATH="$HOME/openssl3/lib/pkgconfig" LDFLAGS="-Wl,-rpath,$HOME/openssl3/lib"
--enable-warnings --enable-werror --enable-debug --disable-ntlm
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-openssl=$HOME/openssl3 --with-openssl-quic
--with-nghttp3=$HOME/nghttpx
steps:
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 \
libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libev-dev libc-ares-dev \
nettle-dev libp11-kit-dev libtspi-dev libunistring-dev guile-2.2-dev libtasn1-bin \
libtasn1-6-dev libidn2-0-dev gawk gperf libtss2-dev dns-root-data bison gtk-doc-tools \
texinfo texlive texlive-extra-utils autopoint libev-dev \
apache2 apache2-dev libnghttp2-dev
name: 'install prereqs and impacket, pytest, crypto, apache2'
- name: cache openssl3
if: contains(matrix.build.install_steps, 'openssl3')
uses: actions/cache@v4
id: cache-openssl3
env:
cache-name: cache-openssl3
with:
path: /home/runner/openssl3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.openssl3-version }}
- name: 'install openssl3'
if: steps.cache-openssl3.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.openssl3-version }} https://github.com/openssl/openssl
cd openssl
./config --prefix=$HOME/openssl3 --libdir=$HOME/openssl3/lib
make -j1 install_sw
- name: cache quictls
if: contains(matrix.build.install_steps, 'quictls')
uses: actions/cache@v4
id: cache-quictls
env:
cache-name: cache-quictls
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-quictls-${{ env.quictls-version }}
- name: cache quictls
uses: actions/cache@v4
id: cache-quictls-no-deprecated
env:
cache-name: cache-quictls-no-deprecated
with:
path: /home/runner/quictls
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.quictls-version }}
- if: steps.cache-quictls-no-deprecated.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b openssl-${{ env.quictls-version }} https://github.com/quictls/openssl quictls
cd quictls
./config no-deprecated --prefix=$HOME/nghttpx --libdir=$HOME/nghttpx/lib
make
name: 'build quictls'
- run: |
cd $HOME/quictls
make -j1 install_sw
name: 'install quictls'
- name: cache nghttp3
uses: actions/cache@v4
id: cache-nghttp3
env:
cache-name: cache-nghttp3
with:
path: /home/runner/nghttp3
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.nghttp3-version }}
- if: steps.cache-nghttp3.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.nghttp3-version }} https://github.com/ngtcp2/nghttp3
cd nghttp3
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only
make
name: 'build nghttp3'
- run: |
cd $HOME/nghttp3
make install
name: 'install nghttp3'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.ngtcp2-version }} https://github.com/ngtcp2/ngtcp2
cd ngtcp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only --with-openssl
make install
name: 'install ngtcp2'
# depends on all other cached libs built so far
- run: |
git clone --quiet --depth=1 -b ${{ env.nghttp2-version }} https://github.com/nghttp2/nghttp2
cd nghttp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-http3
make install
name: 'install nghttp2'
- name: cache mod_h2
uses: actions/cache@v4
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- if: steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
name: 'build mod_h2'
- run: |
cd $HOME/mod_h2
sudo make install
name: 'install mod_h2'
- uses: actions/checkout@v4
- run: |
sudo python3 -m pip install -r tests/requirements.txt -r tests/http/requirements.txt
name: 'install python test prereqs'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
# 2500 and 25002 fail atm due to fin handling
TFLAGS: "!http/3"
- run: pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github

View File

@ -7,17 +7,17 @@ name: proselint
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths:
- '.github/workflows/proselint.yml'
- '**.md'
- '.github/workflows/proselint.yml'
- '**.md'
pull_request:
branches:
- master
- master
paths:
- '.github/workflows/proselint.yml'
- '**.md'
- '.github/workflows/proselint.yml'
- '**.md'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -29,40 +29,41 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: install prereqs
run: sudo apt-get install python3-proselint
- name: install prereqs
run: sudo apt-get install python3-proselint
# config file help: https://github.com/amperser/proselint/
- name: create proselint config
run: |
cat <<JSON > $HOME/.proselintrc
{
"checks": {
"typography.diacritical_marks": false,
"typography.symbols": false,
"annotations.misc": false
# config file help: https://github.com/amperser/proselint/
- name: create proselint config
run: |
cat <<JSON > $HOME/.proselintrc
{
"checks": {
"typography.diacritical_marks": false,
"typography.symbols": false,
"annotations.misc": false,
"security.password": false
}
}
}
JSON
JSON
- name: check prose
run: a=`git ls-files '*.md' | grep -v docs/CHECKSRC.md` && proselint $a README
- name: check prose
run: a=`git ls-files '*.md' | grep -Ev '(docs/CHECKSRC.md|docs/DISTROS.md)'` && proselint $a README
# This is for CHECKSRC and files with aggressive exclamation mark needs
- name: create second proselint config
run: |
cat <<JSON > $HOME/.proselintrc
{
"checks": {
"typography.diacritical_marks": false,
"typography.symbols": false,
"typography.exclamation": false,
"annotations.misc": false
# This is for CHECKSRC and files with aggressive exclamation mark needs
- name: create second proselint config
run: |
cat <<JSON > $HOME/.proselintrc
{
"checks": {
"typography.diacritical_marks": false,
"typography.symbols": false,
"typography.exclamation": false,
"annotations.misc": false
}
}
}
JSON
JSON
- name: check special prose
run: a=docs/CHECKSRC.md && proselint $a
- name: check special prose
run: a=docs/CHECKSRC.md && proselint $a

View File

@ -7,35 +7,35 @@ name: quiche
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
@ -48,10 +48,10 @@ env:
MAKEFLAGS: -j 3
openssl-version: 3.1.4+quic
nghttp3-version: v1.1.0
ngtcp2-version: v1.1.0
nghttp2-version: v1.58.0
quiche-version: 0.19.0
mod_h2-version: v2.0.25
ngtcp2-version: v1.2.0
nghttp2-version: v1.59.0
quiche-version: 0.20.0
mod_h2-version: v2.0.26
jobs:
autotools:
@ -62,149 +62,149 @@ jobs:
fail-fast: false
matrix:
build:
- name: quiche
install: >-
libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libev-dev libc-ares-dev
install_steps: pytest
configure: >-
LDFLAGS="-Wl,-rpath,/home/runner/quiche/target/release"
--with-openssl=/home/runner/quiche/quiche/deps/boringssl/src
--enable-debug
--with-quiche=/home/runner/quiche/target/release
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-ca-fallback
- name: quiche
install: >-
libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libev-dev libc-ares-dev
install_steps: pytest
configure: >-
LDFLAGS="-Wl,-rpath,/home/runner/quiche/target/release"
--with-openssl=/home/runner/quiche/quiche/deps/boringssl/src
--enable-debug
--with-quiche=/home/runner/quiche/target/release
--with-test-nghttpx="$HOME/nghttpx/bin/nghttpx"
--with-ca-fallback
steps:
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install }}
sudo apt-get install apache2 apache2-dev libnghttp2-dev
name: 'install prereqs and impacket, pytest, crypto'
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install }}
sudo apt-get install apache2 apache2-dev libnghttp2-dev
name: 'install prereqs and impacket, pytest, crypto'
- name: cache nghttpx
uses: actions/cache@v3
id: cache-nghttpx
env:
cache-name: cache-nghttpx
with:
path: /home/runner/nghttpx
key: ${{ runner.os }}-build-${{ env.cache-name }}-openssl-${{ env.openssl-version }}-nghttp3-${{ env.nghttp3-version }}-ngtcp2-${{ env.ngtcp2-version }}-nghttp2-${{ env.nghttp2-version }}
- name: cache nghttpx
uses: actions/cache@v4
id: cache-nghttpx
env:
cache-name: cache-nghttpx
with:
path: /home/runner/nghttpx
key: ${{ runner.os }}-build-${{ env.cache-name }}-openssl-${{ env.openssl-version }}-nghttp3-${{ env.nghttp3-version }}-ngtcp2-${{ env.ngtcp2-version }}-nghttp2-${{ env.nghttp2-version }}
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b openssl-${{ env.openssl-version }} https://github.com/quictls/openssl
cd openssl
./config --prefix=$HOME/nghttpx --libdir=$HOME/nghttpx/lib
make -j1 install_sw
name: 'install quictls'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b openssl-${{ env.openssl-version }} https://github.com/quictls/openssl
cd openssl
./config --prefix=$HOME/nghttpx --libdir=$HOME/nghttpx/lib
make -j1 install_sw
name: 'install quictls'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.nghttp3-version }} https://github.com/ngtcp2/nghttp3
cd nghttp3
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only
make install
name: 'install nghttp3'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.nghttp3-version }} https://github.com/ngtcp2/nghttp3
cd nghttp3
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only
make install
name: 'install nghttp3'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.ngtcp2-version }} https://github.com/ngtcp2/ngtcp2
cd ngtcp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only --with-openssl
make install
name: 'install ngtcp2'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.ngtcp2-version }} https://github.com/ngtcp2/ngtcp2
cd ngtcp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-lib-only --with-openssl
make install
name: 'install ngtcp2'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.nghttp2-version }} https://github.com/nghttp2/nghttp2
cd nghttp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-http3
make install
name: 'install nghttp2'
- if: steps.cache-nghttpx.outputs.cache-hit != 'true'
run: |
git clone --quiet --depth=1 -b ${{ env.nghttp2-version }} https://github.com/nghttp2/nghttp2
cd nghttp2
autoreconf -fi
./configure --prefix=$HOME/nghttpx PKG_CONFIG_PATH="$HOME/nghttpx/lib/pkgconfig" --enable-http3
make install
name: 'install nghttp2'
- name: cache quiche
uses: actions/cache@v3
id: cache-quiche
env:
cache-name: cache-quiche
with:
path: /home/runner/quiche
key: ${{ runner.os }}-build-${{ env.cache-name }}-quiche-${{ env.quiche-version }}
- name: cache quiche
uses: actions/cache@v4
id: cache-quiche
env:
cache-name: cache-quiche
with:
path: /home/runner/quiche
key: ${{ runner.os }}-build-${{ env.cache-name }}-quiche-${{ env.quiche-version }}
- if: steps.cache-quiche.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.quiche-version }} --recursive https://github.com/cloudflare/quiche.git
cd quiche
#### Work-around https://github.com/curl/curl/issues/7927 #######
#### See https://github.com/alexcrichton/cmake-rs/issues/131 ####
sed -i -e 's/cmake = "0.1"/cmake = "=0.1.45"/' quiche/Cargo.toml
- if: steps.cache-quiche.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.quiche-version }} --recursive https://github.com/cloudflare/quiche.git
cd quiche
#### Work-around https://github.com/curl/curl/issues/7927 #######
#### See https://github.com/alexcrichton/cmake-rs/issues/131 ####
sed -i -e 's/cmake = "0.1"/cmake = "=0.1.45"/' quiche/Cargo.toml
cargo build -v --package quiche --release --features ffi,pkg-config-meta,qlog --verbose
mkdir -v quiche/deps/boringssl/src/lib
ln -vnf $(find target/release -name libcrypto.a -o -name libssl.a) quiche/deps/boringssl/src/lib/
cargo build -v --package quiche --release --features ffi,pkg-config-meta,qlog --verbose
mkdir -v quiche/deps/boringssl/src/lib
ln -vnf $(find target/release -name libcrypto.a -o -name libssl.a) quiche/deps/boringssl/src/lib/
# include dir
# /home/runner/quiche/quiche/deps/boringssl/src/include
# lib dir
# /home/runner/quiche/quiche/deps/boringssl/src/lib
name: 'build quiche and boringssl'
# include dir
# /home/runner/quiche/quiche/deps/boringssl/src/include
# lib dir
# /home/runner/quiche/quiche/deps/boringssl/src/lib
name: 'build quiche and boringssl'
- name: cache mod_h2
uses: actions/cache@v3
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- name: cache mod_h2
uses: actions/cache@v4
id: cache-mod_h2
env:
cache-name: cache-mod_h2
with:
path: /home/runner/mod_h2
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.mod_h2-version }}
- if: steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
name: 'build mod_h2'
- if: steps.cache-mod_h2.outputs.cache-hit != 'true'
run: |
cd $HOME
git clone --quiet --depth=1 -b ${{ env.mod_h2-version }} https://github.com/icing/mod_h2
cd mod_h2
autoreconf -fi
./configure
make
name: 'build mod_h2'
- run: |
cd $HOME/mod_h2
sudo make install
name: 'install mod_h2'
- run: |
cd $HOME/mod_h2
sudo make install
name: 'install mod_h2'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: |
sudo python3 -m pip install -r tests/requirements.txt -r tests/http/requirements.txt
name: 'install python test prereqs'
- run: |
sudo python3 -m pip install -r tests/requirements.txt -r tests/http/requirements.txt
name: 'install python test prereqs'
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure ${{ matrix.build.configure }}
name: 'configure'
- run: ./configure ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github
- run: pytest -v tests
name: 'run pytest'
env:
TFLAGS: "${{ matrix.build.tflags }}"
CURL_CI: github

View File

@ -8,11 +8,11 @@ name: REUSE compliance
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
pull_request:
branches:
- master
- master
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
@ -24,6 +24,6 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: REUSE Compliance Check
uses: fsfe/reuse-action@v2
- uses: actions/checkout@v4
- name: REUSE Compliance Check
uses: fsfe/reuse-action@v3

View File

@ -6,24 +6,20 @@ name: spell
on:
push:
branches:
- master
- master
paths:
- '**.md'
- '**.3'
- '**.1'
- '**/spellcheck.yml'
- '**/spellcheck.yaml'
- '**/wordlist.txt'
- '**.md'
- '**/spellcheck.yml'
- '**/spellcheck.yaml'
- '.github/scripts/*'
pull_request:
branches:
- master
- master
paths:
- '**.md'
- '**.3'
- '**.1'
- '**/spellcheck.yml'
- '**/spellcheck.yaml'
- '**/wordlist.txt'
- '**.md'
- '**/spellcheck.yml'
- '**/spellcheck.yaml'
- '.github/scripts/*'
permissions: {}
@ -31,36 +27,24 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: install pandoc
run: sudo apt-get install pandoc
- name: trim all man page *.md files
run: find docs -name "*.md" ! -name "_*" | xargs -n1 ./.github/scripts/cleancmd.pl
- name: build curl.1
run: |
autoreconf -fi
./configure --without-ssl
make -C docs
- name: trim libcurl man page *.md files
run: find docs/libcurl -name "curl_*.md" -o -name "libcurl*.md" | xargs -n1 ./.github/scripts/cleanspell.pl
- name: strip "uncheckable" sections from .3 pages
run: find docs -name "*.3" -size +40c | sed 's/\.3//' | xargs -t -n1 -I OO ./.github/scripts/cleanspell.pl OO.3 OO.33
- name: trim libcurl option man page *.md files
run: find docs/libcurl/opts -name "CURL*.md" | xargs -n1 ./.github/scripts/cleanspell.pl
- name: convert .3 man pages to markdown
run: find docs -name "*.33" -size +40c | sed 's/\.33//' | xargs -t -n1 -I OO pandoc -f man -t markdown OO.33 -o OO.md
- name: trim cmdline docs markdown _*.md files
run: find docs/cmdline-opts -name "_*.md" | xargs -n1 ./.github/scripts/cleancmd.pl --no-header
- name: convert .1 man pages to markdown
run: find docs -name "*.1" -size +40c | sed 's/\.1//' | xargs -t -n1 -I OO pandoc OO.1 -o OO.md
- name: setup the custom wordlist
run: grep -v '^#' .github/scripts/spellcheck.words > wordlist.txt
- name: trim the curl.1 markdown file
run: |
perl -pi -e 's/^ .*//' docs/curl.md
perl -pi -e 's/\-\-[\a-z0-9-]*//ig' docs/curl.md
perl -pi -e 's!https://[a-z0-9%/.-]*!!ig' docs/curl.md
- name: setup the custom wordlist
run: grep -v '^#' .github/scripts/spellcheck.words > wordlist.txt
- name: Check Spelling
uses: rojopolis/spellcheck-github-actions@v0
with:
config_path: .github/scripts/spellcheck.yaml
- name: Check Spelling
uses: rojopolis/spellcheck-github-actions@v0
with:
config_path: .github/scripts/spellcheck.yaml

View File

@ -7,22 +7,22 @@ name: SYNOPSIS
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths:
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/curl_*.3'
pull_request:
branches:
- master
- master
paths:
- 'docs/libcurl/curl_*.3'
- 'docs/libcurl/curl_*.3'
jobs:
verify:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: verify-synopsis
run: ./.github/scripts/verify-synopsis.pl docs/libcurl/curl*.3

View File

@ -7,35 +7,35 @@ name: Linux torture
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
@ -56,37 +56,37 @@ jobs:
fail-fast: false
matrix:
build:
- name: torture
install: libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libnghttp2-dev libssh2-1-dev libc-ares-dev
configure: --with-openssl --enable-debug --enable-ares --enable-websockets
tflags: -n -t --shallow=25 !FTP
- name: torture-ftp
install: libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libnghttp2-dev libssh2-1-dev libc-ares-dev
configure: --with-openssl --enable-debug --enable-ares
tflags: -n -t --shallow=20 FTP
- name: torture
install: libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libnghttp2-dev libssh2-1-dev libc-ares-dev
configure: --with-openssl --enable-debug --enable-ares --enable-websockets
tflags: -n -t --shallow=25 !FTP
- name: torture-ftp
install: libpsl-dev libbrotli-dev libzstd-dev zlib1g-dev libnghttp2-dev libssh2-1-dev libc-ares-dev
configure: --with-openssl --enable-debug --enable-ares
tflags: -n -t --shallow=20 FTP
steps:
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-torture
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 test-torture
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"

View File

@ -7,35 +7,35 @@ name: Linux wolfSSL
on:
push:
branches:
- master
- '*/ci'
- master
- '*/ci'
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
pull_request:
branches:
- master
- master
paths-ignore:
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.yml'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
- '**/*.md'
- '**/CMakeLists.txt'
- '.azure-pipelines.yml'
- '.circleci/**'
- '.cirrus.yml'
- 'appveyor.*'
- 'CMake/**'
- 'packages/**'
- 'plan9/**'
- 'projects/**'
- 'winbuild/**'
concurrency:
# Hardcoded workflow filename as workflow name above is just Linux again
@ -56,50 +56,50 @@ jobs:
fail-fast: false
matrix:
build:
- name: wolfssl (configured with --enable-all)
install:
configure: LDFLAGS="-Wl,-rpath,$HOME/wssl/lib" --with-wolfssl=$HOME/wssl --enable-debug
wolfssl-configure: --enable-all
- name: wolfssl (configured with --enable-opensslextra)
install:
configure: LDFLAGS="-Wl,-rpath,$HOME/wssl/lib" --with-wolfssl=$HOME/wssl --enable-debug
wolfssl-configure: --enable-opensslextra
- name: wolfssl (configured with --enable-all)
install:
configure: LDFLAGS="-Wl,-rpath,$HOME/wssl/lib" --with-wolfssl=$HOME/wssl --enable-debug
wolfssl-configure: --enable-all
- name: wolfssl (configured with --enable-opensslextra)
install:
configure: LDFLAGS="-Wl,-rpath,$HOME/wssl/lib" --with-wolfssl=$HOME/wssl --enable-debug
wolfssl-configure: --enable-opensslextra
steps:
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 ${{ matrix.build.install }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- run: |
sudo apt-get update
sudo apt-get install libtool autoconf automake pkg-config stunnel4 libpsl-dev ${{ matrix.build.install }}
sudo python3 -m pip install impacket
name: 'install prereqs and impacket'
- run: |
WOLFSSL_VER=5.6.3
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 https://github.com/wolfSSL/wolfssl/archive/v$WOLFSSL_VER-stable.tar.gz
tar -xzf v$WOLFSSL_VER-stable.tar.gz
cd wolfssl-$WOLFSSL_VER-stable
./autogen.sh
./configure --enable-tls13 ${{ matrix.build.wolfssl-configure }} --enable-harden --prefix=$HOME/wssl
make install
name: 'install wolfssl'
- run: |
WOLFSSL_VER=5.6.3
curl -LOsSf --retry 6 --retry-connrefused --max-time 999 https://github.com/wolfSSL/wolfssl/archive/v$WOLFSSL_VER-stable.tar.gz
tar -xzf v$WOLFSSL_VER-stable.tar.gz
cd wolfssl-$WOLFSSL_VER-stable
./autogen.sh
./configure --enable-tls13 ${{ matrix.build.wolfssl-configure }} --enable-harden --prefix=$HOME/wssl
make install
name: 'install wolfssl'
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- run: autoreconf -fi
name: 'autoreconf'
- run: autoreconf -fi
name: 'autoreconf'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }}
name: 'configure'
- run: make V=1
name: 'make'
- run: make V=1
name: 'make'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 examples
name: 'make examples'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 -C tests
name: 'make tests'
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"
- run: make V=1 test-ci
name: 'run tests'
env:
TFLAGS: "${{ matrix.build.tflags }}"

View File

@ -32,7 +32,7 @@ Files: CHANGES
Copyright: Daniel Stenberg, <daniel@haxx.se>, et al.
License: curl
Files: GIT-INFO
Files: GIT-INFO.md
Copyright: Daniel Stenberg, <daniel@haxx.se>, et al.
License: curl
@ -96,3 +96,7 @@ License: curl
Files: .mailmap
Copyright: Daniel Stenberg, <daniel@haxx.se>, et al.
License: curl
Files: .github/dependabot.yml
Copyright: Daniel Stenberg, <daniel@haxx.se>, et al.
License: curl

View File

@ -164,14 +164,11 @@ int main(void) { ; return 0; }
#ifdef HAVE_IOCTLSOCKET
/* includes start */
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifdef HAVE_WINSOCK2_H
# include <winsock2.h>
# endif
# include <windows.h>
# include <winsock2.h>
#endif
int main(void)
{
@ -186,14 +183,11 @@ int main(void)
#ifdef HAVE_IOCTLSOCKET_CAMEL
/* includes start */
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifdef HAVE_WINSOCK2_H
# include <winsock2.h>
# endif
# include <windows.h>
# include <winsock2.h>
#endif
int main(void)
{
@ -207,14 +201,11 @@ int main(void)
#ifdef HAVE_IOCTLSOCKET_CAMEL_FIONBIO
/* includes start */
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifdef HAVE_WINSOCK2_H
# include <winsock2.h>
# endif
# include <windows.h>
# include <winsock2.h>
#endif
int main(void)
{
@ -229,14 +220,11 @@ int main(void)
#ifdef HAVE_IOCTLSOCKET_FIONBIO
/* includes start */
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifdef HAVE_WINSOCK2_H
# include <winsock2.h>
# endif
# include <windows.h>
# include <winsock2.h>
#endif
int main(void)
{
@ -307,14 +295,11 @@ int main(void)
#ifdef HAVE_SETSOCKOPT_SO_NONBLOCK
/* includes start */
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifdef HAVE_WINSOCK2_H
# include <winsock2.h>
# endif
# include <windows.h>
# include <winsock2.h>
#endif
/* includes start */
#ifdef HAVE_SYS_TYPES_H

View File

@ -45,7 +45,7 @@ macro(curl_internal_test CURL_TEST)
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
endif()
message(STATUS "Performing Curl Test ${CURL_TEST}")
message(STATUS "Performing Test ${CURL_TEST}")
try_compile(${CURL_TEST}
${CMAKE_BINARY_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/CMake/CurlTests.c
@ -54,49 +54,20 @@ macro(curl_internal_test CURL_TEST)
OUTPUT_VARIABLE OUTPUT)
if(${CURL_TEST})
set(${CURL_TEST} 1 CACHE INTERNAL "Curl test ${FUNCTION}")
message(STATUS "Performing Curl Test ${CURL_TEST} - Success")
message(STATUS "Performing Test ${CURL_TEST} - Success")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Performing Curl Test ${CURL_TEST} passed with the following output:\n"
"Performing Test ${CURL_TEST} passed with the following output:\n"
"${OUTPUT}\n")
else()
message(STATUS "Performing Curl Test ${CURL_TEST} - Failed")
message(STATUS "Performing Test ${CURL_TEST} - Failed")
set(${CURL_TEST} "" CACHE INTERNAL "Curl test ${FUNCTION}")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing Curl Test ${CURL_TEST} failed with the following output:\n"
"Performing Test ${CURL_TEST} failed with the following output:\n"
"${OUTPUT}\n")
endif()
endif()
endmacro()
macro(curl_nroff_check)
find_program(NROFF NAMES gnroff nroff)
if(NROFF)
# Need a way to write to stdin, this will do
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/nroff-input.txt" "test")
# Tests for a valid nroff option to generate a manpage
foreach(_MANOPT "-man" "-mandoc")
execute_process(COMMAND "${NROFF}" ${_MANOPT}
OUTPUT_VARIABLE NROFF_MANOPT_OUTPUT
INPUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/nroff-input.txt"
ERROR_QUIET)
# Save the option if it was valid
if(NROFF_MANOPT_OUTPUT)
message("Found *nroff option: -- ${_MANOPT}")
set(NROFF_MANOPT ${_MANOPT})
set(NROFF_USEFUL ON)
break()
endif()
endforeach()
# No need for the temporary file
file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/nroff-input.txt")
if(NOT NROFF_USEFUL)
message(WARNING "Found no *nroff option to get plaintext from man pages")
endif()
else()
message(WARNING "Found no *nroff program")
endif()
endmacro()
macro(optional_dependency DEPENDENCY)
set(CURL_${DEPENDENCY} AUTO CACHE STRING "Build curl with ${DEPENDENCY} support (AUTO, ON or OFF)")
set_property(CACHE CURL_${DEPENDENCY} PROPERTY STRINGS AUTO ON OFF)

View File

@ -23,115 +23,89 @@
###########################################################################
include(CheckCSourceCompiles)
include(CheckCSourceRuns)
# The begin of the sources (macros and includes)
set(_source_epilogue "#undef inline")
include(CheckTypeSize)
macro(add_header_include check header)
if(${check})
set(_source_epilogue "${_source_epilogue}\n#include <${header}>")
set(_source_epilogue "${_source_epilogue}
#include <${header}>")
endif()
endmacro()
set(signature_call_conv)
if(HAVE_WINDOWS_H)
set(_source_epilogue
"${_source_epilogue}\n#ifndef WIN32_LEAN_AND_MEAN\n#define WIN32_LEAN_AND_MEAN\n#endif")
add_header_include(HAVE_WINSOCK2_H "winsock2.h")
add_header_include(HAVE_WINDOWS_H "windows.h")
set(signature_call_conv "PASCAL")
if(WIN32)
set(CMAKE_REQUIRED_LIBRARIES "ws2_32")
endif()
else()
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
endif()
set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
check_c_source_compiles("${_source_epilogue}
int main(void) {
int flag = MSG_NOSIGNAL;
(void)flag;
return 0;
}" HAVE_MSG_NOSIGNAL)
if(NOT HAVE_WINDOWS_H)
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
endif()
check_c_source_compiles("${_source_epilogue}
#include <time.h>
int main(void) {
struct timeval ts;
ts.tv_sec = 0;
ts.tv_usec = 0;
(void)ts;
return 0;
}" HAVE_STRUCT_TIMEVAL)
if(HAVE_WINDOWS_H)
set(CMAKE_EXTRA_INCLUDE_FILES "winsock2.h")
else()
if(NOT DEFINED HAVE_STRUCT_SOCKADDR_STORAGE)
set(CMAKE_EXTRA_INCLUDE_FILES)
if(HAVE_SYS_SOCKET_H)
if(WIN32)
set(CMAKE_EXTRA_INCLUDE_FILES "winsock2.h")
set(CMAKE_REQUIRED_DEFINITIONS "-DWIN32_LEAN_AND_MEAN")
set(CMAKE_REQUIRED_LIBRARIES "ws2_32")
elseif(HAVE_SYS_SOCKET_H)
set(CMAKE_EXTRA_INCLUDE_FILES "sys/socket.h")
endif()
check_type_size("struct sockaddr_storage" SIZEOF_STRUCT_SOCKADDR_STORAGE)
set(HAVE_STRUCT_SOCKADDR_STORAGE ${HAVE_SIZEOF_STRUCT_SOCKADDR_STORAGE})
endif()
check_type_size("struct sockaddr_storage" SIZEOF_STRUCT_SOCKADDR_STORAGE)
if(HAVE_SIZEOF_STRUCT_SOCKADDR_STORAGE)
set(HAVE_STRUCT_SOCKADDR_STORAGE 1)
if(NOT WIN32)
set(_source_epilogue "#undef inline")
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
check_c_source_compiles("${_source_epilogue}
int main(void)
{
int flag = MSG_NOSIGNAL;
(void)flag;
return 0;
}" HAVE_MSG_NOSIGNAL)
endif()
set(_source_epilogue "#undef inline")
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
check_c_source_compiles("${_source_epilogue}
#include <time.h>
int main(void)
{
struct timeval ts;
ts.tv_sec = 0;
ts.tv_usec = 0;
(void)ts;
return 0;
}" HAVE_STRUCT_TIMEVAL)
unset(CMAKE_TRY_COMPILE_TARGET_TYPE)
if(NOT CMAKE_CROSSCOMPILING)
if(NOT ${CMAKE_SYSTEM_NAME} MATCHES "Darwin" AND NOT ${CMAKE_SYSTEM_NAME} MATCHES "iOS")
# only try this on non-apple platforms
if(NOT CMAKE_CROSSCOMPILING AND NOT APPLE)
set(_source_epilogue "#undef inline")
add_header_include(HAVE_SYS_POLL_H "sys/poll.h")
add_header_include(HAVE_POLL_H "poll.h")
check_c_source_runs("${_source_epilogue}
#include <stdlib.h>
#include <sys/time.h>
int main(void)
{
if(0 != poll(0, 0, 10)) {
return 1; /* fail */
}
else {
/* detect the 10.12 poll() breakage */
struct timeval before, after;
int rc;
size_t us;
# if not cross-compilation...
set(CMAKE_REQUIRED_FLAGS "")
if(HAVE_SYS_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_SYS_POLL_H")
elseif(HAVE_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_POLL_H")
endif()
check_c_source_runs("
#include <stdlib.h>
#include <sys/time.h>
gettimeofday(&before, NULL);
rc = poll(NULL, 0, 500);
gettimeofday(&after, NULL);
#ifdef HAVE_SYS_POLL_H
# include <sys/poll.h>
#elif HAVE_POLL_H
# include <poll.h>
#endif
us = (after.tv_sec - before.tv_sec) * 1000000 +
(after.tv_usec - before.tv_usec);
int main(void)
{
if(0 != poll(0, 0, 10)) {
return 1; /* fail */
}
else {
/* detect the 10.12 poll() breakage */
struct timeval before, after;
int rc;
size_t us;
gettimeofday(&before, NULL);
rc = poll(NULL, 0, 500);
gettimeofday(&after, NULL);
us = (after.tv_sec - before.tv_sec) * 1000000 +
(after.tv_usec - before.tv_usec);
if(us < 400000) {
return 1;
}
}
return 0;
if(us < 400000) {
return 1;
}
}
return 0;
}" HAVE_POLL_FINE)
endif()
endif()
# Detect HAVE_GETADDRINFO_THREADSAFE
@ -140,8 +114,8 @@ if(WIN32)
set(HAVE_GETADDRINFO_THREADSAFE ${HAVE_GETADDRINFO})
elseif(NOT HAVE_GETADDRINFO)
set(HAVE_GETADDRINFO_THREADSAFE FALSE)
elseif(CMAKE_SYSTEM_NAME STREQUAL "AIX" OR
CMAKE_SYSTEM_NAME STREQUAL "Darwin" OR
elseif(APPLE OR
CMAKE_SYSTEM_NAME STREQUAL "AIX" OR
CMAKE_SYSTEM_NAME STREQUAL "FreeBSD" OR
CMAKE_SYSTEM_NAME STREQUAL "HP-UX" OR
CMAKE_SYSTEM_NAME STREQUAL "MidnightBSD" OR
@ -153,14 +127,10 @@ elseif(CMAKE_SYSTEM_NAME MATCHES "BSD")
endif()
if(NOT DEFINED HAVE_GETADDRINFO_THREADSAFE)
set(_save_epilogue "${_source_epilogue}")
set(_source_epilogue "#undef inline")
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
add_header_include(HAVE_NETDB_H "netdb.h")
check_c_source_compiles("${_source_epilogue}
int main(void)
{
@ -197,17 +167,12 @@ if(NOT DEFINED HAVE_GETADDRINFO_THREADSAFE)
if(HAVE_H_ERRNO OR HAVE_H_ERRNO_ASSIGNABLE OR HAVE_H_ERRNO_SBS_ISSUE_7)
set(HAVE_GETADDRINFO_THREADSAFE TRUE)
endif()
set(_source_epilogue "${_save_epilogue}")
endif()
if(NOT WIN32 AND NOT DEFINED HAVE_CLOCK_GETTIME_MONOTONIC_RAW)
set(_save_epilogue "${_source_epilogue}")
set(_source_epilogue "#undef inline")
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
check_c_source_compiles("${_source_epilogue}
#include <time.h>
int main(void)
@ -216,6 +181,4 @@ if(NOT WIN32 AND NOT DEFINED HAVE_CLOCK_GETTIME_MONOTONIC_RAW)
(void)clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
return 0;
}" HAVE_CLOCK_GETTIME_MONOTONIC_RAW)
set(_source_epilogue "${_save_epilogue}")
endif()

View File

@ -23,6 +23,12 @@
###########################################################################
include(CheckCCompilerFlag)
unset(WPICKY)
if(CURL_WERROR AND CMAKE_COMPILER_IS_GNUCC AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0)
set(WPICKY "${WPICKY} -pedantic-errors")
endif()
if(PICKY_COMPILER)
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
@ -83,11 +89,12 @@ if(PICKY_COMPILER)
-Wmissing-field-initializers # clang 2.7 gcc 4.1
-Wmissing-noreturn # clang 2.7 gcc 4.1
-Wno-format-nonliteral # clang 1.0 gcc 2.96 (3.0)
-Wno-sign-conversion # clang 2.9 gcc 4.3
-Wno-system-headers # clang 1.0 gcc 3.0
# -Wpadded # clang 2.9 gcc 4.1 # Not used because we cannot change public structs
-Wredundant-decls # clang 2.7 gcc 4.1
-Wold-style-definition # clang 2.7 gcc 3.4
-Wredundant-decls # clang 2.7 gcc 4.1
-Wsign-conversion # clang 2.9 gcc 4.3
-Wno-error=sign-conversion # FIXME
-Wstrict-prototypes # clang 1.0 gcc 3.3
# -Wswitch-enum # clang 2.7 gcc 4.1 # Not used because this basically disallows default case
-Wtype-limits # clang 2.7 gcc 4.3
@ -110,6 +117,7 @@ if(PICKY_COMPILER)
-Wshift-sign-overflow # clang 2.9
-Wshorten-64-to-32 # clang 1.0
-Wlanguage-extension-token # clang 3.0
-Wformat=2 # clang 3.0 gcc 4.8
)
# Enable based on compiler version
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 3.6) OR
@ -135,6 +143,12 @@ if(PICKY_COMPILER)
-Wextra-semi-stmt # clang 7.0 appleclang 10.3
)
endif()
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.0) OR
(CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.4))
list(APPEND WPICKY_ENABLE
-Wimplicit-fallthrough # clang 4.0 gcc 7.0 appleclang 12.4 # we have silencing markup for clang 10.0 and above only
)
endif()
else() # gcc
list(APPEND WPICKY_DETECT
${WPICKY_COMMON}
@ -147,6 +161,7 @@ if(PICKY_COMPILER)
-Wmissing-parameter-type # gcc 4.3
-Wold-style-declaration # gcc 4.3
-Wstrict-aliasing=3 # gcc 4.0
-Wtrampolines # gcc 4.3
)
endif()
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.5 AND MINGW)
@ -156,7 +171,7 @@ if(PICKY_COMPILER)
endif()
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.8)
list(APPEND WPICKY_ENABLE
-Wformat=2 # clang 3.0 gcc 4.8 (clang part-default, enabling it fully causes -Wformat-nonliteral warnings)
-Wformat=2 # clang 3.0 gcc 4.8
)
endif()
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0)
@ -179,6 +194,7 @@ if(PICKY_COMPILER)
-Wduplicated-branches # gcc 7.0
-Wformat-overflow=2 # gcc 7.0
-Wformat-truncation=2 # gcc 7.0
-Wimplicit-fallthrough # clang 4.0 gcc 7.0
-Wrestrict # gcc 7.0
)
endif()
@ -191,8 +207,6 @@ if(PICKY_COMPILER)
#
unset(WPICKY)
foreach(_CCOPT IN LISTS WPICKY_ENABLE)
set(WPICKY "${WPICKY} ${_CCOPT}")
endforeach()
@ -209,8 +223,10 @@ if(PICKY_COMPILER)
set(WPICKY "${WPICKY} ${_CCOPT}")
endif()
endforeach()
message(STATUS "Picky compiler options:${WPICKY}")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${WPICKY}")
endif()
endif()
if(WPICKY)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${WPICKY}")
message(STATUS "Picky compiler options:${WPICKY}")
endif()

View File

@ -171,6 +171,7 @@ set(HAVE_POSIX_STRERROR_R 0)
set(HAVE_BUILTIN_AVAILABLE 0)
set(HAVE_MSG_NOSIGNAL 0)
set(HAVE_STRUCT_TIMEVAL 1)
set(HAVE_STRUCT_SOCKADDR_STORAGE 1)
set(HAVE_GETHOSTBYNAME_R_3 0)
set(HAVE_GETHOSTBYNAME_R_3_REENTRANT 0)

View File

@ -23,7 +23,7 @@
###########################################################################
# File containing various utilities
# Returns a list of arguments that evaluate to true
# Returns number of arguments that evaluate to true
function(count_true output_count_var)
set(lst_len 0)
foreach(option_var IN LISTS ARGN)

View File

@ -21,25 +21,8 @@
# SPDX-License-Identifier: curl
#
###########################################################################
# curl/libcurl CMake script
# by Tetetest and Sukender (Benoit Neil)
# TODO:
# The output .so file lacks the soname number which we currently have within the lib/Makefile.am file
# Add full (4 or 5 libs) SSL support
# Add INSTALL target (EXTRA_DIST variables in Makefile.am may be moved to Makefile.inc so that CMake/CPack is aware of what's to include).
# Check on all possible platforms
# Test with as many configurations possible (With or without any option)
# Create scripts that help keeping the CMake build system up to date (to reduce maintenance). According to Tetetest:
# - lists of headers that 'configure' checks for;
# - curl-specific tests (the ones that are in m4/curl-*.m4 files);
# - (most obvious thing:) curl version numbers.
# Add documentation subproject
#
# To check:
# (From Daniel Stenberg) The cmake build selected to run gcc with -fPIC on my box while the plain configure script did not.
# (From Daniel Stenberg) The gcc command line use neither -g nor any -O options. As a developer, I also treasure our configure scripts's --enable-debug option that sets a long range of "picky" compiler options.
# Note: By default this CMake build script detects the version of some
# dependencies using `check_symbol_exists`. Those checks do not work
# in the case that both CURL and its dependency are included as
@ -105,7 +88,7 @@ option(BUILD_SHARED_LIBS "Build shared libraries" ON)
option(BUILD_STATIC_LIBS "Build static libraries" OFF)
option(BUILD_STATIC_CURL "Build curl executable with static libcurl" OFF)
option(ENABLE_ARES "Set to ON to enable c-ares support" OFF)
option(CURL_DISABLE_INSTALL "Set to ON to disable instalation targets" OFF)
option(CURL_DISABLE_INSTALL "Set to ON to disable installation targets" OFF)
if(WIN32)
option(CURL_STATIC_CRT "Set to ON to build libcurl with static CRT on Windows (/MT)." OFF)
@ -321,18 +304,18 @@ if(ENABLE_IPV6 AND NOT WIN32)
endif()
endif()
if(USE_MANUAL)
#nroff is currently only used when USE_MANUAL is set, so we can prevent the warning of no *NROFF if USE_MANUAL is OFF (or not defined), by not even looking for NROFF..
curl_nroff_check()
endif()
find_package(Perl)
cmake_dependent_option(ENABLE_MANUAL "to provide the built-in manual"
ON "NROFF_USEFUL;PERL_FOUND"
OFF)
option(BUILD_LIBCURL_DOCS "to build libcurl man pages" ON)
option(ENABLE_CURL_MANUAL "to build the man page for curl and enable its -M/--manual option" ON)
if(ENABLE_MANUAL)
set(USE_MANUAL ON)
if(ENABLE_CURL_MANUAL OR BUILD_LIBCURL_DOCS)
if(PERL_FOUND)
set(HAVE_MANUAL_TOOLS ON)
endif()
if(NOT HAVE_MANUAL_TOOLS)
message(WARNING "Perl not found. Will not build manuals.")
endif()
endif()
if(CURL_STATIC_CRT)
@ -368,9 +351,6 @@ include(CheckCSourceCompiles)
# On windows preload settings
if(WIN32)
set(HAVE_WINDOWS_H 1)
set(HAVE_WS2TCPIP_H 1)
set(HAVE_WINSOCK2_H 1)
include(${CMAKE_CURRENT_SOURCE_DIR}/CMake/Platforms/WindowsCache.cmake)
endif()
@ -412,8 +392,7 @@ if(APPLE)
endif()
if(WIN32)
cmake_dependent_option(CURL_USE_SCHANNEL "Enable Windows native SSL/TLS" OFF CURL_ENABLE_SSL OFF)
cmake_dependent_option(CURL_WINDOWS_SSPI "Use windows libraries to allow NTLM authentication without OpenSSL" ON
CURL_USE_SCHANNEL OFF)
option(CURL_WINDOWS_SSPI "Enable SSPI on Windows" ${CURL_USE_SCHANNEL})
endif()
cmake_dependent_option(CURL_USE_MBEDTLS "Enable mbedTLS for SSL/TLS" OFF CURL_ENABLE_SSL OFF)
cmake_dependent_option(CURL_USE_BEARSSL "Enable BearSSL for SSL/TLS" OFF CURL_ENABLE_SSL OFF)
@ -736,6 +715,29 @@ if(USE_MSH3)
list(APPEND CURL_LIBS ${MSH3_LIBRARIES})
endif()
option(USE_OPENSSL_QUIC "Use openssl and nghttp3 libraries for HTTP/3 support" OFF)
if(USE_OPENSSL_QUIC)
if(USE_NGTCP2 OR USE_QUICHE OR USE_MSH3)
message(FATAL_ERROR "Only one HTTP/3 backend can be selected!")
endif()
find_package(OpenSSL 3.2.0 REQUIRED)
find_package(NGHTTP3 REQUIRED)
set(USE_NGHTTP3 ON)
include_directories(${NGHTTP3_INCLUDE_DIRS})
list(APPEND CURL_LIBS ${NGHTTP3_LIBRARIES})
endif()
if(USE_MBEDTLS OR
USE_BEARSSL OR
USE_SECTRANSP)
message(WARNING "A selected TLS library does not support TLS 1.3.")
endif()
if(CURL_WITH_MULTI_SSL AND (USE_NGTCP2 OR USE_QUICHE OR USE_MSH3 OR USE_OPENSSL_QUIC))
message(FATAL_ERROR "MultiSSL cannot be enabled with HTTP/3 and vice versa.")
endif()
if(NOT CURL_DISABLE_SRP AND (HAVE_GNUTLS_SRP OR HAVE_OPENSSL_SRP))
set(USE_TLS_SRP 1)
endif()
@ -787,7 +789,7 @@ if(NOT CURL_DISABLE_LDAP)
endif()
set(NEED_LBER_H ON)
set(_HEADER_LIST)
if(HAVE_WINDOWS_H)
if(WIN32)
list(APPEND _HEADER_LIST "windows.h")
endif()
if(HAVE_SYS_TYPES_H)
@ -927,10 +929,8 @@ if(CURL_USE_GSSAPI)
check_include_file_concat("gssapi/gssapi_generic.h" HAVE_GSSAPI_GSSAPI_GENERIC_H)
check_include_file_concat("gssapi/gssapi_krb5.h" HAVE_GSSAPI_GSSAPI_KRB5_H)
if(GSS_FLAVOUR STREQUAL "Heimdal")
set(HAVE_GSSHEIMDAL ON)
else() # MIT
set(HAVE_GSSMIT ON)
if(NOT GSS_FLAVOUR STREQUAL "Heimdal")
# MIT
set(_INCLUDE_LIST "")
if(HAVE_GSSAPI_GSSAPI_H)
list(APPEND _INCLUDE_LIST "gssapi/gssapi.h")
@ -1070,9 +1070,9 @@ endif()
# Check for header files
if(WIN32)
check_include_file_concat("winsock2.h" HAVE_WINSOCK2_H)
check_include_file_concat("ws2tcpip.h" HAVE_WS2TCPIP_H)
check_include_file_concat("windows.h" HAVE_WINDOWS_H)
set(CURL_INCLUDES ${CURL_INCLUDES} "winsock2.h")
set(CURL_INCLUDES ${CURL_INCLUDES} "ws2tcpip.h")
set(CURL_INCLUDES ${CURL_INCLUDES} "windows.h")
endif()
if(WIN32)
@ -1266,7 +1266,7 @@ set(HAVE_SA_FAMILY_T ${HAVE_SIZEOF_SA_FAMILY_T})
set(CMAKE_EXTRA_INCLUDE_FILES "")
if(WIN32)
set(CMAKE_EXTRA_INCLUDE_FILES "ws2def.h")
set(CMAKE_EXTRA_INCLUDE_FILES "winsock2.h")
check_type_size("ADDRESS_FAMILY" SIZEOF_ADDRESS_FAMILY)
set(HAVE_ADDRESS_FAMILY ${HAVE_SIZEOF_ADDRESS_FAMILY})
set(CMAKE_EXTRA_INCLUDE_FILES "")
@ -1406,15 +1406,6 @@ if(CMAKE_COMPILER_IS_GNUCC AND APPLE)
endif()
endif()
# TODO test which of these headers are required
if(WIN32)
set(CURL_PULL_WS2TCPIP_H ${HAVE_WS2TCPIP_H})
else()
set(CURL_PULL_SYS_TYPES_H ${HAVE_SYS_TYPES_H})
set(CURL_PULL_SYS_SOCKET_H ${HAVE_SYS_SOCKET_H})
set(CURL_PULL_SYS_POLL_H ${HAVE_SYS_POLL_H})
endif()
include(CMake/OtherTests.cmake)
add_definitions(-DHAVE_CONFIG_H)
@ -1504,7 +1495,7 @@ set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated")
set(project_config "${generated_dir}/${PROJECT_NAME}Config.cmake")
set(version_config "${generated_dir}/${PROJECT_NAME}ConfigVersion.cmake")
if(USE_MANUAL)
if(HAVE_MANUAL_TOOLS)
add_subdirectory(docs)
endif()
@ -1533,7 +1524,6 @@ if(NOT CURL_DISABLE_INSTALL)
endmacro()
# NTLM support requires crypto function adaptions from various SSL libs
# TODO alternative SSL libs tests for SSP1, GnuTLS, NSS
if(NOT (CURL_DISABLE_NTLM) AND
(USE_OPENSSL OR USE_MBEDTLS OR USE_DARWINSSL OR USE_WIN32_CRYPTO OR USE_GNUTLS))
set(use_curl_ntlm_core ON)
@ -1551,28 +1541,22 @@ if(NOT CURL_DISABLE_INSTALL)
_add_if("IDN" HAVE_LIBIDN2 OR USE_WIN32_IDN)
_add_if("Largefile" (SIZEOF_CURL_OFF_T GREATER 4) AND
((SIZEOF_OFF_T GREATER 4) OR USE_WIN32_LARGE_FILES))
# TODO SSP1 (Schannel) check is missing
_add_if("SSPI" USE_WINDOWS_SSPI)
_add_if("GSS-API" HAVE_GSSAPI)
_add_if("alt-svc" NOT CURL_DISABLE_ALTSVC)
_add_if("HSTS" NOT CURL_DISABLE_HSTS)
# TODO SSP1 missing for SPNEGO
_add_if("SPNEGO" NOT CURL_DISABLE_NEGOTIATE_AUTH AND
(HAVE_GSSAPI OR USE_WINDOWS_SSPI))
_add_if("Kerberos" NOT CURL_DISABLE_KERBEROS_AUTH AND
(HAVE_GSSAPI OR USE_WINDOWS_SSPI))
# NTLM support requires crypto function adaptions from various SSL libs
# TODO alternative SSL libs tests for SSP1, GnuTLS, NSS
_add_if("NTLM" NOT (CURL_DISABLE_NTLM) AND
(use_curl_ntlm_core OR USE_WINDOWS_SSPI))
# TODO missing option (autoconf: --enable-ntlm-wb)
_add_if("NTLM_WB" NOT (CURL_DISABLE_NTLM) AND
(use_curl_ntlm_core OR USE_WINDOWS_SSPI) AND
NOT CURL_DISABLE_HTTP AND NTLM_WB_ENABLED)
_add_if("TLS-SRP" USE_TLS_SRP)
# TODO option --with-nghttp2 tests for nghttp2 lib and nghttp2/nghttp2.h header
_add_if("HTTP2" USE_NGHTTP2)
_add_if("HTTP3" USE_NGTCP2 OR USE_QUICHE)
_add_if("HTTP3" USE_NGTCP2 OR USE_QUICHE OR USE_OPENSSL_QUIC)
_add_if("MultiSSL" CURL_WITH_MULTI_SSL)
# TODO wolfSSL only support this from v5.0.0 onwards
_add_if("HTTPS-proxy" SSL_ENABLED AND (USE_OPENSSL OR USE_GNUTLS
@ -1589,6 +1573,8 @@ if(NOT CURL_DISABLE_INSTALL)
# Clear list and try to detect available protocols
set(_items)
_add_if("HTTP" NOT CURL_DISABLE_HTTP)
_add_if("IPFS" NOT CURL_DISABLE_HTTP)
_add_if("IPNS" NOT CURL_DISABLE_HTTP)
_add_if("HTTPS" NOT CURL_DISABLE_HTTP AND SSL_ENABLED)
_add_if("FTP" NOT CURL_DISABLE_FTP)
_add_if("FTPS" NOT CURL_DISABLE_FTP AND SSL_ENABLED)
@ -1655,6 +1641,30 @@ if(NOT CURL_DISABLE_INSTALL)
set(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS}")
set(LIBCURL_LIBS "")
set(libdir "${CMAKE_INSTALL_PREFIX}/lib")
# For processing full path libraries into -L and -l ld options,
# the directories that go with the -L option are cached, so they
# only get added once per such directory.
set(_libcurl_libs_dirs)
# To avoid getting unnecessary -L options for known system directories,
# _libcurl_libs_dirs is seeded with them.
foreach(_libdir ${CMAKE_SYSTEM_PREFIX_PATH})
if(_libdir MATCHES "/$")
set(_libdir "${_libdir}lib")
else()
set(_libdir "${_libdir}/lib")
endif()
if(IS_DIRECTORY "${_libdir}")
list(APPEND _libcurl_libs_dirs "${_libdir}")
endif()
if(DEFINED CMAKE_LIBRARY_ARCHITECTURE)
set(_libdir "${_libdir}/${CMAKE_LIBRARY_ARCHITECTURE}")
if(IS_DIRECTORY "${_libdir}")
list(APPEND _libcurl_libs_dirs "${_libdir}")
endif()
endif()
endforeach()
foreach(_lib ${CMAKE_C_IMPLICIT_LINK_LIBRARIES} ${CURL_LIBS})
if(TARGET "${_lib}")
set(_libname "${_lib}")
@ -1670,8 +1680,24 @@ if(NOT CURL_DISABLE_INSTALL)
continue()
endif()
endif()
if(_lib MATCHES ".*/.*" OR _lib MATCHES "^-")
if(_lib MATCHES "^-")
set(LIBCURL_LIBS "${LIBCURL_LIBS} ${_lib}")
elseif(_lib MATCHES ".*/.*")
# This gets a bit more complex, because we want to specify the
# directory separately, and only once per directory
string(REGEX REPLACE "^(.*)/[^/]*$" "\\1" _libdir "${_lib}")
string(REGEX REPLACE "^.*/([^/.]*).*$" "\\1" _libname "${_lib}")
if(_libname MATCHES "^lib")
list(FIND _libcurl_libs_dirs "${_libdir}" _libdir_index)
if(_libdir_index LESS 0)
list(APPEND _libcurl_libs_dirs "${_libdir}")
set(LIBCURL_LIBS "${LIBCURL_LIBS} -L${_libdir}")
endif()
string(REGEX REPLACE "^lib" "" _libname "${_libname}")
set(LIBCURL_LIBS "${LIBCURL_LIBS} -l${_libname}")
else()
set(LIBCURL_LIBS "${LIBCURL_LIBS} ${_lib}")
endif()
else()
set(LIBCURL_LIBS "${LIBCURL_LIBS} -l${_lib}")
endif()

2
deps/curl/COPYING vendored
View File

@ -1,6 +1,6 @@
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 1996 - 2023, Daniel Stenberg, <daniel@haxx.se>, and many
Copyright (c) 1996 - 2024, Daniel Stenberg, <daniel@haxx.se>, and many
contributors, see the THANKS file.
All rights reserved.

44
deps/curl/GIT-INFO vendored
View File

@ -1,44 +0,0 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
GIT-INFO
This file is only present in git - never in release archives. It contains
information about other files and things that the git repository keeps in its
inner sanctum.
To build in environments that support configure, after having extracted
everything from git, do this:
autoreconf -fi
./configure --with-openssl
make
Daniel uses a ./configure line similar to this for easier development:
./configure --disable-shared --enable-debug --enable-maintainer-mode
In environments that don't support configure (i.e. Microsoft), do this:
buildconf.bat
REQUIREMENTS
For autoreconf and configure (not buildconf.bat) to work, you need the
following software installed:
o autoconf 2.57 (or later)
o automake 1.7 (or later)
o libtool 1.4.2 (or later)
o GNU m4 (required by autoconf)
o nroff + perl
If you don't have nroff and perl and you for some reason don't want to
install them, you can rename the source file src/tool_hugehelp.c.cvs to
src/tool_hugehelp.c and avoid having to generate this file. This will
give you a stubbed version of the file that doesn't contain actual content.

42
deps/curl/GIT-INFO.md vendored Normal file
View File

@ -0,0 +1,42 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
# GIT-INFO
This file is only present in git - never in release archives. It contains
information about other files and things that the git repository keeps in its
inner sanctum.
To build in environments that support configure, after having extracted
everything from git, do this:
autoreconf -fi
./configure --with-openssl
make
Daniel uses a configure line similar to this for easier development:
./configure --disable-shared --enable-debug --enable-maintainer-mode
In environments that don't support configure (i.e. Windows), do this:
buildconf.bat
## REQUIREMENTS
For `autoreconf` and `configure` (not `buildconf.bat`) to work, you need the
following software installed:
o autoconf 2.57 (or later)
o automake 1.7 (or later)
o libtool 1.4.2 (or later)
o GNU m4 (required by autoconf)
o perl
If you don't have perl and don't want to install it, you can rename the source
file `src/tool_hugehelp.c.cvs` to `src/tool_hugehelp.c` and avoid having to
generate this file. This will give you a stubbed version of the file that
doesn't contain actual content.

22
deps/curl/Makefile.am vendored
View File

@ -127,13 +127,14 @@ EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \
$(VC_DIST) $(WINBUILD_DIST) $(PLAN9_DIST) lib/libcurl.vers.in buildconf.bat \
libcurl.def
CLEANFILES = $(VC14_LIBVCXPROJ) \
$(VC14_SRCVCXPROJ) $(VC14_10_LIBVCXPROJ) $(VC14_10_SRCVCXPROJ) \
CLEANFILES = $(VC14_LIBVCXPROJ) $(VC14_SRCVCXPROJ) \
$(VC14_10_LIBVCXPROJ) $(VC14_10_SRCVCXPROJ) \
$(VC14_20_LIBVCXPROJ) $(VC14_20_SRCVCXPROJ) \
$(VC14_30_LIBVCXPROJ) $(VC14_30_SRCVCXPROJ)
bin_SCRIPTS = curl-config
SUBDIRS = lib src
SUBDIRS = lib docs src scripts
DIST_SUBDIRS = $(SUBDIRS) tests packages scripts include docs
pkgconfigdir = $(libdir)/pkgconfig
@ -152,12 +153,6 @@ dist-hook:
cp -p $$file $(distdir)$$strip; \
done)
html:
cd docs && $(MAKE) html
pdf:
cd docs && $(MAKE) pdf
check: test examples check-docs
if CROSSCOMPILING
@ -246,10 +241,16 @@ cygwinbin:
$(MAKE) -C packages/Win32/cygwin cygwinbin
# We extend the standard install with a custom hook:
if BUILD_DOCS
install-data-hook:
(cd include && $(MAKE) install)
(cd docs && $(MAKE) install)
(cd docs/libcurl && $(MAKE) install)
else
install-data-hook:
(cd include && $(MAKE) install)
(cd docs && $(MAKE) install)
endif
# We extend the standard uninstall with a custom hook:
uninstall-hook:
@ -509,7 +510,8 @@ function gen_element(type, dir, file)\
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC14_20_SRCTMPL) > $(VC14_20_SRCVCXPROJ) || { exit 1; };) \
"$$awk_code" $(srcdir)/$(VC14_20_SRCTMPL) > $(VC14_20_SRCVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC14_30_LIBVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v lib_srcs="$$sorted_lib_srcs" \

View File

@ -30,27 +30,6 @@ ssl:
./configure --with-openssl
make
mingw32:
$(MAKE) -C lib -f Makefile.mk
$(MAKE) -C src -f Makefile.mk
mingw32-clean:
$(MAKE) -C lib -f Makefile.mk clean
$(MAKE) -C src -f Makefile.mk clean
$(MAKE) -C docs/examples -f Makefile.mk clean
mingw32-vclean mingw32-distclean:
$(MAKE) -C lib -f Makefile.mk vclean
$(MAKE) -C src -f Makefile.mk vclean
$(MAKE) -C docs/examples -f Makefile.mk vclean
mingw32-examples%:
$(MAKE) -C docs/examples -f Makefile.mk CFG=$@
mingw32%:
$(MAKE) -C lib -f Makefile.mk CFG=$@
$(MAKE) -C src -f Makefile.mk CFG=$@
vc:
cd winbuild
nmake /f Makefile.vc MACHINE=x86

View File

@ -1,206 +1,183 @@
curl and libcurl 8.5.0
curl and libcurl 8.7.0
Public curl releases: 253
Public curl releases: 255
Command line options: 258
curl_easy_setopt() options: 303
curl_easy_setopt() options: 304
Public functions in libcurl: 93
Contributors: 3039
Contributors: 3134
This release includes the following changes:
o gnutls: support CURLSSLOPT_NATIVE_CA [31]
o HTTP3: ngtcp2 builds are no longer experimental [77]
o configure: add --disable-docs flag [16]
o CURLINFO_USED_PROXY: return bool whether the proxy was used [24]
o digest: support SHA-512/256 [118]
o DoH: add trace configuration [61]
o write-out: add '%{proxy_used}' [24]
This release includes the following bugfixes:
o appveyor: make VS2008-built curl tool runnable [93]
o asyn-thread: use pipe instead of socketpair for IPC when available [4]
o autotools: accept linker flags via `CURL_LDFLAGS_{LIB,BIN}` [128]
o autotools: avoid passing `LDFLAGS` twice to libcurl [127]
o autotools: delete LCC compiler support bits [137]
o autotools: fix/improve gcc and Apple clang version detection [136]
o autotools: stop setting `-std=gnu89` with `--enable-warnings` [135]
o autotools: update references to deleted `crypt-auth` option [46]
o BINDINGS: add V binding [54]
o build: add `src/.checksrc` to source tarball [1]
o build: add more picky warnings and fix them [172]
o build: always revert `#pragma GCC diagnostic` after use [143]
o build: delete `HAVE_STDINT_H` and `HAVE_INTTYPES_H` [107]
o build: delete support bits for obsolete Windows compilers [106]
o build: fix 'threadsafe' feature detection for older gcc [19]
o build: fix builds that disable protocols but not digest auth [174]
o build: fix compiler warning with auths disabled [85]
o build: fix libssh2 + `CURL_DISABLE_DIGEST_AUTH` + `CURL_DISABLE_AWS` [120]
o build: picky warning updates [125]
o build: require Windows XP or newer [86]
o cfilter: provide call to tell connection to forget a socket [65]
o checksrc.pl: support #line instructions
o CI: add autotools, out-of-tree, debug build to distro check job [14]
o CI: ignore test 286 on Appveyor gcc 9 build [6]
o cmake: add `CURL_DISABLE_BINDLOCAL` option [146]
o cmake: add test for `DISABLE` options, add `CURL_DISABLE_HEADERS_API` [138]
o cmake: dedupe Windows system libs [114]
o cmake: fix `HAVE_H_ERRNO_ASSIGNABLE` detection [2]
o cmake: fix CURL_DISABLE_GETOPTIONS [12]
o cmake: fix multiple include of CURL package [96]
o cmake: fix OpenSSL quic detection in quiche builds [56]
o cmake: option to disable install & drop `curlu` target when unused [72]
o cmake: pre-fill rest of detection values for Windows [50]
o cmake: replace `check_library_exists_concat()` [23]
o cmake: speed up threads setup for Windows [68]
o cmake: speed up zstd detection [69]
o config-win32: set `HAVE_SNPRINTF` for mingw-w64 [123]
o configure: better --disable-http [80]
o configure: check for the fseeko declaration too [55]
o conncache: use the closure handle when disconnecting surplus connections [173]
o content_encoding: make Curl_all_content_encodings allocless [101]
o cookie: lowercase the domain names before PSL checks [160]
o curl.h: delete Symbian OS references [162]
o curl.h: on FreeBSD include sys/param.h instead of osreldate.h [21]
o curl.rc: switch out the copyright symbol for plain ASCII [167]
o curl: improved IPFS and IPNS URL support [87]
o curl_easy_duphandle.3: clarify how HSTS and alt-svc are duped [99]
o Curl_http_body: cleanup properly when Curl_getformdata errors [152]
o curl_setup: disallow Windows IPv6 builds missing getaddrinfo [57]
o curl_sspi: support more revocation error names in error messages [95]
o CURLINFO_PRETRANSFER_TIME_T.3: fix time explanation [181]
o CURLMOPT_MAX_CONCURRENT_STREAMS: make sure the set value is within range [165]
o CURLOPT_CAINFO_BLOB.3: explain what CURL_BLOB_COPY does [113]
o CURLOPT_WRITEFUNCTION.3: clarify libcurl returns for CURL_WRITEFUNC_ERROR [45]
o CURPOST_POSTFIELDS.3: add CURLOPT_COPYPOSTFIELDS in SEE ALSO
o docs/example/keepalive.c: show TCP keep-alive options [73]
o docs/example/localport.c: show off CURLOPT_LOCALPORT [83]
o docs/examples/interface.c: show CURLOPT_INTERFACE use [84]
o docs/libcurl: fix three minor man page format mistakes [26]
o docs/libcurl: SYNSOPSIS cleanup [150]
o docs: add supported version for the json write-out [92]
o docs: clarify that curl passes on input unfiltered [47]
o docs: fix function typo in curl_easy_option_next.3 [36]
o docs: KNOWN_BUGS cleanup
o docs: make all examples in all libcurl man pages compile [175]
o docs: preserve the modification date when copying the prebuilt man page [89]
o docs: remove bold from some man page SYNOPSIS sections [90]
o docs: use SOURCE_DATE_EPOCH for generated manpages [16]
o doh: provide better return code for responses w/o addresses [133]
o doh: use PIPEWAIT when HTTP/2 is attempted [63]
o duphandle: also free 'outcurl->cookies' in error path [122]
o duphandle: make dupset() not return with pointers to old alloced data [109]
o duphandle: use strdup to clone *COPYPOSTFIELDS if size is not set [132]
o easy: in duphandle, init the cookies for the new handle [131]
o easy: remove duplicate wolfSSH init call [37]
o easy_lock: add a pthread_mutex_t fallback [13]
o examples/rtsp-options.c: add [157]
o fopen: create new file using old file's mode [153]
o fopen: create short(er) temporary file name [155]
o getenv: PlayStation doesn't have getenv() [41]
o GHA: move mod_h2 version in CI to v2.0.25 [43]
o hostip: show the list of IPs when resolving is done [35]
o hostip: silence compiler warning `-Wparentheses-equality` [62]
o hsts: skip single-dot hostname [67]
o HTTP/2, HTTP/3: handle detach of onoing transfers [134]
o http2: header conversion tightening [33]
o http2: provide an error callback and failf the message [53]
o http2: safer invocation of populate_binsettings [8]
o http: allow longer HTTP/2 request method names [112]
o http: avoid Expect: 100-continue if Upgrade: is used [15]
o http: consider resume with CURLOPT_FAILONERRROR and 416 to be fine [81]
o http: fix `-Wunused-parameter` with no auth and no proxy [149]
o http: fix `-Wunused-variable` compiler warning [115]
o http: fix empty-body warning [76]
o http_aws_sigv4: canonicalise valueless query params [88]
o hyper: temporarily remove HTTP/2 support [139]
o INSTALL: update list of ports and CPU archs
o IPFS: fix IPFS_PATH and file parsing [119]
o keylog: disable if unused [145]
o lib: add and use Curl_strndup() [97]
o lib: apache style infof and trace macros/functions [71]
o lib: fix gcc warning in printf call [7]
o libcurl-errors.3: sync with current public headers [156]
o libcurl-thread.3: simplify the TLS section [79]
o Makefile.am: drop vc10, vc11 and vc12 projects from dist [103]
o Makefile.mk: fix `-rtmp` option for non-Windows
o mime: store "form escape" as a single bit [170]
o misc: fix -Walloc-size warnings [118]
o msh3: error when built with CURL_DISABLE_SOCKETPAIR set [61]
o multi: during ratelimit multi_getsock should return no sockets [182]
o multi: use pipe instead of socketpair to *wakeup() [18]
o ngtcp2: fix races in stream handling [178]
o ngtcp2: ignore errors on unknown streams [158]
o ntlm_wb: use pipe instead of socketpair when possible [44]
o openldap: move the alloc of ldapconninfo to *connect() [29]
o openldap: set the callback argument in oldap_do [30]
o openssl: avoid BN_num_bits() NULL pointer derefs [9]
o openssl: fix building with v3 `no-deprecated` + add CI test [161]
o openssl: fix infof() to avoid compiler warning for %s with null [70]
o openssl: identify the "quictls" backend correctly [82]
o openssl: include SIG and KEM algorithms in verbose [52]
o openssl: make CURLSSLOPT_NATIVE_CA import Windows intermediate CAs [58]
o openssl: two multi pointer checks should probably rather be asserts [91]
o openssl: when a session-ID is reused, skip OCSP stapling [142]
o page-footer: clarify exit code 25 [51]
o projects: add VC14.20 project files [104]
o pytest: use lower count in repeat tests [98]
o quic: make eyeballers connect retries stop at weird replies [140]
o quic: manage connection idle timeouts [5]
o quiche: use quiche_conn_peer_transport_params() [116]
o rand: fix build error with autotools + LibreSSL [111]
o resolve.d: drop a multi use-sentence [100]
o RTSP: improved RTP parser [32]
o rustls: implement connect_blocking [154]
o sasl: fix `-Wunused-function` compiler warning [124]
o schannel: add CA cache support for files and memory blobs [121]
o setopt: check CURLOPT_TFTP_BLKSIZE range on set [171]
o setopt: remove outdated cookie comment [64]
o setopt: remove superfluous use of ternary expressions [169]
o socks: better buffer size checks for socks4a user and hostname [20]
o socks: make SOCKS5 use the CURLOPT_IPRESOLVE choice [38]
o symbols-in-versions: the CLOSEPOLICY options are deprecated
o test1683: remove commented-out check alternatives
o test3103: add missing quotes around a test tag attribute
o test613: stop showing an error on missing output file
o tests/README: SOCKS tests are not using OpenSSH, it has its own server [48]
o tests/server: add more SOCKS5 handshake error checking [27]
o tests: Fix Windows test helper tool search & use it for handle64 [17]
o tidy-up: casing typos, delete unused Windows version aliases [144]
o tool: fix --capath when proxy support is disabled [28]
o tool: support bold headers in Windows [117]
o tool_cb_hdr: add an additional parsing check [129]
o tool_cb_prg: make the carriage return fit for wide progress bars [159]
o tool_cb_wrt: fix write output for very old Windows versions [24]
o tool_getparam: limit --rate to be smaller than number of ms [3]
o tool_operate: do not mix memory models [108]
o tool_operate: fix links in ipfs errors [22]
o tool_parsecfg: make warning output propose double-quoting [164]
o tool_urlglob: fix build for old gcc versions [25]
o tool_urlglob: make multiply() bail out on negative values [11]
o tool_writeout_json: fix JSON encoding of non-ascii bytes [179]
o transfer: abort pause send when connection is marked for closing [183]
o transfer: avoid calling the read callback again after EOF [130]
o transfer: only reset the FTP wildcard engine in CLEAR state [42]
o url: don't touch the multi handle when closing internal handles [40]
o url: find scheme with a "perfect hash" [141]
o url: fix `-Wzero-length-array` with no protocols [147]
o url: fix builds with `CURL_DISABLE_HTTP` [148]
o url: protocol handler lookup tidy-up [66]
o url: proxy ssl connection reuse fix [94]
o urlapi: avoid null deref if setting blank host to url encode [75]
o urlapi: skip appending NULL pointer query [74]
o urlapi: when URL encoding the fragment, pass in the right length [59]
o urldata: make maxconnects a 32 bit value [166]
o urldata: move async resolver state from easy handle to connectdata [34]
o urldata: move cookielist from UserDefined to UrlState [126]
o urldata: move hstslist from 'set' to 'state' [105]
o urldata: move the 'internal' boolean to the state struct [39]
o vssh: remove the #ifdef for Curl_ssh_init, use empty macro
o vtls: cleanup SSL config management [78]
o vtls: consistently use typedef names for OpenSSL structs [176]
o vtls: late clone of connection ssl config [60]
o vtls: use ALPN "http/1.1" for HTTP/1.x, including HTTP/1.0 [102]
o VULN-DISCLOSURE-POLICY: escape sequences are not a security flaw [110]
o windows: use built-in `_WIN32` macro to detect Windows [163]
o wolfssh: remove redundant static prototypes [168]
o wolfssl: add default case for wolfssl_connect_step1 switch [49]
o wolfssl: require WOLFSSL_SYS_CA_CERTS for loading system CA [10]
o ALTSVC.md: correct a typo [14]
o asyn-ares: fix data race warning [88]
o asyn-thread: use wakeup_close to close the read descriptor [1]
o badwords: use hostname, not host name [46]
o BINDINGS: add mcurl, the python binding [67]
o bufq: writing into a softlimit queue cannot be partial [49]
o c-hyper: add header collection writer in hyper builds [70]
o cd2nroff: gen: make `\>` in input to render as plain '>' in output
o cd2nroff: remove backticks from titles
o checksrc.pl: fix handling .checksrc with CRLF [43]
o cmake: add USE_OPENSSL_QUIC support [21]
o cmake: add warning for using TLS libraries without 1.3 support [25]
o cmake: enable `ENABLE_CURL_MANUAL` by default [112]
o cmake: fix `CURL_WINDOWS_SSPI=ON` with Schannel disabled [117]
o cmake: fix function description in comment [47]
o cmake: fix install for older CMake versions [53]
o cmake: fix libcurl.pc and curl-config library specifications [115]
o cmdline-docs/Makefile: avoid using a fixed temp file name [5]
o cmdline-docs: quote and angle bracket cleanup [45]
o cmdline-opts/_EXITCODES: sync with libcurl-errors [80]
o cmdline-opts/_VARIABLES.md: improve the description [105]
o cmdline-opts/_VERSION: provide %VERSION correctly [87]
o cmdline-opts: shorter help texts [148]
o configure: add pkg-config support to rustls detection [151]
o configure: add warning for using TLS libraries without 1.3 support [26]
o configure: build & install shell completions when enabled [85]
o configure: do not link with nghttp3 unless necessary [7]
o configure: Don't build shell completions when disabled [68]
o configure: Don't make shell completions without perl [83]
o configure: find libpsl with pkg-config [79]
o connect.c: fix typo [17]
o CONTRIBUTE: update the section on documentation format [96]
o cookie.md: provide an example sending a fixed cookie [13]
o cookie: if psl fails, reject the cookie [107]
o curl: exit on config file parser errors [40]
o curl: make --libcurl output better CURLOPT_*SSLVERSION [127]
o curl: when allocating variables, add the name into the struct [37]
o curl_setup.h: add curl_uint64_t internal type
o curldown: fix email address in Copyright [89]
o CURLMOPT_MAX*: mention what happens if changed mid-transfer [154]
o CURLOPT_INTERFACE.md: remove spurious amp, add see-also [137]
o CURLOPT_POSTQUOTE.md: fix typo [36]
o CURLOPT_SSL_CTX_FUNCTION.md: no promises of lifetime after return [104]
o CURLOPT_WRITEFUNCTION.md: typo fix [41]
o digest: add check for hashing error [111]
o dist: make sure the http tests are in the tarball [29]
o DISTROS: add document with distro pointers [144]
o docs/libcurl: add TLS backend info for all TLS options [155]
o docs/libcurl: generate PROTOCOLS from meta-data [153]
o docs: add missing slashes to SChannel client certificate documentation [11]
o docs: add necessary setup for nghttp3 [51]
o docs: ascii version of manpage without nroff [121]
o docs: dist curl*.1 and install without perl [64]
o docs: make curldown do angle brackets like markdown [54]
o docs: make each libcurl man specify protocol(s) [157]
o docs: make sure curl.1 is included in dist tarballs [35]
o docs: update minimal binary size in INSTALL.md
o docs: use present tense [103]
o examples: use present tense in comments [97]
o file: use xfer buf for file:// transfers [23]
o fopen: fix narrowing conversion warning on 32-bit Android [100]
o form-string.md: correct the example [4]
o ftp: do lineend conversions in client writer [32]
o ftp: fix socket wait activity in ftp_domore_getsock [28]
o ftp: tracing improvements [33]
o ftp: treat a 226 arriving before data as a signal to read data [19]
o gen.pl: make the "manpageification" faster [95]
o gen: make `\>` in input to render as plain '>' in output [78]
o getparam: make --ftp-ssl work again [90]
o GHA/linux: add sysctl trick to work-around GitHub runner issue [129]
o GIT-INFO: convert to markdown [114]
o GOVERNANCE: document the core team [133]
o header.md: remove backslash, make nicer markdown [48]
o HTTP/2: write response directly [12]
o http2, http3: return CURLE_PARTIAL_FILE when bytes were received [160]
o http2: fix push discard [124]
o http2: memory errors in the push callbacks are fatal [132]
o http2: minor tweaks to optimize two struct sizes [130]
o http2: push headers better cleanup [113]
o http2: remove the third (unused) argument from http2_data_done() [159]
o HTTP3.md: adjust the OpenSSL QUIC install instructions [34]
o http: better error message for HTTP/1.x response without status line [86]
o http: improve response header handling, save cpu cycles [138]
o http: move headers collecting to writer [71]
o http: remove stale comment about rewindbeforesend [136]
o http: separate response parsing from response action [158]
o http_chunks: fix the accounting of consumed bytes [22]
o http_chunks: remove unused 'endptr' variable [58]
o https-proxy: use IP address and cert with ip in alt names [50]
o hyper: implement unpausing via client reader [98]
o ipv6.md: mention IPv4 mapped addresses [147]
o KNOWN_BUGS: POP3 issue when reading small chunks [134]
o lib1598: fix `CURLOPT_POSTFIELDSIZE` usage [128]
o lib582: remove code causing warning that is never run [38]
o lib: add `void *ctx` to reader/writer instances [122]
o lib: convert Curl_get_line to use dynbuf [42]
o lib: Curl_read/Curl_write clarifications [101]
o lib: enhance client reader resume + rewind [92]
o lib: initialize output pointers to NULL before calling strto[ff,l,ul] [63]
o lib: keep conn IP information together [109]
o lib: move 'done' parameter to SingleRequests [142]
o lib: remove curl_mimepart object when CURL_DISABLE_MIME [72]
o libcurl-docs: cleanups
o libcurl-security.md: Active FTP passes on the local IP address [6]
o libssh/libssh2: return error on too big range [75]
o MANUAL.md: fix typo [66]
o mbedtls: fix building when MBEDTLS_X509_REMOVE_INFO flag is defined [27]
o mbedtls: fix pytest for newer versions [146]
o mbedtls: properly cleanup the thread-shared entropy [140]
o mbedtls: use mbedtls_ssl_conf_{min|max}_tls_version [59]
o md4: include strdup.h for the memdup proto [10]
o mime: add client reader [126]
o misc: fix typos in docs and lib [84]
o mkhelp: simplify the generated hugehelp program [120]
o mprintf: fix format prefix I32/I64 for windows compilers [77]
o multi: add xfer_buf to multi handle [30]
o multi: fix multi_sock handling of select_bits [81]
o multi: make add_handle free any multi_easy [102]
o ngtcp2: no recvbuf for stream [108]
o ntml_wb: fix buffer type typo [2]
o OpenSSL QUIC: adapt to v3.3.x [65]
o openssl-quic: check on Windows that socket conv to int is possible [8]
o openssl-quic: fix BIO leak and Windows warning [93]
o openssl-quic: fix unity build, casing, indentation [94]
o OS400: avoid using awk in the build scripts [20]
o paramhlp: fix CRLF-stripping files with "-d @file" [116]
o proxy1.0.md: fix example [15]
o pytest: adapt to API change [106]
o request: clarify message when request has been sent off [143]
o rustls: make curl compile with 0.12.0 [73]
o schannel: fix hang on unexpected server close [57]
o scripts: fix cijobs.pl for Azure and GHA
o sendf: ignore response body to HEAD [18]
o setopt: fix check for CURLOPT_PROXY_TLSAUTH_TYPE value [76]
o setopt: fix disabling all protocols [99]
o sha512_256: add support for GnuTLS and OpenSSL [110]
o smtp: fix STARTTLS [91]
o SPONSORS: describe the basics [131]
o strtoofft: fix the overflow check [74]
o test 1541: verify getinfo values on first header callback [149]
o test1165: improve pattern matching [60]
o tests: support setting/using blank content env variables
o TIMER_STARTTRANSFER: set the same for everyone [82]
o TLS: start shutdown only when peer did not already close [150]
o TODO: update 13.11 with more information [152]
o tool_cb_hdr: only parse etag + content-disposition for 2xx [9]
o tool_getparam: accept a blank -w "" [139]
o tool_getparam: handle non-existing (out of range) short-options [141]
o tool_operate: change precedence of server Retry-After time [44]
o tool_operate: do not set CURLOPT_QUICK_EXIT in debug builds [3]
o trace-config.md: remove the mutexed options list [119]
o transfer.c: break receive loop in speed limited transfers [125]
o transfer: improve Windows SO_SNDBUF update limit [56]
o urldata: move authneg bit from conn to Curl_easy [69]
o version: allow building with ancient libpsl [52]
o vquic-tls: fix the error code returned for bad CA file [135]
o vtls: fix tls proxy peer verification [55]
o vtls: revert "receive max buffer" + add test case [39]
o VULN-DISCLOSURE-POLICY.md: update detail about CVE requests [123]
o websocket: fix curl_ws_recv() [62]
o wolfSSL: do not call the stub function wolfSSL_BIO_set_init() [145]
o write-out.md: clarify error handling details [31]
This release includes the following known bugs:
@ -215,204 +192,188 @@ Planned upcoming removals include:
This release would not have looked like this without help, code, reports and
advice from friends like these:
12932 on github, Alex Bozarth, Alexey Larikov, Alex Klyubin, Ammar Faizi,
Andrew Kurushin, Anubhav Rai, boilingoden, calvin2021y on github,
Carlos Henrique Lima Melara, Casey Bodley, Charlie C, Dan Fandrich,
Daniel Jeliński, Daniel Stenberg, David Benjamin, David Suter, Dmitry Karpov,
eeverettrbx on github, Emanuele Torre, Enno Boland, enWILLYado on github,
Faraz Fallahi, Gisle Vanem, Goro FUJI, Graham Campbell, Harry Mallon,
Harry Sintonen, iconoclasthero, icy17 on github, Jacob Hoffman-Andrews,
Jan Alexander Steffens, Jeroen Ooms, Jiehong on github, Jiri Hruska,
Junho Choi, Kai Pastor, Kareem, Kartatz on Github, kirbyn17 on hackerone,
Lau, lkordos on github, Loïc Yhuel, LoRd_MuldeR, lRoccoon on github,
Maksymilian Arciemowicz, Manfred Schwarb, Marcel Raad, Marcin Rataj,
Mark Gaiser, Martin Schmatz, Michael Kaufmann, Michał Antoniak, Nico Rieck,
Niracler Li, ohyeaah on github, Ophir Lojkine, Paweł Wegner, Philip Heiduck,
Ray Satiro, rilysh, Robert Southee, Romain Geissler, Sam James,
Samuel Henrique, sd0 on hackerone, Smackd0wn, Sohom Datta, Stefan Eissing,
Steven Allen, Tim Hill, Torben Dury, Turiiya, Viktor Szakats,
yushicheng7788 on github, z2_, zhengqwe on github, 積丹尼 Dan Jacobson
(78 contributors)
5533asdg on github, Alan Coopersmith, Andreas Kiefer, Andrew Kaster,
Andy Fiddaman, Arjan van de Ven, av223119 on github, awesomekosm on github,
Boris Verkhovskiy, Brett Buddin, Brian Clemens, chensong1211 on github,
Chris Webb, chrysos349 on github, Dan Fandrich, Daniel Gustafsson,
Daniel Stenberg, Daniel Szmulewicz, Dan McDonald, DasKutti on github,
dependabot[bot], Dexter Gerig, dfdity on github, Dirk Hünniger,
Dmitry Karpov, Dmitry Tretyakov, edmcln on github, Erik Schnetter,
Evgeny Grin (Karlson2k), Fabian Keil, Fabian Vogt, Fabrice Fontaine,
Faraz Fallahi, Gaelan Steele, Geeknik Labs, Gisle Vanem, graywolf on github,
Harry Sintonen, HsiehYuho on github, Jan Macku, Jiawen Geng, Jiří Bok,
Joel Depooter, John Marshall, Jonathan Perkin, Jon Rumsey, Jordan Brown,
Josh Soref, Karthikdasari0423, Karthikdasari0423 on github, Kevin Daudt,
Konstantin Vlasov, kpcyrd, Lars Kellogg-Stedman, LeeRiva, Louis Solofrizzo,
Lukáš Zaoral, Marcel Raad, Marcus Müller, Matt Jolly, Michael Forney,
Michael Kaufmann, Michał Antoniak, Michał Górny, Mohammadreza Hendiani,
Nikita Taranov, Outvi V, Patrick Monnerat, Paweł Witas, Pēteris Caune,
Peter Krefting, RainRat, Ramiro Garcia, Ray Satiro, Richard Levitte,
Robert Moreton, Ross Burton, Rudi Heitbaum, Ryan Carsten Schmidt,
Scott Mutter, Scott Talbert, Sean Molenaar, Sebastian Neubauer,
Sergey Bronnikov, Simon K, Stefan Eissing, Tal Regev, Thomas Pyle,
Till Wegmüller, Viktor Szakats, vulnerabilityspotter on hackerone,
Winni Neessen
(92 contributors)
References to bug reports and discussions on issues:
[1] = https://curl.se/bug/?i=12084
[2] = https://curl.se/bug/?i=12093
[3] = https://curl.se/bug/?i=12116
[4] = https://curl.se/bug/?i=12146
[5] = https://curl.se/bug/?i=12064
[6] = https://curl.se/bug/?i=12040
[7] = https://curl.se/bug/?i=12082
[8] = https://curl.se/bug/?i=12101
[9] = https://curl.se/bug/?i=12099
[10] = https://curl.se/bug/?i=12108
[11] = https://curl.se/bug/?i=12102
[12] = https://curl.se/bug/?i=12091
[13] = https://curl.se/bug/?i=12090
[14] = https://curl.se/bug/?i=12088
[15] = https://curl.se/bug/?i=12022
[16] = https://curl.se/bug/?i=12092
[17] = https://curl.se/bug/?i=12115
[18] = https://curl.se/bug/?i=12142
[19] = https://curl.se/bug/?i=12125
[20] = https://curl.se/bug/?i=12139
[21] = https://curl.se/bug/?i=12107
[22] = https://curl.se/bug/?i=12133
[23] = https://curl.se/bug/?i=11285
[24] = https://curl.se/bug/?i=12131
[25] = https://curl.se/bug/?i=12124
[26] = https://curl.se/bug/?i=12126
[27] = https://curl.se/bug/?i=12117
[28] = https://curl.se/bug/?i=12089
[29] = https://curl.se/bug/?i=12166
[30] = https://curl.se/bug/?i=12166
[31] = https://curl.se/bug/?i=12137
[32] = https://curl.se/bug/?i=12052
[33] = https://curl.se/bug/?i=12097
[34] = https://curl.se/bug/?i=12198
[35] = https://curl.se/bug/?i=12145
[36] = https://curl.se/bug/?i=12170
[37] = https://curl.se/bug/?i=12168
[38] = https://curl.se/bug/?i=11949
[39] = https://curl.se/bug/?i=12165
[40] = https://curl.se/bug/?i=12165
[41] = https://curl.se/bug/?i=12140
[42] = https://curl.se/bug/?i=11775
[43] = https://curl.se/bug/?i=12157
[44] = https://curl.se/bug/?i=12149
[45] = https://curl.se/bug/?i=12201
[46] = https://curl.se/bug/?i=12194
[47] = https://curl.se/bug/?i=12249
[48] = https://curl.se/bug/?i=12195
[49] = https://curl.se/bug/?i=12218
[50] = https://curl.se/bug/?i=12044
[51] = https://curl.se/bug/?i=12189
[52] = https://curl.se/bug/?i=12030
[53] = https://curl.se/bug/?i=12179
[54] = https://curl.se/bug/?i=12182
[55] = https://curl.se/bug/?i=12086
[56] = https://curl.se/bug/?i=12160
[57] = https://curl.se/bug/?i=12221
[58] = https://curl.se/bug/?i=12155
[59] = https://curl.se/bug/?i=12250
[60] = https://curl.se/bug/?i=12237
[61] = https://curl.se/bug/?i=12213
[62] = https://curl.se/bug/?i=12215
[63] = https://curl.se/bug/?i=12214
[64] = https://curl.se/bug/?i=12206
[65] = https://curl.se/bug/?i=12207
[66] = https://curl.se/bug/?i=12216
[67] = https://curl.se/bug/?i=12247
[68] = https://curl.se/bug/?i=12202
[69] = https://curl.se/bug/?i=12200
[70] = https://curl.se/bug/?i=12196
[71] = https://curl.se/bug/?i=12083
[72] = https://curl.se/bug/?i=12287
[73] = https://curl.se/bug/?i=12242
[74] = https://curl.se/bug/?i=12240
[75] = https://curl.se/bug/?i=12240
[76] = https://curl.se/bug/?i=12262
[77] = https://curl.se/bug/?i=12235
[78] = https://curl.se/bug/?i=12204
[79] = https://curl.se/bug/?i=12233
[80] = https://curl.se/bug/?i=12223
[81] = https://curl.se/bug/?i=10521
[82] = https://curl.se/bug/?i=12270
[83] = https://curl.se/bug/?i=12230
[84] = https://curl.se/bug/?i=12229
[85] = https://curl.se/bug/?i=12227
[86] = https://curl.se/bug/?i=12225
[87] = https://curl.se/bug/?i=12148
[88] = https://curl.se/bug/?i=8107
[89] = https://curl.se/bug/?i=12199
[90] = https://curl.se/bug/?i=12267
[91] = https://curl.se/bug/?i=12264
[92] = https://curl.se/bug/?i=12266
[93] = https://curl.se/bug/?i=12263
[94] = https://curl.se/bug/?i=12255
[95] = https://curl.se/bug/?i=12239
[96] = https://curl.se/bug/?i=11913
[97] = https://curl.se/bug/?i=12251
[98] = https://curl.se/bug/?i=12248
[99] = https://curl.se/bug/?i=12315
[100] = https://curl.se/bug/?i=12294
[101] = https://curl.se/bug/?i=12289
[102] = https://curl.se/bug/?i=12259
[103] = https://curl.se/bug/?i=12288
[104] = https://curl.se/bug/?i=12282
[105] = https://curl.se/bug/?i=12315
[106] = https://curl.se/bug/?i=12222
[107] = https://curl.se/bug/?i=12275
[108] = https://curl.se/bug/?i=12280
[109] = https://curl.se/bug/?i=12337
[110] = https://curl.se/bug/?i=12278
[111] = https://curl.se/bug/?i=12257
[112] = https://curl.se/bug/?i=12311
[113] = https://curl.se/bug/?i=12277
[114] = https://curl.se/bug/?i=12307
[115] = https://curl.se/bug/?i=12228
[116] = https://curl.se/bug/?i=12180
[117] = https://curl.se/bug/?i=12321
[118] = https://curl.se/bug/?i=12292
[119] = https://curl.se/bug/?i=12152
[120] = https://curl.se/bug/?i=12273
[121] = https://curl.se/bug/?i=12261
[122] = https://curl.se/bug/?i=12329
[123] = https://curl.se/bug/?i=12325
[124] = https://curl.se/bug/?i=12326
[125] = https://curl.se/bug/?i=12324
[126] = https://curl.se/bug/?i=12323
[127] = https://curl.se/bug/?i=12310
[128] = https://curl.se/bug/?i=12312
[129] = https://curl.se/bug/?i=12320
[130] = https://curl.se/mail/lib-2023-11/0017.html
[131] = https://curl.se/bug/?i=12318
[132] = https://curl.se/bug/?i=12317
[133] = https://curl.se/bug/?i=12365
[134] = https://curl.se/bug/?i=12356
[135] = https://curl.se/bug/?i=12346
[136] = https://curl.se/bug/?i=12362
[137] = https://curl.se/bug/?i=12357
[138] = https://curl.se/bug/?i=12353
[139] = https://curl.se/bug/?i=12191
[140] = https://curl.se/bug/?i=12400
[141] = https://curl.se/bug/?i=12347
[142] = https://curl.se/bug/?i=12399
[143] = https://curl.se/bug/?i=12352
[144] = https://curl.se/bug/?i=12351
[145] = https://curl.se/bug/?i=12350
[146] = https://curl.se/bug/?i=12345
[147] = https://curl.se/bug/?i=12344
[148] = https://curl.se/bug/?i=12343
[149] = https://curl.se/bug/?i=12338
[150] = https://curl.se/bug/?i=12402
[152] = https://curl.se/bug/?i=12410
[153] = https://curl.se/bug/?i=12299
[154] = https://curl.se/bug/?i=11647
[155] = https://curl.se/bug/?i=12388
[156] = https://curl.se/bug/?i=12424
[157] = https://curl.se/bug/?i=12452
[158] = https://curl.se/bug/?i=12449
[159] = https://curl.se/bug/?i=12407
[160] = https://curl.se/bug/?i=12387
[161] = https://curl.se/bug/?i=12384
[162] = https://curl.se/bug/?i=12378
[163] = https://curl.se/bug/?i=12376
[164] = https://curl.se/bug/?i=12409
[165] = https://curl.se/bug/?i=12382
[166] = https://curl.se/bug/?i=12375
[167] = https://curl.se/bug/?i=12403
[168] = https://curl.se/bug/?i=12381
[169] = https://curl.se/bug/?i=12374
[170] = https://curl.se/bug/?i=12374
[171] = https://curl.se/bug/?i=12374
[172] = https://curl.se/bug/?i=12331
[173] = https://curl.se/bug/?i=12367
[174] = https://curl.se/bug/?i=12440
[175] = https://curl.se/bug/?i=12448
[176] = https://curl.se/bug/?i=12439
[178] = https://curl.se/bug/?i=12435
[179] = https://curl.se/bug/?i=12434
[181] = https://curl.se/bug/?i=12431
[182] = https://curl.se/bug/?i=12430
[183] = https://curl.se/bug/?i=12428
[1] = https://curl.se/bug/?i=12836
[2] = https://curl.se/bug/?i=12825
[3] = https://curl.se/bug/?i=12834
[4] = https://curl.se/bug/?i=12822
[5] = https://curl.se/bug/?i=12829
[6] = https://curl.se/bug/?i=12867
[7] = https://curl.se/bug/?i=12833
[8] = https://curl.se/bug/?i=12861
[9] = https://curl.se/bug/?i=12866
[10] = https://curl.se/bug/?i=12849
[11] = https://curl.se/bug/?i=12854
[12] = https://curl.se/bug/?i=12828
[13] = https://curl.se/bug/?i=12868
[14] = https://curl.se/bug/?i=12852
[15] = https://curl.se/bug/?i=12856
[16] = https://curl.se/bug/?i=12832
[17] = https://curl.se/bug/?i=12858
[18] = https://curl.se/mail/lib-2024-02/0000.html
[19] = https://curl.se/bug/?i=12823
[20] = https://curl.se/bug/?i=12826
[21] = https://curl.se/bug/?i=13034
[22] = https://curl.se/bug/?i=12937
[23] = https://curl.se/bug/?i=12750
[24] = https://curl.se/bug/?i=12719
[25] = https://curl.se/bug/?i=12900
[26] = https://curl.se/bug/?i=12900
[27] = https://curl.se/bug/?i=12904
[28] = https://curl.se/bug/?i=12901
[29] = https://curl.se/bug/?i=12914
[30] = https://curl.se/bug/?i=12805
[31] = https://curl.se/bug/?i=12909
[32] = https://curl.se/bug/?i=12878
[33] = https://curl.se/bug/?i=12902
[34] = https://curl.se/bug/?i=12896
[35] = https://curl.se/bug/?i=12892
[36] = https://curl.se/bug/?i=12926
[37] = https://curl.se/bug/?i=12891
[38] = https://curl.se/bug/?i=12890
[39] = https://curl.se/bug/?i=12885
[40] = https://curl.se/mail/archive-2024-02/0008.html
[41] = https://curl.se/bug/?i=12889
[42] = https://curl.se/bug/?i=12846
[43] = https://curl.se/bug/?i=12924
[44] = https://curl.se/mail/archive-2024-01/0022.html
[45] = https://curl.se/bug/?i=12884
[46] = https://curl.se/bug/?i=12888
[47] = https://curl.se/bug/?i=12879
[48] = https://curl.se/bug/?i=12877
[49] = https://curl.se/bug/?i=13020
[50] = https://curl.se/bug/?i=12838
[51] = https://curl.se/bug/?i=12859
[52] = https://curl.se/mail/archive-2024-02/0004.html
[53] = https://curl.se/bug/?i=12920
[54] = https://curl.se/bug/?i=12869
[55] = https://curl.se/bug/?i=12831
[56] = https://curl.se/bug/?i=12911
[57] = https://curl.se/bug/?i=12894
[58] = https://curl.se/bug/?i=12996
[59] = https://curl.se/bug/?i=12905
[60] = https://curl.se/bug/?i=12903
[61] = https://curl.se/bug/?i=12411
[62] = https://curl.se/bug/?i=12945
[63] = https://curl.se/bug/?i=12995
[64] = https://curl.se/bug/?i=12921
[65] = https://curl.se/bug/?i=12933
[66] = https://curl.se/bug/?i=12965
[67] = https://curl.se/bug/?i=12962
[68] = https://curl.se/bug/?i=13027
[69] = https://curl.se/bug/?i=12949
[70] = https://curl.se/bug/?i=12880
[71] = https://curl.se/bug/?i=12880
[72] = https://curl.se/bug/?i=12948
[73] = https://curl.se/bug/?i=12989
[74] = https://curl.se/bug/?i=12990
[75] = https://curl.se/bug/?i=12983
[76] = https://curl.se/bug/?i=12981
[77] = https://curl.se/bug/?i=12944
[78] = https://curl.se/bug/?i=12977
[79] = https://curl.se/bug/?i=12947
[80] = https://curl.se/bug/?i=13015
[81] = https://curl.se/bug/?i=12971
[82] = https://curl.se/bug/?i=13052
[83] = https://curl.se/bug/?i=13022
[84] = https://curl.se/bug/?i=13019
[85] = https://curl.se/bug/?i=12906
[86] = https://curl.se/bug/?i=13045
[87] = https://curl.se/bug/?i=13008
[88] = https://curl.se/bug/?i=13065
[89] = https://curl.se/bug/?i=12997
[90] = https://curl.se/bug/?i=13006
[91] = https://curl.se/bug/?i=13048
[92] = https://curl.se/bug/?i=13026
[93] = https://curl.se/bug/?i=13043
[94] = https://curl.se/bug/?i=13044
[95] = https://curl.se/bug/?i=13041
[96] = https://curl.se/bug/?i=13046
[97] = https://curl.se/bug/?i=13003
[98] = https://curl.se/bug/?i=13075
[99] = https://curl.se/bug/?i=13004
[100] = https://curl.se/bug/?i=12998
[101] = https://curl.se/bug/?i=12964
[102] = https://curl.se/bug/?i=12992
[103] = https://curl.se/bug/?i=13001
[104] = https://curl.se/bug/?i=12999
[105] = https://curl.se/bug/?i=13040
[106] = https://curl.se/bug/?i=13037
[107] = https://curl.se/bug/?i=13033
[108] = https://curl.se/bug/?i=13073
[109] = https://curl.se/bug/?i=13084
[110] = https://curl.se/bug/?i=13070
[111] = https://curl.se/bug/?i=13072
[112] = https://curl.se/bug/?i=13028
[113] = https://curl.se/bug/?i=13054
[114] = https://curl.se/bug/?i=13074
[115] = https://curl.se/bug/?i=6169
[116] = https://curl.se/bug/?i=13063
[117] = https://curl.se/bug/?i=13061
[118] = https://curl.se/bug/?i=12897
[119] = https://curl.se/bug/?i=13031
[120] = https://curl.se/bug/?i=13047
[121] = https://curl.se/bug/?i=13047
[122] = https://curl.se/bug/?i=13035
[123] = https://curl.se/bug/?i=13088
[124] = https://curl.se/bug/?i=13055
[125] = https://curl.se/mail/lib-2024-03/0001.html
[126] = https://curl.se/bug/?i=13039
[127] = https://curl.se/bug/?i=13127
[128] = https://curl.se/bug/?i=13085
[129] = https://curl.se/bug/?i=13124
[130] = https://curl.se/bug/?i=13082
[131] = https://curl.se/bug/?i=13119
[132] = https://curl.se/bug/?i=13081
[133] = https://curl.se/bug/?i=13118
[134] = https://curl.se/bug/?i=12063
[135] = https://curl.se/bug/?i=13115
[136] = https://curl.se/bug/?i=13187
[137] = https://curl.se/bug/?i=13149
[138] = https://curl.se/bug/?i=13143
[139] = https://curl.se/bug/?i=13144
[140] = https://curl.se/bug/?i=11919
[141] = https://curl.se/bug/?i=13101
[142] = https://curl.se/bug/?i=13096
[143] = https://curl.se/bug/?i=13093
[144] = https://curl.se/bug/?i=13178
[145] = https://curl.se/bug/?i=13164
[146] = https://curl.se/bug/?i=13132
[147] = https://curl.se/bug/?i=13112
[148] = https://curl.se/bug/?i=13169
[149] = https://curl.se/bug/?i=13128
[150] = https://curl.se/bug/?i=10290
[151] = https://curl.se/bug/?i=13179
[152] = https://curl.se/bug/?i=13173
[153] = https://curl.se/bug/?i=13175
[154] = https://curl.se/bug/?i=13176
[155] = https://curl.se/bug/?i=13168
[157] = https://curl.se/bug/?i=13166
[158] = https://curl.se/bug/?i=13134
[159] = https://curl.se/bug/?i=13154
[160] = https://curl.se/bug/?i=13151

251
deps/curl/acinclude.m4 vendored
View File

@ -156,7 +156,6 @@ AC_DEFUN([CURL_CHECK_AIX_ALL_SOURCE], [
#endif])
AC_BEFORE([$0], [AC_SYS_LARGEFILE])dnl
AC_BEFORE([$0], [CURL_CONFIGURE_REENTRANT])dnl
AC_BEFORE([$0], [CURL_CONFIGURE_PULL_SYS_POLL])dnl
AC_MSG_CHECKING([if OS is AIX (to define _ALL_SOURCE)])
AC_EGREP_CPP([yes_this_is_aix],[
#ifdef _AIX
@ -171,159 +170,43 @@ AC_DEFUN([CURL_CHECK_AIX_ALL_SOURCE], [
])
dnl CURL_CHECK_HEADER_WINDOWS
dnl -------------------------------------------------
dnl Check for compilable and valid windows.h header
AC_DEFUN([CURL_CHECK_HEADER_WINDOWS], [
AC_CACHE_CHECK([for windows.h], [curl_cv_header_windows_h], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
]],[[
#if defined(__CYGWIN__) || defined(__CEGCC__)
HAVE_WINDOWS_H shall not be defined.
#else
int dummy=2*WINVER;
#endif
]])
],[
curl_cv_header_windows_h="yes"
],[
curl_cv_header_windows_h="no"
])
])
case "$curl_cv_header_windows_h" in
yes)
AC_DEFINE_UNQUOTED(HAVE_WINDOWS_H, 1,
[Define to 1 if you have the windows.h header file.])
;;
esac
])
dnl CURL_CHECK_NATIVE_WINDOWS
dnl -------------------------------------------------
dnl Check if building a native Windows target
AC_DEFUN([CURL_CHECK_NATIVE_WINDOWS], [
AC_REQUIRE([CURL_CHECK_HEADER_WINDOWS])dnl
AC_CACHE_CHECK([whether build target is a native Windows one], [curl_cv_native_windows], [
if test "$curl_cv_header_windows_h" = "no"; then
curl_cv_native_windows="no"
else
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
]],[[
#if defined(__MINGW32__) || defined(__MINGW32CE__) || \
(defined(_MSC_VER) && (defined(_WIN32) || defined(_WIN64)))
int dummy=1;
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
]],[[
#ifdef _WIN32
int dummy=1;
#else
Not a native Windows build target.
Not a native Windows build target.
#endif
]])
],[
curl_cv_native_windows="yes"
],[
curl_cv_native_windows="no"
])
fi
]])
],[
curl_cv_native_windows="yes"
],[
curl_cv_native_windows="no"
])
])
AM_CONDITIONAL(DOING_NATIVE_WINDOWS, test "x$curl_cv_native_windows" = xyes)
])
dnl CURL_CHECK_HEADER_WINSOCK2
dnl -------------------------------------------------
dnl Check for compilable and valid winsock2.h header
AC_DEFUN([CURL_CHECK_HEADER_WINSOCK2], [
AC_REQUIRE([CURL_CHECK_HEADER_WINDOWS])dnl
AC_CACHE_CHECK([for winsock2.h], [curl_cv_header_winsock2_h], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#include <winsock2.h>
]],[[
#if defined(__CYGWIN__) || defined(__CEGCC__) || defined(__MINGW32CE__)
HAVE_WINSOCK2_H shall not be defined.
#else
int dummy=2*IPPROTO_ESP;
#endif
]])
],[
curl_cv_header_winsock2_h="yes"
],[
curl_cv_header_winsock2_h="no"
])
])
case "$curl_cv_header_winsock2_h" in
yes)
AC_DEFINE_UNQUOTED(HAVE_WINSOCK2_H, 1,
[Define to 1 if you have the winsock2.h header file.])
;;
esac
])
dnl CURL_CHECK_HEADER_WS2TCPIP
dnl -------------------------------------------------
dnl Check for compilable and valid ws2tcpip.h header
AC_DEFUN([CURL_CHECK_HEADER_WS2TCPIP], [
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK2])dnl
AC_CACHE_CHECK([for ws2tcpip.h], [curl_cv_header_ws2tcpip_h], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#include <winsock2.h>
#include <ws2tcpip.h>
]],[[
#if defined(__CYGWIN__) || defined(__CEGCC__) || defined(__MINGW32CE__)
HAVE_WS2TCPIP_H shall not be defined.
#else
int dummy=2*IP_PKTINFO;
#endif
]])
],[
curl_cv_header_ws2tcpip_h="yes"
],[
curl_cv_header_ws2tcpip_h="no"
])
])
case "$curl_cv_header_ws2tcpip_h" in
yes)
AC_DEFINE_UNQUOTED(HAVE_WS2TCPIP_H, 1,
[Define to 1 if you have the ws2tcpip.h header file.])
;;
esac
])
dnl CURL_CHECK_HEADER_LBER
dnl -------------------------------------------------
dnl Check for compilable and valid lber.h header,
dnl and check if it is needed even with ldap.h
AC_DEFUN([CURL_CHECK_HEADER_LBER], [
AC_REQUIRE([CURL_CHECK_HEADER_WINDOWS])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_CACHE_CHECK([for lber.h], [curl_cv_header_lber_h], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -355,7 +238,7 @@ AC_DEFUN([CURL_CHECK_HEADER_LBER], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -403,7 +286,7 @@ AC_DEFUN([CURL_CHECK_HEADER_LDAP], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -449,7 +332,7 @@ AC_DEFUN([CURL_CHECK_HEADER_LDAP_SSL], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -534,7 +417,7 @@ AC_DEFUN([CURL_CHECK_LIBS_WINLDAP], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -632,7 +515,7 @@ AC_DEFUN([CURL_CHECK_LIBS_LDAP], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
@ -701,14 +584,11 @@ AC_DEFUN([TYPE_SOCKADDR_STORAGE],
[if struct sockaddr_storage is defined]), ,
[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
@ -731,7 +611,7 @@ dnl -------------------------------------------------
dnl Test if the socket recv() function is available,
AC_DEFUN([CURL_CHECK_FUNC_RECV], [
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK2])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_REQUIRE([CURL_INCLUDES_BSDSOCKET])dnl
AC_CHECK_HEADERS(sys/types.h sys/socket.h)
#
@ -739,14 +619,11 @@ AC_DEFUN([CURL_CHECK_FUNC_RECV], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
$curl_includes_bsdsocket
#ifdef HAVE_SYS_TYPES_H
@ -782,7 +659,7 @@ dnl -------------------------------------------------
dnl Test if the socket send() function is available,
AC_DEFUN([CURL_CHECK_FUNC_SEND], [
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK2])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_REQUIRE([CURL_INCLUDES_BSDSOCKET])dnl
AC_CHECK_HEADERS(sys/types.h sys/socket.h)
#
@ -790,14 +667,11 @@ AC_DEFUN([CURL_CHECK_FUNC_SEND], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
$curl_includes_bsdsocket
#ifdef HAVE_SYS_TYPES_H
@ -837,14 +711,11 @@ AC_DEFUN([CURL_CHECK_MSG_NOSIGNAL], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
@ -876,21 +747,18 @@ dnl -------------------------------------------------
dnl Check for timeval struct
AC_DEFUN([CURL_CHECK_STRUCT_TIMEVAL], [
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK2])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_CHECK_HEADERS(sys/types.h sys/time.h sys/socket.h)
AC_CACHE_CHECK([for struct timeval], [curl_cv_struct_timeval], [
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -937,14 +805,11 @@ AC_DEFUN([TYPE_IN_ADDR_T], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
@ -979,14 +844,11 @@ AC_DEFUN([TYPE_IN_ADDR_T], [
esac
],[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
@ -1197,7 +1059,7 @@ AC_DEFUN([CURL_CHECK_LIBS_CONNECT], [
AC_LANG_PROGRAM([[
$curl_includes_winsock2
$curl_includes_bsdsocket
#if !defined(HAVE_WINDOWS_H) && !defined(HAVE_PROTO_BSDSOCKET_H)
#if !defined(_WIN32) && !defined(HAVE_PROTO_BSDSOCKET_H)
int connect(int, void*, int);
#endif
]],[[
@ -1246,40 +1108,6 @@ cat >>confdefs.h <<_EOF
_EOF
])
dnl CURL_CONFIGURE_PULL_SYS_POLL
dnl -------------------------------------------------
dnl The need for the sys/poll.h inclusion arises mainly to properly
dnl interface AIX systems which define macros 'events' and 'revents'.
AC_DEFUN([CURL_CONFIGURE_PULL_SYS_POLL], [
AC_REQUIRE([CURL_INCLUDES_POLL])dnl
#
tst_poll_events_macro_defined="unknown"
#
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
$curl_includes_poll
]],[[
#if defined(events) || defined(revents)
return 0;
#else
force compilation error
#endif
]])
],[
tst_poll_events_macro_defined="yes"
],[
tst_poll_events_macro_defined="no"
])
#
if test "$tst_poll_events_macro_defined" = "yes"; then
if test "x$ac_cv_header_sys_poll_h" = "xyes"; then
CURL_DEFINE_UNQUOTED([CURL_PULL_SYS_POLL_H])
fi
fi
#
])
dnl CURL_CHECK_FUNC_SELECT
dnl -------------------------------------------------
@ -1294,15 +1122,12 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1310,7 +1135,7 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
#include <sys/time.h>
#endif
#include <time.h>
#ifndef HAVE_WINDOWS_H
#ifndef _WIN32
#ifdef HAVE_SYS_SELECT_H
#include <sys/select.h>
#elif defined(HAVE_UNISTD_H)
@ -1547,17 +1372,15 @@ dnl -------------------------------------------------
dnl Check if curl's WIN32 large file will be used
AC_DEFUN([CURL_CHECK_WIN32_LARGEFILE], [
AC_REQUIRE([CURL_CHECK_HEADER_WINDOWS])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_MSG_CHECKING([whether build target supports WIN32 file API])
curl_win32_file_api="no"
if test "$curl_cv_header_windows_h" = "yes"; then
if test "$curl_cv_native_windows" = "yes"; then
if test x"$enable_largefile" != "xno"; then
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
]],[[
#if !defined(_WIN32_WCE) && \
(defined(__MINGW32__) || \
(defined(_MSC_VER) && (defined(_WIN32) || defined(_WIN64))))
#if !defined(_WIN32_WCE) && (defined(__MINGW32__) || defined(_MSC_VER))
int dummy=1;
#else
WIN32 large file API not supported.
@ -1606,10 +1429,10 @@ dnl -------------------------------------------------
dnl Check if curl's WIN32 crypto lib can be used
AC_DEFUN([CURL_CHECK_WIN32_CRYPTO], [
AC_REQUIRE([CURL_CHECK_HEADER_WINDOWS])dnl
AC_REQUIRE([CURL_CHECK_NATIVE_WINDOWS])dnl
AC_MSG_CHECKING([whether build target supports WIN32 crypto API])
curl_win32_crypto_api="no"
if test "$curl_cv_header_windows_h" = "yes"; then
if test "$curl_cv_native_windows" = "yes"; then
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline

160
deps/curl/appveyor.sh vendored Normal file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env bash
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# SPDX-License-Identifier: curl
#
###########################################################################
# shellcheck disable=SC3040,SC2039
set -eux; [ -n "${BASH:-}${ZSH_NAME:-}" ] && set -o pipefail
# build
if [ "${APPVEYOR_BUILD_WORKER_IMAGE}" = 'Visual Studio 2022' ]; then
openssl_root_win='C:/OpenSSL-v30-Win64'
else
openssl_root_win='C:/OpenSSL-v111-Win64'
fi
openssl_root="$(cygpath -u "${openssl_root_win}")"
if [ "${BUILD_SYSTEM}" = 'CMake' ]; then
options=''
[[ "${TARGET:-}" = *'ARM64'* ]] && SKIP_RUN='ARM64 architecture'
[ "${OPENSSL}" = 'ON' ] && options+=" -DOPENSSL_ROOT_DIR=${openssl_root_win}"
[ "${OPENSSL}" = 'ON' ] && options+=" -DOPENSSL_ROOT_DIR=${openssl_root_win}"
[ "${PRJ_CFG}" = 'Debug' ] && options+=' -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG='
[ "${PRJ_CFG}" = 'Release' ] && options+=' -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE='
[[ "${PRJ_GEN}" = *'Visual Studio'* ]] && options+=' -DCMAKE_VS_GLOBALS=TrackFileAccess=false'
# Fails to run without this run due to missing MSVCR90.dll
[ "${PRJ_GEN}" = 'Visual Studio 9 2008' ] && options+=' -DCURL_STATIC_CRT=ON'
# shellcheck disable=SC2086
cmake -B _bld "-G${PRJ_GEN}" ${TARGET:-} ${options} \
"-DCURL_USE_OPENSSL=${OPENSSL}" \
"-DCURL_USE_SCHANNEL=${SCHANNEL}" \
"-DHTTP_ONLY=${HTTP_ONLY}" \
"-DBUILD_SHARED_LIBS=${SHARED}" \
"-DBUILD_TESTING=${TESTING}" \
"-DENABLE_WEBSOCKETS=${WEBSOCKETS:-}" \
"-DCMAKE_UNITY_BUILD=${UNITY}" \
'-DCURL_WERROR=ON' \
"-DENABLE_DEBUG=${DEBUG}" \
"-DENABLE_UNICODE=${ENABLE_UNICODE}" \
'-DCMAKE_INSTALL_PREFIX=C:/CURL' \
"-DCMAKE_BUILD_TYPE=${PRJ_CFG}"
# shellcheck disable=SC2086
cmake --build _bld --config "${PRJ_CFG}" --parallel 2 --clean-first -- ${BUILD_OPT:-}
if [ "${SHARED}" = 'ON' ]; then
cp -f -p _bld/lib/*.dll _bld/src/
fi
if [ "${OPENSSL}" = 'ON' ]; then
cp -f -p "${openssl_root}"/*.dll _bld/src/
fi
curl='_bld/src/curl.exe'
elif [ "${BUILD_SYSTEM}" = 'VisualStudioSolution' ]; then
(
cd projects
./generate.bat "${VC_VERSION}"
msbuild.exe -maxcpucount "-property:Configuration=${PRJ_CFG}" "Windows/${VC_VERSION}/curl-all.sln"
)
curl="build/Win32/${VC_VERSION}/${PRJ_CFG}/curld.exe"
elif [ "${BUILD_SYSTEM}" = 'winbuild_vs2015' ]; then
./buildconf.bat
(
cd winbuild
cat << EOF > _make.bat
call "C:/Program Files/Microsoft SDKs/Windows/v7.1/Bin/SetEnv.cmd" /x64
call "C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" x86_amd64
nmake -f Makefile.vc mode=dll VC=14 "SSL_PATH=${openssl_root_win}" WITH_SSL=dll MACHINE=x64 DEBUG=${DEBUG} ENABLE_UNICODE=${ENABLE_UNICODE}
EOF
./_make.bat
rm _make.bat
)
curl="builds/libcurl-vc14-x64-${PATHPART}-dll-ssl-dll-ipv6-sspi/bin/curl.exe"
elif [ "${BUILD_SYSTEM}" = 'winbuild_vs2017' ]; then
./buildconf.bat
(
cd winbuild
cat << EOF > _make.bat
call "C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvars64.bat"
nmake -f Makefile.vc mode=dll VC=14.10 "SSL_PATH=${openssl_root_win}" WITH_SSL=dll MACHINE=x64 DEBUG=${DEBUG} ENABLE_UNICODE=${ENABLE_UNICODE}
EOF
./_make.bat
rm _make.bat
)
curl="builds/libcurl-vc14.10-x64-${PATHPART}-dll-ssl-dll-ipv6-sspi/bin/curl.exe"
elif [ "${BUILD_SYSTEM}" = 'autotools' ]; then
autoreconf -fi
(
mkdir _bld
cd _bld
# shellcheck disable=SC2086
../configure ${CONFIG_ARGS:-}
make -j2 V=1
make -j2 V=1 examples
cd tests
make -j2 V=1
)
curl='_bld/src/curl.exe'
fi
find . -name '*.exe' -o -name '*.dll'
if [ -z "${SKIP_RUN:-}" ]; then
"${curl}" --version
else
echo "Skip running curl.exe. Reason: ${SKIP_RUN}"
fi
if false; then
for log in CMakeFiles/CMakeConfigureLog.yaml CMakeFiles/CMakeOutput.log CMakeFiles/CMakeError.log; do
[ -r "_bld/${log}" ] && cat "_bld/${log}"
done
fi
if [ "${TESTING}" = 'ON' ] && [ "${BUILD_SYSTEM}" = 'CMake' ]; then
cmake --build _bld --config "${PRJ_CFG}" --parallel 2 --target testdeps
fi
# test
if [ "${TESTING}" = 'ON' ]; then
export TFLAGS=''
if [ -x "$(cygpath -u "${WINDIR}/System32/curl.exe")" ]; then
TFLAGS+=" -ac $(cygpath -u "${WINDIR}/System32/curl.exe")"
elif [ -x "$(cygpath -u "C:/msys64/usr/bin/curl.exe")" ]; then
TFLAGS+=" -ac $(cygpath -u "C:/msys64/usr/bin/curl.exe")"
fi
TFLAGS+=" ${DISABLED_TESTS:-}"
if [ "${BUILD_SYSTEM}" = 'CMake' ]; then
ls _bld/lib/*.dll >/dev/null 2>&1 && cp -f -p _bld/lib/*.dll _bld/tests/libtest/
cmake --build _bld --config "${PRJ_CFG}" --target test-ci
elif [ "${BUILD_SYSTEM}" = 'autotools' ]; then
(
cd _bld
make -j2 V=1 test-ci
)
else
(
TFLAGS="-a -p !flaky -r -rm ${TFLAGS}"
cd _bld/tests
./runtests.pl
)
fi
fi

188
deps/curl/appveyor.yml vendored
View File

@ -84,8 +84,7 @@ environment:
ENABLE_UNICODE: 'OFF'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1139 !1501'
ADD_PATH: 'C:\msys64\usr\bin'
DISABLED_TESTS: '!1139 !1501 !1140 !1173 !1177 !1477'
- job_name: 'CMake, VS2022, Debug, x64, Schannel, Static, Unicode'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2022'
BUILD_SYSTEM: CMake
@ -96,8 +95,7 @@ environment:
ENABLE_UNICODE: 'ON'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1139 !1501'
ADD_PATH: 'C:\msys64\usr\bin'
DISABLED_TESTS: '!1139 !1501 !1140 !1173 !1177 !1477'
- job_name: 'CMake, VS2022, Debug, x64, no SSL, Static'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2022'
BUILD_SYSTEM: CMake
@ -108,8 +106,7 @@ environment:
ENABLE_UNICODE: 'OFF'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1139 !1501'
ADD_PATH: 'C:\msys64\usr\bin'
DISABLED_TESTS: '!1139 !1501 !1140 !1173 !1177 !1477'
- job_name: 'CMake, VS2022, Debug, x64, no SSL, Static, HTTP only'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2022'
BUILD_SYSTEM: CMake
@ -120,8 +117,7 @@ environment:
ENABLE_UNICODE: 'OFF'
HTTP_ONLY: 'ON'
TESTING: 'ON'
DISABLED_TESTS: '!1139 !1501'
ADD_PATH: 'C:\msys64\usr\bin'
DISABLED_TESTS: '!1139 !1501 !1140 !1173 !1177 !1477'
# generated CMake-based MSYS Makefiles builds (mingw cross-compiling)
- job_name: 'CMake, mingw-w64, gcc 13, Debug, x64, Schannel, Static, Unicode, Unity'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2022'
@ -132,8 +128,8 @@ environment:
ENABLE_UNICODE: 'ON'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1086 !1139 !1451 !1501'
ADD_PATH: 'C:\msys64\mingw64\bin;C:\msys64\usr\bin'
DISABLED_TESTS: '!1086 !1139 !1451 !1501 !1140 !1173 !1177 !1477'
ADD_PATH: 'C:/msys64/mingw64/bin'
MSYS2_ARG_CONV_EXCL: '/*'
BUILD_OPT: -k
UNITY: 'ON'
@ -146,8 +142,8 @@ environment:
ENABLE_UNICODE: 'ON'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1086 !1139 !1451 !1501'
ADD_PATH: 'C:\mingw-w64\x86_64-7.2.0-posix-seh-rt_v5-rev1\mingw64\bin;C:\msys64\usr\bin'
DISABLED_TESTS: '!1086 !1139 !1451 !1501 !1140 !1173 !1177 !1477'
ADD_PATH: 'C:/mingw-w64/x86_64-7.2.0-posix-seh-rt_v5-rev1/mingw64/bin'
MSYS2_ARG_CONV_EXCL: '/*'
BUILD_OPT: -k
- job_name: 'CMake, mingw-w64, gcc 9, Debug, x64, Schannel, Static, Unity'
@ -160,8 +156,8 @@ environment:
HTTP_ONLY: 'OFF'
TESTING: 'ON'
# test 286 disabled due to https://github.com/curl/curl/issues/12040
DISABLED_TESTS: '~286 !1086 !1139 !1451 !1501'
ADD_PATH: 'C:\msys64\mingw64\bin;C:\msys64\usr\bin'
DISABLED_TESTS: '~286 !1086 !1139 !1451 !1501 !1140 !1173 !1177 !1477'
ADD_PATH: 'C:/msys64/mingw64/bin'
MSYS2_ARG_CONV_EXCL: '/*'
BUILD_OPT: -k
UNITY: 'ON'
@ -174,8 +170,8 @@ environment:
ENABLE_UNICODE: 'OFF'
HTTP_ONLY: 'OFF'
TESTING: 'ON'
DISABLED_TESTS: '!1086 !1139 !1451 !1501'
ADD_PATH: 'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin;C:\msys64\usr\bin'
DISABLED_TESTS: '!1086 !1139 !1451 !1501 !1140 !1173 !1177 !1477'
ADD_PATH: 'C:/mingw-w64/i686-6.3.0-posix-dwarf-rt_v5-rev1/mingw32/bin'
MSYS2_ARG_CONV_EXCL: '/*'
BUILD_OPT: -k
# winbuild-based builds
@ -248,176 +244,42 @@ environment:
BUILD_SYSTEM: autotools
TESTING: 'ON'
DISABLED_TESTS: '!19 !1233'
ADD_PATH: 'C:\msys64\usr\bin'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --disable-proxy --without-ssl --enable-websockets'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --disable-proxy --without-ssl --enable-websockets --without-libpsl'
- job_name: 'autotools, msys2, Debug, x86_64, no SSL'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2017'
BUILD_SYSTEM: autotools
TESTING: 'ON'
DISABLED_TESTS: '!19 !504 !704 !705 !1233'
ADD_PATH: 'C:\msys64\usr\bin'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --without-ssl --enable-websockets'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --without-ssl --enable-websockets --without-libpsl'
- job_name: 'autotools, msys2, Release, x86_64, no SSL'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2017'
BUILD_SYSTEM: autotools
TESTING: 'ON'
DISABLED_TESTS: '!19 !504 !704 !705 !1233'
ADD_PATH: 'C:\msys64\usr\bin'
CONFIG_ARGS: '--enable-warnings --enable-werror --without-ssl --enable-websockets'
CONFIG_ARGS: '--enable-warnings --enable-werror --without-ssl --enable-websockets --without-libpsl'
# autotools-based Cygwin build
- job_name: 'autotools, cygwin, Debug, x86_64, no SSL'
APPVEYOR_BUILD_WORKER_IMAGE: 'Visual Studio 2022'
BUILD_SYSTEM: autotools
TESTING: 'ON'
DISABLED_TESTS: ''
ADD_PATH: 'C:\cygwin64\bin'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --without-ssl --enable-websockets'
POSIX_PATH_PREFIX: '/cygdrive'
ADD_SHELL: 'C:/cygwin64/bin'
CONFIG_ARGS: '--enable-debug --enable-werror --disable-threaded-resolver --without-ssl --enable-websockets --without-libpsl'
install:
- ps: |
if($env:ADD_SHELL -ne $null) {
$env:PATH = "$env:ADD_SHELL;$env:PATH"
}
else {
$env:PATH = "C:/msys64/usr/bin;$env:PATH"
}
if($env:ADD_PATH -ne $null) {
$env:PATH = "$env:ADD_PATH;$env:PATH"
}
build_script:
- ps: |
function Pull-BatchFile-Env {
param([string]$Path, [string]$Parameters)
$tempFile = [IO.Path]::GetTempFileName()
cmd.exe /c " `"$Path`" $Parameters && set " > $tempFile
Get-Content $tempFile | ForEach-Object { if($_ -match '^(.*?)=(.*)$') { Set-Content "env:\$($matches[1])" $matches[2] } }
Remove-Item $tempFile
}
$ErrorActionPreference = 'Stop'
if($env:APPVEYOR_BUILD_WORKER_IMAGE -eq 'Visual Studio 2022') {
$openssl_root = 'C:\OpenSSL-v30-Win64'
}
else {
$openssl_root = 'C:\OpenSSL-v111-Win64'
}
if($env:BUILD_SYSTEM -eq 'CMake') {
$options = @('-DCURL_WERROR=ON')
$options += "-G$env:PRJ_GEN"
if($env:TARGET -ne $null) {
$options += "$env:TARGET"
if($env:TARGET.Contains('ARM64')) {
$env:SKIP_RUN = 'ARM64 architecture'
}
}
$options += "-DCURL_USE_OPENSSL=$env:OPENSSL"
if($env:OPENSSL -eq 'ON') {
$options += "-DOPENSSL_ROOT_DIR=$openssl_root"
}
$options += "-DCURL_USE_SCHANNEL=$env:SCHANNEL"
$options += "-DHTTP_ONLY=$env:HTTP_ONLY"
$options += "-DBUILD_SHARED_LIBS=$env:SHARED"
$options += "-DBUILD_TESTING=$env:TESTING"
$options += "-DENABLE_WEBSOCKETS=$env:WEBSOCKETS"
$options += "-DCMAKE_UNITY_BUILD=$env:UNITY"
$options += "-DENABLE_DEBUG=$env:DEBUG"
$options += "-DENABLE_UNICODE=$env:ENABLE_UNICODE"
$options += '-DCMAKE_INSTALL_PREFIX=C:/CURL'
$options += "-DCMAKE_BUILD_TYPE=$env:PRJ_CFG"
if($env:PRJ_CFG -eq 'Debug') {
$options += '-DCMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG='
}
elseif ($env:PRJ_CFG -eq 'Release') {
$options += '-DCMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE='
}
if($env:PRJ_GEN.Contains('Visual Studio')) {
$options += '-DCMAKE_VS_GLOBALS=TrackFileAccess=false'
}
if($env:PRJ_GEN -eq 'Visual Studio 9 2008') {
# Without this it fails to run due to missing MSVCR90.dll
$options += '-DCURL_STATIC_CRT=ON'
}
Write-Host 'CMake options:' $options
cmake . $options
cmake --build . --config $env:PRJ_CFG --parallel 2 --clean-first -- $env:BUILD_OPT
if($env:SHARED -eq 'ON') {
Copy-Item -Path 'C:\Projects\curl\lib\*.dll' -Destination 'C:\projects\curl\src'
Copy-Item -Path 'C:\Projects\curl\lib\*.dll' -Destination 'C:\projects\curl\tests\libtest'
}
if($env:OPENSSL -eq 'ON') {
Copy-Item -Path "$openssl_root\*.dll" -Destination 'C:\projects\curl\src'
}
$curl = '.\src\curl.exe'
}
elseif($env:BUILD_SYSTEM -eq 'VisualStudioSolution') {
cd projects
.\generate.bat $env:VC_VERSION
msbuild.exe -maxcpucount "-property:Configuration=$env:PRJ_CFG" "Windows\$env:VC_VERSION\curl-all.sln"
$curl = "..\build\Win32\$env:VC_VERSION\$env:PRJ_CFG\curld.exe"
}
elseif($env:BUILD_SYSTEM -eq 'winbuild_vs2015') {
.\buildconf.bat
cd winbuild
Pull-BatchFile-Env 'C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd' /x64
Pull-BatchFile-Env 'C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat' x86_amd64
nmake /nologo /f Makefile.vc mode=dll VC=14 "SSL_PATH=$openssl_root" WITH_SSL=dll MACHINE=x64 DEBUG=$env:DEBUG ENABLE_UNICODE=$env:ENABLE_UNICODE
$curl = "..\builds\libcurl-vc14-x64-$env:PATHPART-dll-ssl-dll-ipv6-sspi\bin\curl.exe"
}
elseif($env:BUILD_SYSTEM -eq 'winbuild_vs2017') {
.\buildconf.bat
cd winbuild
Pull-BatchFile-Env 'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat'
nmake /nologo /f Makefile.vc mode=dll VC=14.10 "SSL_PATH=$openssl_root" WITH_SSL=dll MACHINE=x64 DEBUG=$env:DEBUG ENABLE_UNICODE=$env:ENABLE_UNICODE
$curl = "..\builds\libcurl-vc14.10-x64-$env:PATHPART-dll-ssl-dll-ipv6-sspi\bin\curl.exe"
}
elseif($env:BUILD_SYSTEM -eq 'autotools') {
& bash -e -c "cd $env:POSIX_PATH_PREFIX/c/projects/curl && autoreconf -fi 2>&1 && ./configure $env:CONFIG_ARGS 2>&1 && make V=1 && make V=1 examples && cd tests && make V=1"
$curl = '.\src\curl.exe'
}
Get-ChildItem -Path C:\projects\curl -Include ('*.exe', '*.dll') -Recurse -Name
if($env:SKIP_RUN -eq $null) {
cmd.exe /c "`"$curl`" -V 2>&1"
if(-not $?) {
Write-Host "Error running curl: '$curl':" ("0x" + $LastExitCode.ToString("X"))
exit 1
}
}
else {
Write-Host "Skip running curl.exe. Reason: $env:SKIP_RUN"
}
if($false) {
if(Test-Path CMakeFiles/CMakeConfigureLog.yaml) { cat CMakeFiles/CMakeConfigureLog.yaml }
if(Test-Path CMakeFiles/CMakeOutput.log) { cat CMakeFiles/CMakeOutput.log }
if(Test-Path CMakeFiles/CMakeError.log) { cat CMakeFiles/CMakeError.log }
if(Test-Path config.log) { cat config.log }
}
if($env:TESTING -eq 'ON' -and $env:BUILD_SYSTEM -eq 'CMake') {
cmake --build . --config $env:PRJ_CFG --parallel 2 --target testdeps
}
test_script:
- ps: |
if(Test-Path 'C:/msys64/usr/bin/curl.exe') {
$acurl="-ac $env:POSIX_PATH_PREFIX/c/msys64/usr/bin/curl.exe"
}
if(Test-Path 'C:/Windows/System32/curl.exe') {
$acurl="-ac $env:POSIX_PATH_PREFIX/c/Windows/System32/curl.exe"
}
if($env:TESTING -eq 'ON') {
if($env:BUILD_SYSTEM -eq 'CMake') {
$env:TFLAGS="$acurl $env:DISABLED_TESTS"
cmake --build . --config $env:PRJ_CFG --target test-ci
}
elseif($env:BUILD_SYSTEM -eq 'autotools') {
& bash -e -c "cd $env:POSIX_PATH_PREFIX/c/projects/curl && make V=1 TFLAGS='$acurl $env:DISABLED_TESTS' test-ci"
}
else {
& bash -e -c "cd $env:POSIX_PATH_PREFIX/c/projects/curl/tests && ./runtests.pl -a -p !flaky -r -rm $acurl $env:DISABLED_TESTS"
}
}
- cmd: sh -c ./appveyor.sh
clone_depth: 10
@ -430,7 +292,7 @@ branches:
skip_commits:
files:
- '.azure-pipelines.yml'
- '.circleci/**/*'
- '.circleci/*'
- '.cirrus.yml'
- '.github/**/*'
- 'packages/**/*'

View File

@ -38,19 +38,7 @@ rem
cd /d "%~0\.." 1>NUL 2>&1
rem Check we are running from a curl git repository
if not exist GIT-INFO goto norepo
rem Detect programs. HAVE_<PROGNAME>
rem When not found the variable is set undefined. The undefined pattern
rem allows for statements like "if not defined HAVE_PERL (command)"
groff --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_GROFF=) else (set HAVE_GROFF=Y)
nroff --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_NROFF=) else (set HAVE_NROFF=Y)
perl --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_PERL=) else (set HAVE_PERL=Y)
gzip --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_GZIP=) else (set HAVE_GZIP=Y)
if not exist GIT-INFO.md goto norepo
:parseArgs
if "%~1" == "" goto start
@ -125,15 +113,6 @@ rem
)
cmd /c exit 0
rem Setup c-ares git tree
if exist ares\buildconf.bat (
echo.
echo Configuring c-ares build environment
cd ares
call buildconf.bat
cd ..
)
if "%BASIC_HUGEHELP%" == "1" (
if "%OS%" == "Windows_NT" endlocal
exit /B 1
@ -182,47 +161,20 @@ rem
:genHugeHelp
if "%OS%" == "Windows_NT" setlocal
set LC_ALL=C
set ROFFCMD=
set BASIC=1
if defined HAVE_PERL (
if defined HAVE_GROFF (
set ROFFCMD=groff -mtty-char -Tascii -P-c -man
) else if defined HAVE_NROFF (
set ROFFCMD=nroff -c -Tascii -man
)
)
if defined ROFFCMD (
if exist src\tool_hugehelp.c.cvs (
copy /Y src\tool_hugehelp.c.cvs src\tool_hugehelp.c 1>NUL 2>&1
) else (
echo #include "tool_setup.h"> src\tool_hugehelp.c
echo #include "tool_hugehelp.h">> src\tool_hugehelp.c
if defined HAVE_GZIP (
echo #ifndef HAVE_LIBZ>> src\tool_hugehelp.c
)
%ROFFCMD% docs\curl.1 2>NUL | perl src\mkhelp.pl docs\MANUAL >> src\tool_hugehelp.c
if defined HAVE_GZIP (
echo #else>> src\tool_hugehelp.c
%ROFFCMD% docs\curl.1 2>NUL | perl src\mkhelp.pl -c docs\MANUAL >> src\tool_hugehelp.c
echo #endif /^* HAVE_LIBZ ^*/>> src\tool_hugehelp.c
)
set BASIC=0
) else (
if exist src\tool_hugehelp.c.cvs (
copy /Y src\tool_hugehelp.c.cvs src\tool_hugehelp.c 1>NUL 2>&1
) else (
echo #include "tool_setup.h"> src\tool_hugehelp.c
echo #include "tool_hugehelp.h">> src\tool_hugehelp.c
echo.>> src\tool_hugehelp.c
echo void hugehelp(void^)>> src\tool_hugehelp.c
echo {>> src\tool_hugehelp.c
echo #ifdef USE_MANUAL>> src\tool_hugehelp.c
echo fputs("Built-in manual not included\n", stdout^);>> src\tool_hugehelp.c
echo #endif>> src\tool_hugehelp.c
echo }>> src\tool_hugehelp.c
)
echo.>> src\tool_hugehelp.c
echo void hugehelp(void^)>> src\tool_hugehelp.c
echo {>> src\tool_hugehelp.c
echo #ifdef USE_MANUAL>> src\tool_hugehelp.c
echo fputs("Built-in manual not included\n", stdout^);>> src\tool_hugehelp.c
echo #endif>> src\tool_hugehelp.c
echo }>> src\tool_hugehelp.c
)
findstr "/C:void hugehelp(void)" src\tool_hugehelp.c 1>NUL 2>&1
@ -244,13 +196,8 @@ rem Windows 9x as setlocal isn't available until Windows NT
rem
:dosCleanup
set MODE=
set HAVE_GROFF=
set HAVE_NROFF=
set HAVE_PERL=
set HAVE_GZIP=
set BASIC_HUGEHELP=
set LC_ALL
set ROFFCMD=
set BASIC=
exit /B
@ -296,10 +243,9 @@ rem
:warning
echo.
echo Warning: The curl manual could not be integrated in the source. This means when
echo you build curl the manual will not be available (curl --man^). Integration of
echo you build curl the manual will not be available (curl --manual^). Integration of
echo the manual is not required and a summary of the options will still be available
echo (curl --help^). To integrate the manual your PATH is required to have
echo groff/nroff, perl and optionally gzip for compression.
echo (curl --help^). To integrate the manual build with configure or cmake.
goto success
:error

382
deps/curl/configure.ac vendored
View File

@ -159,6 +159,7 @@ curl_tls_srp_msg="no (--enable-tls-srp)"
curl_ipv6_msg="no (--enable-ipv6)"
curl_unix_sockets_msg="no (--enable-unix-sockets)"
curl_idn_msg="no (--with-{libidn2,winidn})"
curl_docs_msg="enabled (--disable-docs)"
curl_manual_msg="no (--enable-manual)"
curl_libcurl_msg="enabled (--disable-libcurl-option)"
curl_verbose_msg="enabled (--disable-verbose)"
@ -175,7 +176,7 @@ curl_headers_msg="enabled (--disable-headers-api)"
ssl_backends=
curl_h1_msg="enabled (internal)"
curl_h2_msg="no (--with-nghttp2)"
curl_h3_msg="no (--with-ngtcp2 --with-nghttp3, --with-quiche, --with-msh3)"
curl_h3_msg="no (--with-ngtcp2 --with-nghttp3, --with-quiche, --with-openssl-quic, --with-msh3)"
enable_altsvc="yes"
hsts="yes"
@ -503,6 +504,7 @@ dnl platform/compiler/architecture specific checks/flags
dnl **********************************************************************
CURL_CHECK_COMPILER
CURL_CHECK_NATIVE_WINDOWS
CURL_SET_COMPILER_BASIC_OPTS
CURL_SET_COMPILER_DEBUG_OPTS
CURL_SET_COMPILER_OPTIMIZE_OPTS
@ -583,25 +585,6 @@ dnl **********************************************************************
dnl Compilation based checks should not be done before this point.
dnl **********************************************************************
dnl **********************************************************************
dnl Make sure that our checks for headers windows.h winsock2.h
dnl and ws2tcpip.h take precedence over any other further checks which
dnl could be done later using AC_CHECK_HEADER or AC_CHECK_HEADERS for
dnl this specific header files. And do them before its results are used.
dnl **********************************************************************
CURL_CHECK_HEADER_WINDOWS
CURL_CHECK_NATIVE_WINDOWS
case X-"$curl_cv_native_windows" in
X-yes)
CURL_CHECK_HEADER_WINSOCK2
CURL_CHECK_HEADER_WS2TCPIP
;;
*)
curl_cv_header_winsock2_h="no"
curl_cv_header_ws2tcpip_h="no"
;;
esac
CURL_CHECK_WIN32_LARGEFILE
CURL_CHECK_WIN32_CRYPTO
@ -1015,6 +998,28 @@ AS_HELP_STRING([--disable-mqtt],[Disable MQTT support]),
AC_MSG_RESULT(no)
)
dnl **********************************************************************
dnl Check whether to build documentation
dnl **********************************************************************
AC_MSG_CHECKING([whether to build documentation])
AC_ARG_ENABLE(docs,
AS_HELP_STRING([--enable-docs],[Enable documentation])
AS_HELP_STRING([--disable-docs],[Disable documentation]),
[ case "$enableval" in
no)
AC_MSG_RESULT(no)
BUILD_DOCS=0
curl_docs_msg="no"
;;
*) AC_MSG_RESULT(yes)
BUILD_DOCS=1
;;
esac ],
AC_MSG_RESULT(yes)
BUILD_DOCS=1
)
dnl **********************************************************************
dnl Check for built-in manual
dnl **********************************************************************
@ -1130,24 +1135,19 @@ fi
if test "$HAVE_GETHOSTBYNAME" != "1"
then
dnl This is for winsock systems
if test "$curl_cv_header_windows_h" = "yes"; then
if test "$curl_cv_header_winsock2_h" = "yes"; then
winsock_LIB="-lws2_32"
fi
if test "$curl_cv_native_windows" = "yes"; then
winsock_LIB="-lws2_32"
if test ! -z "$winsock_LIB"; then
my_ac_save_LIBS=$LIBS
LIBS="$winsock_LIB $LIBS"
AC_MSG_CHECKING([for gethostbyname in $winsock_LIB])
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#ifdef HAVE_WINDOWS_H
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#endif
#include <windows.h>
#endif
]],[[
gethostbyname("localhost");
@ -1668,7 +1668,7 @@ AS_HELP_STRING([--disable-ipv6],[Disable IPv6 support]),
AC_RUN_IFELSE([AC_LANG_SOURCE([[
/* are AF_INET6 and sockaddr_in6 available? */
#include <sys/types.h>
#ifdef HAVE_WINSOCK2_H
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#else
@ -1678,15 +1678,12 @@ AS_HELP_STRING([--disable-ipv6],[Disable IPv6 support]),
# include <netinet/in6.h>
#endif
#endif
#include <stdlib.h> /* for exit() */
main()
int main(void)
{
struct sockaddr_in6 s;
(void)s;
if (socket(AF_INET6, SOCK_STREAM, 0) < 0)
exit(1);
else
exit(0);
return socket(AF_INET6, SOCK_STREAM, 0) < 0;
}
]])
],
@ -1707,7 +1704,7 @@ if test "$ipv6" = yes; then
AC_MSG_CHECKING([if struct sockaddr_in6 has sin6_scope_id member])
AC_COMPILE_IFELSE([ AC_LANG_PROGRAM([[
#include <sys/types.h>
#ifdef HAVE_WINSOCK2_H
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#else
@ -1850,10 +1847,7 @@ AC_INCLUDES_DEFAULT
if test "x$not_mit" = "x1"; then
dnl MIT not found, check for Heimdal
AC_CHECK_HEADER(gssapi.h,
[
dnl found
AC_DEFINE(HAVE_GSSHEIMDAL, 1, [if you have Heimdal])
],
[],
[
dnl no header found, disabling GSS
want_gss=no
@ -1862,7 +1856,6 @@ AC_INCLUDES_DEFAULT
)
else
dnl MIT found
AC_DEFINE(HAVE_GSSMIT, 1, [if you have MIT Kerberos])
dnl check if we have a really old MIT Kerberos version (<= 1.2)
AC_MSG_CHECKING([if GSS-API headers declare GSS_C_NT_HOSTBASED_SERVICE])
AC_COMPILE_IFELSE([
@ -2082,20 +2075,74 @@ dnl **********************************************************************
dnl Check for libpsl
dnl **********************************************************************
AC_ARG_WITH(libpsl,
AS_HELP_STRING([--without-libpsl],
[disable support for libpsl cookie checking]),
with_libpsl=$withval,
with_libpsl=yes)
if test $with_libpsl != "no"; then
AC_SEARCH_LIBS(psl_builtin, psl,
[curl_psl_msg="enabled";
AC_DEFINE([USE_LIBPSL], [1], [PSL support enabled])
],
[curl_psl_msg="no (libpsl not found)";
AC_MSG_WARN([libpsl was not found])
]
dnl Default to compiler & linker defaults for LIBPSL files & libraries.
OPT_LIBPSL=off
AC_ARG_WITH(libpsl,dnl
AS_HELP_STRING([--with-libpsl=PATH],[Where to look for libpsl, PATH points to the LIBPSL installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option])
AS_HELP_STRING([--without-libpsl], [disable LIBPSL]),
OPT_LIBPSL=$withval)
if test X"$OPT_LIBPSL" != Xno; then
dnl backup the pre-libpsl variables
CLEANLDFLAGS="$LDFLAGS"
CLEANCPPFLAGS="$CPPFLAGS"
CLEANLIBS="$LIBS"
case "$OPT_LIBPSL" in
yes)
dnl --with-libpsl (without path) used
CURL_CHECK_PKGCONFIG(libpsl)
if test "$PKGCONFIG" != "no" ; then
LIB_PSL=`$PKGCONFIG --libs-only-l libpsl`
LD_PSL=`$PKGCONFIG --libs-only-L libpsl`
CPP_PSL=`$PKGCONFIG --cflags-only-I libpsl`
else
dnl no libpsl pkg-config found
LIB_PSL="-lpsl"
fi
;;
off)
dnl no --with-libpsl option given, just check default places
LIB_PSL="-lpsl"
;;
*)
dnl use the given --with-libpsl spot
LIB_PSL="-lpsl"
PREFIX_PSL=$OPT_LIBPSL
;;
esac
dnl if given with a prefix, we set -L and -I based on that
if test -n "$PREFIX_PSL"; then
LD_PSL=-L${PREFIX_PSL}/lib$libsuff
CPP_PSL=-I${PREFIX_PSL}/include
fi
LDFLAGS="$LDFLAGS $LD_PSL"
CPPFLAGS="$CPPFLAGS $CPP_PSL"
LIBS="$LIB_PSL $LIBS"
AC_CHECK_LIB(psl, psl_builtin,
[
AC_CHECK_HEADERS(libpsl.h,
curl_psl_msg="enabled"
LIBPSL_ENABLED=1
AC_DEFINE(USE_LIBPSL, 1, [if libpsl is in use])
AC_SUBST(USE_LIBPSL, [1])
)
],
dnl not found, revert back to clean variables
LDFLAGS=$CLEANLDFLAGS
CPPFLAGS=$CLEANCPPFLAGS
LIBS=$CLEANLIBS
)
if test X"$OPT_LIBPSL" != Xoff &&
test "$LIBPSL_ENABLED" != "1"; then
AC_MSG_ERROR([libpsl libs and/or directories were not found where specified!])
fi
fi
AM_CONDITIONAL([USE_LIBPSL], [test "$curl_psl_msg" = "enabled"])
@ -2807,6 +2854,11 @@ esac
curl_tcp2_msg="no (--with-ngtcp2)"
if test X"$want_tcp2" != Xno; then
if test "$QUIC_ENABLED" != "yes"; then
AC_MSG_ERROR([the detected TLS library does not support QUIC, making --with-ngtcp2 a no-no])
fi
dnl backup the pre-ngtcp2 variables
CLEANLDFLAGS="$LDFLAGS"
CLEANCPPFLAGS="$CPPFLAGS"
@ -2862,7 +2914,7 @@ if test X"$want_tcp2" != Xno; then
fi
if test "x$NGTCP2_ENABLED" = "x1" -a "x$OPENSSL_ENABLED" = "x1"; then
if test "x$NGTCP2_ENABLED" = "x1" -a "x$OPENSSL_ENABLED" = "x1" -a "x$OPENSSL_IS_BORINGSSL" != "x1"; then
dnl backup the pre-ngtcp2_crypto_quictls variables
CLEANLDFLAGS="$LDFLAGS"
CLEANCPPFLAGS="$CPPFLAGS"
@ -2917,6 +2969,61 @@ if test "x$NGTCP2_ENABLED" = "x1" -a "x$OPENSSL_ENABLED" = "x1"; then
fi
fi
if test "x$NGTCP2_ENABLED" = "x1" -a "x$OPENSSL_ENABLED" = "x1" -a "x$OPENSSL_IS_BORINGSSL" = "x1"; then
dnl backup the pre-ngtcp2_crypto_boringssl variables
CLEANLDFLAGS="$LDFLAGS"
CLEANCPPFLAGS="$CPPFLAGS"
CLEANLIBS="$LIBS"
CURL_CHECK_PKGCONFIG(libngtcp2_crypto_boringssl, $want_tcp2_path)
if test "$PKGCONFIG" != "no" ; then
LIB_NGTCP2_CRYPTO_BORINGSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path])
$PKGCONFIG --libs-only-l libngtcp2_crypto_boringssl`
AC_MSG_NOTICE([-l is $LIB_NGTCP2_CRYPTO_BORINGSSL])
CPP_NGTCP2_CRYPTO_BORINGSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path]) dnl
$PKGCONFIG --cflags-only-I libngtcp2_crypto_boringssl`
AC_MSG_NOTICE([-I is $CPP_NGTCP2_CRYPTO_BORINGSSL])
LD_NGTCP2_CRYPTO_BORINGSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path])
$PKGCONFIG --libs-only-L libngtcp2_crypto_boringssl`
AC_MSG_NOTICE([-L is $LD_NGTCP2_CRYPTO_BORINGSSL])
LDFLAGS="$LDFLAGS $LD_NGTCP2_CRYPTO_BORINGSSL"
CPPFLAGS="$CPPFLAGS $CPP_NGTCP2_CRYPTO_BORINGSSL"
LIBS="$LIB_NGTCP2_CRYPTO_BORINGSSL $LIBS"
if test "x$cross_compiling" != "xyes"; then
DIR_NGTCP2_CRYPTO_BORINGSSL=`echo $LD_NGTCP2_CRYPTO_BORINGSSL | $SED -e 's/^-L//'`
fi
AC_CHECK_LIB(ngtcp2_crypto_boringssl, ngtcp2_crypto_recv_client_initial_cb,
[
AC_CHECK_HEADERS(ngtcp2/ngtcp2_crypto.h,
NGTCP2_ENABLED=1
AC_DEFINE(USE_NGTCP2_CRYPTO_BORINGSSL, 1, [if ngtcp2_crypto_boringssl is in use])
AC_SUBST(USE_NGTCP2_CRYPTO_BORINGSSL, [1])
CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_NGTCP2_CRYPTO_BORINGSSL"
export CURL_LIBRARY_PATH
AC_MSG_NOTICE([Added $DIR_NGTCP2_CRYPTO_BORINGSSL to CURL_LIBRARY_PATH])
)
],
dnl not found, revert back to clean variables
LDFLAGS=$CLEANLDFLAGS
CPPFLAGS=$CLEANCPPFLAGS
LIBS=$CLEANLIBS
)
else
dnl no ngtcp2_crypto_boringssl pkg-config found, deal with it
if test X"$want_tcp2" != Xdefault; then
dnl To avoid link errors, we do not allow --with-ngtcp2 without
dnl a pkgconfig file
AC_MSG_ERROR([--with-ngtcp2 was specified but could not find ngtcp2_crypto_boringssl pkg-config file.])
fi
fi
fi
if test "x$NGTCP2_ENABLED" = "x1" -a "x$GNUTLS_ENABLED" = "x1"; then
dnl backup the pre-ngtcp2_crypto_gnutls variables
CLEANLDFLAGS="$LDFLAGS"
@ -3027,15 +3134,55 @@ if test "x$NGTCP2_ENABLED" = "x1" -a "x$WOLFSSL_ENABLED" = "x1"; then
fi
fi
dnl **********************************************************************
dnl Check for OpenSSL QUIC
dnl **********************************************************************
OPT_OPENSSL_QUIC="no"
if test "x$disable_http" = "xyes" -o "x$OPENSSL_ENABLED" != "x1"; then
# without HTTP or without openssl, no use
OPT_OPENSSL_QUIC="no"
fi
AC_ARG_WITH(openssl-quic,
AS_HELP_STRING([--with-openssl-quic],[Enable OpenSSL QUIC usage])
AS_HELP_STRING([--without-openssl-quic],[Disable OpenSSL QUIC usage]),
[OPT_OPENSSL_QUIC=$withval])
case "$OPT_OPENSSL_QUIC" in
no)
dnl --without-openssl-quic option used
want_openssl_quic="no"
;;
yes)
dnl --with-openssl-quic option used
want_openssl_quic="yes"
;;
esac
curl_openssl_quic_msg="no (--with-openssl-quic)"
if test "x$want_openssl_quic" = "xyes"; then
if test "$NGTCP2_ENABLED" = 1; then
AC_MSG_ERROR([--with-openssl-quic and --with-ngtcp2 are mutually exclusive])
fi
if test "$HAVE_OPENSSL_QUIC" != 1; then
AC_MSG_ERROR([--with-openssl-quic requires quic support in OpenSSL])
fi
AC_DEFINE(USE_OPENSSL_QUIC, 1, [if openssl QUIC is in use])
AC_SUBST(USE_OPENSSL_QUIC, [1])
fi
dnl **********************************************************************
dnl Check for nghttp3 (HTTP/3 with ngtcp2)
dnl **********************************************************************
OPT_NGHTTP3="yes"
if test "x$NGTCP2_ENABLED" = "x"; then
# without ngtcp2, nghttp3 is of no use for us
if test "x$USE_NGTCP2" != "x1" -a "x$USE_OPENSSL_QUIC" != "x1"; then
# without ngtcp2 or openssl quic, nghttp3 is of no use for us
OPT_NGHTTP3="no"
want_nghttp3="no"
fi
AC_ARG_WITH(nghttp3,
@ -3061,6 +3208,7 @@ esac
curl_http3_msg="no (--with-nghttp3)"
if test X"$want_nghttp3" != Xno; then
dnl backup the pre-nghttp3 variables
CLEANLDFLAGS="$LDFLAGS"
CLEANCPPFLAGS="$CPPFLAGS"
@ -3091,8 +3239,6 @@ if test X"$want_nghttp3" != Xno; then
AC_CHECK_LIB(nghttp3, nghttp3_conn_client_new_versioned,
[
AC_CHECK_HEADERS(nghttp3/nghttp3.h,
curl_h3_msg="enabled (ngtcp2 + nghttp3)"
NGHTTP3_ENABLED=1
AC_DEFINE(USE_NGHTTP3, 1, [if nghttp3 is in use])
AC_SUBST(USE_NGHTTP3, [1])
CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_NGHTTP3"
@ -3117,6 +3263,29 @@ if test X"$want_nghttp3" != Xno; then
fi
dnl **********************************************************************
dnl Check for ngtcp2 and nghttp3 (HTTP/3 with ngtcp2 + nghttp3)
dnl **********************************************************************
if test "x$NGTCP2_ENABLED" = "x1" -a "x$USE_NGHTTP3" = "x1"; then
AC_DEFINE(USE_NGTCP2_H3, 1, [if ngtcp2 + nghttp3 is in use])
AC_SUBST(USE_NGTCP2_H3, [1])
AC_MSG_NOTICE([HTTP3 support is experimental])
curl_h3_msg="enabled (ngtcp2 + nghttp3)"
fi
dnl **********************************************************************
dnl Check for OpenSSL and nghttp3 (HTTP/3 with nghttp3 using OpenSSL QUIC)
dnl **********************************************************************
if test "x$USE_OPENSSL_QUIC" = "x1" -a "x$USE_NGHTTP3" = "x1"; then
experimental="$experimental HTTP3"
AC_DEFINE(USE_OPENSSL_H3, 1, [if openssl quic + nghttp3 is in use])
AC_SUBST(USE_OPENSSL_H3, [1])
AC_MSG_NOTICE([HTTP3 support is experimental])
curl_h3_msg="enabled (openssl + nghttp3)"
fi
dnl **********************************************************************
dnl Check for quiche (QUIC)
dnl **********************************************************************
@ -3151,6 +3320,10 @@ esac
if test X"$want_quiche" != Xno; then
if test "$QUIC_ENABLED" != "yes"; then
AC_MSG_ERROR([the detected TLS library does not support QUIC, making --with-quiche a no-no])
fi
if test "$NGHTTP3_ENABLED" = 1; then
AC_MSG_ERROR([--with-quiche and --with-ngtcp2 are mutually exclusive])
fi
@ -3249,9 +3422,22 @@ esac
if test X"$want_msh3" != Xno; then
dnl msh3 on non-Windows needs an OpenSSL with the QUIC API
if test "$curl_cv_native_windows" != "yes"; then
if test "$QUIC_ENABLED" != "yes"; then
AC_MSG_ERROR([the detected TLS library does not support QUIC, making --with-msh3 a no-no])
fi
if test "$OPENSSL_ENABLED" != "1"; then
AC_MSG_ERROR([msh3 requires OpenSSL])
fi
fi
if test "$NGHTTP3_ENABLED" = 1; then
AC_MSG_ERROR([--with-msh3 and --with-ngtcp2 are mutually exclusive])
fi
if test "$QUICHE_ENABLED" = 1; then
AC_MSG_ERROR([--with-msh3 and --with-quiche are mutually exclusive])
fi
dnl backup the pre-msh3 variables
CLEANLDFLAGS="$LDFLAGS"
@ -3297,10 +3483,10 @@ AS_HELP_STRING([--with-zsh-functions-dir=PATH],[Install zsh completions to PATH]
AS_HELP_STRING([--without-zsh-functions-dir],[Do not install zsh completions]),
[OPT_ZSH_FPATH=$withval])
case "$OPT_ZSH_FPATH" in
no)
default|no)
dnl --without-zsh-functions-dir option used
;;
default|yes)
yes)
dnl --with-zsh-functions-dir option used without path
ZSH_FUNCTIONS_DIR="$datarootdir/zsh/site-functions"
AC_SUBST(ZSH_FUNCTIONS_DIR)
@ -3311,6 +3497,7 @@ case "$OPT_ZSH_FPATH" in
AC_SUBST(ZSH_FUNCTIONS_DIR)
;;
esac
AM_CONDITIONAL(USE_ZSH_COMPLETION, test x"$ZSH_FUNCTIONS_DIR" != x)
dnl **********************************************************************
dnl Check for fish completion path
@ -3322,10 +3509,10 @@ AS_HELP_STRING([--with-fish-functions-dir=PATH],[Install fish completions to PAT
AS_HELP_STRING([--without-fish-functions-dir],[Do not install fish completions]),
[OPT_FISH_FPATH=$withval])
case "$OPT_FISH_FPATH" in
no)
default|no)
dnl --without-fish-functions-dir option used
;;
default|yes)
yes)
dnl --with-fish-functions-dir option used without path
CURL_CHECK_PKGCONFIG(fish)
if test "$PKGCONFIG" != "no" ; then
@ -3341,6 +3528,7 @@ case "$OPT_FISH_FPATH" in
AC_SUBST(FISH_FUNCTIONS_DIR)
;;
esac
AM_CONDITIONAL(USE_FISH_COMPLETION, test x"$FISH_FUNCTIONS_DIR" != x)
dnl Now check for the very most basic headers. Then we can use these
dnl ones as default-headers when checking for the rest!
@ -3473,6 +3661,12 @@ AC_CHECK_TYPE(sa_family_t,
AC_DEFINE(CURL_SA_FAMILY_T, ADDRESS_FAMILY, [IP address type in sockaddr]),
AC_DEFINE(CURL_SA_FAMILY_T, unsigned short, [IP address type in sockaddr]),
[
#ifdef _WIN32
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <winsock2.h>
#endif
#ifdef HAVE_SYS_SOCKET_H
#include <sys/socket.h>
#endif
@ -3516,8 +3710,6 @@ CURL_RUN_IFELSE(
AC_MSG_RESULT([no])
])
CURL_CONFIGURE_PULL_SYS_POLL
TYPE_IN_ADDR_T
TYPE_SOCKADDR_STORAGE
@ -3634,47 +3826,19 @@ AC_CHECK_DECL([fseeko],
CURL_CHECK_NONBLOCKING_SOCKET
dnl ************************************************************
dnl nroff tool stuff
dnl
AC_PATH_PROG( PERL, perl, ,
$PATH:/usr/local/bin/perl:/usr/bin/:/usr/local/bin )
AC_SUBST(PERL)
AC_PATH_PROGS( NROFF, gnroff nroff, ,
$PATH:/usr/bin/:/usr/local/bin )
AC_SUBST(NROFF)
if test -n "$NROFF"; then
dnl only check for nroff options if an nroff command was found
AC_MSG_CHECKING([how to use *nroff to get plain text from man pages])
MANOPT="-man"
mancheck=`echo foo | $NROFF $MANOPT 2>/dev/null`
if test -z "$mancheck"; then
MANOPT="-mandoc"
mancheck=`echo foo | $NROFF $MANOPT 2>/dev/null`
if test -z "$mancheck"; then
MANOPT=""
AC_MSG_RESULT([failed])
AC_MSG_WARN([found no *nroff option to get plaintext from man pages])
else
AC_MSG_RESULT([$MANOPT])
fi
else
AC_MSG_RESULT([$MANOPT])
fi
AC_SUBST(MANOPT)
if test -z "$PERL"; then
dnl if perl was not found then disable building docs
AC_MSG_WARN([disabling documentation])
BUILD_DOCS=0
curl_docs_msg="no"
fi
if test -z "$MANOPT"
then
dnl if no nroff tool was found, or no option that could convert man pages
dnl was found, then disable the built-in manual stuff
AC_MSG_WARN([disabling built-in manual])
USE_MANUAL="no";
fi
dnl set variable for use in automakefile(s)
AM_CONDITIONAL(BUILD_DOCS, test x"$BUILD_DOCS" = x1)
dnl *************************************************************************
dnl If the manual variable still is set, then we go with providing a built-in
@ -4578,8 +4742,11 @@ if test "x$USE_NGHTTP2" = "x1"; then
SUPPORT_FEATURES="$SUPPORT_FEATURES HTTP2"
fi
if test "x$USE_NGTCP2" = "x1" -o "x$USE_QUICHE" = "x1" \
-o "x$USE_MSH3" = "x1"; then
if test "x$USE_NGTCP2_H3" = "x1" -o "x$USE_QUICHE" = "x1" \
-o "x$USE_OPENSSL_H3" = "x1" -o "x$USE_MSH3" = "x1"; then
if test "x$CURL_WITH_MULTI_SSL" = "x1"; then
AC_MSG_ERROR([MultiSSL cannot be enabled with HTTP/3 and vice versa])
fi
SUPPORT_FEATURES="$SUPPORT_FEATURES HTTP3"
fi
@ -4653,7 +4820,7 @@ AC_SUBST(SUPPORT_FEATURES)
dnl For supported protocols in pkg-config file
if test "x$CURL_DISABLE_HTTP" != "x1"; then
SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS HTTP"
SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS HTTP IPFS IPNS"
if test "x$SSL_ENABLED" = "x1"; then
SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS HTTPS"
fi
@ -4825,6 +4992,7 @@ AC_MSG_NOTICE([Configured to build curl/libcurl:
IPv6: ${curl_ipv6_msg}
Unix sockets: ${curl_unix_sockets_msg}
IDN: ${curl_idn_msg}
Build docs: ${curl_docs_msg}
Build libcurl: Shared=${enable_shared}, Static=${enable_static}
Built-in manual: ${curl_manual_msg}
--libcurl option: ${curl_libcurl_msg}
@ -4850,6 +5018,14 @@ AC_MSG_NOTICE([Configured to build curl/libcurl:
Protocols: ${SUPPORT_PROTOCOLS}
Features: ${SUPPORT_FEATURES}
])
non13=`echo "$TLSCHOICE" | grep -Ei 'bearssl|secure-transport|mbedtls'`;
if test -n "$non13"; then
cat >&2 << _EOF
WARNING: A selected TLS library ($TLSCHOICE) does not support TLS 1.3!
_EOF
fi
if test -n "$experimental"; then
cat >&2 << _EOF
WARNING: $experimental enabled but marked EXPERIMENTAL. Use with caution!

View File

@ -2,7 +2,5 @@
#
# SPDX-License-Identifier: curl
*.html
*.pdf
curl.1
*.1.dist
*.1
*.3

View File

@ -24,16 +24,16 @@ space separated fields.
## Fields
1. The ALPN id for the source origin
2. The host name for the source origin
2. The hostname for the source origin
3. The port number for the source origin
4. The ALPN id for the destination host
5. The host name for the destination host
6. The host number for the destination host
5. The hostname for the destination host
6. The port number for the destination host
7. The expiration date and time of this entry within double quotes. The date format is "YYYYMMDD HH:MM:SS" and the time zone is GMT.
8. Boolean (1 or 0) if "persist" was set for this entry
9. Integer priority value (not currently used)
If the host name is an IPv6 numerical address, it is stored with brackets such
If the hostname is an IPv6 numerical address, it is stored with brackets such
as `[::1]`.
# TODO

View File

@ -96,6 +96,8 @@ Bailiff and Bálint Szilakszi,
[Python](http://pycurl.io/) PycURL by Kjetil Jacobsen
[Python](https://pypi.org/project/pymcurl/) mcurl by Ganesh Viswanathan
[Q](https://q-lang.sourceforge.net/) The libcurl module is part of the default install
[R](https://cran.r-project.org/package=curl)

View File

@ -13,12 +13,12 @@ as many internal Curl read and write ones.
ssize_t Curl_bufq_write(struct bufq *q, const unsigned char *buf, size_t len, CURLcode *err);
- returns the length written into `q` or -1 on error.
- writing to a full `q` will return -1 and set *err to CURLE_AGAIN
- writing to a full `q` returns -1 and set *err to CURLE_AGAIN
ssize_t Curl_bufq_read(struct bufq *q, unsigned char *buf, size_t len, CURLcode *err);
- returns the length read from `q` or -1 on error.
- reading from an empty `q` will return -1 and set *err to CURLE_AGAIN
- reading from an empty `q` returns -1 and set *err to CURLE_AGAIN
```
@ -32,10 +32,11 @@ ssize_t Curl_bufq_slurp(struct bufq *q, Curl_bufq_reader *reader, void *reader_c
CURLcode *err);
```
`Curl_bufq_slurp()` will invoke the given `reader` callback, passing it its own internal
buffer memory to write to. It may invoke the `reader` several times, as long as it has space
and while the `reader` always returns the length that was requested. There are variations of `slurp` that call the `reader` at most once or only read in a
maximum amount of bytes.
`Curl_bufq_slurp()` invokes the given `reader` callback, passing it its own
internal buffer memory to write to. It may invoke the `reader` several times,
as long as it has space and while the `reader` always returns the length that
was requested. There are variations of `slurp` that call the `reader` at most
once or only read in a maximum amount of bytes.
The analog mechanism for write out buffer data is:
@ -47,8 +48,8 @@ ssize_t Curl_bufq_pass(struct bufq *q, Curl_bufq_writer *writer, void *writer_ct
CURLcode *err);
```
`Curl_bufq_pass()` will invoke the `writer`, passing its internal memory and remove the
amount that `writer` reports.
`Curl_bufq_pass()` invokes the `writer`, passing its internal memory and
remove the amount that `writer` reports.
## peek and skip
@ -58,8 +59,8 @@ It is possible to get access to the memory of data stored in a `bufq` with:
bool Curl_bufq_peek(const struct bufq *q, const unsigned char **pbuf, size_t *plen);
```
On returning TRUE, `pbuf` will point to internal memory with `plen` bytes that one may read. This will only
be valid until another operation on `bufq` is performed.
On returning TRUE, `pbuf` points to internal memory with `plen` bytes that one
may read. This is only valid until another operation on `bufq` is performed.
Instead of reading `bufq` data, one may simply skip it:
@ -67,20 +68,22 @@ Instead of reading `bufq` data, one may simply skip it:
void Curl_bufq_skip(struct bufq *q, size_t amount);
```
This will remove `amount` number of bytes from the `bufq`.
This removes `amount` number of bytes from the `bufq`.
## lifetime
`bufq` is initialized and freed similar to the `dynbuf` module. Code using `bufq` will
hold a `struct bufq` somewhere. Before it uses it, it invokes:
`bufq` is initialized and freed similar to the `dynbuf` module. Code using
`bufq` holds a `struct bufq` somewhere. Before it uses it, it invokes:
```
void Curl_bufq_init(struct bufq *q, size_t chunk_size, size_t max_chunks);
```
The `bufq` is told how many "chunks" of data it shall hold at maximum and how large those
"chunks" should be. There are some variants of this, allowing for more options. How "chunks" are handled in a `bufq` is presented in the section about memory management.
The `bufq` is told how many "chunks" of data it shall hold at maximum and how
large those "chunks" should be. There are some variants of this, allowing for
more options. How "chunks" are handled in a `bufq` is presented in the section
about memory management.
The user of the `bufq` has the responsibility to call:
@ -95,37 +98,58 @@ void Curl_bufq_reset(struct bufq *q);
## memory management
Internally, a `bufq` uses allocation of fixed size, e.g. the "chunk_size", up to a maximum number, e.g. "max_chunks". These chunks are allocated on demand, therefore writing to a `bufq` may return `CURLE_OUT_OF_MEMORY`. Once the max number of chunks are used, the `bufq` will report that it is "full".
Internally, a `bufq` uses allocation of fixed size, e.g. the "chunk_size", up
to a maximum number, e.g. "max_chunks". These chunks are allocated on demand,
therefore writing to a `bufq` may return `CURLE_OUT_OF_MEMORY`. Once the max
number of chunks are used, the `bufq` reports that it is "full".
Each chunks has a `read` and `write` index. A `bufq` keeps its chunks in a list. Reading happens always at the head chunk, writing always goes to the tail chunk. When the head chunk becomes empty, it is removed. When the tail chunk becomes full, another chunk is added to the end of the list, becoming the new tail.
Each chunks has a `read` and `write` index. A `bufq` keeps its chunks in a
list. Reading happens always at the head chunk, writing always goes to the
tail chunk. When the head chunk becomes empty, it is removed. When the tail
chunk becomes full, another chunk is added to the end of the list, becoming
the new tail.
Chunks that are no longer used are returned to a `spare` list by default. If the `bufq` is created with option `BUFQ_OPT_NO_SPARES` those chunks will be freed right away.
Chunks that are no longer used are returned to a `spare` list by default. If
the `bufq` is created with option `BUFQ_OPT_NO_SPARES` those chunks are freed
right away.
If a `bufq` is created with a `bufc_pool`, the no longer used chunks are returned to the pool. Also `bufq` will ask the pool for a chunk when it needs one. More in section "pools".
If a `bufq` is created with a `bufc_pool`, the no longer used chunks are
returned to the pool. Also `bufq` asks the pool for a chunk when it needs one.
More in section "pools".
## empty, full and overflow
One can ask about the state of a `bufq` with methods such as `Curl_bufq_is_empty(q)`,
`Curl_bufq_is_full(q)`, etc. The amount of data held by a `bufq` is the sum of the data in all its chunks. This is what is reported by `Curl_bufq_len(q)`.
One can ask about the state of a `bufq` with methods such as
`Curl_bufq_is_empty(q)`, `Curl_bufq_is_full(q)`, etc. The amount of data held
by a `bufq` is the sum of the data in all its chunks. This is what is reported
by `Curl_bufq_len(q)`.
Note that a `bufq` length and it being "full" are only loosely related. A simple example:
Note that a `bufq` length and it being "full" are only loosely related. A
simple example:
* create a `bufq` with chunk_size=1000 and max_chunks=4.
* write 4000 bytes to it, it will report "full"
* read 1 bytes from it, it will still report "full"
* read 999 more bytes from it, and it will no longer be "full"
* write 4000 bytes to it, it reports "full"
* read 1 bytes from it, it still reports "full"
* read 999 more bytes from it, and it is no longer "full"
The reason for this is that full really means: *bufq uses max_chunks and the last one cannot be written to*.
The reason for this is that full really means: *bufq uses max_chunks and the
last one cannot be written to*.
So when you read 1 byte from the head chunk in the example above, the head still hold 999 unread bytes. Only when those are also read, can the head chunk be removed and a new tail be added.
When you read 1 byte from the head chunk in the example above, the head still
hold 999 unread bytes. Only when those are also read, can the head chunk be
removed and a new tail be added.
There is another variation to this. If you initialized a `bufq` with option `BUFQ_OPT_SOFT_LIMIT`, it will allow writes **beyond** the `max_chunks`. It will report **full**, but one can **still** write. This option is necessary, if partial writes need to be avoided. But it means that you will need other checks to keep the `bufq` from growing ever larger and larger.
There is another variation to this. If you initialized a `bufq` with option
`BUFQ_OPT_SOFT_LIMIT`, it allows writes **beyond** the `max_chunks`. It
reports **full**, but one can **still** write. This option is necessary, if
partial writes need to be avoided. It means that you need other checks to keep
the `bufq` from growing ever larger and larger.
## pools
A `struct bufc_pool` may be used to create chunks for a `bufq` and keep spare ones around. It is initialized
and used via:
A `struct bufc_pool` may be used to create chunks for a `bufq` and keep spare
ones around. It is initialized and used via:
```
void Curl_bufcp_init(struct bufc_pool *pool, size_t chunk_size, size_t spare_max);
@ -133,9 +157,15 @@ void Curl_bufcp_init(struct bufc_pool *pool, size_t chunk_size, size_t spare_max
void Curl_bufq_initp(struct bufq *q, struct bufc_pool *pool, size_t max_chunks, int opts);
```
The pool gets the size and the mount of spares to keep. The `bufq` gets the pool and the `max_chunks`. It no longer needs to know the chunk sizes, as those are managed by the pool.
The pool gets the size and the mount of spares to keep. The `bufq` gets the
pool and the `max_chunks`. It no longer needs to know the chunk sizes, as
those are managed by the pool.
A pool can be shared between many `bufq`s, as long as all of them operate in the same thread. In curl that would be true for all transfers using the same multi handle. The advantages of a pool are:
A pool can be shared between many `bufq`s, as long as all of them operate in
the same thread. In curl that would be true for all transfers using the same
multi handle. The advantages of a pool are:
* when all `bufq`s are empty, only memory for `max_spare` chunks in the pool is used. Empty `bufq`s will hold no memory.
* the latest spare chunk is the first to be handed out again, no matter which `bufq` needs it. This keeps the footprint of "recently used" memory smaller.
* when all `bufq`s are empty, only memory for `max_spare` chunks in the pool
is used. Empty `bufq`s holds no memory.
* the latest spare chunk is the first to be handed out again, no matter which
`bufq` needs it. This keeps the footprint of "recently used" memory smaller.

View File

@ -44,8 +44,7 @@ void Curl_bufref_set(struct bufref *br, const void *buffer, size_t length,
Releases the previously referenced buffer, then assigns the new `buffer` to
the structure, associated with its `destructor` function. The latter can be
specified as `NULL`: this will be the case when the referenced buffer is
static.
specified as `NULL`: this is the case when the referenced buffer is static.
if `buffer` is NULL, `length` must be zero.

View File

@ -21,8 +21,8 @@ security vulnerabilities. The amount of money that is rewarded depends on how
serious the flaw is determined to be.
Since 2021, the Bug Bounty is managed in association with the Internet Bug
Bounty and they will set the reward amounts. If it would turn out that they
set amounts that are way lower than we can accept, the curl project intends to
Bounty and they set the reward amounts. If it would turn out that they set
amounts that are way lower than we can accept, the curl project intends to
"top up" rewards.
In 2022, typical "Medium" rated vulnerabilities have been rewarded 2,400 USD
@ -40,7 +40,7 @@ Vulnerabilities in features that are off by default and documented as
experimental are not eligible for a reward.
The vulnerability has to be fixed and publicly announced (by the curl project)
before a bug bounty will be considered.
before a bug bounty is considered.
Once the vulnerability has been published by curl, the researcher can request
their bounty from the [Internet Bug Bounty](https://hackerone.com/ibb).
@ -48,6 +48,9 @@ their bounty from the [Internet Bug Bounty](https://hackerone.com/ibb).
Bounties need to be requested within twelve months from the publication of the
vulnerability.
The curl security team reserves themselves the right to deny or allow bug
bounty payouts on its own discretion. There is no appeals process.
## Product vulnerabilities only
This bug bounty only concerns the curl and libcurl products and thus their
@ -60,9 +63,9 @@ bounty or not.
## How are vulnerabilities graded?
The grading of each reported vulnerability that makes a reward claim will be
performed by the curl security team. The grading will be based on the CVSS
(Common Vulnerability Scoring System) 3.0.
The grading of each reported vulnerability that makes a reward claim is
performed by the curl security team. The grading is based on the CVSS (Common
Vulnerability Scoring System) 3.0.
## How are reward amounts determined?

View File

@ -3,9 +3,9 @@
## There are still bugs
Curl and libcurl keep being developed. Adding features and changing code
means that bugs will sneak in, no matter how hard we try to keep them out.
means that bugs sneak in, no matter how hard we try to keep them out.
Of course there are lots of bugs left. And lots of misfeatures.
Of course there are lots of bugs left. Not to mention misfeatures.
To help us make curl the stable and solid product we want it to be, we need
bug reports and bug fixes.
@ -34,16 +34,16 @@
HackerOne](https://hackerone.com/curl).
This ensures that the report reaches the curl security team so that they
first can deal with the report away from the public to minimize the harm
and impact it will have on existing users out there who might be using the
vulnerable versions.
first can deal with the report away from the public to minimize the harm and
impact it has on existing users out there who might be using the vulnerable
versions.
The curl project's process for handling security related issues is
[documented separately](https://curl.se/dev/secprocess.html).
## What to report
When reporting a bug, you should include all information that will help us
When reporting a bug, you should include all information to help us
understand what is wrong, what you expected to happen and how to repeat the
bad behavior. You therefore need to tell us:
@ -58,8 +58,8 @@
and anything and everything else you think matters. Tell us what you expected
to happen, tell use what did happen, tell us how you could make it work
another way. Dig around, try out, test. Then include all the tiny bits and
pieces in your report. You will benefit from this yourself, as it will enable
us to help you quicker and more accurately.
pieces in your report. You benefit from this yourself, as it enables us to
help you quicker and more accurately.
Since curl deals with networks, it often helps us if you include a protocol
debug dump with your bug report. The output you get by using the `-v` or
@ -84,15 +84,15 @@
SCP, the libssh2 version is relevant etc.
Showing us a real source code example repeating your problem is the best way
to get our attention and it will greatly increase our chances to understand
your problem and to work on a fix (if we agree it truly is a problem).
to get our attention and it greatly increases our chances to understand your
problem and to work on a fix (if we agree it truly is a problem).
Lots of problems that appear to be libcurl problems are actually just abuses
of the libcurl API or other malfunctions in your applications. It is advised
that you run your problematic program using a memory debug tool like valgrind
or similar before you post memory-related or "crashing" problems to us.
## Who will fix the problems
## Who fixes the problems
If the problems or bugs you describe are considered to be bugs, we want to
have the problems fixed.
@ -102,11 +102,11 @@
it out of an ambition to keep curl and libcurl excellent products and out of
pride.
Please do not assume that you can just lump over something to us and it will
then magically be fixed after some given time. Most often we need feedback
and help to understand what you have experienced and how to repeat a
problem. Then we may only be able to assist YOU to debug the problem and to
track down the proper fix.
Please do not assume that you can just lump over something to us and it then
magically gets fixed after some given time. Most often we need feedback and
help to understand what you have experienced and how to repeat a problem.
Then we may only be able to assist YOU to debug the problem and to track down
the proper fix.
We get reports from many people every month and each report can take a
considerable amount of time to really go to the bottom with.
@ -119,23 +119,23 @@
Run the program until it cores.
Run your debugger on the core file, like `<debugger> curl
core`. `<debugger>` should be replaced with the name of your debugger, in
most cases that will be `gdb`, but `dbx` and others also occur.
Run your debugger on the core file, like `<debugger> curl core`. `<debugger>`
should be replaced with the name of your debugger, in most cases that is
`gdb`, but `dbx` and others also occur.
When the debugger has finished loading the core file and presents you a
prompt, enter `where` (without quotes) and press return.
The list that is presented is the stack trace. If everything worked, it is
supposed to contain the chain of functions that were called when curl
crashed. Include the stack trace with your detailed bug report, it will help a
crashed. Include the stack trace with your detailed bug report, it helps a
lot.
## Bugs in libcurl bindings
There will of course pop up bugs in libcurl bindings. You should then
primarily approach the team that works on that particular binding and see
what you can do to help them fix the problem.
There are of course bugs in libcurl bindings. You should then primarily
approach the team that works on that particular binding and see what you can
do to help them fix the problem.
If you suspect that the problem exists in the underlying libcurl, then please
convert your program over to plain C and follow the steps outlined above.
@ -181,13 +181,13 @@
maybe they are off in the woods hunting. Have patience. Allow at least a few
days before expecting someone to have responded.
In the issue tracker, you can expect that some labels will be set on the issue
to help categorize it.
In the issue tracker, you can expect that some labels are set on the issue to
help categorize it.
## First response
If your issue/bug report was not perfect at once (and few are), chances are
that someone will ask follow-up questions. Which version did you use? Which
that someone asks follow-up questions. Which version did you use? Which
options did you use? How often does the problem occur? How can we reproduce
this problem? Which protocols does it involve? Or perhaps much more specific
and deep diving questions. It all depends on your specific issue.
@ -210,8 +210,8 @@
for discussing possible ways to move forward with the task, we take that as a
strong suggestion that the bug is unimportant.
Unimportant issues will be closed as inactive sooner or later as they cannot
be fixed. The inactivity period (waiting for responses) should not be shorter
Unimportant issues are closed as inactive sooner or later as they cannot be
fixed. The inactivity period (waiting for responses) should not be shorter
than two weeks but may extend months.
## Lack of time/interest
@ -240,9 +240,8 @@
Issues that are filed or reported that are not really bugs but more missing
features or ideas for future improvements and so on are marked as
'enhancement' or 'feature-request' and will be added to the `TODO` document
and the issues are closed. We do not keep TODO items open in the issue
tracker.
*enhancement* or *feature-request* and get added to the `TODO` document and
the issues are closed. We do not keep TODO items open in the issue tracker.
The `TODO` document is full of ideas and suggestions of what we can add or
fix one day. You are always encouraged and free to grab one of those items and
@ -255,11 +254,11 @@
## Closing off stalled bugs
The [issue and pull request trackers](https://github.com/curl/curl) only
hold "active" entries open (using a non-precise definition of what active
actually is, but they are at least not completely dead). Those that are
abandoned or in other ways dormant will be closed and sometimes added to
`TODO` and `KNOWN_BUGS` instead.
The [issue and pull request trackers](https://github.com/curl/curl) only hold
"active" entries open (using a non-precise definition of what active actually
is, but they are at least not completely dead). Those that are abandoned or
in other ways dormant are closed and sometimes added to `TODO` and
`KNOWN_BUGS` instead.
This way, we only have "active" issues open on GitHub. Irrelevant issues and
pull requests will not distract developers or casual visitors.
pull requests do not distract developers or casual visitors.

View File

@ -12,7 +12,7 @@ check that it adheres to our [Source Code Style guide](CODE_STYLE.md).
`-W[file]` skip that file and exclude it from being checked. Helpful
when, for example, one of the files is generated.
`-D[dir]` directory name to prepend to file names when accessing them.
`-D[dir]` directory name to prepend to filenames when accessing them.
`-h` shows the help output, that also lists all recognized warnings
@ -73,7 +73,7 @@ warnings are:
- `FOPENMODE`: `fopen()` needs a macro for the mode string, use it
- `INDENTATION`: detected a wrong start column for code. Note that this
warning only checks some specific places and will certainly miss many bad
warning only checks some specific places and can certainly miss many bad
indentations.
- `LONGLINE`: A line is longer than 79 columns.
@ -158,21 +158,21 @@ Example
/* !checksrc! disable LONGLINE all */
This will ignore the warning for overly long lines until it is re-enabled with:
This ignores the warning for overly long lines until it is re-enabled with:
/* !checksrc! enable LONGLINE */
If the enabling is not performed before the end of the file, it will be enabled
automatically for the next file.
If the enabling is not performed before the end of the file, it is enabled
again automatically for the next file.
You can also opt to ignore just N violations so that if you have a single long
line you just cannot shorten and is agreed to be fine anyway:
/* !checksrc! disable LONGLINE 1 */
... and the warning for long lines will be enabled again automatically after
it has ignored that single warning. The number `1` can of course be changed to
any other integer number. It can be used to make sure only the exact intended
... and the warning for long lines is enabled again automatically after it has
ignored that single warning. The number `1` can of course be changed to any
other integer number. It can be used to make sure only the exact intended
instances are ignored and nothing extra.
### Directory wide ignore patterns

View File

@ -290,9 +290,9 @@ next section.
There is also the case that the selected algorithm is not supported by the
protocol or does not match the ciphers offered by the server during the SSL
negotiation. In this case curl will return error
negotiation. In this case curl returns error
`CURLE_SSL_CONNECT_ERROR (35) SEC_E_ALGORITHM_MISMATCH`
and the request will fail.
and the request fails.
`CALG_MD2`,
`CALG_MD4`,
@ -353,7 +353,7 @@ are running an outdated OS you might still be supporting weak ciphers.
You can set TLS 1.3 ciphers for Schannel by using `CURLOPT_TLS13_CIPHERS` or
`--tls13-ciphers` with the names below.
If TLS 1.3 cipher suites are set then libcurl will add or restrict Schannel TLS
If TLS 1.3 cipher suites are set then libcurl adds or restricts Schannel TLS
1.3 algorithms automatically. Essentially, libcurl is emulating support for
individual TLS 1.3 cipher suites since Schannel does not support it directly.
@ -363,10 +363,10 @@ individual TLS 1.3 cipher suites since Schannel does not support it directly.
`TLS_AES_128_CCM_8_SHA256`
`TLS_AES_128_CCM_SHA256`
Note if you set TLS 1.3 ciphers without also setting the minimum TLS version to
1.3 then it's possible Schannel may negotiate an earlier TLS version and cipher
suite if your libcurl and OS settings allow it. You can set the minimum TLS
version by using `CURLOPT_SSLVERSION` or `--tlsv1.3`.
Note if you set TLS 1.3 ciphers without also setting the minimum TLS version
to 1.3 then it is possible Schannel may negotiate an earlier TLS version and
cipher suite if your libcurl and OS settings allow it. You can set the minimum
TLS version by using `CURLOPT_SSLVERSION` or `--tlsv1.3`.
## BearSSL

126
deps/curl/docs/CLIENT-READERS.md vendored Normal file
View File

@ -0,0 +1,126 @@
# curl client readers
Client readers is a design in the internals of libcurl, not visible in its public API. They were started
in curl v8.7.0. This document describes the concepts, its high level implementation and the motivations.
## Naming
`libcurl` operates between clients and servers. A *client* is the application using libcurl, like the command line tool `curl` itself. Data to be uploaded to a server is **read** from the client and **sent** to the server, the servers response is **received** by `libcurl` and then **written** to the client.
With this naming established, client readers are concerned with providing data from the application to the server. Applications register callbacks via `CURLOPT_READFUNCTION`, data via `CURLOPT_POSTFIELDS` and other options to be used by `libcurl` when the request is send.
## Invoking
The transfer loop that sends and receives, is using `Curl_client_read()` to get more data to send for a transfer. If no specific reader has been installed yet, the default one that uses `CURLOPT_READFUNCTION` is added. The prototype is
```
CURLcode Curl_client_read(struct Curl_easy *data, char *buf, size_t blen,
size_t *nread, bool *eos);
```
The arguments are the transfer to read for, a buffer to hold the read data, its length, the actual number of bytes placed into the buffer and the `eos` (*end of stream*) flag indicating that no more data is available. The `eos` flag may be set for a read amount, if that amount was the last. That way curl can avoid to read an additional time.
The implementation of `Curl_client_read()` uses a chain of *client reader* instances to get the data. This is similar to the design of *client writers*. The chain of readers allows processing of the data to send.
The definition of a reader is:
```
struct Curl_crtype {
const char *name; /* writer name. */
CURLcode (*do_init)(struct Curl_easy *data, struct Curl_creader *writer);
CURLcode (*do_read)(struct Curl_easy *data, struct Curl_creader *reader,
char *buf, size_t blen, size_t *nread, bool *eos);
void (*do_close)(struct Curl_easy *data, struct Curl_creader *reader);
bool (*needs_rewind)(struct Curl_easy *data, struct Curl_creader *reader);
curl_off_t (*total_length)(struct Curl_easy *data,
struct Curl_creader *reader);
CURLcode (*resume_from)(struct Curl_easy *data,
struct Curl_creader *reader, curl_off_t offset);
CURLcode (*rewind)(struct Curl_easy *data, struct Curl_creader *reader);
};
struct Curl_creader {
const struct Curl_crtype *crt; /* type implementation */
struct Curl_creader *next; /* Downstream reader. */
Curl_creader_phase phase; /* phase at which it operates */
};
```
`Curl_creader` is a reader instance with a `next` pointer to form the chain. It as a type `crt` which provides the implementation. The main callback is `do_read()` which provides the data to the caller. The others are for setup and tear down. `needs_rewind()` is explained further below.
## Phases and Ordering
Since client readers may transform the data being read through the chain, the order in which they are called is relevant for the outcome. When a reader is created, it gets the `phase` property in which it operates. Reader phases are defined like:
```
typedef enum {
CURL_CR_NET, /* data send to the network (connection filters) */
CURL_CR_TRANSFER_ENCODE, /* add transfer-encodings */
CURL_CR_PROTOCOL, /* before transfer, but after content decoding */
CURL_CR_CONTENT_ENCODE, /* add content-encodings */
CURL_CR_CLIENT /* data read from client */
} Curl_creader_phase;
```
If a reader for phase `PROTOCOL` is added to the chain, it is always added *after* any `NET` or `TRANSFER_ENCODE` readers and *before* and `CONTENT_ENCODE` and `CLIENT` readers. If there is already a reader for the same phase, the new reader is added before the existing one(s).
### Example: `chunked` reader
In `http_chunks.c` a client reader for chunked uploads is implemented. This one operates at phase `CURL_CR_TRANSFER_ENCODE`. Any data coming from the reader "below" has the HTTP/1.1 chunk handling applied and returned to the caller.
When this reader sees an `eos` from below, it generates the terminal chunk, adding trailers if provided by the application. When that last chunk is fully returned, it also sets `eos` to the caller.
### Example: `lineconv` reader
In `sendf.c` a client reader that does line-end conversions is implemented. It operates at `CURL_CR_CONTENT_ENCODE` and converts any "\n" to "\r\n". This is used for FTP ASCII uploads or when the general `crlf` options has been set.
### Example: `null` reader
Implemented in `sendf.c` for phase `CURL_CR_CLIENT`, this reader has the simple job of providing transfer bytes of length 0 to the caller, immediately indicating an `eos`. This reader is installed by HTTP for all GET/HEAD requests and when authentication is being negotiated.
### Example: `buf` reader
Implemented in `sendf.c` for phase `CURL_CR_CLIENT`, this reader get a buffer pointer and a length and provides exactly these bytes. This one is used in HTTP for sending `postfields` provided by the application.
## Request retries
Sometimes it is necessary to send a request with client data again. Transfer handling can inquire via `Curl_client_read_needs_rewind()` if a rewind (e.g. a reset of the client data) is necessary. This asks all installed readers if they need it and give `FALSE` of none does.
## Upload Size
Many protocols need to know the amount of bytes delivered by the client readers in advance. They may invoke `Curl_creader_total_length(data)` to retrieve that. However, not all reader chains know the exact value beforehand. In that case, the call returns `-1` for "unknown".
Even if the length of the "raw" data is known, the length that is send may not. Example: with option `--crlf` the uploaded content undergoes line-end conversion. The line converting reader does not know in advance how many newlines it may encounter. Therefore it must return `-1` for any positive raw content length.
In HTTP, once the correct client readers are installed, the protocol asks the readers for the total length. If that is known, it can set `Content-Length:` accordingly. If not, it may choose to add an HTTP "chunked" reader.
In addition, there is `Curl_creader_client_length(data)` which gives the total length as reported by the reader in phase `CURL_CR_CLIENT` without asking other readers that may transform the raw data. This is useful in estimating the size of an upload. The HTTP protocol uses this to determine if `Expect: 100-continue` shall be done.
## Resuming
Uploads can start at a specific offset, if so requested. The "resume from" that offset. This applies to the reader in phase `CURL_CR_CLIENT` that delivers the "raw" content. Resumption can fail if the installed reader does not support it or if the offset is too large.
The total length reported by the reader changes when resuming. Example: resuming an upload of 100 bytes by 25 reports a total length of 75 afterwards.
If `resume_from()` is invoked twice, it is additive. There is currently no way to undo a resume.
## Rewinding
When a request is retried, installed client readers are discarded and replaced by new ones. This works only if the new readers upload the same data. For many readers, this is not an issue. The "null" reader always does the same. Also the `buf` reader, initialized with the same buffer, does this.
Readers operating on callbacks to the application need to "rewind" the underlying content. For example, when reading from a `FILE*`, the reader needs to `fseek()` to the beginning. The following methods are used:
1. `Curl_creader_needs_rewind(data)`: tells if a rewind is necessary, given the current state of the reader chain. If nothing really has been read so far, this returns `FALSE`.
2. `Curl_creader_will_rewind(data)`: tells if the reader chain rewinds at the start of the next request.
3. `Curl_creader_set_rewind(data, TRUE)`: marks the reader chain for rewinding at the start of the next request.
4. `Curl_client_start(data)`: tells the readers that a new request starts and they need to rewind if requested.
## Summary and Outlook
By adding the client reader interface, any protocol can control how/if it wants the curl transfer to send bytes for a request. The transfer loop becomes then blissfully ignorant of the specifics.
The protocols on the other hand no longer have to care to package data most efficiently. At any time, should more data be needed, it can be read from the client. This is used when sending HTTP requests headers to add as much request body data to the initial sending as there is room for.
Future enhancements based on the client readers:
* `expect-100` handling: place that into a HTTP specific reader at `CURL_CR_PROTOCOL` and eliminate the checks in the generic transfer parts.
* `eos forwarding`: transfer should forward an `eos` flag to the connection filters. Filters like HTTP/2 and HTTP/3 can make use of that, terminating streams early. This would also eliminate length checks in stream handling.

117
deps/curl/docs/CLIENT-WRITERS.md vendored Normal file
View File

@ -0,0 +1,117 @@
# curl client writers
Client writers is a design in the internals of libcurl, not visible in its public API. They were started
in curl v8.5.0. This document describes the concepts, its high level implementation and the motivations.
## Naming
`libcurl` operates between clients and servers. A *client* is the application using libcurl, like the command line tool `curl` itself. Data to be uploaded to a server is **read** from the client and **send** to the server, the servers response is **received** by `libcurl` and then **written** to the client.
With this naming established, client writers are concerned with writing responses from the server to the application. Applications register callbacks via `CURLOPT_WRITEFUNCTION` and `CURLOPT_HEADERFUNCTION` to be invoked by `libcurl` when the response is received.
## Invoking
All code in `libcurl` that handles response data is ultimately expected to forward this data via `Curl_client_write()` to the application. The exact prototype of this function is:
```
CURLcode Curl_client_write(struct Curl_easy *data, int type, const char *buf, size_t blen);
```
The `type` argument specifies what the bytes in `buf` actually are. The following bits are defined:
```
#define CLIENTWRITE_BODY (1<<0) /* non-meta information, BODY */
#define CLIENTWRITE_INFO (1<<1) /* meta information, not a HEADER */
#define CLIENTWRITE_HEADER (1<<2) /* meta information, HEADER */
#define CLIENTWRITE_STATUS (1<<3) /* a special status HEADER */
#define CLIENTWRITE_CONNECT (1<<4) /* a CONNECT related HEADER */
#define CLIENTWRITE_1XX (1<<5) /* a 1xx response related HEADER */
#define CLIENTWRITE_TRAILER (1<<6) /* a trailer HEADER */
```
The main types here are `CLIENTWRITE_BODY` and `CLIENTWRITE_HEADER`. They are
mutually exclusive. The other bits are enhancements to `CLIENTWRITE_HEADER` to
specify what the header is about. They are only used in HTTP and related
protocols (RTSP and WebSocket).
The implementation of `Curl_client_write()` uses a chain of *client writer* instances to process the call and make sure that the bytes reach the proper application callbacks. This is similar to the design of connection filters: client writers can be chained to process the bytes written through them. The definition is:
```
struct Curl_cwtype {
const char *name;
CURLcode (*do_init)(struct Curl_easy *data,
struct Curl_cwriter *writer);
CURLcode (*do_write)(struct Curl_easy *data,
struct Curl_cwriter *writer, int type,
const char *buf, size_t nbytes);
void (*do_close)(struct Curl_easy *data,
struct Curl_cwriter *writer);
};
struct Curl_cwriter {
const struct Curl_cwtype *cwt; /* type implementation */
struct Curl_cwriter *next; /* Downstream writer. */
Curl_cwriter_phase phase; /* phase at which it operates */
};
```
`Curl_cwriter` is a writer instance with a `next` pointer to form the chain. It has a type `cwt` which provides the implementation. The main callback is `do_write()` that processes the data and calls then the `next` writer. The others are for setup and tear down.
## Phases and Ordering
Since client writers may transform the bytes written through them, the order in which the are called is relevant for the outcome. When a writer is created, one property it gets is the `phase` in which it operates. Writer phases are defined like:
```
typedef enum {
CURL_CW_RAW, /* raw data written, before any decoding */
CURL_CW_TRANSFER_DECODE, /* remove transfer-encodings */
CURL_CW_PROTOCOL, /* after transfer, but before content decoding */
CURL_CW_CONTENT_DECODE, /* remove content-encodings */
CURL_CW_CLIENT /* data written to client */
} Curl_cwriter_phase;
```
If a writer for phase `PROTOCOL` is added to the chain, it is always added *after* any `RAW` or `TRANSFER_DECODE` and *before* any `CONTENT_DECODE` and `CLIENT` phase writer. If there is already a writer for the same phase present, the new writer is inserted just before that one.
All transfers have a chain of 3 writers by default. A specific protocol handler may alter that by adding additional writers. The 3 standard writers are (name, phase):
1. `"raw", CURL_CW_RAW `: if the transfer is verbose, it forwards the body data to the debug function.
1. `"download", CURL_CW_PROTOCOL`: checks that protocol limits are kept and updates progress counters. When a download has a known length, it checks that it is not exceeded and errors otherwise.
1. `"client", CURL_CW_CLIENT`: the main work horse. It invokes the application callbacks or writes to the configured file handles. It chops large writes into smaller parts, as documented for `CURLOPT_WRITEFUNCTION`. If also handles *pausing* of transfers when the application callback returns `CURL_WRITEFUNC_PAUSE`.
With these writers always in place, libcurl's protocol handlers automatically have these implemented.
## Enhanced Use
HTTP is the protocol in curl that makes use of the client writer chain by
adding writers to it. When the `libcurl` application set
`CURLOPT_ACCEPT_ENCODING` (as `curl` does with `--compressed`), the server is
offered an `Accept-Encoding` header with the algorithms supported. The server
then may choose to send the response body compressed. For example using `gzip`
or `brotli` or even both.
In the server's response, if there is a `Content-Encoding` header listing the
encoding applied. If supported by `libcurl` it then decompresses the content
before writing it out to the client. How does it do that?
The HTTP protocol adds client writers in phase `CURL_CW_CONTENT_DECODE` on
seeing such a header. For each encoding listed, it adds the corresponding
writer. The response from the server is then passed through
`Curl_client_write()` to the writers that decode it. If several encodings had
been applied the writer chain decodes them in the proper order.
When the server provides a `Content-Length` header, that value applies to the
*compressed* content. Length checks on the response bytes must happen *before*
it gets decoded. That is why this check happens in phase `CURL_CW_PROTOCOL`
which always is ordered before writers in phase `CURL_CW_CONTENT_DECODE`.
What else?
Well, HTTP servers may also apply a `Transfer-Encoding` to the body of a response. The most well-known one is `chunked`, but algorithms like `gzip` and friends could also be applied. The difference to content encodings is that decoding needs to happen *before* protocol checks, for example on length, are done.
That is why transfer decoding writers are added for phase `CURL_CW_TRANSFER_DECODE`. Which makes their operation happen *before* phase `CURL_CW_PROTOCOL` where length may be checked.
## Summary
By adding the common behavior of all protocols into `Curl_client_write()` we make sure that they do apply everywhere. Protocol handler have less to worry about. Changes to default behavior can be done without affecting handler implementations.
Having a writer chain as implementation allows protocol handlers with extra needs, like HTTP, to add to this for special behavior. The common way of writing the actual response data stays the same.

View File

@ -22,5 +22,9 @@
#
###########################################################################
#add_subdirectory(examples)
add_subdirectory(libcurl)
add_subdirectory(cmdline-opts)
if(BUILD_LIBCURL_DOCS)
add_subdirectory(libcurl)
endif()
if(ENABLE_CURL_MANUAL AND BUILD_CURL_EXE)
add_subdirectory(cmdline-opts)
endif()

View File

@ -19,7 +19,7 @@ particularly unusual rules in our set of rules.
We also work hard on writing code that are warning-free on all the major
platforms and in general on as many platforms as possible. Code that obviously
will cause warnings will not be accepted as-is.
causes warnings is not accepted as-is.
## Naming
@ -218,10 +218,10 @@ int size = sizeof(int);
Some statements cannot be completed on a single line because the line would be
too long, the statement too hard to read, or due to other style guidelines
above. In such a case the statement will span multiple lines.
above. In such a case the statement spans multiple lines.
If a continuation line is part of an expression or sub-expression then you
should align on the appropriate column so that it's easy to tell what part of
should align on the appropriate column so that it is easy to tell what part of
the statement it is. Operators should not start continuation lines. In other
cases follow the 2-space indent guideline. Here are some examples from
libcurl:

View File

@ -1,16 +1,26 @@
# curl connection filters
Connection filters is a design in the internals of curl, not visible in its public API. They were added
in curl v7.xx.x. This document describes the concepts, its high level implementation and the motivations.
Connection filters is a design in the internals of curl, not visible in its
public API. They were added in curl v7.87.0. This document describes the
concepts, its high level implementation and the motivations.
## Filters
A "connection filter" is a piece of code that is responsible for handling a range of operations
of curl's connections: reading, writing, waiting on external events, connecting and closing down - to name the most important ones.
A "connection filter" is a piece of code that is responsible for handling a
range of operations of curl's connections: reading, writing, waiting on
external events, connecting and closing down - to name the most important
ones.
The most important feat of connection filters is that they can be stacked on top of each other (or "chained" if you prefer that metaphor). In the common scenario that you want to retrieve a `https:` url with curl, you need 2 basic things to send the request and get the response: a TCP connection, represented by a `socket` and a SSL instance en- and decrypt over that socket. You write your request to the SSL instance, which encrypts and writes that data to the socket, which then sends the bytes over the network.
The most important feat of connection filters is that they can be stacked on
top of each other (or "chained" if you prefer that metaphor). In the common
scenario that you want to retrieve a `https:` URL with curl, you need 2 basic
things to send the request and get the response: a TCP connection, represented
by a `socket` and a SSL instance en- and decrypt over that socket. You write
your request to the SSL instance, which encrypts and writes that data to the
socket, which then sends the bytes over the network.
With connection filters, curl's internal setup will look something like this (cf for connection filter):
With connection filters, curl's internal setup looks something like this (cf
for connection filter):
```
Curl_easy *data connectdata *conn cf-ssl cf-socket
@ -23,11 +33,17 @@ Curl_easy *data connectdata *conn cf-ssl cf-socket
---> conn->filter->write(conn->filter, data, buffer)
```
While connection filters all do different things, they look the same from the "outside". The code in `data` and `conn` does not really know **which** filters are installed. `conn` just writes into the first filter, whatever that is.
While connection filters all do different things, they look the same from the
"outside". The code in `data` and `conn` does not really know **which**
filters are installed. `conn` just writes into the first filter, whatever that
is.
Same is true for filters. Each filter has a pointer to the `next` filter. When SSL has encrypted the data, it does not write to a socket, it writes to the next filter. If that is indeed a socket, or a file, or an HTTP/2 connection is of no concern to the SSL filter.
Same is true for filters. Each filter has a pointer to the `next` filter. When
SSL has encrypted the data, it does not write to a socket, it writes to the
next filter. If that is indeed a socket, or a file, or an HTTP/2 connection is
of no concern to the SSL filter.
And this allows the stacking, as in:
This allows stacking, as in:
```
Direct:
@ -45,7 +61,12 @@ Via http proxy tunnel via SOCKS proxy:
### Connecting/Closing
Before `Curl_easy` can send the request, the connection needs to be established. This means that all connection filters have done, whatever they need to do: waiting for the socket to be connected, doing the TLS handshake, performing the HTTP tunnel request, etc. This has to be done in reverse order: the last filter has to do its connect first, then the one above can start, etc.
Before `Curl_easy` can send the request, the connection needs to be
established. This means that all connection filters have done, whatever they
need to do: waiting for the socket to be connected, doing the TLS handshake,
performing the HTTP tunnel request, etc. This has to be done in reverse order:
the last filter has to do its connect first, then the one above can start,
etc.
Each filter does in principle the following:
@ -72,12 +93,14 @@ myfilter_cf_connect(struct Curl_cfilter *cf,
}
```
Closing a connection then works similar. The `conn` tells the first filter to close. Contrary to connecting,
the filter does its own things first, before telling the next filter to close.
Closing a connection then works similar. The `conn` tells the first filter to
close. Contrary to connecting, the filter does its own things first, before
telling the next filter to close.
### Efficiency
There are two things curl is concerned about: efficient memory use and fast transfers.
There are two things curl is concerned about: efficient memory use and fast
transfers.
The memory footprint of a filter is relatively small:
@ -91,13 +114,24 @@ struct Curl_cfilter {
BIT(connected); /* != 0 iff this filter is connected */
};
```
The filter type `cft` is a singleton, one static struct for each type of filter. The `ctx` is where a filter will hold its specific data. That varies by filter type. An http-proxy filter will keep the ongoing state of the CONNECT here, but free it after its has been established. The SSL filter will keep the `SSL*` (if OpenSSL is used) here until the connection is closed. So, this varies.
`conn` is a reference to the connection this filter belongs to, so nothing extra besides the pointer itself.
The filter type `cft` is a singleton, one static struct for each type of
filter. The `ctx` is where a filter holds its specific data. That varies by
filter type. An http-proxy filter keeps the ongoing state of the CONNECT here,
free it after its has been established. The SSL filter keeps the `SSL*` (if
OpenSSL is used) here until the connection is closed. So, this varies.
Several things, that before were kept in `struct connectdata`, will now go into the `filter->ctx` *when needed*. So, the memory footprint for connections that do *not* use an http proxy, or socks, or https will be lower.
`conn` is a reference to the connection this filter belongs to, so nothing
extra besides the pointer itself.
As to transfer efficiency, writing and reading through a filter comes at near zero cost *if the filter does not transform the data*. An http proxy or socks filter, once it is connected, will just pass the calls through. Those filters implementations will look like this:
Several things, that before were kept in `struct connectdata`, now goes into
the `filter->ctx` *when needed*. So, the memory footprint for connections that
do *not* use an http proxy, or socks, or https is lower.
As to transfer efficiency, writing and reading through a filter comes at near
zero cost *if the filter does not transform the data*. An http proxy or socks
filter, once it is connected, just passes the calls through. Those filters
implementations look like this:
```
ssize_t Curl_cf_def_send(struct Curl_cfilter *cf, struct Curl_easy *data,
@ -110,18 +144,159 @@ The `recv` implementation is equivalent.
## Filter Types
The (currently) existing filter types are: SOCKET, SOCKET-ACCEPT, SSL, HTTP-PROXY and SOCKS-PROXY. Vital to establishing and read/writing a connection. But filters are also a good way to implement tasks for *managing* a connection:
The currently existing filter types (curl 8.5.0) are:
* **Statistics**: a filter that counts the number of bytes sent/received. Place one in front of SOCKET and one higher up and get the number of raw and "easy" bytes transferred. They may track the speed as well, or number of partial writes, etc.
* **Timeout**: enforce timeouts, e.g. fail if a connection cannot be established in a certain amount of time.
* **Progress**: report progress on a connection.
* **Pacing**: limit read/write rates.
* **Testing**: simulate network condition or failures.
* `TCP`, `UDP`, `UNIX`: filters that operate on a socket, providing raw I/O.
* `SOCKET-ACCEPT`: special TCP socket that has a socket that has been
`accept()`ed in a `listen()`
* `SSL`: filter that applies TLS en-/decryption and handshake. Manages the
underlying TLS backend implementation.
* `HTTP-PROXY`, `H1-PROXY`, `H2-PROXY`: the first manages the connection to an
HTTP proxy server and uses the other depending on which ALPN protocol has
been negotiated.
* `SOCKS-PROXY`: filter for the various SOCKS proxy protocol variations
* `HAPROXY`: filter for the protocol of the same name, providing client IP
information to a server.
* `HTTP/2`: filter for handling multiplexed transfers over an HTTP/2
connection
* `HTTP/3`: filter for handling multiplexed transfers over an HTTP/3+QUIC
connection
* `HAPPY-EYEBALLS`: meta filter that implements IPv4/IPv6 "happy eyeballing".
It creates up to 2 sub-filters that race each other for a connection.
* `SETUP`: meta filter that manages the creation of sub-filter chains for a
specific transport (e.g. TCP or QUIC).
* `HTTPS-CONNECT`: meta filter that races a TCP+TLS and a QUIC connection
against each other to determine if HTTP/1.1, HTTP/2 or HTTP/3 shall be used
for a transfer.
As you see, filters are a good way to add functionality to curl's internal handling of transfers without impact on other code.
Meta filters are combining other filters for a specific purpose, mostly during
connection establishment. Other filters like `TCP`, `UDP` and `UNIX` are only
to be found at the end of filter chains. SSL filters provide encryption, of
course. Protocol filters change the bytes sent and received.
## Easy Filters?
## Filter Flags
Some things that curl needs to manage are not directly tied to a specific connection but the property of the `Curl_easy` handle, e.g. a particular transfer. When using HTTP/2 or HTTP/3, many transfers can use the same connection. If one wants to monitor of the transfer itself or restricting its speed alone, a connection filter is not the right place to do this.
Filter types carry flags that inform what they do. These are (for now):
So we might add "easy filters" one day. Who knows?
* `CF_TYPE_IP_CONNECT`: this filter type talks directly to a server. This does
not have to be the server the transfer wants to talk to. For example when a
proxy server is used.
* `CF_TYPE_SSL`: this filter type provides encryption.
* `CF_TYPE_MULTIPLEX`: this filter type can manage multiple transfers in parallel.
Filter types can combine these flags. For example, the HTTP/3 filter types
have `CF_TYPE_IP_CONNECT`, `CF_TYPE_SSL` and `CF_TYPE_MULTIPLEX` set.
Flags are useful to extrapolate properties of a connection. To check if a
connection is encrypted, libcurl inspect the filter chain in place, top down,
for `CF_TYPE_SSL`. If it finds `CF_TYPE_IP_CONNECT` before any `CF_TYPE_SSL`,
the connection is not encrypted.
For example, `conn1` is for a `http:` request using a tunnel through an HTTP/2
`https:` proxy. `conn2` is a `https:` HTTP/2 connection to the same proxy.
`conn3` uses HTTP/3 without proxy. The filter chains would look like this
(simplified):
```
conn1 --> `HTTP-PROXY` --> `H2-PROXY` --> `SSL` --> `TCP`
flags: `IP_CONNECT` `SSL` `IP_CONNECT`
conn2 --> `HTTP/2` --> `SSL` --> `HTTP-PROXY` --> `H2-PROXY` --> `SSL` --> `TCP`
flags: `SSL` `IP_CONNECT` `SSL` `IP_CONNECT`
conn3 --> `HTTP/3`
flags: `SSL|IP_CONNECT`
```
Inspecting the filter chains, `conn1` is seen as unencrypted, since it
contains an `IP_CONNECT` filter before any `SSL`. `conn2` is clearly encrypted
as an `SSL` flagged filter is seen first. `conn3` is also encrypted as the
`SSL` flag is checked before the presence of `IP_CONNECT`.
Similar checks can determine if a connection is multiplexed or not.
## Filter Tracing
Filters may make use of special trace macros like `CURL_TRC_CF(data, cf, msg,
...)`. With `data` being the transfer and `cf` being the filter instance.
These traces are normally not active and their execution is guarded so that
they are cheap to ignore.
Users of `curl` may activate them by adding the name of the filter type to the
`--trace-config` argument. For example, in order to get more detailed tracing
of an HTTP/2 request, invoke curl with:
```
> curl -v --trace-config ids,time,http/2 https://curl.se
```
Which gives you trace output with time information, transfer+connection ids
and details from the `HTTP/2` filter. Filter type names in the trace config
are case insensitive. You may use `all` to enable tracing for all filter
types. When using `libcurl` you may call `curl_global_trace(config_string)` at
the start of your application to enable filter details.
## Meta Filters
Meta filters is a catch-all name for filter types that do not change the
transfer data in any way but provide other important services to curl. In
general, it is possible to do all sorts of silly things with them. One of the
commonly used, important things is "eyeballing".
The `HAPPY-EYEBALLS` filter is involved in the connect phase. Its job is to
try the various IPv4 and IPv6 addresses that are known for a server. If only
one address family is known (or configured), it tries the addresses one after
the other with timeouts calculated from the amount of addresses and the
overall connect timeout.
When more than one address family is to be tried, it splits the address list
into IPv4 and IPv6 and makes parallel attempts. The connection filter chain
looks like this:
```
* create connection for http://curl.se
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
* start connect
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
- ballerv4 --> TCP[151.101.1.91]:443
- ballerv6 --> TCP[2a04:4e42:c00::347]:443
* v6 answers, connected
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> TCP[2a04:4e42:c00::347]:443
* transfer
```
The modular design of connection filters and that we can plug them into each other is used to control the parallel attempts. When a `TCP` filter does not connect (in time), it is torn down and another one is created for the next address. This keeps the `TCP` filter simple.
The `HAPPY-EYEBALLS` on the other hand stays focused on its side of the problem. We can use it also to make other type of connection by just giving it another filter type to try to have happy eyeballing for QUIC:
```
* create connection for --http3-only https://curl.se
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
* start connect
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
- ballerv4 --> HTTP/3[151.101.1.91]:443
- ballerv6 --> HTTP/3[2a04:4e42:c00::347]:443
* v6 answers, connected
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> HTTP/3[2a04:4e42:c00::347]:443
* transfer
```
When we plug these two variants together, we get the `HTTPS-CONNECT` filter
type that is used for `--http3` when **both** HTTP/3 and HTTP/2 or HTTP/1.1
shall be attempted:
```
* create connection for --http3 https://curl.se
conn[curl.se] --> HTTPS-CONNECT --> NULL
* start connect
conn[curl.se] --> HTTPS-CONNECT --> NULL
- SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
- ballerv4 --> HTTP/3[151.101.1.91]:443
- ballerv6 --> HTTP/3[2a04:4e42:c00::347]:443
- SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
- ballerv4 --> TCP[151.101.1.91]:443
- ballerv6 --> TCP[2a04:4e42:c00::347]:443
* v4 QUIC answers, connected
conn[curl.se] --> HTTPS-CONNECT --> SETUP[QUIC] --> HAPPY-EYEBALLS --> HTTP/3[151.101.1.91]:443
* transfer
```

View File

@ -35,14 +35,14 @@ must use "GPL compatible" licenses (as we want to allow users to use libcurl
properly in GPL licensed environments).
When changing existing source code, you do not alter the copyright of the
original file(s). The copyright will still be owned by the original creator(s)
or those who have been assigned copyright by the original author(s).
original file(s). The copyright is still owned by the original creator(s) or
those who have been assigned copyright by the original author(s).
By submitting a patch to the curl project, you are assumed to have the right
to the code and to be allowed by your employer or whatever to hand over that
patch/code to us. We will credit you for your changes as far as possible, to
give credit but also to keep a trace back to who made what changes. Please
always provide us with your full real name when contributing,
patch/code to us. We credit you for your changes as far as possible, to give
credit but also to keep a trace back to who made what changes. Please always
provide us with your full real name when contributing,
## What To Read
@ -50,10 +50,10 @@ Source code, the man pages, the [INTERNALS
document](https://curl.se/dev/internals.html),
[TODO](https://curl.se/docs/todo.html),
[KNOWN_BUGS](https://curl.se/docs/knownbugs.html) and the [most recent
changes](https://curl.se/dev/sourceactivity.html) in git. Just lurking on
the [curl-library mailing
list](https://curl.se/mail/list.cgi?list=curl-library) will give you a
lot of insights on what's going on right now. Asking there is a good idea too.
changes](https://curl.se/dev/sourceactivity.html) in git. Just lurking on the
[curl-library mailing list](https://curl.se/mail/list.cgi?list=curl-library)
gives you a lot of insights on what's going on right now. Asking there is a
good idea too.
## Write a good patch
@ -101,22 +101,23 @@ archive is quite OK as well.
### Documentation
Writing docs is dead boring and one of the big problems with many open source
projects. But someone's gotta do it. It makes things a lot easier if you
submit a small description of your fix or your new features with every
contribution so that it can be swiftly added to the package documentation.
projects but someone's gotta do it. It makes things a lot easier if you submit
a small description of your fix or your new features with every contribution
so that it can be swiftly added to the package documentation.
The documentation is always made in man pages (nroff formatted) or plain
ASCII files. All HTML files on the website and in the release archives are
generated from the nroff/ASCII versions.
Documentation is mostly provided as manpages or plain ASCII files. The
manpages are rendered from their source files that are usually written using
markdown. Most HTML files on the website and in the release archives are
generated from corresponding markdown and ASCII files.
### Test Cases
Since the introduction of the test suite, we can quickly verify that the main
features are working as they are supposed to. To maintain this situation and
improve it, all new features and functions that are added need to be tested
in the test suite. Every feature that is added should get at least one valid
test case that verifies that it works as documented. If every submitter also
posts a few test cases, it will not end up as a heavy burden on a single person!
improve it, all new features and functions that are added need to be tested in
the test suite. Every feature that is added should get at least one valid test
case that verifies that it works as documented. If every submitter also posts
a few test cases, it does not end up a heavy burden on a single person.
If you do not have test cases or perhaps you have done something that is hard
to write tests for, do explain exactly how you have otherwise tested and
@ -131,19 +132,19 @@ GitHub](https://github.com/curl/curl/pulls), but you can also send your plain
patch to [the curl-library mailing
list](https://curl.se/mail/list.cgi?list=curl-library).
If you opt to post a patch on the mailing list, chances are someone will
convert it into a pull request for you, to have the CI jobs verify it proper
before it can be merged. Be prepared that some feedback on the proposed change
might then come on GitHub.
If you opt to post a patch on the mailing list, chances are someone converts
it into a pull request for you, to have the CI jobs verify it proper before it
can be merged. Be prepared that some feedback on the proposed change might
then come on GitHub.
Your change will be reviewed and discussed and you will be expected to correct
flaws pointed out and update accordingly, or the change risks stalling and
Your changes be reviewed and discussed and you are expected to correct flaws
pointed out and update accordingly, or the change risks stalling and
eventually just getting deleted without action. As a submitter of a change,
you are the owner of that change until it has been merged.
Respond on the list or on GitHub about the change and answer questions and/or
fix nits/flaws. This is important. We will take lack of replies as a sign that
you are not anxious to get your patch accepted and we tend to simply drop such
fix nits/flaws. This is important. We take lack of replies as a sign that you
are not anxious to get your patch accepted and we tend to simply drop such
changes.
### About pull requests
@ -157,7 +158,7 @@ git commit that is easy to merge and they are easy to track and not that easy
to lose in the flood of many emails, like they sometimes do on the mailing
lists.
Every pull request submitted will automatically be tested in several different
Every pull request submitted is automatically tested in several different
ways. [See the CI document for more
information](https://github.com/curl/curl/blob/master/tests/CI.md).
@ -219,10 +220,10 @@ A short guide to how to write git commit messages in the curl project.
has already been closed]
[Ref: URL to more information about the commit; use Bug: instead for
a reference to a bug on another bug tracker]
[Fixes #1234 - if this closes a GitHub issue; GitHub will actually
close the issue once this commit is merged]
[Closes #1234 - if this closes a GitHub PR; GitHub will actually
close the PR once this commit is merged]
[Fixes #1234 - if this closes a GitHub issue; GitHub closes the issue once
this commit is merged]
[Closes #1234 - if this closes a GitHub PR; GitHub closes the PR once this
commit is merged]
---- stop ----
The first line is a succinct description of the change:
@ -240,18 +241,18 @@ make sure that you have your own user and email setup correctly in git before
you commit.
Add whichever header lines as appropriate, with one line per person if more
than one person was involved. There is no need to credit yourself unless you are
using --author=... which hides your identity. Do not include people's e-mail
addresses in headers to avoid spam, unless they are already public from a
previous commit; saying `{userid} on github` is OK.
than one person was involved. There is no need to credit yourself unless you
are using --author=... which hides your identity. Do not include people's
email addresses in headers to avoid spam, unless they are already public from
a previous commit; saying `{userid} on github` is OK.
### Write Access to git Repository
If you are a frequent contributor, you may be given push access to the git
repository and then you will be able to push your changes straight into the git
repository and then you are able to push your changes straight into the git
repo instead of sending changes as pull requests or by mail as patches.
Just ask if this is what you would want. You will be required to have posted
Just ask if this is what you would want. You are required to have posted
several high quality patches first, before you can be granted push access.
### How To Make a Patch with git
@ -302,9 +303,9 @@ all kinds of Unixes and Windows.
## Update copyright and license information
There is a CI job called **REUSE compliance / check** that will run on every
pull request and commit to verify that the *REUSE state* of all files are
still fine.
There is a CI job called **REUSE compliance / check** that runs on every pull
request and commit to verify that the *REUSE state* of all files are still
fine.
This means that all files need to have their license and copyright information
clearly stated. Ideally by having the standard curl source code header, with

153
deps/curl/docs/CURLDOWN.md vendored Normal file
View File

@ -0,0 +1,153 @@
# curldown
A markdown-like syntax for libcurl man pages.
## Purpose
A text format for writing libcurl documentation in the shape of man pages.
Make it easier for users to contribute and write documentation. A format that
is easier on the eye in its source format.
Make it harder to do syntactical mistakes.
Use a format that allows creating man pages that end up looking exactly like
the man pages did when we wrote them in nroff format.
Take advantage of the fact that people these days are accustomed to markdown
by using a markdown-like syntax.
This allows us to fix issues in the nroff format easier since now we generate
them. For example: escaping minus to prevent them from being turned into
Unicode by man.
Generate nroff output that looks (next to) *identical* to the previous files,
so that the look, existing test cases, HTML conversions, existing
infrastructure etc remain mostly intact.
Contains meta-data in a structured way to allow better output (for example the
see also information) and general awareness of what the file is about.
## File extension
Since curldown looks similar to markdown, we use `.md` extensions on the
files.
## Conversion
Convert **from curldown to nroff** with `cd2nroff`. Generates nroff man pages.
Convert **from nroff to curldown** with `nroff2cd`. This is only meant to be
used for the initial conversion to curldown and should ideally never be needed
again.
Convert, check or clean up an existing curldown to nicer, better, cleaner
curldown with **cd2cd**.
Mass-convert all curldown files to nroff in specified directories with
`cdall`:
cdall [dir1] [dir2] [dir3] ..
## Known issues
The `cd2nroff` tool does not yet handle *italics* or **bold** where the start
and the end markers are used on separate lines.
The `nroff2cd` tool generates code style quotes for all `.fi` sections since
the nroff format does not carry a distinction.
# Format
Each curldown starts with a header with meta-data:
---
c: Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
SPDX-License-Identifier: curl
Title: CURLOPT_AWS_SIGV4
Section: 3
Source: libcurl
Protocol:
- HTTP
See-also:
- CURLOPT_HEADEROPT (3)
- CURLOPT_HTTPAUTH (3)
TLS-backend:
- [name]
---
All curldown files *must* have all the headers present and at least one
`See-also:` entry specified.
If the man page is for section 3 (library related). The `Protocol` list must
contain at least one protocol, which can be `*` if the option is virtually for
everything. If `*` is used, it must be the only listed protocol. Recognized
protocols are either URL schemes (in uppercase), `TLS` or `TCP`.
If the `Protocol` list contains `TLS`, then there must also be a `TLS-backend`
list, specifying `All` or a list of what TLS backends that work with this
option. The available TLS backends are:
- `BearSSL`
- `GnuTLS`
- `mbedTLS`
- `OpenSSL` (also covers BoringSSL, libressl, quictls, AWS-LC and AmiSSL)
- `rustls`
- `Schannel`
- `Secure Transport`
- `wolfSSL`
- `All`: all TLS backends
Following the header in the file, is the manual page using markdown-like
syntax:
~~~
# NAME
a page - this is a page descriving something
# SYNOPSIS
~~~c
#include <curl/curl.h>
CURLcode curl_easy_setopt(CURL *handle, CURLOPT_AWS_SIGV4, char *param);
~~~
~~~
Quoted source code should start with `~~~c` and end with `~~~` while regular
quotes can start with `~~~` or just be indented with 4 spaces.
Headers at top-level `#` get converted to `.SH`.
`nroff2cd` supports the `##` next level header which gets converted to `.IP`.
Write bold words or phrases within `**` like:
This is a **bold** word.
Write italics like:
This is *italics*.
Due to how man pages do not support backticks especially formatted, such
occurrences in the source are instead just using italics in the generated
output:
This `word` appears in italics.
When generating the nroff output, the tooling removes superfluous newlines,
meaning they can be used freely in the source file to make the text more
readable.
To make sure curldown documents render correctly as markdown, all literal
occurrences of `<` or `>` need to be escaped by a leading backslash.
## symbols
All mentioned curl symbols that have their own man pages, like
`curl_easy_perform(3)` are automatically rendered using italics in the output
without having to enclose it with asterisks. This helps ensuring that they get
converted to links properly later in the HTML version on the website, as
converted with `roffit`. This makes the curldown text easier to read even when
mentioning many curl symbols.
This auto-linking works for patterns matching `(lib|)curl[^ ]*(3)`.

View File

@ -10,12 +10,16 @@ how your use case cannot be satisfied properly using a workaround.
This NTLM authentication method is powered by a separate tool,
`ntlm_auth`. Barely anyone uses this method. It was always a quirky
implementation (including fork + exec), it has limited portability and we
don't test it in the test suite and CI.
implementation (including fork + exec), it has limited portability and we do
not test it in the test suite and CI.
We keep the native NTLM implementation.
curl will remove the support for NTLM_WB auth in June 2024.
Due to a mistake, the `NTLM_WB` functionality is missing in builds since 8.4.0
(October 2023). It needs to be manually patched to work. See [PR
12479](https://github.com/curl/curl/pull/12479).
curl removes the support for NTLM_WB auth in April 2024.
## space-separated `NOPROXY` patterns
@ -34,7 +38,7 @@ variable but do not consider a space to be a valid separator. Using spaces for
separator is probably less portable and might cause more friction than commas
do. Users should use commas for this for greater portability.
curl will remove the support for space-separated names in July 2024.
curl removes the support for space-separated names in July 2024.
## past removals

233
deps/curl/docs/DISTROS.md vendored Normal file
View File

@ -0,0 +1,233 @@
# curl distros
<!-- markdown-link-check-disable -->
Lots of organizations distribute curl packages to end users. This is a
collection of pointers to where to learn more about curl on and with each
distro.
We discuss curl distro issues, patches and collaboration on the [curl-distros
mailing list](https://lists.haxx.se/listinfo/curl-distros).
## AlmaLinux
- curl package source and patches: curl package source and patches
- curl issues: https://bugs.almalinux.org/view_all_bug_page.php click Category and choose curl
- curl security: https://errata.almalinux.org/ search for curl
## Alpine Linux
- curl: https://pkgs.alpinelinux.org/package/edge/main/x86_64/curl
- curl issues: https://gitlab.alpinelinux.org/alpine/aports/-/issues
- curl security: https://security.alpinelinux.org/srcpkg/curl
- curl package source and patches: https://gitlab.alpinelinux.org/alpine/aports/-/tree/master/main/curl
## Alt Linux
- curl: http://www.sisyphus.ru/srpm/Sisyphus/curl
- curl patches: http://www.sisyphus.ru/ru/srpm/Sisyphus/curl/patches
- curl issues: http://www.sisyphus.ru/ru/srpm/Sisyphus/curl/bugs
## Arch Linux
- curl: https://archlinux.org/packages/core/x86_64/curl/
- curl issues: https://gitlab.archlinux.org/archlinux/packaging/packages/curl/-/issues
- curl security: https://security.archlinux.org/package/curl
- curl wiki: https://wiki.archlinux.org/title/CURL
## Buildroot
- curl package source and patches: https://git.buildroot.net/buildroot/tree/package/libcurl
- curl issues: https://bugs.buildroot.org/buglist.cgi?quicksearch=curl
## Chimera
- curl package source and patches: https://github.com/chimera-linux/cports/tree/master/main/curl
## Clear Linux
- curl: https://github.com/clearlinux-pkgs/curl
- curl issues: https://github.com/clearlinux/distribution/issues
## Conary
- curl: https://github.com/conan-io/conan-center-index/tree/master/recipes/libcurl
- curl issues: https://github.com/conan-io/conan-center-index/issues
- curl patches: https://github.com/conan-io/conan-center-index/tree/master/recipes/libcurl (in `all/patches/*`, if any)
## conda-forge
- curl: https://github.com/conda-forge/curl-feedstock
- curl issues: https://github.com/conda-forge/curl-feedstock/issues
## CRUX
- curl: https://crux.nu/portdb/?a=search&q=curl
- curl issues: https://git.crux.nu/ports/core/issues/?type=all&state=open&q=curl
## curl-for-win
(this is the official curl binaries for Windows shipped by the curl project)
- curl: https://curl.se/windows/
Issues and patches for this are managed in the main curl project.
## Cygwin
- curl: https://cygwin.com/cgit/cygwin-packages/curl/tree/curl.cygport
- curl patches: https://cygwin.com/cgit/cygwin-packages/curl/tree
## Debian
- curl: https://tracker.debian.org/pkg/curl
- curl issues: https://bugs.debian.org/cgi-bin/pkgreport.cgi?pkg=curl
- curl patches: https://udd.debian.org/patches.cgi?src=curl
- curl patches: https://salsa.debian.org/debian/curl (in debian/* branches, inside the folder debian/patches)
## Fedora
- curl: https://src.fedoraproject.org/rpms/curl
- curl issues: [bugzilla](https://bugzilla.redhat.com/buglist.cgi?bug_status=NEW&bug_status=ASSIGNED&classification=Fedora&product=Fedora&product=Fedora%20EPEL&component=curl)
- curl patches: [list of patches in package git](https://src.fedoraproject.org/rpms/curl/tree/rawhide)
## FreeBSD
- curl: https://cgit.freebsd.org/ports/tree/ftp/curl
- curl patches: https://cgit.freebsd.org/ports/tree/ftp/curl
- curl issues: https://bugs.freebsd.org/bugzilla/buglist.cgi?bug_status=__open__&order=Importance&product=Ports%20%26%20Packages&query_format=advanced&short_desc=curl&short_desc_type=allwordssubstr
## Gentoo Linux
- curl: https://packages.gentoo.org/packages/net-misc/curl
- curl issues: https://bugs.gentoo.org/buglist.cgi?quicksearch=net-misc/curl
- curl package sources and patches: https://gitweb.gentoo.org/repo/gentoo.git/tree/net-misc/curl/
## GNU Guix
- curl: https://git.savannah.gnu.org/gitweb/?p=guix.git;a=blob;f=gnu/packages/curl.scm;hb=HEAD
- curl issues: https://issues.guix.gnu.org/search?query=curl
## Homebrew
- curl: https://formulae.brew.sh/formula/curl
Homebrew's policy is that all patches and issues should be submitted upstream
unless it is very specific to Homebrew's way of packaging software.
## MacPorts
- curl: https://github.com/macports/macports-ports/tree/master/net/curl
- curl issues: https://trac.macports.org/query?0_port=curl&0_port_mode=%7E&0_status=%21closed
- curl patches: https://github.com/macports/macports-ports/tree/master/net/curl/files
## Mageia
- curl: https://svnweb.mageia.org/packages/cauldron/curl/current/SPECS/curl.spec?view=markup
- curl issues: https://bugs.mageia.org/buglist.cgi?bug_status=NEW&bug_status=UNCONFIRMED&bug_status=NEEDINFO&bug_status=UPSTREAM&bug_status=ASSIGNED&component=RPM%20Packages&f1=cf_rpmpkg&list_id=176576&o1=casesubstring&product=Mageia&query_format=advanced&v1=curl
- curl patches: https://svnweb.mageia.org/packages/cauldron/curl/current/SOURCES/
- curl patches in stable distro releases: https://svnweb.mageia.org/packages/updates/<STABLE_VERSION>/curl/current/SOURCES/
- curl security: https://advisories.mageia.org/src_curl.html
## MSYS2
- curl: https://github.com/msys2/MINGW-packages/tree/master/mingw-w64-curl
- curl issues: https://github.com/msys2/MINGW-packages/issues
- curl patches: https://github.com/msys2/MINGW-packages/tree/master/mingw-w64-curl (`*.patch`)
## Muldersoft
- curl: https://github.com/lordmulder/cURL-build-win32
- curl issues: https://github.com/lordmulder/cURL-build-win32/issues
- curl patches: https://github.com/lordmulder/cURL-build-win32/tree/master/patch
## NixOS
- curl: https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/networking/curl/default.nix
- curl issues: https://github.com/NixOS/nixpkgs
nixpkgs is the package repository used by the NixOS Linux distribution, but
can also be used on other distributions
## OmniOS
- curl: https://github.com/omniosorg/omnios-build/tree/master/build/curl
- curl issues: https://github.com/omniosorg/omnios-build/issues
- curl patches: https://github.com/omniosorg/omnios-build/tree/master/build/curl/patches
## OpenIndiana
- curl: https://github.com/OpenIndiana/oi-userland/tree/oi/hipster/components/web/curl
- curl issues: https://www.illumos.org/projects/openindiana/issues
- curl patches: https://github.com/OpenIndiana/oi-userland/tree/oi/hipster/components/web/curl/patches
## OpenSUSE
- curl source and patches: https://build.opensuse.org/package/show/openSUSE%3AFactory/curl
## Oracle Solaris
- curl: https://github.com/oracle/solaris-userland/tree/master/components/curl
- curl issues: https://support.oracle.com/ (requires support contract)
- curl patches: https://github.com/oracle/solaris-userland/tree/master/components/curl/patches
## OpenEmbedded / Yocto Project
- curl: https://layers.openembedded.org/layerindex/recipe/5765/
- curl issues: https://bugzilla.yoctoproject.org/
- curl patches: https://git.openembedded.org/openembedded-core/tree/meta/recipes-support/curl
## PLD Linux
- curl package source and patches: https://github.com/pld-linux/curl
- curl issues: https://bugs.launchpad.net/pld-linux?field.searchtext=curl&search=Search&field.status%3Alist=NEW&field.status%3Alist=INCOMPLETE_WITH_RESPONSE&field.status%3Alist=INCOMPLETE_WITHOUT_RESPONSE&field.status%3Alist=CONFIRMED&field.status%3Alist=TRIAGED&field.status%3Alist=INPROGRESS&field.status%3Alist=FIXCOMMITTED&field.assignee=&field.bug_reporter=&field.omit_dupes=on&field.has_patch=&field.has_no_package=
## pkgsrc
- curl: https://github.com/NetBSD/pkgsrc/tree/trunk/www/curl
- curl issues: https://github.com/NetBSD/pkgsrc/issues
- curl patches: https://github.com/NetBSD/pkgsrc/tree/trunk/www/curl/patches
## Red Hat Enterprise Linux / CentOS Stream
- curl: https://kojihub.stream.centos.org/koji/packageinfo?packageID=217
- curl issues: https://issues.redhat.com/secure/CreateIssueDetails!init.jspa?pid=12332745&issuetype=1&components=12377466&priority=10300
- curl patches: https://gitlab.com/redhat/centos-stream/rpms/curl
## Rocky Linux
- curl: https://git.rockylinux.org/staging/rpms/curl/-/blob/r9/SPECS/curl.spec
- curl issues: https://bugs.rockylinux.org
- curl patches: https://git.rockylinux.org/staging/rpms/curl/-/tree/r9/SOURCES
## SerenityOS
- curl: https://github.com/SerenityOS/serenity/tree/master/Ports/curl
- curl issues: https://github.com/SerenityOS/serenity/issues?q=label%3Aports
- curl patches: https://github.com/SerenityOS/serenity/tree/master/Ports/curl/patches
## SmartOS
- curl: https://github.com/TritonDataCenter/illumos-extra/tree/master/curl
- curl issues: https://github.com/TritonDataCenter/illumos-extra/issues
- curl patches: https://github.com/TritonDataCenter/illumos-extra/tree/master/curl/Patches
## SPACK
- curl package source and patches: https://github.com/spack/spack/tree/develop/var/spack/repos/builtin/packages/curl
## vcpkg
- curl: https://github.com/microsoft/vcpkg/tree/master/ports/curl
- curl issues: https://github.com/microsoft/vcpkg/issues
- curl patches: https://github.com/microsoft/vcpkg/tree/master/ports/curl (`*.patch`)
## Void Linux
- curl: https://github.com/void-linux/void-packages/tree/master/srcpkgs/curl
- curl issues: https://github.com/void-linux/void-packages/issues
- curl patches: https://github.com/void-linux/void-packages/tree/master/srcpkgs/curl/patches
## Wolfi
- curl: https://github.com/wolfi-dev/os/blob/main/curl.yaml

View File

@ -3,7 +3,7 @@
This is the internal module for creating and handling "dynamic buffers". This
means buffers that can be appended to, dynamically and grow to adapt.
There will always be a terminating zero put at the end of the dynamic buffer.
There is always a terminating zero put at the end of the dynamic buffer.
The `struct dynbuf` is used to hold data for each instance of a dynamic
buffer. The members of that struct **MUST NOT** be accessed or modified
@ -17,8 +17,8 @@ void Curl_dyn_init(struct dynbuf *s, size_t toobig);
This initializes a struct to use for dynbuf and it cannot fail. The `toobig`
value **must** be set to the maximum size we allow this buffer instance to
grow to. The functions below will return `CURLE_OUT_OF_MEMORY` when hitting
this limit.
grow to. The functions below return `CURLE_OUT_OF_MEMORY` when hitting this
limit.
## `Curl_dyn_free`

View File

@ -28,7 +28,7 @@ big and we never release just a patch. There is only "release".
- Is there a security advisory rated high or critical?
- Is there a data corruption bug?
- Did the bug cause an API/ABI breakage?
- Will the problem annoy a significant share of the user population?
- Does the problem annoy a significant share of the user population?
If the answer is yes to one or more of the above, an early release might be
warranted.

View File

@ -8,8 +8,8 @@ Experimental support in curl means:
1. Experimental features are provided to allow users to try them out and
provide feedback on functionality and API etc before they ship and get
"carved in stone".
2. You must enable the feature when invoking configure as otherwise curl will
not be built with the feature present.
2. You must enable the feature when invoking configure as otherwise curl is
not built with the feature present.
3. We strongly advise against using this feature in production.
4. **We reserve the right to change behavior** of the feature without sticking
to our API/ABI rules as we do for regular features, as long as it is marked

View File

@ -16,7 +16,7 @@
- custom maximum download time
- custom least download speed acceptable
- custom output result after completion
- guesses protocol from host name unless specified
- guesses protocol from hostname unless specified
- uses .netrc
- progress bar with time statistics while downloading
- "standard" proxy environment variables support
@ -26,7 +26,7 @@
- happy eyeballs dual-stack connects
- persistent connections
- SOCKS 4 + 5 support, with or without local name resolving
- supports user name and password in proxy environment variables
- supports username and password in proxy environment variables
- operations through HTTP proxy "tunnel" (using CONNECT)
- replaceable memory functions (malloc, free, realloc, etc)
- asynchronous name resolving (6)
@ -82,8 +82,8 @@
- active/passive using PORT, EPRT, PASV or EPSV
- single file size information (compare to HTTP HEAD)
- 'type=' URL support
- dir listing
- dir listing names-only
- directory listing
- directory listing names-only
- upload
- upload append
- upload via http-proxy as HTTP PUT
@ -94,7 +94,7 @@
- via HTTP proxy, HTTPS proxy or SOCKS proxy
- all operations can be tunneled through proxy
- customizable to retrieve file modification date
- no dir depth limit
- no directory depth limit
## FTPS (1)

View File

@ -10,9 +10,8 @@ BDFL (Benevolent Dictator For Life) style project.
This setup has been used due to convenience and the fact that it has worked
fine this far. It is not because someone thinks of it as a superior project
leadership model. It will also only continue working as long as Daniel manages
to listen in to what the project and the general user population wants and
expects from us.
leadership model. It also only works as long as Daniel manages to listen in to
what the project and the general user population wants and expects from us.
## Legal entity
@ -29,13 +28,13 @@ that wrote those parts of the code.
The curl project is not a democracy, but everyone is entitled to state their
opinion and may argue for their sake within the community.
All and any changes that have been done or will be done are eligible to bring
up for discussion, to object to or to praise. Ideally, we find consensus for
the appropriate way forward in any given situation or challenge.
All and any changes that have been done or are done are eligible to bring up
for discussion, to object to or to praise. Ideally, we find consensus for the
appropriate way forward in any given situation or challenge.
If there is no obvious consensus, a maintainer who's knowledgeable in the
specific area will take an "executive" decision that they think is the right
for the project.
specific area takes an "executive" decision that they think is the right for
the project.
## Donations
@ -81,17 +80,17 @@ curl source code repository. Committers are recorded as `Author` in git.
A maintainer in the curl project is an individual who has been given
permissions to push commits to one of the git repositories.
Maintainers are free to push commits to the repositories at their own will.
Maintainers are free to push commits to the repositories at they see fit.
Maintainers are however expected to listen to feedback from users and any
change that is non-trivial in size or nature *should* be brought to the
project as a Pull-Request (PR) to allow others to comment/object before merge.
## Former maintainers
A maintainer who stops being active in the project will at some point get
their push permissions removed. We do this for security reasons but also to
make sure that we always have the list of maintainers as "the team that push
stuff to curl".
A maintainer who stops being active in the project gets their push permissions
removed at some point. We do this for security reasons but also to make sure
that we always have the list of maintainers as "the team that push stuff to
curl".
Getting push permissions removed is not a punishment. Everyone who ever worked
on maintaining curl is considered a hero, for all time hereafter.
@ -100,7 +99,7 @@ on maintaining curl is considered a hero, for all time hereafter.
We have a security team. That is the team of people who are subscribed to the
curl-security mailing list; the receivers of security reports from users and
developers. This list of people will vary over time but should be skilled
developers. This list of people varies over time but they are all skilled
developers familiar with the curl project.
The security team works best when it consists of a small set of active
@ -108,6 +107,22 @@ persons. We invite new members when the team seems to need it, and we also
expect to retire security team members as they "drift off" from the project or
just find themselves unable to perform their duties there.
## Core team
There is a curl core team. It currently has the same set of members as the
security team. It can also be reached on the security email address.
The core team nominates and invites new members to the team when it sees fit.
There is no open member voting or formal ways to be a candidate. Active
participants in the curl project who want to join the core team can ask to
join.
The core team is a board of advisors. It deals with project management
subjects that need confidentiality or for other reasons cannot be dealt with
and discussed in the open (for example reports of code of conduct violations).
Project matters should always as far as possible be discussed on open mailing
lists.
## Server admins
We run a web server, a mailing list and more on the curl project's primary
@ -172,11 +187,10 @@ different individuals and over time.
If you think you can help making the project better by shouldering some
maintaining responsibilities, then please get in touch.
You will be expected to be familiar with the curl project and its ways of
working. You need to have gotten a few quality patches merged as a proof of
this.
You are expected to be familiar with the curl project and its ways of working.
You need to have gotten a few quality patches merged as a proof of this.
### Stop being a maintainer
If you (appear to) not be active in the project anymore, you may be removed as
a maintainer. Thank you for your service!
a maintainer. Thank you for your service.

View File

@ -40,8 +40,8 @@ In the issue tracker we occasionally mark bugs with [help
wanted](https://github.com/curl/curl/labels/help%20wanted), as a sign that the
bug is acknowledged to exist and that there is nobody known to work on this
issue for the moment. Those are bugs that are fine to "grab" and provide a
pull request for. The complexity level of these will of course vary, so pick
one that piques your interest.
pull request for. The complexity level of these of course varies, so pick one
that piques your interest.
## Work on known bugs
@ -77,13 +77,12 @@ brainstorming on specific ways to do the implementation etc.
You can also come up with a completely new thing you think we should do. Or
not do. Or fix. Or add to the project. You then either bring it to the mailing
list first to see if people will shoot down the idea at once, or you bring a
first draft of the idea as a pull request and take the discussion there around
the specific implementation. Either way is fine.
list first to see if people shoot down the idea at once, or you bring a first
draft of the idea as a pull request and take the discussion there around the
specific implementation. Either way is fine.
## CONTRIBUTE
We offer [guidelines](https://curl.se/dev/contribute.html) that are
suitable to be familiar with before you decide to contribute to curl. If
you are used to open source development, you will probably not find many
surprises there.
We offer [guidelines](https://curl.se/dev/contribute.html) that are suitable
to be familiar with before you decide to contribute to curl. If you are used
to open source development, you probably do not find many surprises there.

View File

@ -327,7 +327,7 @@ April: added the cyassl backend (later renamed to WolfSSL)
January: the curl tool defaults to HTTP/2 for HTTPS URLs
December: curl 7.52.0 introduced support for HTTPS-proxy!
December: curl 7.52.0 introduced support for HTTPS-proxy
First TLS 1.3 support

View File

@ -10,19 +10,19 @@ HTTP Strict-Transport-Security. Added as experimental in curl
## Behavior
libcurl features an in-memory cache for HSTS hosts, so that subsequent
HTTP-only requests to a host name present in the cache will get internally
HTTP-only requests to a hostname present in the cache gets internally
"redirected" to the HTTPS version.
## `curl_easy_setopt()` options:
- `CURLOPT_HSTS_CTRL` - enable HSTS for this easy handle
- `CURLOPT_HSTS` - specify file name where to store the HSTS cache on close
- `CURLOPT_HSTS` - specify filename where to store the HSTS cache on close
(and possibly read from at startup)
## curl command line options
- `--hsts [filename]` - enable HSTS, use the file as HSTS cache. If filename
is `""` (no length) then no file will be used, only in-memory cache.
is `""` (no length) then no file is used, only in-memory cache.
## HSTS cache file format
@ -38,5 +38,5 @@ The time stamp is when the entry expires.
## Possible future additions
- `CURLOPT_HSTS_PRELOAD` - provide a set of HSTS host names to load first
- `CURLOPT_HSTS_PRELOAD` - provide a set of HSTS hostnames to load first
- ability to save to something else than a file

View File

@ -9,7 +9,7 @@
Cookies are either "session cookies" which typically are forgotten when the
session is over which is often translated to equal when browser quits, or
the cookies are not session cookies they have expiration dates after which
the client will throw them away.
the client throws them away.
Cookies are set to the client with the Set-Cookie: header and are sent to
servers with the Cookie: header.
@ -30,9 +30,28 @@
implemented by curl.
curl considers `http://localhost` to be a *secure context*, meaning that it
will allow and use cookies marked with the `secure` keyword even when done
over plain HTTP for this host. curl does this to match how popular browsers
work with secure cookies.
allows and uses cookies marked with the `secure` keyword even when done over
plain HTTP for this host. curl does this to match how popular browsers work
with secure cookies.
## Super cookies
A single cookie can be set for a domain that matches multiple hosts. Like if
set for `example.com` it gets sent to both `aa.example.com` as well as
`bb.example.com`.
A challenge with this concept is that there are certain domains for which
cookies should not be allowed at all, because they are *Public
Suffixes*. Similarly, a client never accepts cookies set directly for the
top-level domain like for example `.com`. Cookies set for *too broad*
domains are generally referred to as *super cookies*.
If curl is built with PSL (**Public Suffix List**) support, it detects and
discards cookies that are specified for such suffix domains that should not
be allowed to have cookies.
if curl is *not* built with PSL support, it has no ability to stop super
cookies.
## Cookies saved to disk
@ -46,8 +65,7 @@
TAB. That file is called the cookie jar in curl terminology.
When libcurl saves a cookie jar, it creates a file header of its own in
which there is a URL mention that will link to the web version of this
document.
which there is a URL mention that links to the web version of this document.
## Cookie file format
@ -82,13 +100,13 @@
`-b, --cookie`
tell curl a file to read cookies from and start the cookie engine, or if it
is not a file it will pass on the given string. `-b name=var` works and so
does `-b cookiefile`.
is not a file it passes on the given string. `-b name=var` works and so does
`-b cookiefile`.
`-j, --junk-session-cookies`
when used in combination with -b, it will skip all "session cookies" on load
so as to appear to start a new cookie session.
when used in combination with -b, it skips all "session cookies" on load so
as to appear to start a new cookie session.
`-c, --cookie-jar`
@ -140,7 +158,7 @@
can also set and access cookies.
Since curl and libcurl are plain HTTP clients without any knowledge of or
capability to handle JavaScript, such cookies will not be detected or used.
capability to handle JavaScript, such cookies are not detected or used.
Often, if you want to mimic what a browser does on such websites, you can
record web browser HTTP traffic when using such a site and then repeat the

View File

@ -23,20 +23,20 @@ We require at least version 1.12.0.
Over an http:// URL
-------------------
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl will
include an upgrade header in the initial request to the host to allow
upgrading to HTTP/2.
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl includes
an upgrade header in the initial request to the host to allow upgrading to
HTTP/2.
Possibly we can later introduce an option that will cause libcurl to fail if
Possibly we can later introduce an option that causes libcurl to fail if it is
not possible to upgrade. Possibly we introduce an option that makes libcurl
use HTTP/2 at once over http://
Over an https:// URL
--------------------
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl will use
ALPN to negotiate which protocol to continue with. Possibly introduce an
option that will cause libcurl to fail if not possible to use HTTP/2.
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl uses ALPN
to negotiate which protocol to continue with. Possibly introduce an option
that causes libcurl to fail if not possible to use HTTP/2.
`CURL_HTTP_VERSION_2TLS` was added in 7.47.0 as a way to ask libcurl to prefer
HTTP/2 for HTTPS but stick to 1.1 by default for plain old HTTP connections.
@ -54,15 +54,15 @@ term for doing multiple independent transfers over the same physical TCP
connection.
To take advantage of multiplexing, you need to use the multi interface and set
`CURLMOPT_PIPELINING` to `CURLPIPE_MULTIPLEX`. With that bit set, libcurl will
attempt to reuse existing HTTP/2 connections and just add a new stream over
`CURLMOPT_PIPELINING` to `CURLPIPE_MULTIPLEX`. With that bit set, libcurl
attempts to reuse existing HTTP/2 connections and just add a new stream over
that when doing subsequent parallel requests.
While libcurl sets up a connection to an HTTP server there is a period during
which it does not know if it can pipeline or do multiplexing and if you add
new transfers in that period, libcurl will default to start new connections
for those transfers. With the new option `CURLOPT_PIPEWAIT` (added in 7.43.0),
you can ask that a transfer should rather wait and see in case there is a
new transfers in that period, libcurl defaults to starting new connections for
those transfers. With the new option `CURLOPT_PIPEWAIT` (added in 7.43.0), you
can ask that a transfer should rather wait and see in case there is a
connection for the same host in progress that might end up being possible to
multiplex on. It favors keeping the number of connections low to the cost of
slightly longer time to first byte transferred.

View File

@ -15,6 +15,8 @@ QUIC libraries we are using:
[quiche](https://github.com/cloudflare/quiche) - **EXPERIMENTAL**
[OpenSSL 3.2+ QUIC](https://github.com/openssl/openssl) - **EXPERIMENTAL**
[msh3](https://github.com/nibanks/msh3) (with [msquic](https://github.com/microsoft/msquic)) - **EXPERIMENTAL**
## Experimental
@ -23,19 +25,19 @@ HTTP/3 support in curl is considered **EXPERIMENTAL** until further notice
when built to use *quiche* or *msh3*. Only the *ngtcp2* backend is not
experimental.
Further development and tweaking of the HTTP/3 support in curl will happen in
the master branch using pull-requests, just like ordinary changes.
Further development and tweaking of the HTTP/3 support in curl happens in the
master branch using pull-requests, just like ordinary changes.
To fix before we remove the experimental label:
- the used QUIC library needs to consider itself non-beta
- it's fine to "leave" individual backends as experimental if necessary
- it is fine to "leave" individual backends as experimental if necessary
# ngtcp2 version
Building curl with ngtcp2 involves 3 components: `ngtcp2` itself, `nghttp3` and a QUIC supporting TLS library. The supported TLS libraries are covered below.
* `ngtcp2`: v1.1.0
* `ngtcp2`: v1.2.0
* `nghttp3`: v1.1.0
## Build with quictls
@ -56,6 +58,7 @@ Build nghttp3
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
% cd nghttp3
% git submodule update --init
% autoreconf -fi
% ./configure --prefix=<somewhere2> --enable-lib-only
% make
@ -64,7 +67,7 @@ Build nghttp3
Build ngtcp2
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/ngtcp2
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
% cd ngtcp2
% autoreconf -fi
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only
@ -99,6 +102,7 @@ Build nghttp3
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
% cd nghttp3
% git submodule update --init
% autoreconf -fi
% ./configure --prefix=<somewhere2> --enable-lib-only
% make
@ -107,7 +111,7 @@ Build nghttp3
Build ngtcp2
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/ngtcp2
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
% cd ngtcp2
% autoreconf -fi
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only --with-gnutls
@ -140,6 +144,7 @@ Build nghttp3
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
% cd nghttp3
% git submodule update --init
% autoreconf -fi
% ./configure --prefix=<somewhere2> --enable-lib-only
% make
@ -148,7 +153,7 @@ Build nghttp3
Build ngtcp2
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/ngtcp2
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
% cd ngtcp2
% autoreconf -fi
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only --with-wolfssl
@ -175,7 +180,7 @@ Since the quiche build manages its dependencies, curl can be built against the l
Build quiche and BoringSSL:
% git clone --recursive https://github.com/cloudflare/quiche
% git clone --recursive -b 0.20.0 https://github.com/cloudflare/quiche
% cd quiche
% cargo build --package quiche --release --features ffi,pkg-config-meta,qlog
% mkdir quiche/deps/boringssl/src/lib
@ -191,7 +196,54 @@ Build curl:
% make
% make install
If `make install` results in `Permission denied` error, you will need to prepend it with `sudo`.
If `make install` results in `Permission denied` error, you need to prepend
it with `sudo`.
# OpenSSL version
QUIC support is **EXPERIMENTAL**
Build OpenSSL 3.2.0
% cd ..
% git clone -b openssl-3.2.0 https://github.com/openssl/openssl
% cd openssl
% ./config enable-tls1_3 --prefix=<somewhere> --libdir=<somewhere>/lib
% make
% make install
Build nghttp3
% cd ..
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
% cd nghttp3
% git submodule update --init
% autoreconf -fi
% ./configure --prefix=<somewhere2> --enable-lib-only
% make
% make install
Build curl:
% cd ..
% git clone https://github.com/curl/curl
% cd curl
% autoreconf -fi
% LDFLAGS="-Wl,-rpath,<somewhere>/lib" ./configure --with-openssl=<somewhere> --with-openssl-quic --with-nghttp3=<somewhere2>
% make
% make install
You can build curl with cmake:
% cd ..
% git clone https://github.com/curl/curl
% cd curl
% cmake . -B build -DCURL_USE_OPENSSL=ON -DUSE_OPENSSL_QUIC=ON
% cmake --build build
% cmake --install build
If `make install` results in `Permission denied` error, you need to prepend
it with `sudo`.
# msh3 (msquic) version
@ -232,11 +284,10 @@ Build msh3:
% cmake --build . --config Release
% cmake --install . --config Release
**Note** - On Windows, Schannel will be used for TLS support by default. If
you with to use (the quictls fork of) OpenSSL, specify the
`-DQUIC_TLS=openssl` option to the generate command above. Also note that
OpenSSL brings with it an additional set of build dependencies not specified
here.
**Note** - On Windows, Schannel is used for TLS support by default. If you
with to use (the quictls fork of) OpenSSL, specify the `-DQUIC_TLS=openssl`
option to the generate command above. Also note that OpenSSL brings with it an
additional set of build dependencies not specified here.
Build curl (in [Visual Studio Command
prompt](../winbuild/README.md#open-a-command-prompt)):
@ -273,10 +324,10 @@ See this [list of public HTTP/3 servers](https://bagder.github.io/HTTP3-test/)
### HTTPS eyeballing
With option `--http3` curl will attempt earlier HTTP versions as well should
the connect attempt via HTTP/3 not succeed "fast enough". This strategy is
similar to IPv4/6 happy eyeballing where the alternate address family is used
in parallel after a short delay.
With option `--http3` curl attempts earlier HTTP versions as well should the
connect attempt via HTTP/3 not succeed "fast enough". This strategy is similar
to IPv4/6 happy eyeballing where the alternate address family is used in
parallel after a short delay.
The IPv4/6 eyeballing has a default of 200ms and you may override that via
`--happy-eyeballs-timeout-ms value`. Since HTTP/3 is still relatively new, we
@ -295,8 +346,8 @@ So, without you specifying anything, the hard timeout is 200ms and the soft is 1
in less than 100ms.
* When QUIC is not supported (or UDP does not work for this network path), no
reply is seen and the HTTP/2 TLS+TCP connection starts 100ms later.
* In the worst case, UDP replies start before 100ms, but drag on. This will
start the TLS+TCP connection after 200ms.
* In the worst case, UDP replies start before 100ms, but drag on. This starts
the TLS+TCP connection after 200ms.
* When the QUIC handshake fails, the TLS+TCP connection is attempted right
away. For example, when the QUIC server presents the wrong certificate.
@ -304,10 +355,10 @@ The whole transfer only fails, when **both** QUIC and TLS+TCP fail to
handshake or time out.
Note that all this happens in addition to IP version happy eyeballing. If the
name resolution for the server gives more than one IP address, curl will try
all those until one succeeds - just as with all other protocols. And if those
IP addresses contain both IPv6 and IPv4, those attempts will happen, delayed,
in parallel (the actual eyeballing).
name resolution for the server gives more than one IP address, curl tries all
those until one succeeds - just as with all other protocols. If those IP
addresses contain both IPv6 and IPv4, those attempts happen, delayed, in
parallel (the actual eyeballing).
## Known Bugs

View File

@ -8,9 +8,8 @@ library as a backend to deal with HTTP.
Hyper support in curl is considered **EXPERIMENTAL** until further notice. It
needs to be explicitly enabled at build-time.
Further development and tweaking of the Hyper backend support in curl will
happen in the master branch using pull-requests, just like ordinary
changes.
Further development and tweaking of the Hyper backend support in curl happens
in the master branch using pull-requests, just like ordinary changes.
## Hyper version

133
deps/curl/docs/INSTALL-CMAKE.md vendored Normal file
View File

@ -0,0 +1,133 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
How To Compile with CMake
# Building with CMake
This document describes how to configure, build and install curl and libcurl
from source code using the CMake build tool. To build with CMake, you of
course first have to install CMake. The minimum required version of CMake is
specified in the file `CMakeLists.txt` found in the top of the curl source
tree. Once the correct version of CMake is installed you can follow the
instructions below for the platform you are building on.
CMake builds can be configured either from the command line, or from one of
CMake's GUIs.
# Current flaws in the curl CMake build
Missing features in the CMake build:
- Builds libcurl without large file support
- Does not support all SSL libraries (only OpenSSL, Schannel, Secure
Transport, and mbedTLS, WolfSSL)
- Does not allow different resolver backends (no c-ares build support)
- No RTMP support built
- Does not allow build curl and libcurl debug enabled
- Does not allow a custom CA bundle path
- Does not allow you to disable specific protocols from the build
- Does not find or use krb4 or GSS
- Rebuilds test files too eagerly, but still cannot run the tests
- Does not detect the correct `strerror_r` flavor when cross-compiling
(issue #1123)
# Configuring
A CMake configuration of curl is similar to the autotools build of curl.
It consists of the following steps after you have unpacked the source.
## Using `cmake`
You can configure for in source tree builds or for a build tree
that is apart from the source tree.
- Build in the source tree.
$ cmake -B .
- Build in a separate directory (parallel to the curl source tree in this
example). The build directory is created for you.
$ cmake -B ../curl-build
### Fallback for CMake before version 3.13
CMake before version 3.13 does not support the `-B` option. In that case,
you must create the build directory yourself, `cd` to it and run `cmake`
from there:
$ mkdir ../curl-build
$ cd ../curl-build
$ cmake ../curl
If you want to build in the source tree, it is enough to do this:
$ cmake .
## Using `ccmake`
CMake comes with a curses based interface called `ccmake`. To run `ccmake`
on a curl use the instructions for the command line cmake, but substitute
`ccmake` for `cmake`.
This brings up a curses interface with instructions on the bottom of the
screen. You can press the "c" key to configure the project, and the "g" key to
generate the project. After the project is generated, you can run make.
## Using `cmake-gui`
CMake also comes with a Qt based GUI called `cmake-gui`. To configure with
`cmake-gui`, you run `cmake-gui` and follow these steps:
1. Fill in the "Where is the source code" combo box with the path to
the curl source tree.
2. Fill in the "Where to build the binaries" combo box with the path to
the directory for your build tree, ideally this should not be the same
as the source tree, but a parallel directory called curl-build or
something similar.
3. Once the source and binary directories are specified, press the
"Configure" button.
4. Select the native build tool that you want to use.
5. At this point you can change any of the options presented in the GUI.
Once you have selected all the options you want, click the "Generate"
button.
# Building
Build (you have to specify the build directory).
$ cmake --build ../curl-build
### Fallback for CMake before version 3.13
CMake before version 3.13 does not support the `--build` option. In that
case, you have to `cd` to the build directory and use the building tool that
corresponds to the build files that CMake generated for you. This example
assumes that CMake generates `Makefile`:
$ cd ../curl-build
$ make
# Testing
(The test suite does not yet work with the cmake build)
# Installing
Install to default location (you have to specify the build directory).
$ cmake --install ../curl-build
### Fallback for CMake before version 3.15
CMake before version 3.15 does not support the `--install` option. In that
case, you have to `cd` to the build directory and use the building tool that
corresponds to the build files that CMake generated for you. This example
assumes that CMake generates `Makefile`:
$ cd ../curl-build
$ make install

View File

@ -1,89 +0,0 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
How To Compile with CMake
Building with CMake
==========================
This document describes how to compile, build and install curl and libcurl
from source code using the CMake build tool. To build with CMake, you will
of course have to first install CMake. The minimum required version of
CMake is specified in the file CMakeLists.txt found in the top of the curl
source tree. Once the correct version of CMake is installed you can follow
the instructions below for the platform you are building on.
CMake builds can be configured either from the command line, or from one
of CMake's GUI's.
Current flaws in the curl CMake build
=====================================
Missing features in the cmake build:
- Builds libcurl without large file support
- Does not support all SSL libraries (only OpenSSL, Schannel,
Secure Transport, and mbedTLS, WolfSSL)
- Does not allow different resolver backends (no c-ares build support)
- No RTMP support built
- Does not allow build curl and libcurl debug enabled
- Does not allow a custom CA bundle path
- Does not allow you to disable specific protocols from the build
- Does not find or use krb4 or GSS
- Rebuilds test files too eagerly, but still cannot run the tests
- Does not detect the correct strerror_r flavor when cross-compiling (issue #1123)
Command Line CMake
==================
A CMake build of curl is similar to the autotools build of curl. It
consists of the following steps after you have unpacked the source.
1. Create an out of source build tree parallel to the curl source
tree and change into that directory
$ mkdir curl-build
$ cd curl-build
2. Run CMake from the build tree, giving it the path to the top of
the curl source tree. CMake will pick a compiler for you. If you
want to specify the compile, you can set the CC environment
variable prior to running CMake.
$ cmake ../curl
$ make
3. Install to default location:
$ make install
(The test suite does not work with the cmake build)
ccmake
=========
CMake comes with a curses based interface called ccmake. To run ccmake on
a curl use the instructions for the command line cmake, but substitute
ccmake ../curl for cmake ../curl. This will bring up a curses interface
with instructions on the bottom of the screen. You can press the "c" key
to configure the project, and the "g" key to generate the project. After
the project is generated, you can run make.
cmake-gui
=========
CMake also comes with a Qt based GUI called cmake-gui. To configure with
cmake-gui, you run cmake-gui and follow these steps:
1. Fill in the "Where is the source code" combo box with the path to
the curl source tree.
2. Fill in the "Where to build the binaries" combo box with the path
to the directory for your build tree, ideally this should not be the
same as the source tree, but a parallel directory called curl-build or
something similar.
3. Once the source and binary directories are specified, press the
"Configure" button.
4. Select the native build tool that you want to use.
5. At this point you can change any of the options presented in the
GUI. Once you have selected all the options you want, click the
"Generate" button.
6. Run the native build tool that you used CMake to generate.

View File

@ -24,8 +24,8 @@ or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
## Building from git
If you get your code off a git repository instead of a release tarball, see
the `GIT-INFO` file in the root directory for specific instructions on how to
proceed.
the `GIT-INFO.md` file in the root directory for specific instructions on how
to proceed.
# Unix
@ -110,10 +110,10 @@ Figuring out all the dependency libraries for a given library is hard, as it
might involve figuring out the dependencies of the dependencies and they vary
between platforms and change between versions.
When using static dependencies, the build scripts will mostly assume that you,
the user, will provide all the necessary additional dependency libraries as
additional arguments in the build. With configure, by setting `LIBS` or
`LDFLAGS` on the command line.
When using static dependencies, the build scripts mostly assume that you, the
user, provide all the necessary additional dependency libraries as additional
arguments in the build. With configure, by setting `LIBS` or `LDFLAGS` on the
command line.
Building statically is not for the faint of heart.
@ -123,8 +123,8 @@ If you are a curl developer and use gcc, you might want to enable more debug
options with the `--enable-debug` option.
curl can be built to use a whole range of libraries to provide various useful
services, and configure will try to auto-detect a decent default. But if you
want to alter it, you can select how to deal with each individual library.
services, and configure tries to auto-detect a decent default. If you want to
alter it, you can select how to deal with each individual library.
## Select TLS backend
@ -146,7 +146,7 @@ you cannot add another OpenSSL fork (or wolfSSL) simply because they have
conflicting identical symbol names.
When you build with multiple TLS backends, you can select the active one at
run-time when curl starts up.
runtime when curl starts up.
## configure finding libs in wrong directory
@ -174,8 +174,8 @@ Building for Windows XP is required as a minimum.
KB140584 is a must for any Windows developer. Especially important is full
understanding if you are not going to follow the advice given above.
- [How To Use the C Run-Time](https://support.microsoft.com/help/94248/how-to-use-the-c-run-time)
- [Run-Time Library Compiler Options](https://docs.microsoft.com/cpp/build/reference/md-mt-ld-use-run-time-library)
- [How To Use the C Runtime](https://support.microsoft.com/help/94248/how-to-use-the-c-run-time)
- [Runtime Library Compiler Options](https://docs.microsoft.com/cpp/build/reference/md-mt-ld-use-run-time-library)
- [Potential Errors Passing CRT Objects Across DLL Boundaries](https://docs.microsoft.com/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries)
If your app is misbehaving in some strange way, or it is suffering from memory
@ -185,59 +185,11 @@ multi-threaded dynamic C runtime.
If you get linkage errors read section 5.7 of the FAQ document.
## mingw-w64
Make sure that mingw-w64's bin directory is in the search path, for example:
```cmd
set PATH=c:\mingw-w64\bin;%PATH%
```
then run `mingw32-make mingw32` in the root dir. There are other
make targets available to build libcurl with more features, use:
- `mingw32-make mingw32-zlib` to build with Zlib support;
- `mingw32-make mingw32-ssl-zlib` to build with SSL and Zlib enabled;
- `mingw32-make mingw32-ssh2-ssl-zlib` to build with SSH2, SSL, Zlib;
- `mingw32-make mingw32-ssh2-ssl-sspi-zlib` to build with SSH2, SSL, Zlib
and SSPI support.
If you have any problems linking libraries or finding header files, be sure
to verify that the provided `Makefile.mk` files use the proper paths, and
adjust as necessary. It is also possible to override these paths with
environment variables, for example:
```cmd
set ZLIB_PATH=c:\zlib-1.2.12
set OPENSSL_PATH=c:\openssl-3.0.5
set LIBSSH2_PATH=c:\libssh2-1.10.0
```
It is also possible to build with other LDAP installations than MS LDAP;
currently it is possible to build with native Win32 OpenLDAP, or with the
*Novell CLDAP* SDK. If you want to use these you need to set these vars:
```cmd
set CPPFLAGS=-Ic:/openldap/include -DCURL_HAS_OPENLDAP_LDAPSDK
set LDFLAGS=-Lc:/openldap/lib
set LIBS=-lldap -llber
```
or for using the Novell SDK:
```cmd
set CPPFLAGS=-Ic:/openldapsdk/inc -DCURL_HAS_NOVELL_LDAPSDK
set LDFLAGS=-Lc:/openldapsdk/lib/mscvc
set LIBS=-lldapsdk -lldapssl -lldapx
```
If you want to enable LDAPS support then append `-ldaps` to the make target.
## Cygwin
Almost identical to the Unix installation. Run the configure script in the
curl source tree root with `sh configure`. Make sure you have the `sh`
executable in `/bin/` or you will see the configure fail toward the end.
executable in `/bin/` or you see the configure fail toward the end.
Run `make`
@ -315,16 +267,16 @@ might yet need some additional adjustment.
## Important static libcurl usage note
When building an application that uses the static libcurl library on Windows,
you must add `-DCURL_STATICLIB` to your `CFLAGS`. Otherwise the linker will
look for dynamic import symbols.
you must add `-DCURL_STATICLIB` to your `CFLAGS`. Otherwise the linker looks
for dynamic import symbols.
## Legacy Windows and SSL
Schannel (from Windows SSPI), is the native SSL library in Windows. However,
Schannel in Windows <= XP is unable to connect to servers that
no longer support the legacy handshakes and algorithms used by those
versions. If you will be using curl in one of those earlier versions of
Windows you should choose another SSL backend such as OpenSSL.
Schannel in Windows <= XP is unable to connect to servers that no longer
support the legacy handshakes and algorithms used by those versions. If you
are using curl in one of those earlier versions of Windows you should choose
another SSL backend such as OpenSSL.
# Apple Platforms (macOS, iOS, tvOS, watchOS, and their simulator counterparts)
@ -333,10 +285,10 @@ implementation, Secure Transport, instead of OpenSSL. To build with Secure
Transport for SSL/TLS, use the configure option `--with-secure-transport`.
When Secure Transport is in use, the curl options `--cacert` and `--capath`
and their libcurl equivalents, will be ignored, because Secure Transport uses
the certificates stored in the Keychain to evaluate whether or not to trust
the server. This, of course, includes the root certificates that ship with the
OS. The `--cert` and `--engine` options, and their libcurl equivalents, are
and their libcurl equivalents, are ignored, because Secure Transport uses the
certificates stored in the Keychain to evaluate whether or not to trust the
server. This, of course, includes the root certificates that ship with the OS.
The `--cert` and `--engine` options, and their libcurl equivalents, are
currently unimplemented in curl with Secure Transport.
In general, a curl build for an Apple `ARCH/SDK/DEPLOYMENT_TARGET` combination
@ -355,7 +307,8 @@ make -j8
make install
```
Above will build curl for macOS platform with `x86_64` architecture and `10.8` as deployment target.
The above command lines build curl for macOS platform with `x86_64`
architecture and `10.8` as deployment target.
Here is an example for iOS device:
@ -388,14 +341,14 @@ In all above, the built libraries and executables can be found in the
# Android
When building curl for Android it's recommended to use a Linux/macOS environment
since using curl's `configure` script is the easiest way to build curl
for Android. Before you can build curl for Android, you need to install the
Android NDK first. This can be done using the SDK Manager that is part of
Android Studio. Once you have installed the Android NDK, you need to figure out
where it has been installed and then set up some environment variables before
launching `configure`. On macOS, those variables could look like this to compile
for `aarch64` and API level 29:
When building curl for Android it is recommended to use a Linux/macOS
environment since using curl's `configure` script is the easiest way to build
curl for Android. Before you can build curl for Android, you need to install
the Android NDK first. This can be done using the SDK Manager that is part of
Android Studio. Once you have installed the Android NDK, you need to figure
out where it has been installed and then set up some environment variables
before launching `configure`. On macOS, those variables could look like this
to compile for `aarch64` and API level 29:
```bash
export ANDROID_NDK_HOME=~/Library/Android/sdk/ndk/25.1.8937393 # Point into your NDK.
@ -415,16 +368,16 @@ to adjust those variables accordingly. After that you can build curl like this:
./configure --host aarch64-linux-android --with-pic --disable-shared
Note that this will not give you SSL/TLS support. If you need SSL/TLS, you have
to build curl against a SSL/TLS layer, e.g. OpenSSL, because it's impossible for
curl to access Android's native SSL/TLS layer. To build curl for Android using
OpenSSL, follow the OpenSSL build instructions and then install `libssl.a` and
`libcrypto.a` to `$TOOLCHAIN/sysroot/usr/lib` and copy `include/openssl` to
`$TOOLCHAIN/sysroot/usr/include`. Now you can build curl for Android using
OpenSSL like this:
Note that this does not give you SSL/TLS support. If you need SSL/TLS, you
have to build curl with a SSL/TLS library, e.g. OpenSSL, because it is
impossible for curl to access Android's native SSL/TLS layer. To build curl
for Android using OpenSSL, follow the OpenSSL build instructions and then
install `libssl.a` and `libcrypto.a` to `$TOOLCHAIN/sysroot/usr/lib` and copy
`include/openssl` to `$TOOLCHAIN/sysroot/usr/include`. Now you can build curl
for Android using OpenSSL like this:
```bash
LIBS="-lssl -lcrypto -lc++" # For OpenSSL/BoringSSL. In general, you will need to the SSL/TLS layer's transitive dependencies if you are linking statically.
LIBS="-lssl -lcrypto -lc++" # For OpenSSL/BoringSSL. In general, you need to the SSL/TLS layer's transitive dependencies if you are linking statically.
./configure --host aarch64-linux-android --with-pic --disable-shared --with-openssl="$TOOLCHAIN/sysroot/usr"
```
@ -434,22 +387,22 @@ For IBM i (formerly OS/400), you can use curl in two different ways:
- Natively, running in the **ILE**. The obvious use is being able to call curl
from ILE C or RPG applications.
- You will need to build this from source. See `packages/OS400/README` for
the ILE specific build instructions.
- In the **PASE** environment, which runs AIX programs. curl will be built as
it would be on AIX.
- IBM provides builds of curl in their Yum repository for PASE software.
- To build from source, follow the Unix instructions.
- You need to build this from source. See `packages/OS400/README` for the ILE
specific build instructions.
- In the **PASE** environment, which runs AIX programs. curl is built as it
would be on AIX.
- IBM provides builds of curl in their Yum repository for PASE software.
- To build from source, follow the Unix instructions.
There are some additional limitations and quirks with curl on this platform;
they affect both environments.
## Multi-threading notes
By default, jobs in IBM i will not start with threading enabled. (Exceptions
By default, jobs in IBM i does not start with threading enabled. (Exceptions
include interactive PASE sessions started by `QP2TERM` or SSH.) If you use
curl in an environment without threading when options like asynchronous DNS
were enabled, you will get messages like:
were enabled, you get messages like:
```
getaddrinfo() thread failed to start
@ -494,9 +447,9 @@ export NM=ppc_405-nm
You may also need to provide a parameter like `--with-random=/dev/urandom` to
configure as it cannot detect the presence of a random number generating
device for a target system. The `--prefix` parameter specifies where curl
will be installed. If `configure` completes successfully, do `make` and `make
install` as usual.
device for a target system. The `--prefix` parameter specifies where curl gets
installed. If `configure` completes successfully, do `make` and `make install`
as usual.
In some cases, you may be able to simplify the above commands to as little as:
@ -508,10 +461,16 @@ There are a number of configure options that can be used to reduce the size of
libcurl for embedded applications where binary size is an important factor.
First, be sure to set the `CFLAGS` variable when configuring with any relevant
compiler optimization flags to reduce the size of the binary. For gcc, this
would mean at minimum the -Os option, and potentially the `-march=X`,
`-mdynamic-no-pic` and `-flto` options as well, e.g.
would mean at minimum the `-Os` option, and others like the following that
may be relevant in some environments: `-march=X`, `-mthumb`, `-m32`,
`-mdynamic-no-pic`, `-flto`, `-fdata-sections`, `-ffunction-sections`,
`-fno-unwind-tables`, `-fno-asynchronous-unwind-tables`,
`-fno-record-gcc-switches`, `-fsection-anchors`, `-fno-plt`,
`-Wl,--gc-sections`, `-Wl,-Bsymbolic`, `-Wl,-s`,
./configure CFLAGS='-Os' LDFLAGS='-Wl,-Bsymbolic'...
For example, this is how to combine a few of these options:
./configure CC=gcc CFLAGS='-Os -ffunction-sections' LDFLAGS='-Wl,--gc-sections'...
Note that newer compilers often produce smaller code than older versions
due to improved optimization.
@ -519,9 +478,9 @@ due to improved optimization.
Be sure to specify as many `--disable-` and `--without-` flags on the
configure command-line as you can to disable all the libcurl features that you
know your application is not going to need. Besides specifying the
`--disable-PROTOCOL` flags for all the types of URLs your application will not
`--disable-PROTOCOL` flags for all the types of URLs your application do not
use, here are some other flags that can reduce the size of the library by
disabling support for some feature:
disabling support for some feature (run `./configure --help` to see them all):
- `--disable-alt-svc` (HTTP Alt-Svc)
- `--disable-ares` (the C-ARES DNS library)
@ -535,13 +494,17 @@ disabling support for some feature:
- `--disable-dateparse` (date parsing for time conditionals)
- `--disable-dnsshuffle` (internal server load spreading)
- `--disable-doh` (DNS-over-HTTP)
- `--disable-form-api` (POST form API)
- `--disable-get-easy-options` (lookup easy options at runtime)
- `--disable-headers-api` (API to access headers)
- `--disable-hsts` (HTTP Strict Transport Security)
- `--disable-http-auth` (all HTTP authentication)
- `--disable-ipv6` (IPv6)
- `--disable-libcurl-option` (--libcurl C code generation support)
- `--disable-manual` (built-in documentation)
- `--disable-manual` (--manual built-in documentation)
- `--disable-mime` (MIME API)
- `--disable-netrc` (.netrc file)
- `--disable-ntlm` (NTLM authentication)
- `--disable-ntlm-wb` (NTLM WinBind)
- `--disable-progress-meter` (graphical progress meter in library)
- `--disable-proxy` (HTTP and SOCKS proxies)
@ -563,30 +526,21 @@ disabling support for some feature:
- `--without-ssl` (SSL/TLS)
- `--without-zlib` (on-the-fly decompression)
The GNU compiler and linker have a number of options that can reduce the
size of the libcurl dynamic libraries on some platforms even further.
Specify them by providing appropriate `CFLAGS` and `LDFLAGS` variables on
the configure command-line, e.g.
CFLAGS="-Os -ffunction-sections -fdata-sections
-fno-unwind-tables -fno-asynchronous-unwind-tables -flto"
LDFLAGS="-Wl,-s -Wl,-Bsymbolic -Wl,--gc-sections"
Be sure also to strip debugging symbols from your binaries after compiling
using 'strip' (or the appropriate variant if cross-compiling). If space is
really tight, you may be able to remove some unneeded sections of the shared
library using the -R option to objcopy (e.g. the .comment section).
using 'strip' or an option like `-s`. If space is really tight, you may be able
to gain a few bytes by removing some unneeded sections of the shared library
using the -R option to objcopy (e.g. the .comment section).
Using these techniques it is possible to create a basic HTTP-only libcurl
shared library for i386 Linux platforms that is only 133 KiB in size
(as of libcurl version 7.80.0, using gcc 11.2.0).
shared library for i386 Linux platforms that is only 130 KiB in size
(as of libcurl version 8.6.0, using gcc 13.2.0).
You may find that statically linking libcurl to your application will result
in a lower total size than dynamically linking.
You may find that statically linking libcurl to your application results in a
lower total size than dynamically linking.
Note that the curl test harness can detect the use of some, but not all, of
the `--disable` statements suggested above. Use will cause tests relying on
those features to fail. The test harness can be manually forced to skip the
The curl test harness can detect the use of some, but not all, of the
`--disable` statements suggested above. Use of these can cause tests relying
on those features to fail. The test harness can be manually forced to skip the
relevant tests by specifying certain key words on the `runtests.pl` command
line. Following is a list of appropriate key words for those configure options
that are not automatically detected:

View File

@ -43,7 +43,6 @@ versions of libs and build tools.
- GNU M4 1.4
- perl 5.6
- roffit 0.5
- nroff any version that supports `-man [in] [out]`
- cmake 3.7
Library Symbols

101
deps/curl/docs/IPFS.md vendored
View File

@ -19,12 +19,29 @@ By explicitly requesting [application/vnd.ipld.raw](https://www.iana.org/assignm
This enables users to use untrusted, public gateways without worrying they might return invalid/malicious bytes.
## IPFS and IPNS protocol handling
There are various ways to access data from the IPFS network. One such way is through the concept of public "[gateways](https://docs.ipfs.tech/concepts/ipfs-gateway/#overview)". The short version is that entities can offer gateway services. An example here that is hosted by Protocol Labs (who also makes IPFS) is `dweb.link` and `ipfs.io`. Both sites expose gateway functionality. Getting a file through `ipfs.io` looks like this: `https://ipfs.io/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
If you were to be [running your own IPFS node](https://docs.ipfs.tech/how-to/command-line-quick-start/) then you, by default, also have a [local gateway](https://specs.ipfs.tech/http-gateways/) running. In it's default configuration the earlier example would then also work in this link: `http://127.0.0.1:8080/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
There are various ways to access data from the IPFS network. One such way is
through the concept of public
"[gateways](https://docs.ipfs.tech/concepts/ipfs-gateway/#overview)". The
short version is that entities can offer gateway services. An example here
that is hosted by Protocol Labs (who also makes IPFS) is `dweb.link` and
`ipfs.io`. Both sites expose gateway functionality. Getting a file through
`ipfs.io` looks like this:
`https://ipfs.io/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
If you were to be [running your own IPFS
node](https://docs.ipfs.tech/how-to/command-line-quick-start/) then you, by
default, also have a [local gateway](https://specs.ipfs.tech/http-gateways/)
running. In its default configuration the earlier example would then also work
in this link:
`http://127.0.0.1:8080/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
## cURL handling of the IPFS protocols
The IPFS integration in cURL hides this gateway logic for you. So instead of providing a full URL to a file on IPFS like this:
The IPFS integration in cURL hides this gateway logic for you. Instead of
providing a full URL to a file on IPFS like this:
```
curl http://127.0.0.1:8080/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
```
@ -34,49 +51,77 @@ You can provide it with the IPFS protocol instead:
curl ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
```
With the IPFS protocol way of asking a file, cURL still needs to know the gateway. curl essentially just rewrites the IPFS based URL to a gateway URL.
With the IPFS protocol way of asking a file, cURL still needs to know the
gateway. curl essentially just rewrites the IPFS based URL to a gateway URL.
### IPFS_GATEWAY environment variable
If the `IPFS_GATEWAY` environment variable is found, it's value is used as gateway.
If the `IPFS_GATEWAY` environment variable is found, its value is used as
gateway.
### Automatic gateway detection
When you provide no additional details to cURL then cURL will:
1. First look for the `IPFS_GATEWAY` environment variable and use that if it's set.
2. Look for the file: `~/.ipfs/gateway`. If it can find that file then it means that you have a local gateway running and that file contains the URL to your local gateway.
When you provide no additional details to cURL then it:
If cURL fails you'll be presented with an error message and a link to this page to the option most applicable to solving the issue.
1. First looks for the `IPFS_GATEWAY` environment variable and use that if it
is set.
2. Looks for the file: `~/.ipfs/gateway`. If it can find that file then it
means that you have a local gateway running and that file contains the URL
to your local gateway.
If cURL fails, you are presented with an error message and a link to this page
to the option most applicable to solving the issue.
### `--ipfs-gateway` argument
You can also provide a `--ipfs-gateway` argument to cURL. This overrules any other gateway setting. curl won't fallback to the other options if the provided gateway didn't work.
You can also provide a `--ipfs-gateway` argument to cURL. This overrules any
other gateway setting. curl does not fallback to the other options if the
provided gateway did not work.
## Gateway redirects
A gateway could redirect to another place. For example, `dweb.link` redirects [path based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#path-gateway) requests to [subdomain based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#subdomain-gateway) ones. So a request to:
```
curl ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi --ipfs-gateway https://dweb.link
```
A gateway could redirect to another place. For example, `dweb.link` redirects
[path based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#path-gateway)
requests to [subdomain
based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#subdomain-gateway)
ones. A request using:
curl ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi --ipfs-gateway https://dweb.link
Which would be translated to:
```
https://dweb.link/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
```
Will redirect to:
```
https://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi.ipfs.dweb.link
```
If you trust this behavior from your gateway of choice then passing the `-L` option will follow the redirect.
https://dweb.link/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
redirects to:
https://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi.ipfs.dweb.link
If you trust this behavior from your gateway of choice then passing the `-L`
option follows the redirect.
## Error messages and hints
Depending on the arguments, cURL could present the user with an error.
### Gateway file and environment variable
cURL tried to look for the file: `~/.ipfs/gateway` but couldn't find it. It also tried to look for the `IPFS_GATEWAY` environment variable but couldn't find that either. This happens when no extra arguments are passed to cURL and letting it try to figure it out [automatically](#automatic-gateway-detection).
Any IPFS implementation that has gateway support should expose it's URL in `~/.ipfs/gateway`. If you are already running a gateway, make sure it exposes the file where cURL expects to find it.
cURL tried to look for the file: `~/.ipfs/gateway` but could not find it. It
also tried to look for the `IPFS_GATEWAY` environment variable but could not
find that either. This happens when no extra arguments are passed to cURL and
letting it try to figure it out [automatically](#automatic-gateway-detection).
Alternatively you could set the `IPFS_GATEWAY` environment variable or pass the `--ipfs-gateway` flag to the cURL command.
Any IPFS implementation that has gateway support should expose its URL in
`~/.ipfs/gateway`. If you are already running a gateway, make sure it exposes
the file where cURL expects to find it.
Alternatively you could set the `IPFS_GATEWAY` environment variable or pass
the `--ipfs-gateway` flag to the cURL command.
### Malformed gateway URL
The command executed evaluates in an invalid URL. This could be anywhere in the URL, but a likely point is a wrong gateway URL.
Inspect the URL set via the `IPFS_GATEWAY` environment variable or passed with the `--ipfs-gateway` flag.
Alternatively opt to go for the [automatic](#automatic-gateway-detection) gateway detection.
The command executed evaluates in an invalid URL. This could be anywhere in
the URL, but a likely point is a wrong gateway URL.
Inspect the URL set via the `IPFS_GATEWAY` environment variable or passed with
the `--ipfs-gateway` flag. Alternatively opt to go for the
[automatic](#automatic-gateway-detection) gateway detection.

View File

@ -16,12 +16,12 @@ problems may have been fixed or changed somewhat since this was written.
1.5 Expect-100 meets 417
2. TLS
2.1 IMAPS connection fails with rustls error
2.3 Unable to use PKCS12 certificate with Secure Transport
2.4 Secure Transport will not import PKCS#12 client certificates without a password
2.5 Client cert handling with Issuer DN differs between backends
2.7 Client cert (MTLS) issues with Schannel
2.11 Schannel TLS 1.2 handshake bug in old Windows versions
2.12 FTPS with Schannel times out file list operation
2.13 CURLOPT_CERTINFO results in CURLE_OUT_OF_MEMORY with Schannel
3. Email protocols
@ -30,6 +30,7 @@ problems may have been fixed or changed somewhat since this was written.
3.3 POP3 expects "CRLF.CRLF" eob for some single-line responses
3.4 AUTH PLAIN for SMTP is not working on all servers
3.5 APOP authentication fails on POP3
3.6 POP3 issue when reading small chunks
4. Command line
@ -44,6 +45,7 @@ problems may have been fixed or changed somewhat since this was written.
5.12 flaky CI builds
5.13 long paths are not fully supported on Windows
5.14 Windows Unicode builds use homedir in current locale
5.15 Unicode on Windows
6. Authentication
6.1 NTLM authentication and unicode
@ -59,8 +61,11 @@ problems may have been fixed or changed somewhat since this was written.
6.13 Negotiate against Hadoop HDFS
7. FTP
7.1 FTP upload fails if remembered dir is deleted
7.2 Implicit FTPS upload timeout
7.3 FTP with NOBODY and FAILONERROR
7.4 FTP with ACCT
7.5 FTPS upload, FileZilla, GnuTLS and close_notify
7.11 FTPS upload data loss with TLS 1.3
7.12 FTPS directory listing hangs on Windows with Schannel
@ -92,10 +97,8 @@ problems may have been fixed or changed somewhat since this was written.
15.1 cmake outputs: no version information available
15.2 support build with GnuTLS
15.3 unusable tool_hugehelp.c with MinGW
15.4 build docs/curl.1
15.6 uses -lpthread instead of Threads::Threads
15.7 generated .pc file contains strange entries
15.8 libcurl.pc uses absolute library paths
15.11 ExternalProject_Add does not set CURL_CA_PATH
15.13 CMake build with MIT Kerberos does not work
@ -104,12 +107,16 @@ problems may have been fixed or changed somewhat since this was written.
16.6 aws-sigv4 does not behave well with AWS VPC Lattice
17. HTTP/2
17.1 HTTP/2 prior knowledge over proxy
17.2 HTTP/2 frames while in the connection pool kill reuse
17.3 ENHANCE_YOUR_CALM causes infinite retries
18. HTTP/3
18.1 connection migration does not work
19. RTSP
19.1 Some methods do not support response bodies
==============================================================================
1. HTTP
@ -129,6 +136,10 @@ problems may have been fixed or changed somewhat since this was written.
2. TLS
2.1 IMAPS connection fails with rustls error
https://github.com/curl/curl/issues/10457
2.3 Unable to use PKCS12 certificate with Secure Transport
See https://github.com/curl/curl/issues/5403
@ -159,12 +170,6 @@ problems may have been fixed or changed somewhat since this was written.
https://github.com/curl/curl/issues/5488
2.12 FTPS with Schannel times out file list operation
"Instead of the command completing, it just sits there until the timeout
expires." - the same command line seems to work with other TLS backends and
other operating systems. See https://github.com/curl/curl/issues/5284.
2.13 CURLOPT_CERTINFO results in CURLE_OUT_OF_MEMORY with Schannel
https://github.com/curl/curl/issues/8741
@ -200,6 +205,12 @@ problems may have been fixed or changed somewhat since this was written.
See https://github.com/curl/curl/issues/10073
3.6 POP3 issue when reading small chunks
CURL_DBG_SOCK_RMAX=4 ./runtests.pl -v 982
See https://github.com/curl/curl/issues/12063
4. Command line
5. Build and portability issues
@ -281,6 +292,16 @@ problems may have been fixed or changed somewhat since this was written.
See https://github.com/curl/curl/pull/7252 and
https://github.com/curl/curl/pull/7281
5.15 Unicode on Windows
Passing in a unicode filename with -o:
https://github.com/curl/curl/issues/11461
Passing in unicode character with -d:
https://github.com/curl/curl/issues/12231
6. Authentication
6.1 NTLM authentication and unicode
@ -360,6 +381,18 @@ problems may have been fixed or changed somewhat since this was written.
7. FTP
7.1 FTP upload fails if remembered dir is deleted
curl's FTP code assumes that the directory it entered in a previous transfer
still exists when it comes back to do a second transfer, and does not respond
well if it was indeed deleted in the mean time.
https://github.com/curl/curl/issues/12181
7.2 Implicit FTPS upload timeout
https://github.com/curl/curl/issues/11720
7.3 FTP with NOBODY and FAILONERROR
It seems sensible to be able to use CURLOPT_NOBODY and CURLOPT_FAILONERROR
@ -373,6 +406,13 @@ problems may have been fixed or changed somewhat since this was written.
thus fails to issue the correct command:
https://curl.se/bug/view.cgi?id=635
7.5 FTPS upload, FileZilla, GnuTLS and close_notify
An issue where curl does not send the TLS alert close_notify, which triggers
the wrath of GnuTLS in FileZilla server, and a FTP reply 426 ECONNABORTED.
https://github.com/curl/curl/issues/11383
7.11 FTPS upload data loss with TLS 1.3
During FTPS upload curl does not attempt to read TLS handshake messages sent
@ -390,9 +430,16 @@ problems may have been fixed or changed somewhat since this was written.
https://github.com/curl/curl/issues/6149
7.12 FTPS directory listing hangs on Windows with Schannel
7.12 FTPS server compatibility on Windows with Schannel
https://github.com/curl/curl/issues/9161
FTPS is not widely used with the Schannel TLS backend and so there may be more
bugs compared to other TLS backends such as OpenSSL. In the past users have
reported hanging and failed connections. It's very likely some changes to curl
since then fixed the issues. None of the reported issues can be reproduced any
longer.
If you encounter an issue connecting to your server via FTPS with the latest
curl and Schannel then please search for open issues or file a new issue.
9. SFTP and SCP
@ -525,12 +572,6 @@ problems may have been fixed or changed somewhat since this was written.
see https://github.com/curl/curl/issues/3125
15.4 build docs/curl.1
The cmake build does not create the docs/curl.1 file and therefore must rely on
it being there already. This makes the --manual option not work and test
cases like 1139 cannot function.
15.6 uses -lpthread instead of Threads::Threads
See https://github.com/curl/curl/issues/6166
@ -542,14 +583,6 @@ problems may have been fixed or changed somewhat since this was written.
See https://github.com/curl/curl/issues/6167
15.8 libcurl.pc uses absolute library paths
The libcurl.pc file generated by cmake contains things like Libs.private:
/usr/lib64/libssl.so /usr/lib64/libcrypto.so /usr/lib64/libz.so. The
autotools equivalent would say Libs.private: -lssl -lcrypto -lz
See https://github.com/curl/curl/issues/6169
15.11 ExternalProject_Add does not set CURL_CA_PATH
CURL_CA_BUNDLE and CURL_CA_PATH are not set properly when cmake's
@ -580,6 +613,10 @@ problems may have been fixed or changed somewhat since this was written.
17. HTTP/2
17.1 HTTP/2 prior knowledge over proxy
https://github.com/curl/curl/issues/12641
17.2 HTTP/2 frames while in the connection pool kill reuse
If the server sends HTTP/2 frames (like for example an HTTP/2 PING frame) to
@ -602,3 +639,13 @@ problems may have been fixed or changed somewhat since this was written.
18.1 connection migration does not work
https://github.com/curl/curl/issues/7695
19. RTSP
19.1 Some methods do not support response bodies
The RTSP implementation is written to assume that a number of RTSP methods
will always get responses without bodies, even though there seems to be no
indication in the RFC that this is always the case.
https://github.com/curl/curl/issues/12414

View File

@ -40,13 +40,13 @@ MAIL ETIQUETTE
please use the one or the ones that suit you the most.
Each mailing list has hundreds up to thousands of readers, meaning that each
mail sent will be received and read by a large number of people. People
from various cultures, regions, religions and continents.
mail sent is received and read by a large number of people. People from
various cultures, regions, religions and continents.
1.2 Netiquette
Netiquette is a common term for how to behave on the Internet. Of course, in
each particular group and subculture there will be differences in what is
each particular group and subculture there are differences in what is
acceptable and what is considered good manners.
This document outlines what we in the curl project consider to be good
@ -71,8 +71,8 @@ MAIL ETIQUETTE
through to all the subscribers.
If you post without being subscribed (or from a different mail address than
the one you are subscribed with), your mail will simply be silently
discarded. You have to subscribe first, then post.
the one you are subscribed with), your mail is simply silently discarded.
You have to subscribe first, then post.
The reason for this unfortunate and strict subscription policy is of course
to stop spam from pestering the lists.
@ -80,14 +80,13 @@ MAIL ETIQUETTE
1.5 Moderation of new posters
Several of the curl mailing lists automatically make all posts from new
subscribers be moderated. This means that after you have subscribed and
sent your first mail to a list, that mail will not be let through to the
list until a mailing list administrator has verified that it is OK and
permits it to get posted.
subscribers be moderated. After you have subscribed and sent your first mail
to a list, that mail is not let through to the list until a mailing list
administrator has verified that it is OK and permits it to get posted.
Once a first post has been made that proves the sender is actually talking
about curl-related subjects, the moderation "flag" will be switched off and
future posts will go through without being moderated.
about curl-related subjects, the moderation "flag" is switched off and
future posts go through without being moderated.
The reason for this moderation policy is that we do suffer from spammers who
actually subscribe and send spam to our lists.
@ -95,8 +94,8 @@ MAIL ETIQUETTE
1.6 Handling trolls and spam
Despite our good intentions and hard work to keep spam off the lists and to
maintain a friendly and positive atmosphere, there will be times when spam
and or trolls get through.
maintain a friendly and positive atmosphere, there are times when spam and
or trolls get through.
Troll - "someone who posts inflammatory, extraneous, or off-topic messages
in an online community"
@ -106,10 +105,10 @@ MAIL ETIQUETTE
No matter what, we NEVER EVER respond to trolls or spammers on the list. If
you believe the list admin should do something in particular, contact them
off-list. The subject will be taken care of as much as possible to prevent
repeated offenses, but responding on the list to such messages never leads to
anything good and only puts the light even more on the offender: which was
the entire purpose of it getting sent to the list in the first place.
off-list. The subject is taken care of as much as possible to prevent
repeated offenses, but responding on the list to such messages never leads
to anything good and only puts the light even more on the offender: which
was the entire purpose of it getting sent to the list in the first place.
Do not feed the trolls.
@ -130,7 +129,7 @@ MAIL ETIQUETTE
1.8 I posted, now what?
If you are not subscribed with the same email address that you used to send
the email, your post will just be silently discarded.
the email, your post is silently discarded.
If you posted for the first time to the mailing list, you first need to wait
for an administrator to allow your email to go through (moderated). This
@ -151,28 +150,28 @@ MAIL ETIQUETTE
what you did with details enough to allow others to help point out the
problem or repeat the steps in their locations.
Failing to include details will only delay responses and make people respond
and ask for more details and you will have to send a follow-up email that
includes them.
Failing to include details only delays responses and make people respond and
ask for more details and you have to send follow-up emails that include
them.
Expect the responses to primarily help YOU debug the issue, or ask YOU
questions that can lead you or others towards a solution or explanation to
whatever you experience.
If you are a repeat offender to the guidelines outlined in this document,
chances are that people will ignore you at will and your chances to get
responses in the future will greatly diminish.
chances are that people ignore you and your chances to get responses in the
future greatly diminish.
1.9 Your emails are public
Your email, its contents and all its headers and the details in those
headers will be received by every subscriber of the mailing list that you
send your email to.
headers are received by every subscriber of the mailing list that you send
your email to.
Your email as sent to a curl mailing list will end up in mail archives, on
the curl website and elsewhere, for others to see and read. Today and in
the future. In addition to the archives, the mail is sent out to thousands
of individuals. There is no way to undo a sent email.
Your email as sent to a curl mailing list ends up in mail archives, on the
curl website and elsewhere, for others to see and read. Today and in the
future. In addition to the archives, the mail is sent out to thousands of
individuals. There is no way to undo a sent email.
When sending emails to a curl mailing list, do not include sensitive
information such as user names and passwords; use fake ones, temporary ones

View File

@ -10,7 +10,7 @@ Get a README file from an FTP server:
curl ftp://ftp.example.com/README
Get a web page from a server using port 8000:
Get a webpage from a server using port 8000:
curl http://www.example.com:8000/
@ -63,13 +63,12 @@ Get a file from an SMB server:
## Download to a File
Get a web page and store in a local file with a specific name:
Get a webpage and store in a local file with a specific name:
curl -o thatpage.html http://www.example.com/
Get a web page and store in a local file, make the local file get the name of
the remote document (if no file name part is specified in the URL, this will
fail):
Get a webpage and store in a local file, make the local file get the name of
the remote document (if no filename part is specified in the URL, this fails):
curl -O http://www.example.com/index.html
@ -104,7 +103,7 @@ This is similar to FTP, but you can use the `--key` option to specify a
private key to use instead of a password. Note that the private key may itself
be protected by a password that is unrelated to the login password of the
remote system; this password is specified using the `--pass` option.
Typically, curl will automatically extract the public key from the private key
Typically, curl automatically extracts the public key from the private key
file, but in cases where curl does not have the proper library support, a
matching public key file must be specified using the `--pubkey` option.
@ -126,7 +125,7 @@ secure ones out of the ones that the server accepts for the given URL, by
using `--anyauth`.
**Note**! According to the URL specification, HTTP URLs can not contain a user
and password, so that style will not work when using curl via a proxy, even
and password, so that style does not work when using curl via a proxy, even
though curl allows it at other times. When using a proxy, you _must_ use the
`-u` style for user and password.
@ -161,7 +160,7 @@ specified as:
curl --noproxy example.com -x my-proxy:888 http://www.example.com/
If the proxy is specified with `--proxy1.0` instead of `--proxy` or `-x`, then
curl will use HTTP/1.0 instead of HTTP/1.1 for any `CONNECT` attempts.
curl uses HTTP/1.0 instead of HTTP/1.1 for any `CONNECT` attempts.
curl also supports SOCKS4 and SOCKS5 proxies with `--socks4` and `--socks5`.
@ -224,7 +223,7 @@ Upload data from a specified file, login with user and password:
curl -T uploadfile -u user:passwd ftp://ftp.example.com/myfile
Upload a local file to the remote site, and use the local file name at the
Upload a local file to the remote site, and use the local filename at the
remote site too:
curl -T uploadfile -u user:passwd ftp://ftp.example.com/
@ -257,16 +256,16 @@ For other ways to do HTTP data upload, see the POST section below.
## Verbose / Debug
If curl fails where it is not supposed to, if the servers do not let you in, if
you cannot understand the responses: use the `-v` flag to get verbose
fetching. Curl will output lots of info and what it sends and receives in
order to let the user see all client-server interaction (but it will not show you
the actual data).
If curl fails where it is not supposed to, if the servers do not let you in,
if you cannot understand the responses: use the `-v` flag to get verbose
fetching. Curl outputs lots of info and what it sends and receives in order to
let the user see all client-server interaction (but it does not show you the
actual data).
curl -v ftp://ftp.example.com/
To get even more details and information on what curl does, try using the
`--trace` or `--trace-ascii` options with a given file name to log to, like
`--trace` or `--trace-ascii` options with a given filename to log to, like
this:
curl --trace trace.txt www.haxx.se
@ -283,7 +282,7 @@ extensive.
For HTTP, you can get the header information (the same as `-I` would show)
shown before the data by using `-i`/`--include`. Curl understands the
`-D`/`--dump-header` option when getting files from both FTP and HTTP, and it
will then store the headers in the specified file.
then stores the headers in the specified file.
Store the HTTP headers in a separate file (headers.txt in the example):
@ -347,20 +346,20 @@ multipart/form-data type. This latter type supports things like file upload.
`-F` accepts parameters like `-F "name=contents"`. If you want the contents to
be read from a file, use `@filename` as contents. When specifying a file, you
can also specify the file content type by appending `;type=<mime type>` to the
file name. You can also post the contents of several files in one field. For
filename. You can also post the contents of several files in one field. For
example, the field name `coolfiles` is used to send three files, with
different content types using the following syntax:
curl -F "coolfiles=@fil1.gif;type=image/gif,fil2.txt,fil3.html"
http://www.example.com/postit.cgi
If the content-type is not specified, curl will try to guess from the file
If the content-type is not specified, curl tries to guess from the file
extension (it only knows a few), or use the previously specified type (from an
earlier file if several files are specified in a list) or else it will use the
earlier file if several files are specified in a list) or else it uses the
default type `application/octet-stream`.
Emulate a fill-in form with `-F`. Let's say you fill in three fields in a
form. One field is a file name which to post, one field is your name and one
form. One field is a filename which to post, one field is your name and one
field is a file description. We want to post the file we have written named
`cooltext.txt`. To let curl do the posting of this data instead of your
favorite browser, you have to read the HTML source of the form page and find
@ -395,7 +394,7 @@ used on the command line. It is especially useful to fool or trick stupid
servers or CGI scripts that rely on that information being available or
contain certain data.
curl -e www.exomaple.org http://www.example.com/
curl -e www.example.org http://www.example.com/
## User Agent
@ -475,11 +474,11 @@ non-existing file to trigger the cookie awareness like:
curl -L -b empty.txt www.example.com
The file to read cookies from must be formatted using plain HTTP headers OR as
Netscape's cookie file. Curl will determine what kind it is based on the file
contents. In the above command, curl will parse the header and store the
cookies received from www.example.com. curl will send to the server the stored
cookies which match the request as it follows the location. The file
`empty.txt` may be a nonexistent file.
Netscape's cookie file. Curl determines what kind it is based on the file
contents. In the above command, curl parses the header and store the cookies
received from www.example.com. curl sends the stored cookies which match the
request to the server as it follows the location. The file `empty.txt` may be
a nonexistent file.
To read and write cookies from a Netscape cookie file, you can set both `-b`
and `-c` to use the same file:
@ -511,8 +510,8 @@ From left-to-right:
- `Curr.Speed` - the average transfer speed the last 5 seconds (the first
5 seconds of a transfer is based on less time of course.)
The `-#` option will display a totally different progress bar that does not
need much explanation!
The `-#` option displays a totally different progress bar that does not need
much explanation!
## Speed Limit
@ -549,14 +548,14 @@ Or prevent curl from uploading data faster than 1 megabyte per second:
curl -T upload --limit-rate 1M ftp://uploads.example.com
When using the `--limit-rate` option, the transfer rate is regulated on a
per-second basis, which will cause the total transfer speed to become lower
than the given number. Sometimes of course substantially lower, if your
transfer stalls during periods.
per-second basis, which causes the total transfer speed to become lower than
the given number. Sometimes of course substantially lower, if your transfer
stalls during periods.
## Config File
Curl automatically tries to read the `.curlrc` file (or `_curlrc` file on
Microsoft Windows systems) from the user's home dir on startup.
Microsoft Windows systems) from the user's home directory on startup.
The config file could be made up with normal command line switches, but you
can also specify the long options without the dashes to make it more
@ -592,16 +591,16 @@ URL by making a config file similar to:
url = "http://help.with.curl.example.com/curlhelp.html"
You can specify another config file to be read by using the `-K`/`--config`
flag. If you set config file name to `-` it will read the config from stdin,
which can be handy if you want to hide options from being visible in process
tables etc:
flag. If you set config filename to `-` it reads the config from stdin, which
can be handy if you want to hide options from being visible in process tables
etc:
echo "user = user:passwd" | curl -K - http://that.secret.example.com
## Extra Headers
When using curl in your own programs, you may end up needing to pass on your
own custom headers when getting a web page. You can do this by using the `-H`
own custom headers when getting a webpage. You can do this by using the `-H`
flag.
Example, send the header `X-you-and-me: yes` to the server when getting a
@ -626,11 +625,11 @@ directory at your ftp site, do:
curl ftp://user:passwd@my.example.com/README
If you want the README file from the root directory of that same site, you
need to specify the absolute file name:
need to specify the absolute filename:
curl ftp://user:passwd@my.example.com//README
(I.e with an extra slash in front of the file name.)
(I.e with an extra slash in front of the filename.)
## SFTP and SCP and Path Names
@ -676,7 +675,7 @@ Download with `PORT` but use 192.168.0.10 as our IP address to use:
## Network Interface
Get a web page from a server using a specified port for the interface:
Get a webpage from a server using a specified port for the interface:
curl --interface eth0:1 http://www.example.com/
@ -707,8 +706,8 @@ personal password:
curl -E /path/to/cert.pem:password https://secure.example.com/
If you neglect to specify the password on the command line, you will be
prompted for the correct password before any data can be received.
If you neglect to specify the password on the command line, you are prompted
for the correct password before any data can be received.
Many older HTTPS servers have problems with specific SSL or TLS versions,
which newer versions of OpenSSL etc use, therefore it is sometimes useful to
@ -716,7 +715,7 @@ specify what TLS version curl should use.:
curl --tlv1.0 https://secure.example.com/
Otherwise, curl will attempt to use a sensible TLS default version.
Otherwise, curl attempts to use a sensible TLS default version.
## Resuming File Transfers
@ -783,11 +782,11 @@ Authentication support is still missing
## LDAP
If you have installed the OpenLDAP library, curl can take advantage of it and
offer `ldap://` support. On Windows, curl will use WinLDAP from Platform SDK
by default.
offer `ldap://` support. On Windows, curl uses WinLDAP from Platform SDK by
default.
Default protocol version used by curl is LDAP version 3. Version 2 will be
used as a fallback mechanism in case version 3 fails to connect.
Default protocol version used by curl is LDAP version 3. Version 2 is used as
a fallback mechanism in case version 3 fails to connect.
LDAP is a complex thing and writing an LDAP query is not an easy
task. Familiarize yourself with the exact syntax description elsewhere. One
@ -804,14 +803,14 @@ You also can use authentication when accessing LDAP catalog:
curl -u user:passwd "ldap://ldap.example.com/o=frontec??sub?mail=*"
curl "ldap://user:passwd@ldap.example.com/o=frontec??sub?mail=*"
By default, if user and password are provided, OpenLDAP/WinLDAP will use basic
By default, if user and password are provided, OpenLDAP/WinLDAP uses basic
authentication. On Windows you can control this behavior by providing one of
`--basic`, `--ntlm` or `--digest` option in curl command line
curl --ntlm "ldap://user:passwd@ldap.example.com/o=frontec??sub?mail=*"
On Windows, if no user/password specified, auto-negotiation mechanism will be
used with current logon credentials (SSPI/SPNEGO).
On Windows, if no user/password specified, auto-negotiation mechanism is used
with current logon credentials (SSPI/SPNEGO).
## Environment Variables
@ -824,17 +823,17 @@ with
ALL_PROXY
A comma-separated list of host names that should not go through any proxy is
A comma-separated list of hostnames that should not go through any proxy is
set in (only an asterisk, `*` matches all hosts)
NO_PROXY
If the host name matches one of these strings, or the host is within the
domain of one of these strings, transactions with that node will not be done
over proxy. When a domain is used, it needs to start with a period. A user can
If the hostname matches one of these strings, or the host is within the domain
of one of these strings, transactions with that node is not done over the
proxy. When a domain is used, it needs to start with a period. A user can
specify that both www.example.com and foo.example.com should not use a proxy
by setting `NO_PROXY` to `.example.com`. By including the full name you can
exclude specific host names, so to make `www.example.com` not use a proxy but
exclude specific hostnames, so to make `www.example.com` not use a proxy but
still have `foo.example.com` do it, set `NO_PROXY` to `www.example.com`.
The usage of the `-x`/`--proxy` flag overrides the environment variables.
@ -845,7 +844,7 @@ Unix introduced the `.netrc` concept a long time ago. It is a way for a user
to specify name and password for commonly visited FTP sites in a file so that
you do not have to type them in each time you visit those sites. You realize
this is a big security risk if someone else gets hold of your passwords,
therefore most Unix programs will not read this file unless it is only readable
therefore most Unix programs do not read this file unless it is only readable
by yourself (curl does not care though).
Curl supports `.netrc` files if told to (using the `-n`/`--netrc` and
@ -877,8 +876,8 @@ Then use curl in way similar to:
curl --krb private ftp://krb4site.example.com -u username:fakepwd
There is no use for a password on the `-u` switch, but a blank one will make
curl ask for one and you already entered the real password to `kinit`/`kauth`.
There is no use for a password on the `-u` switch, but a blank one makes curl
ask for one and you already entered the real password to `kinit`/`kauth`.
## TELNET
@ -888,8 +887,8 @@ command line similar to:
curl telnet://remote.example.com
And enter the data to pass to the server on stdin. The result will be sent to
stdout or to the file you specify with `-o`.
Enter the data to pass to the server on stdin. The result is sent to stdout or
to the file you specify with `-o`.
You might want the `-N`/`--no-buffer` option to switch off the buffered output
for slow connections or similar.
@ -911,20 +910,20 @@ accordingly.
## Persistent Connections
Specifying multiple files on a single command line will make curl transfer all
of them, one after the other in the specified order.
Specifying multiple files on a single command line makes curl transfer all of
them, one after the other in the specified order.
libcurl will attempt to use persistent connections for the transfers so that
the second transfer to the same host can use the same connection that was
already initiated and was left open in the previous transfer. This greatly
decreases connection time for all but the first transfer and it makes a far
better use of the network.
libcurl attempts to use persistent connections for the transfers so that the
second transfer to the same host can use the same connection that was already
initiated and was left open in the previous transfer. This greatly decreases
connection time for all but the first transfer and it makes a far better use
of the network.
Note that curl cannot use persistent connections for transfers that are used
in subsequent curl invokes. Try to stuff as many URLs as possible on the same
command line if they are using the same host, as that will make the transfers
command line if they are using the same host, as that makes the transfers
faster. If you use an HTTP proxy for file transfers, practically all transfers
will be persistent.
are persistent.
## Multiple Transfers With A Single Command Line
@ -945,11 +944,10 @@ You can also upload multiple files in a similar fashion:
## IPv6
curl will connect to a server with IPv6 when a host lookup returns an IPv6
address and fall back to IPv4 if the connection fails. The `--ipv4` and
`--ipv6` options can specify which address to use when both are
available. IPv6 addresses can also be specified directly in URLs using the
syntax:
curl connects to a server with IPv6 when a host lookup returns an IPv6 address
and fall back to IPv4 if the connection fails. The `--ipv4` and `--ipv6`
options can specify which address to use when both are available. IPv6
addresses can also be specified directly in URLs using the syntax:
http://[2001:1890:1112:1::20]/overview.html

View File

@ -24,4 +24,4 @@ Remaining limitations:
- Only QoS level 0 is implemented for publish
- No way to set retain flag for publish
- No TLS (mqtts) support
- Naive EAGAIN handling will not handle split messages
- Naive EAGAIN handling does not handle split messages

View File

@ -24,26 +24,25 @@
AUTOMAKE_OPTIONS = foreign no-dependencies
# EXTRA_DIST breaks with $(abs_builddir) so build it using this variable
# but distribute it (using the relative file name) in the next variable
man_MANS = $(abs_builddir)/curl.1
noinst_man_MANS = curl.1 mk-ca-bundle.1
dist_man_MANS = curl-config.1
GENHTMLPAGES = curl.html curl-config.html mk-ca-bundle.html
PDFPAGES = curl.pdf curl-config.pdf mk-ca-bundle.pdf
MANDISTPAGES = curl.1.dist curl-config.1.dist
if BUILD_DOCS
# if we disable man page building, ignore these
MK_CA_DOCS = mk-ca-bundle.1
CURLCONF_DOCS = curl-config.1
endif
HTMLPAGES = $(GENHTMLPAGES)
man_MANS = curl-config.1
CURLPAGES = curl-config.md mk-ca-bundle.md
# Build targets in this file (.) before cmdline-opts to ensure that
# the curl.1 rule below runs first
SUBDIRS = . cmdline-opts
DIST_SUBDIRS = $(SUBDIRS) examples libcurl
SUBDIRS = . cmdline-opts libcurl
DIST_SUBDIRS = $(SUBDIRS) examples
CLEANFILES = $(GENHTMLPAGES) $(PDFPAGES) $(MANDISTPAGES) curl.1
if BUILD_DOCS
CLEANFILES = mk-ca-bundle.1 curl-config.1
endif
EXTRA_DIST = \
$(noinst_man_MANS) \
$(CURLPAGES) \
$(CURLCONF_DOCS) \
ALTSVC.md \
BINDINGS.md \
BUFREF.md \
@ -55,10 +54,14 @@ EXTRA_DIST = \
CODE_OF_CONDUCT.md \
CODE_REVIEW.md \
CODE_STYLE.md \
CLIENT-READERS.md \
CLIENT-WRITERS.md \
CONNECTION-FILTERS.md \
CONTRIBUTE.md \
CURL-DISABLE.md \
CURLDOWN.md \
DEPRECATE.md \
DISTROS.md \
DYNBUF.md \
EARLY-RELEASE.md \
EXPERIMENTAL.md \
@ -73,7 +76,7 @@ EXTRA_DIST = \
HTTP3.md \
HYPER.md \
INSTALL \
INSTALL.cmake \
INSTALL-CMAKE.md \
INSTALL.md \
INTERNALS.md \
KNOWN_BUGS \
@ -87,6 +90,7 @@ EXTRA_DIST = \
RUSTLS.md \
ROADMAP.md \
SECURITY-ADVISORY.md \
SPONSORS.md \
SSL-PROBLEMS.md \
SSLCERTS.md \
THANKS \
@ -97,39 +101,23 @@ EXTRA_DIST = \
VULN-DISCLOSURE-POLICY.md \
WEBSOCKET.md
MAN2HTML= roffit $< >$@
CD2NROFF = $(top_srcdir)/scripts/cd2nroff $< >$@
SUFFIXES = .1 .html .pdf
CD2 = $(CD2_$(V))
CD2_0 = @echo " RENDER " $@;
CD2_1 =
CD2_ = $(CD2_0)
# $(abs_builddir) is to disable VPATH when searching for this file, which
# would otherwise find the copy in $(srcdir) which breaks the $(HUGE)
# rule in src/Makefile.am in out-of-tree builds that references the file in the
# build directory.
#
# First, seed the used copy of curl.1 with the prebuilt copy (in an out-of-tree
# build), then run make recursively to rebuild it only if its dependencies
# have changed.
$(abs_builddir)/curl.1:
if test "$(top_builddir)x" != "$(top_srcdir)x" -a -e "$(srcdir)/curl.1"; then \
$(INSTALL_DATA) "$(srcdir)/curl.1" $@ \
&& touch -r "$(srcdir)/curl.1" $@; fi
cd cmdline-opts && $(MAKE)
SUFFIXES = .1 .md
html: $(HTMLPAGES)
cd libcurl && $(MAKE) html
all: $(MK_CA_DOCS)
pdf: $(PDFPAGES)
cd libcurl && $(MAKE) pdf
.md.1:
$(CD2)$(CD2NROFF)
.1.html:
$(MAN2HTML)
curl-config.1: curl-config.md
.1.pdf:
@(foo=`echo $@ | sed -e 's/\.[0-9]$$//g'`; \
groff -Tps -man $< >$$foo.ps; \
ps2pdf $$foo.ps $@; \
rm $$foo.ps; \
echo "converted $< to $@")
mk-ca-bundle.1: mk-ca-bundle.md
distclean:
rm -f $(CLEANFILES)

View File

@ -7,16 +7,16 @@ protocols and it is the Internet transfer machine for the world.
In the curl project we love protocols and we love supporting many protocols
and doing it well.
So how do you proceed to add a new protocol and what are the requirements?
How do you proceed to add a new protocol and what are the requirements?
## No fixed set of requirements
This document is an attempt to describe things to consider. There is no
checklist of the twenty-seven things you need to cross off. We view the entire
effort as a whole and then judge if it seems to be the right thing - for
now. The more things that look right, fit our patterns and are done in ways
that align with our thinking, the better are the chances that we will agree
that supporting this protocol is a grand idea.
effort as a whole and then judge if it seems to be the right thing - for now.
The more things that look right, fit our patterns and are done in ways that
align with our thinking, the better are the chances that we agree that
supporting this protocol is a grand idea.
## Mutual benefit is preferred
@ -93,18 +93,18 @@ protocol - but it might require a bit of an effort to make it happen.
We cannot assume that users are particularly familiar with details and
peculiarities of the protocol. It needs documentation.
Maybe it even needs some internal documentation so that the developers who
will try to debug something five years from now can figure out functionality a
little easier!
Maybe it even needs some internal documentation so that the developers who try
to debug something five years from now can figure out functionality a little
easier!
The protocol specification itself should be freely available without requiring
a non-disclosure agreement or similar.
## Do not compare
We are constantly raising the bar and we are constantly improving the
project. A lot of things we did in the past would not be acceptable if done
today. Therefore, you might be tempted to use shortcuts or "hacks" you can
spot other - existing - protocol implementations have used, but there is
nothing to gain from that. The bar has been raised. Former "cheats" will not be
tolerated anymore.
We are constantly raising the bar and we are constantly improving the project.
A lot of things we did in the past would not be acceptable if done today.
Therefore, you might be tempted to use shortcuts or "hacks" you can spot
other - existing - protocol implementations have used, but there is nothing to
gain from that. The bar has been raised. Former "cheats" may not tolerated
anymore.

View File

@ -5,9 +5,9 @@ parallel.
## -Z, --parallel
When this command line option is used, curl will perform the transfers given
to it at the same time. It will do up to `--parallel-max` concurrent
transfers, with a default value of 50.
When this command line option is used, curl performs the transfers given to it
at the same time. It does up to `--parallel-max` concurrent transfers, with a
default value of 50.
## Progress meter
@ -38,9 +38,9 @@ Example:
## Behavior differences
Connections are shared fine between different easy handles, but the
"authentication contexts" are not. So for example doing HTTP Digest auth with
one handle for a particular transfer and then continue on with another handle
that reuses the same connection, the second handle cannot send the necessary
"authentication contexts" are not. For example doing HTTP Digest auth with one
handle for a particular transfer and then continue on with another handle that
reuses the same connection, the second handle cannot send the necessary
Authorization header at once since the context is only kept in the original
easy handle.

View File

@ -2,9 +2,9 @@
# Documentation
you will find a mix of various documentation in this directory and
subdirectories, using several different formats. Some of them are not ideal
for reading directly in your browser.
You find a mix of various documentation in this directory and subdirectories,
using several different formats. Some of them are not ideal for reading
directly in your browser.
If you would rather see the rendered version of the documentation, check out the
curl website's [documentation section](https://curl.se/docs/) for

View File

@ -17,9 +17,8 @@ in the source code repo
the tag is GPG signed (using -s).
- run `./maketgz 7.34.0` to build the release tarballs. It is important that
you run this on a machine with the correct set of autotools etc installed
as this is what then will be shipped and used by most users on \*nix like
systems.
you run this on a machine with the correct set of autotools etc installed as
this is what is shipped and used by most users on \*nix like systems.
- push the git commits and the new tag
@ -107,11 +106,11 @@ Coming dates
Based on the description above, here are some planned release dates (at the
time of this writing):
- October 11, 2023
- December 6, 2023
- January 31, 2024
- March 27, 2024
- May 22, 2024
- July 17, 2024
- September 11, 2024
- November 6, 2024
- January 8, 2025
- March 5, 2025
- April 30, 2025
- June 25, 2025

View File

@ -3,7 +3,7 @@
[Rustls is a TLS backend written in Rust](https://docs.rs/rustls/). Curl can
be built to use it as an alternative to OpenSSL or other TLS backends. We use
the [rustls-ffi C bindings](https://github.com/rustls/rustls-ffi/). This
version of curl depends on version v0.10.0 of rustls-ffi.
version of curl depends on version v0.12.0 of rustls-ffi.
# Building with rustls
@ -12,7 +12,7 @@ First, [install Rust](https://rustup.rs/).
Next, check out, build, and install the appropriate version of rustls-ffi:
% cargo install cbindgen
% git clone https://github.com/rustls/rustls-ffi -b v0.10.0
% git clone https://github.com/rustls/rustls-ffi -b v0.12.0
% cd rustls-ffi
% make
% make DESTDIR=${HOME}/rustls-ffi-built/ install

Some files were not shown because too many files have changed in this diff Show More