diff options
-rw-r--r-- | autobuild.xml | 117 | ||||
-rw-r--r-- | indra/cmake/CMakeLists.txt | 1 | ||||
-rw-r--r-- | indra/cmake/Copy3rdPartyLibs.cmake | 4 | ||||
-rw-r--r-- | indra/cmake/DeploySharedLibs.cmake | 5 | ||||
-rw-r--r-- | indra/cmake/GetPrerequisites_2_8.cmake | 786 | ||||
-rw-r--r-- | indra/cmake/NGHTTP2.cmake | 20 | ||||
-rw-r--r-- | indra/llcorehttp/CMakeLists.txt | 4 | ||||
-rw-r--r-- | indra/llcorehttp/_httplibcurl.cpp | 33 | ||||
-rw-r--r-- | indra/llcorehttp/_httpoprequest.cpp | 157 | ||||
-rw-r--r-- | indra/llmessage/CMakeLists.txt | 4 | ||||
-rw-r--r-- | indra/llplugin/CMakeLists.txt | 2 | ||||
-rw-r--r-- | indra/newview/CMakeLists.txt | 1 | ||||
-rw-r--r-- | indra/newview/lltexturefetch.cpp | 2 | ||||
-rw-r--r-- | indra/newview/llviewerregion.cpp | 22 | ||||
-rwxr-xr-x | indra/newview/viewer_manifest.py | 24 | ||||
-rwxr-xr-x | scripts/packages-formatter.py | 83 |
16 files changed, 313 insertions, 952 deletions
diff --git a/autobuild.xml b/autobuild.xml index 11214d3485..4a51588fca 100644 --- a/autobuild.xml +++ b/autobuild.xml @@ -374,9 +374,9 @@ <key>archive</key> <map> <key>hash</key> - <string>99fbc15f514be77c36280f300d257d5a</string> + <string>f426c56252c70fe38fcb2251f7c1d762</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1077/2495/curl-7.47.0.501064-darwin64-501064.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9265/41615/curl-7.54.1.509254-darwin64-509254.tar.bz2</string> </map> <key>name</key> <string>darwin64</string> @@ -398,9 +398,9 @@ <key>archive</key> <map> <key>hash</key> - <string>e5d1b0439235f5f3829662a45e566743</string> + <string>630a2ddf43bba6e5b6e171dc68921dcb</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1075/2484/curl-7.47.0.501064-linux64-501064.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/8663/36142/curl-7.54.1.508652-linux64-508652.tar.bz2</string> </map> <key>name</key> <string>linux64</string> @@ -410,11 +410,11 @@ <key>archive</key> <map> <key>hash</key> - <string>bda9f237d3e8238440bc5bedc2657547</string> + <string>4c7a960e1ee518acceac6a0c65495800</string> <key>hash_algorithm</key> <string>md5</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1078/2487/curl-7.47.0.501064-windows-501064.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9268/41606/curl-7.54.1.509254-windows-509254.tar.bz2</string> </map> <key>name</key> <string>windows</string> @@ -424,16 +424,16 @@ <key>archive</key> <map> <key>hash</key> - <string>f7fa42c7157cd5007283ae7a5d97393d</string> + <string>32df7cce1658ccec893fb46cd476c024</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1079/2490/curl-7.47.0.501064-windows64-501064.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9267/41607/curl-7.54.1.509254-windows64-509254.tar.bz2</string> </map> <key>name</key> <string>windows64</string> </map> </map> <key>version</key> - <string>7.47.0.501064</string> + <string>7.54.1.509254</string> </map> <key>db</key> <map> @@ -2343,6 +2343,87 @@ <key>version</key> <string>7.11.1.297294</string> </map> + <key>nghttp2</key> + <map> + <key>copyright</key> + <string>Copyright (c) 2012, 2014, 2015, 2016 Tatsuhiro Tsujikawa +Copyright (c) 2012, 2014, 2015, 2016 nghttp2 contributors</string> + <key>description</key> + <string>Library providing HTTP 2 support for libcurl</string> + <key>license</key> + <string>MIT</string> + <key>license_file</key> + <string>LICENSES/nghttp2.txt</string> + <key>name</key> + <string>nghttp2</string> + <key>platforms</key> + <map> + <key>darwin64</key> + <map> + <key>archive</key> + <map> + <key>hash</key> + <string>f51bcd9245ed4e4ca1fa250ba9b112ce</string> + <key>url</key> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9259/41575/nghttp2-1.25.0.509246-darwin64-509246.tar.bz2</string> + </map> + <key>name</key> + <string>darwin64</string> + </map> + <key>linux</key> + <map> + <key>archive</key> + <map> + <key>hash</key> + <string>079c1a1bdb3ce1cda8ce3d7f75eeced3</string> + <key>url</key> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9258/41585/nghttp2-1.25.0.509246-linux-509246.tar.bz2</string> + </map> + <key>name</key> + <string>linux</string> + </map> + <key>linux64</key> + <map> + <key>archive</key> + <map> + <key>hash</key> + <string>c3c5ff7d2f7ac1143ef8d888192d4a53</string> + <key>url</key> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9257/41579/nghttp2-1.25.0.509246-linux64-509246.tar.bz2</string> + </map> + <key>name</key> + <string>linux64</string> + </map> + <key>windows</key> + <map> + <key>archive</key> + <map> + <key>hash</key> + <string>8367d6743356ad637e61335ee319f7a7</string> + <key>url</key> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9261/41597/nghttp2-1.25.0.509246-windows-509246.tar.bz2</string> + </map> + <key>name</key> + <string>windows</string> + </map> + <key>windows64</key> + <map> + <key>archive</key> + <map> + <key>hash</key> + <string>3267acca5dbfe6b8770deeebd548ee6a</string> + <key>url</key> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/9260/41591/nghttp2-1.25.0.509246-windows64-509246.tar.bz2</string> + </map> + <key>name</key> + <string>windows64</string> + </map> + </map> + <key>source_type</key> + <string>hg</string> + <key>version</key> + <string>1.25.0.509246</string> + </map> <key>nvapi</key> <map> <key>copyright</key> @@ -2696,9 +2777,9 @@ <key>archive</key> <map> <key>hash</key> - <string>7011482b07b32b375ecc3e891c37f469</string> + <string>6c28cce95e3576daf66252b07d9d151f</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1063/2462/openssl-1.0.1u.501051-darwin64-501051.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/8340/33489/openssl-1.0.2l.508328-darwin64-508328.tar.bz2</string> </map> <key>name</key> <string>darwin64</string> @@ -2720,9 +2801,9 @@ <key>archive</key> <map> <key>hash</key> - <string>50bb9aca605a599cb1a46da3c45f78ac</string> + <string>d50ccfbf0c1d249392919e2c46ad8d5c</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1062/2473/openssl-1.0.1u.501051-linux64-501051.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/8339/33495/openssl-1.0.2l.508328-linux64-508328.tar.bz2</string> </map> <key>name</key> <string>linux64</string> @@ -2732,9 +2813,9 @@ <key>archive</key> <map> <key>hash</key> - <string>7d9801f9ce049ffd30fbe06b6bac4c91</string> + <string>ffdb11a4c7aff72086c01555f931c918</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1064/2465/openssl-1.0.1u.501051-windows-501051.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/8341/33481/openssl-1.0.2l.508328-windows-508328.tar.bz2</string> </map> <key>name</key> <string>windows</string> @@ -2744,16 +2825,16 @@ <key>archive</key> <map> <key>hash</key> - <string>c3e404a9cc51d64c71ec13dbd44b8409</string> + <string>d875fc7d1f3a7bd9f85cfde05d9ae3dc</string> <key>url</key> - <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/1065/2468/openssl-1.0.1u.501051-windows64-501051.tar.bz2</string> + <string>http://automated-builds-secondlife-com.s3.amazonaws.com/ct2/8342/33480/openssl-1.0.2l.508328-windows64-508328.tar.bz2</string> </map> <key>name</key> <string>windows64</string> </map> </map> <key>version</key> - <string>1.0.1u.501051</string> + <string>1.0.2l.508328</string> </map> <key>pcre</key> <map> diff --git a/indra/cmake/CMakeLists.txt b/indra/cmake/CMakeLists.txt index 100645611e..4a3ebe4835 100644 --- a/indra/cmake/CMakeLists.txt +++ b/indra/cmake/CMakeLists.txt @@ -44,7 +44,6 @@ set(cmake_SOURCE_FILES GLH.cmake GLOD.cmake ## GStreamer010Plugin.cmake - GetPrerequisites_2_8.cmake GoogleBreakpad.cmake GoogleMock.cmake Havok.cmake diff --git a/indra/cmake/Copy3rdPartyLibs.cmake b/indra/cmake/Copy3rdPartyLibs.cmake index 5ccbe7d1d8..ce9f005e9f 100644 --- a/indra/cmake/Copy3rdPartyLibs.cmake +++ b/indra/cmake/Copy3rdPartyLibs.cmake @@ -37,6 +37,7 @@ if(WINDOWS) libapriconv-1.dll ssleay32.dll libeay32.dll + nghttp2.dll glod.dll libhunspell.dll ) @@ -169,6 +170,9 @@ elseif(DARWIN) ${EXPAT_COPY} libGLOD.dylib libndofdev.dylib + libnghttp2.dylib + libnghttp2.14.dylib + libnghttp2.14.14.0.dylib ) if (FMODEX) diff --git a/indra/cmake/DeploySharedLibs.cmake b/indra/cmake/DeploySharedLibs.cmake index e57fd5eee3..9d6cf0cb35 100644 --- a/indra/cmake/DeploySharedLibs.cmake +++ b/indra/cmake/DeploySharedLibs.cmake @@ -7,10 +7,7 @@ # SEARCH_DIRS= The full paths to dirs to search for dependencies. # DST_PATH= The full path where the dependecies will be copied. -# *FIX:Mani - I pulled in the CMake 2.8 GetPrerequisites.cmake script here, because it works on windows where 2.6 did not. -# Once we have officially upgraded to 2.8 we can just use that version of GetPrerequisites.cmake. -get_filename_component(current_dir ${CMAKE_CURRENT_LIST_FILE} PATH) -include(${current_dir}/GetPrerequisites_2_8.cmake) +include(GetPrerequisites) message(STATUS "Getting recursive dependencies for file: ${BIN_NAME}") diff --git a/indra/cmake/GetPrerequisites_2_8.cmake b/indra/cmake/GetPrerequisites_2_8.cmake deleted file mode 100644 index 05ec1539ba..0000000000 --- a/indra/cmake/GetPrerequisites_2_8.cmake +++ /dev/null @@ -1,786 +0,0 @@ -# GetPrerequisites.cmake -# -# This script provides functions to list the .dll, .dylib or .so files that an -# executable or shared library file depends on. (Its prerequisites.) -# -# It uses various tools to obtain the list of required shared library files: -# dumpbin (Windows) -# ldd (Linux/Unix) -# otool (Mac OSX) -# -# The following functions are provided by this script: -# gp_append_unique -# is_file_executable -# gp_item_default_embedded_path -# (projects can override with gp_item_default_embedded_path_override) -# gp_resolve_item -# (projects can override with gp_resolve_item_override) -# gp_resolved_file_type -# gp_file_type -# get_prerequisites -# list_prerequisites -# list_prerequisites_by_glob -# -# Requires CMake 2.6 or greater because it uses function, break, return and -# PARENT_SCOPE. - -#============================================================================= -# Copyright 2008-2009 Kitware, Inc. -# -# Distributed under the OSI-approved BSD License (the "License"); -# see accompanying file Copyright.txt for details. -# -# This software is distributed WITHOUT ANY WARRANTY; without even the -# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the License for more information. -#============================================================================= -# (To distributed this file outside of CMake, substitute the full -# License text for the above reference.) - -# gp_append_unique list_var value -# -# Append value to the list variable ${list_var} only if the value is not -# already in the list. -# -function(gp_append_unique list_var value) - set(contains 0) - - foreach(item ${${list_var}}) - if("${item}" STREQUAL "${value}") - set(contains 1) - break() - endif("${item}" STREQUAL "${value}") - endforeach(item) - - if(NOT contains) - set(${list_var} ${${list_var}} "${value}" PARENT_SCOPE) - endif(NOT contains) -endfunction(gp_append_unique) - - -# is_file_executable file result_var -# -# Return 1 in ${result_var} if ${file} is a binary executable. -# -# Return 0 in ${result_var} otherwise. -# -function(is_file_executable file result_var) - # - # A file is not executable until proven otherwise: - # - set(${result_var} 0 PARENT_SCOPE) - - get_filename_component(file_full "${file}" ABSOLUTE) - string(TOLOWER "${file_full}" file_full_lower) - - # If file name ends in .exe on Windows, *assume* executable: - # - if(WIN32) - if("${file_full_lower}" MATCHES "\\.exe$") - set(${result_var} 1 PARENT_SCOPE) - return() - endif("${file_full_lower}" MATCHES "\\.exe$") - - # A clause could be added here that uses output or return value of dumpbin - # to determine ${result_var}. In 99%+? practical cases, the exe name - # match will be sufficient... - # - endif(WIN32) - - # Use the information returned from the Unix shell command "file" to - # determine if ${file_full} should be considered an executable file... - # - # If the file command's output contains "executable" and does *not* contain - # "text" then it is likely an executable suitable for prerequisite analysis - # via the get_prerequisites macro. - # - if(UNIX) - if(NOT file_cmd) - find_program(file_cmd "file") - endif(NOT file_cmd) - - if(file_cmd) - execute_process(COMMAND "${file_cmd}" "${file_full}" - OUTPUT_VARIABLE file_ov - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - - # Replace the name of the file in the output with a placeholder token - # (the string " _file_full_ ") so that just in case the path name of - # the file contains the word "text" or "executable" we are not fooled - # into thinking "the wrong thing" because the file name matches the - # other 'file' command output we are looking for... - # - string(REPLACE "${file_full}" " _file_full_ " file_ov "${file_ov}") - string(TOLOWER "${file_ov}" file_ov) - - #message(STATUS "file_ov='${file_ov}'") - if("${file_ov}" MATCHES "executable") - #message(STATUS "executable!") - if("${file_ov}" MATCHES "text") - #message(STATUS "but text, so *not* a binary executable!") - else("${file_ov}" MATCHES "text") - set(${result_var} 1 PARENT_SCOPE) - return() - endif("${file_ov}" MATCHES "text") - endif("${file_ov}" MATCHES "executable") - else(file_cmd) - message(STATUS "warning: No 'file' command, skipping execute_process...") - endif(file_cmd) - endif(UNIX) -endfunction(is_file_executable) - - -# gp_item_default_embedded_path item default_embedded_path_var -# -# Return the path that others should refer to the item by when the item -# is embedded inside a bundle. -# -# Override on a per-project basis by providing a project-specific -# gp_item_default_embedded_path_override function. -# -function(gp_item_default_embedded_path item default_embedded_path_var) - - # On Windows and Linux, "embed" prerequisites in the same directory - # as the executable by default: - # - set(path "@executable_path") - set(overridden 0) - - # On the Mac, relative to the executable depending on the type - # of the thing we are embedding: - # - if(APPLE) - # - # The assumption here is that all executables in the bundle will be - # in same-level-directories inside the bundle. The parent directory - # of an executable inside the bundle should be MacOS or a sibling of - # MacOS and all embedded paths returned from here will begin with - # "@executable_path/../" and will work from all executables in all - # such same-level-directories inside the bundle. - # - - # By default, embed things right next to the main bundle executable: - # - set(path "@executable_path/../../Contents/MacOS") - - # Embed .dylibs right next to the main bundle executable: - # - if(item MATCHES "\\.dylib$") - set(path "@executable_path/../MacOS") - set(overridden 1) - endif(item MATCHES "\\.dylib$") - - # Embed frameworks in the embedded "Frameworks" directory (sibling of MacOS): - # - if(NOT overridden) - if(item MATCHES "[^/]+\\.framework/") - set(path "@executable_path/../Frameworks") - set(overridden 1) - endif(item MATCHES "[^/]+\\.framework/") - endif(NOT overridden) - endif() - - # Provide a hook so that projects can override the default embedded location - # of any given library by whatever logic they choose: - # - if(COMMAND gp_item_default_embedded_path_override) - gp_item_default_embedded_path_override("${item}" path) - endif(COMMAND gp_item_default_embedded_path_override) - - set(${default_embedded_path_var} "${path}" PARENT_SCOPE) -endfunction(gp_item_default_embedded_path) - - -# gp_resolve_item context item exepath dirs resolved_item_var -# -# Resolve an item into an existing full path file. -# -# Override on a per-project basis by providing a project-specific -# gp_resolve_item_override function. -# -function(gp_resolve_item context item exepath dirs resolved_item_var) - set(resolved 0) - set(resolved_item "${item}") - - # Is it already resolved? - # - if(EXISTS "${resolved_item}") - set(resolved 1) - endif(EXISTS "${resolved_item}") - - if(NOT resolved) - if(item MATCHES "@executable_path") - # - # @executable_path references are assumed relative to exepath - # - string(REPLACE "@executable_path" "${exepath}" ri "${item}") - get_filename_component(ri "${ri}" ABSOLUTE) - - if(EXISTS "${ri}") - #message(STATUS "info: embedded item exists (${ri})") - set(resolved 1) - set(resolved_item "${ri}") - else(EXISTS "${ri}") - message(STATUS "warning: embedded item does not exist '${ri}'") - endif(EXISTS "${ri}") - endif(item MATCHES "@executable_path") - endif(NOT resolved) - - if(NOT resolved) - if(item MATCHES "@loader_path") - # - # @loader_path references are assumed relative to the - # PATH of the given "context" (presumably another library) - # - get_filename_component(contextpath "${context}" PATH) - string(REPLACE "@loader_path" "${contextpath}" ri "${item}") - get_filename_component(ri "${ri}" ABSOLUTE) - - if(EXISTS "${ri}") - #message(STATUS "info: embedded item exists (${ri})") - set(resolved 1) - set(resolved_item "${ri}") - else(EXISTS "${ri}") - message(STATUS "warning: embedded item does not exist '${ri}'") - endif(EXISTS "${ri}") - endif(item MATCHES "@loader_path") - endif(NOT resolved) - - if(NOT resolved) - set(ri "ri-NOTFOUND") - find_file(ri "${item}" ${exepath} ${dirs} NO_DEFAULT_PATH) - find_file(ri "${item}" ${exepath} ${dirs} /usr/lib) - if(ri) - #message(STATUS "info: 'find_file' in exepath/dirs (${ri})") - set(resolved 1) - set(resolved_item "${ri}") - set(ri "ri-NOTFOUND") - endif(ri) - endif(NOT resolved) - - if(NOT resolved) - if(item MATCHES "[^/]+\\.framework/") - set(fw "fw-NOTFOUND") - find_file(fw "${item}" - "~/Library/Frameworks" - "/Library/Frameworks" - "/System/Library/Frameworks" - ) - if(fw) - #message(STATUS "info: 'find_file' found framework (${fw})") - set(resolved 1) - set(resolved_item "${fw}") - set(fw "fw-NOTFOUND") - endif(fw) - endif(item MATCHES "[^/]+\\.framework/") - endif(NOT resolved) - - # Using find_program on Windows will find dll files that are in the PATH. - # (Converting simple file names into full path names if found.) - # - if(WIN32) - if(NOT resolved) - set(ri "ri-NOTFOUND") - find_program(ri "${item}" PATHS "${exepath};${dirs}" NO_DEFAULT_PATH) - find_program(ri "${item}" PATHS "${exepath};${dirs}") - if(ri) - #message(STATUS "info: 'find_program' in exepath/dirs (${ri})") - set(resolved 1) - set(resolved_item "${ri}") - set(ri "ri-NOTFOUND") - endif(ri) - endif(NOT resolved) - endif(WIN32) - - # Provide a hook so that projects can override item resolution - # by whatever logic they choose: - # - if(COMMAND gp_resolve_item_override) - gp_resolve_item_override("${context}" "${item}" "${exepath}" "${dirs}" resolved_item resolved) - endif(COMMAND gp_resolve_item_override) - - if(NOT resolved) - message(STATUS " -warning: cannot resolve item '${item}' - - possible problems: - need more directories? - need to use InstallRequiredSystemLibraries? - run in install tree instead of build tree? -") -# message(STATUS " -#****************************************************************************** -#warning: cannot resolve item '${item}' -# -# possible problems: -# need more directories? -# need to use InstallRequiredSystemLibraries? -# run in install tree instead of build tree? -# -# context='${context}' -# item='${item}' -# exepath='${exepath}' -# dirs='${dirs}' -# resolved_item_var='${resolved_item_var}' -#****************************************************************************** -#") - endif(NOT resolved) - - set(${resolved_item_var} "${resolved_item}" PARENT_SCOPE) -endfunction(gp_resolve_item) - - -# gp_resolved_file_type original_file file exepath dirs type_var -# -# Return the type of ${file} with respect to ${original_file}. String -# describing type of prerequisite is returned in variable named ${type_var}. -# -# Use ${exepath} and ${dirs} if necessary to resolve non-absolute ${file} -# values -- but only for non-embedded items. -# -# Possible types are: -# system -# local -# embedded -# other -# -function(gp_resolved_file_type original_file file exepath dirs type_var) - #message(STATUS "**") - - if(NOT IS_ABSOLUTE "${original_file}") - message(STATUS "warning: gp_resolved_file_type expects absolute full path for first arg original_file") - endif() - - set(is_embedded 0) - set(is_local 0) - set(is_system 0) - - set(resolved_file "${file}") - - if("${file}" MATCHES "^@(executable|loader)_path") - set(is_embedded 1) - endif() - - if(NOT is_embedded) - if(NOT IS_ABSOLUTE "${file}") - gp_resolve_item("${original_file}" "${file}" "${exepath}" "${dirs}" resolved_file) - endif() - - string(TOLOWER "${original_file}" original_lower) - string(TOLOWER "${resolved_file}" lower) - - if(UNIX) - if(resolved_file MATCHES "^(/lib/|/lib32/|/lib64/|/usr/lib/|/usr/lib32/|/usr/lib64/|/usr/X11R6/)") - set(is_system 1) - endif() - endif() - - if(APPLE) - if(resolved_file MATCHES "^(/System/Library/|/usr/lib/)") - set(is_system 1) - endif() - endif() - - if(WIN32) - string(TOLOWER "$ENV{SystemRoot}" sysroot) - string(REGEX REPLACE "\\\\" "/" sysroot "${sysroot}") - - string(TOLOWER "$ENV{windir}" windir) - string(REGEX REPLACE "\\\\" "/" windir "${windir}") - - if(lower MATCHES "^(${sysroot}/system|${windir}/system|${sysroot}/syswow|${windir}/syswow|(.*/)*msvc[^/]+dll)") - set(is_system 1) - endif() - endif() - - if(NOT is_system) - get_filename_component(original_path "${original_lower}" PATH) - get_filename_component(path "${lower}" PATH) - if("${original_path}" STREQUAL "${path}") - set(is_local 1) - endif() - endif() - endif() - - # Return type string based on computed booleans: - # - set(type "other") - - if(is_system) - set(type "system") - elseif(is_embedded) - set(type "embedded") - elseif(is_local) - set(type "local") - endif() - - #message(STATUS "gp_resolved_file_type: '${file}' '${resolved_file}'") - #message(STATUS " type: '${type}'") - - if(NOT is_embedded) - if(NOT IS_ABSOLUTE "${resolved_file}") - if(lower MATCHES "^msvc[^/]+dll" AND is_system) - message(STATUS "info: non-absolute msvc file '${file}' returning type '${type}'") - else() - message(STATUS "warning: gp_resolved_file_type non-absolute file '${file}' returning type '${type}' -- possibly incorrect") - endif() - endif() - endif() - - set(${type_var} "${type}" PARENT_SCOPE) - - #message(STATUS "**") -endfunction() - - -# gp_file_type original_file file type_var -# -# Return the type of ${file} with respect to ${original_file}. String -# describing type of prerequisite is returned in variable named ${type_var}. -# -# Possible types are: -# system -# local -# embedded -# other -# -function(gp_file_type original_file file type_var) - if(NOT IS_ABSOLUTE "${original_file}") - message(STATUS "warning: gp_file_type expects absolute full path for first arg original_file") - endif() - - get_filename_component(exepath "${original_file}" PATH) - - set(type "") - gp_resolved_file_type("${original_file}" "${file}" "${exepath}" "" type) - - set(${type_var} "${type}" PARENT_SCOPE) -endfunction(gp_file_type) - - -# get_prerequisites target prerequisites_var exclude_system recurse dirs -# -# Get the list of shared library files required by ${target}. The list in -# the variable named ${prerequisites_var} should be empty on first entry to -# this function. On exit, ${prerequisites_var} will contain the list of -# required shared library files. -# -# target is the full path to an executable file -# -# prerequisites_var is the name of a CMake variable to contain the results -# -# exclude_system is 0 or 1: 0 to include "system" prerequisites , 1 to -# exclude them -# -# recurse is 0 or 1: 0 for direct prerequisites only, 1 for all prerequisites -# recursively -# -# exepath is the path to the top level executable used for @executable_path -# replacment on the Mac -# -# dirs is a list of paths where libraries might be found: these paths are -# searched first when a target without any path info is given. Then standard -# system locations are also searched: PATH, Framework locations, /usr/lib... -# -function(get_prerequisites target prerequisites_var exclude_system recurse exepath dirs) - set(verbose 0) - set(eol_char "E") - - if(NOT IS_ABSOLUTE "${target}") - message("warning: target '${target}' is not absolute...") - endif(NOT IS_ABSOLUTE "${target}") - - if(NOT EXISTS "${target}") - message("warning: target '${target}' does not exist...") - endif(NOT EXISTS "${target}") - - # <setup-gp_tool-vars> - # - # Try to choose the right tool by default. Caller can set gp_tool prior to - # calling this function to force using a different tool. - # - if("${gp_tool}" STREQUAL "") - set(gp_tool "ldd") - if(APPLE) - set(gp_tool "otool") - endif(APPLE) - if(WIN32) - set(gp_tool "dumpbin") - endif(WIN32) - endif("${gp_tool}" STREQUAL "") - - set(gp_tool_known 0) - - if("${gp_tool}" STREQUAL "ldd") - set(gp_cmd_args "") - set(gp_regex "^[\t ]*[^\t ]+ => ([^\t ]+).*${eol_char}$") - set(gp_regex_cmp_count 1) - set(gp_tool_known 1) - endif("${gp_tool}" STREQUAL "ldd") - - if("${gp_tool}" STREQUAL "otool") - set(gp_cmd_args "-L") - set(gp_regex "^\t([^\t]+) \\(compatibility version ([0-9]+.[0-9]+.[0-9]+), current version ([0-9]+.[0-9]+.[0-9]+)\\)${eol_char}$") - set(gp_regex_cmp_count 3) - set(gp_tool_known 1) - endif("${gp_tool}" STREQUAL "otool") - - if("${gp_tool}" STREQUAL "dumpbin") - set(gp_cmd_args "/dependents") - set(gp_regex "^ ([^ ].*[Dd][Ll][Ll])${eol_char}$") - set(gp_regex_cmp_count 1) - set(gp_tool_known 1) - set(ENV{VS_UNICODE_OUTPUT} "") # Block extra output from inside VS IDE. - endif("${gp_tool}" STREQUAL "dumpbin") - - if(NOT gp_tool_known) - message(STATUS "warning: gp_tool='${gp_tool}' is an unknown tool...") - message(STATUS "CMake function get_prerequisites needs more code to handle '${gp_tool}'") - message(STATUS "Valid gp_tool values are dumpbin, ldd and otool.") - return() - endif(NOT gp_tool_known) - - set(gp_cmd_paths ${gp_cmd_paths} - "C:/Program Files/Microsoft Visual Studio 9.0/VC/bin" - "C:/Program Files (x86)/Microsoft Visual Studio 9.0/VC/bin" - "C:/Program Files/Microsoft Visual Studio 8/VC/BIN" - "C:/Program Files (x86)/Microsoft Visual Studio 8/VC/BIN" - "C:/Program Files/Microsoft Visual Studio .NET 2003/VC7/BIN" - "C:/Program Files (x86)/Microsoft Visual Studio .NET 2003/VC7/BIN" - "/usr/local/bin" - "/usr/bin" - ) - - find_program(gp_cmd ${gp_tool} PATHS ${gp_cmd_paths}) - - if(NOT gp_cmd) - message(STATUS "warning: could not find '${gp_tool}' - cannot analyze prerequisites...") - return() - endif(NOT gp_cmd) - - if("${gp_tool}" STREQUAL "dumpbin") - # When running dumpbin, it also needs the "Common7/IDE" directory in the - # PATH. It will already be in the PATH if being run from a Visual Studio - # command prompt. Add it to the PATH here in case we are running from a - # different command prompt. - # - get_filename_component(gp_cmd_dir "${gp_cmd}" PATH) - get_filename_component(gp_cmd_dlls_dir "${gp_cmd_dir}/../../Common7/IDE" ABSOLUTE) - if(EXISTS "${gp_cmd_dlls_dir}") - # only add to the path if it is not already in the path - if(NOT "$ENV{PATH}" MATCHES "${gp_cmd_dlls_dir}") - set(ENV{PATH} "$ENV{PATH};${gp_cmd_dlls_dir}") - endif(NOT "$ENV{PATH}" MATCHES "${gp_cmd_dlls_dir}") - endif(EXISTS "${gp_cmd_dlls_dir}") - endif("${gp_tool}" STREQUAL "dumpbin") - # - # </setup-gp_tool-vars> - - if("${gp_tool}" STREQUAL "ldd") - set(old_ld_env "$ENV{LD_LIBRARY_PATH}") - foreach(dir ${exepath} ${dirs}) - set(ENV{LD_LIBRARY_PATH} "${dir}:$ENV{LD_LIBRARY_PATH}") - endforeach(dir) - endif("${gp_tool}" STREQUAL "ldd") - - - # Track new prerequisites at each new level of recursion. Start with an - # empty list at each level: - # - set(unseen_prereqs) - - # Run gp_cmd on the target: - # - execute_process( - COMMAND ${gp_cmd} ${gp_cmd_args} ${target} - OUTPUT_VARIABLE gp_cmd_ov - ) - - if("${gp_tool}" STREQUAL "ldd") - set(ENV{LD_LIBRARY_PATH} "${old_ld_env}") - endif("${gp_tool}" STREQUAL "ldd") - - if(verbose) - message(STATUS "<RawOutput cmd='${gp_cmd} ${gp_cmd_args} ${target}'>") - message(STATUS "gp_cmd_ov='${gp_cmd_ov}'") - message(STATUS "</RawOutput>") - endif(verbose) - - get_filename_component(target_dir "${target}" PATH) - - # Convert to a list of lines: - # - string(REGEX REPLACE ";" "\\\\;" candidates "${gp_cmd_ov}") - string(REGEX REPLACE "\n" "${eol_char};" candidates "${candidates}") - - # Analyze each line for file names that match the regular expression: - # - foreach(candidate ${candidates}) - if("${candidate}" MATCHES "${gp_regex}") - # Extract information from each candidate: - string(REGEX REPLACE "${gp_regex}" "\\1" raw_item "${candidate}") - - if(gp_regex_cmp_count GREATER 1) - string(REGEX REPLACE "${gp_regex}" "\\2" raw_compat_version "${candidate}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\1" compat_major_version "${raw_compat_version}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\2" compat_minor_version "${raw_compat_version}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\3" compat_patch_version "${raw_compat_version}") - endif(gp_regex_cmp_count GREATER 1) - - if(gp_regex_cmp_count GREATER 2) - string(REGEX REPLACE "${gp_regex}" "\\3" raw_current_version "${candidate}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\1" current_major_version "${raw_current_version}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\2" current_minor_version "${raw_current_version}") - string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" "\\3" current_patch_version "${raw_current_version}") - endif(gp_regex_cmp_count GREATER 2) - - # Use the raw_item as the list entries returned by this function. Use the - # gp_resolve_item function to resolve it to an actual full path file if - # necessary. - # - set(item "${raw_item}") - - # Add each item unless it is excluded: - # - set(add_item 1) - - if(${exclude_system}) - set(type "") - gp_resolved_file_type("${target}" "${item}" "${exepath}" "${dirs}" type) - if("${type}" STREQUAL "system") - set(add_item 0) - endif("${type}" STREQUAL "system") - endif(${exclude_system}) - - if(add_item) - list(LENGTH ${prerequisites_var} list_length_before_append) - gp_append_unique(${prerequisites_var} "${item}") - list(LENGTH ${prerequisites_var} list_length_after_append) - - if(${recurse}) - # If item was really added, this is the first time we have seen it. - # Add it to unseen_prereqs so that we can recursively add *its* - # prerequisites... - # - # But first: resolve its name to an absolute full path name such - # that the analysis tools can simply accept it as input. - # - if(NOT list_length_before_append EQUAL list_length_after_append) - gp_resolve_item("${target}" "${item}" "${exepath}" "${dirs}" resolved_item) - set(unseen_prereqs ${unseen_prereqs} "${resolved_item}") - endif(NOT list_length_before_append EQUAL list_length_after_append) - endif(${recurse}) - endif(add_item) - else("${candidate}" MATCHES "${gp_regex}") - if(verbose) - message(STATUS "ignoring non-matching line: '${candidate}'") - endif(verbose) - endif("${candidate}" MATCHES "${gp_regex}") - endforeach(candidate) - - list(LENGTH ${prerequisites_var} prerequisites_var_length) - if(prerequisites_var_length GREATER 0) - list(SORT ${prerequisites_var}) - endif(prerequisites_var_length GREATER 0) - if(${recurse}) - set(more_inputs ${unseen_prereqs}) - foreach(input ${more_inputs}) - get_prerequisites("${input}" ${prerequisites_var} ${exclude_system} ${recurse} "${exepath}" "${dirs}") - endforeach(input) - endif(${recurse}) - - set(${prerequisites_var} ${${prerequisites_var}} PARENT_SCOPE) -endfunction(get_prerequisites) - - -# list_prerequisites target all exclude_system verbose -# -# ARGV0 (target) is the full path to an executable file -# -# optional ARGV1 (all) is 0 or 1: 0 for direct prerequisites only, -# 1 for all prerequisites recursively -# -# optional ARGV2 (exclude_system) is 0 or 1: 0 to include "system" -# prerequisites , 1 to exclude them -# -# optional ARGV3 (verbose) is 0 or 1: 0 to print only full path -# names of prerequisites, 1 to print extra information -# -function(list_prerequisites target) - if("${ARGV1}" STREQUAL "") - set(all 1) - else("${ARGV1}" STREQUAL "") - set(all "${ARGV1}") - endif("${ARGV1}" STREQUAL "") - - if("${ARGV2}" STREQUAL "") - set(exclude_system 0) - else("${ARGV2}" STREQUAL "") - set(exclude_system "${ARGV2}") - endif("${ARGV2}" STREQUAL "") - - if("${ARGV3}" STREQUAL "") - set(verbose 0) - else("${ARGV3}" STREQUAL "") - set(verbose "${ARGV3}") - endif("${ARGV3}" STREQUAL "") - - set(count 0) - set(count_str "") - set(print_count "${verbose}") - set(print_prerequisite_type "${verbose}") - set(print_target "${verbose}") - set(type_str "") - - get_filename_component(exepath "${target}" PATH) - - set(prereqs "") - get_prerequisites("${target}" prereqs ${exclude_system} ${all} "${exepath}" "") - - if(print_target) - message(STATUS "File '${target}' depends on:") - endif(print_target) - - foreach(d ${prereqs}) - math(EXPR count "${count} + 1") - - if(print_count) - set(count_str "${count}. ") - endif(print_count) - - if(print_prerequisite_type) - gp_file_type("${target}" "${d}" type) - set(type_str " (${type})") - endif(print_prerequisite_type) - - message(STATUS "${count_str}${d}${type_str}") - endforeach(d) -endfunction(list_prerequisites) - - -# list_prerequisites_by_glob glob_arg glob_exp -# -# glob_arg is GLOB or GLOB_RECURSE -# -# glob_exp is a globbing expression used with "file(GLOB" to retrieve a list -# of matching files. If a matching file is executable, its prerequisites are -# listed. -# -# Any additional (optional) arguments provided are passed along as the -# optional arguments to the list_prerequisites calls. -# -function(list_prerequisites_by_glob glob_arg glob_exp) - message(STATUS "=============================================================================") - message(STATUS "List prerequisites of executables matching ${glob_arg} '${glob_exp}'") - message(STATUS "") - file(${glob_arg} file_list ${glob_exp}) - foreach(f ${file_list}) - is_file_executable("${f}" is_f_executable) - if(is_f_executable) - message(STATUS "=============================================================================") - list_prerequisites("${f}" ${ARGN}) - message(STATUS "") - endif(is_f_executable) - endforeach(f) -endfunction(list_prerequisites_by_glob) diff --git a/indra/cmake/NGHTTP2.cmake b/indra/cmake/NGHTTP2.cmake new file mode 100644 index 0000000000..df191ff3c1 --- /dev/null +++ b/indra/cmake/NGHTTP2.cmake @@ -0,0 +1,20 @@ +include(Prebuilt) + +set(NGHTTP2_FIND_QUIETLY ON) +set(NGHTTP2_FIND_REQUIRED ON) + +if (USESYSTEMLIBS) + include(FindNGHTTP2) +else (USESYSTEMLIBS) + use_prebuilt_binary(nghttp2) + if (WINDOWS) + set(NGHTTP2_LIBRARIES + ${ARCH_PREBUILT_DIRS_RELEASE}/nghttp2.lib + ) + elseif (DARWIN) + set(NGHTTP2_LIBRARIES libnghttp2.dylib) + else (WINDOWS) + set(NGHTTP2_LIBRARIES libnghttp2.a) + endif (WINDOWS) + set(NGHTTP2_INCLUDE_DIR ${LIBS_PREBUILT_DIR}/include/nghttp2) +endif (USESYSTEMLIBS) diff --git a/indra/llcorehttp/CMakeLists.txt b/indra/llcorehttp/CMakeLists.txt index 435fb09aa4..40ec836f12 100644 --- a/indra/llcorehttp/CMakeLists.txt +++ b/indra/llcorehttp/CMakeLists.txt @@ -6,6 +6,7 @@ include(00-Common) include(GoogleMock) include(CURL) include(OpenSSL) +include(NGHTTP2) include(ZLIB) include(LLCoreHttp) include(LLAddBuildTest) @@ -94,6 +95,7 @@ target_link_libraries( ${CURL_LIBRARIES} ${OPENSSL_LIBRARIES} ${CRYPTO_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${BOOST_THREAD_LIBRARY} ${BOOST_SYSTEM_LIBRARY} ) @@ -132,6 +134,7 @@ if (LL_TESTS) ${CURL_LIBRARIES} ${OPENSSL_LIBRARIES} ${CRYPTO_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${BOOST_THREAD_LIBRARY} ${BOOST_SYSTEM_LIBRARY} ) @@ -202,6 +205,7 @@ endif (DARWIN) ${CURL_LIBRARIES} ${OPENSSL_LIBRARIES} ${CRYPTO_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${BOOST_THREAD_LIBRARY} ${BOOST_SYSTEM_LIBRARY} ) diff --git a/indra/llcorehttp/_httplibcurl.cpp b/indra/llcorehttp/_httplibcurl.cpp index c25e01a318..abd304f6a5 100644 --- a/indra/llcorehttp/_httplibcurl.cpp +++ b/indra/llcorehttp/_httplibcurl.cpp @@ -40,6 +40,15 @@ namespace void check_curl_multi_code(CURLMcode code); void check_curl_multi_code(CURLMcode code, int curl_setopt_option); +// This is a template because different 'option' values require different +// types for 'ARG'. Just pass them through unchanged (by value). +template <typename ARG> +void check_curl_multi_setopt(CURLM* handle, CURLMoption option, ARG argument) +{ + CURLMcode code = curl_multi_setopt(handle, option, argument); + check_curl_multi_code(code, option); +} + static const char * const LOG_CORE("CoreHttp"); } // end anonymous namespace @@ -461,46 +470,38 @@ void HttpLibcurl::policyUpdated(int policy_class) HttpPolicyClass & options(policy.getClassOptions(policy_class)); CURLM * multi_handle(mMultiHandles[policy_class]); - CURLMcode code; // Enable policy if stalled policy.stallPolicy(policy_class, false); mDirtyPolicy[policy_class] = false; - + if (options.mPipelining > 1) { // We'll try to do pipelining on this multihandle - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_PIPELINING, 1L); - check_curl_multi_code(code, CURLMOPT_PIPELINING); - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_MAX_PIPELINE_LENGTH, long(options.mPipelining)); - check_curl_multi_code(code, CURLMOPT_MAX_PIPELINE_LENGTH); - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_MAX_HOST_CONNECTIONS, long(options.mPerHostConnectionLimit)); - check_curl_multi_code(code, CURLMOPT_MAX_HOST_CONNECTIONS); - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_MAX_TOTAL_CONNECTIONS, long(options.mConnectionLimit)); - check_curl_multi_code(code, CURLMOPT_MAX_TOTAL_CONNECTIONS); } else { - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_PIPELINING, 0L); - check_curl_multi_code(code, CURLMOPT_PIPELINING); - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_MAX_HOST_CONNECTIONS, 0L); - check_curl_multi_code(code, CURLMOPT_MAX_HOST_CONNECTIONS); - code = curl_multi_setopt(multi_handle, + check_curl_multi_setopt(multi_handle, CURLMOPT_MAX_TOTAL_CONNECTIONS, long(options.mConnectionLimit)); - check_curl_multi_code(code, CURLMOPT_MAX_TOTAL_CONNECTIONS); } } else if (! mDirtyPolicy[policy_class]) diff --git a/indra/llcorehttp/_httpoprequest.cpp b/indra/llcorehttp/_httpoprequest.cpp index 721147ffb0..cc49a2af80 100644 --- a/indra/llcorehttp/_httpoprequest.cpp +++ b/indra/llcorehttp/_httpoprequest.cpp @@ -108,6 +108,15 @@ void os_strlower(char * str); // Error testing and reporting for libcurl status codes void check_curl_easy_code(CURLcode code, int curl_setopt_option); +// This is a template because different 'option' values require different +// types for 'ARG'. Just pass them through unchanged (by value). +template <typename ARG> +void check_curl_easy_setopt(CURL* handle, CURLoption option, ARG argument) +{ + CURLcode code = curl_easy_setopt(handle, option, argument); + check_curl_easy_code(code, option); +} + static const char * const LOG_CORE("CoreHttp"); } // end anonymous namespace @@ -456,8 +465,6 @@ void HttpOpRequest::setupCommon(HttpRequest::policy_t policy_id, // *TODO: Move this to _httplibcurl where it belongs. HttpStatus HttpOpRequest::prepareRequest(HttpService * service) { - CURLcode code; - // Scrub transport and result data for retried op case mCurlActive = false; mCurlHandle = NULL; @@ -496,45 +503,28 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) return HttpStatus(HttpStatus::LLCORE, HE_BAD_ALLOC); } - code = curl_easy_setopt(mCurlHandle, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4); - check_curl_easy_code(code, CURLOPT_IPRESOLVE); - code = curl_easy_setopt(mCurlHandle, CURLOPT_NOSIGNAL, 1); - check_curl_easy_code(code, CURLOPT_NOSIGNAL); - code = curl_easy_setopt(mCurlHandle, CURLOPT_NOPROGRESS, 1); - check_curl_easy_code(code, CURLOPT_NOPROGRESS); - code = curl_easy_setopt(mCurlHandle, CURLOPT_URL, mReqURL.c_str()); - check_curl_easy_code(code, CURLOPT_URL); - code = curl_easy_setopt(mCurlHandle, CURLOPT_PRIVATE, getHandle()); - check_curl_easy_code(code, CURLOPT_PRIVATE); - code = curl_easy_setopt(mCurlHandle, CURLOPT_ENCODING, ""); - check_curl_easy_code(code, CURLOPT_ENCODING); - - code = curl_easy_setopt(mCurlHandle, CURLOPT_AUTOREFERER, 1); - check_curl_easy_code(code, CURLOPT_AUTOREFERER); - code = curl_easy_setopt(mCurlHandle, CURLOPT_MAXREDIRS, HTTP_REDIRECTS_DEFAULT); - check_curl_easy_code(code, CURLOPT_MAXREDIRS); - code = curl_easy_setopt(mCurlHandle, CURLOPT_WRITEFUNCTION, writeCallback); - check_curl_easy_code(code, CURLOPT_WRITEFUNCTION); - code = curl_easy_setopt(mCurlHandle, CURLOPT_WRITEDATA, getHandle()); - check_curl_easy_code(code, CURLOPT_WRITEDATA); - code = curl_easy_setopt(mCurlHandle, CURLOPT_READFUNCTION, readCallback); - check_curl_easy_code(code, CURLOPT_READFUNCTION); - code = curl_easy_setopt(mCurlHandle, CURLOPT_READDATA, getHandle()); - check_curl_easy_code(code, CURLOPT_READDATA); - code = curl_easy_setopt(mCurlHandle, CURLOPT_SEEKFUNCTION, seekCallback); - check_curl_easy_code(code, CURLOPT_SEEKFUNCTION); - code = curl_easy_setopt(mCurlHandle, CURLOPT_SEEKDATA, getHandle()); - check_curl_easy_code(code, CURLOPT_SEEKDATA); - - code = curl_easy_setopt(mCurlHandle, CURLOPT_COOKIEFILE, ""); - check_curl_easy_code(code, CURLOPT_COOKIEFILE); + check_curl_easy_setopt(mCurlHandle, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4); + check_curl_easy_setopt(mCurlHandle, CURLOPT_NOSIGNAL, 1); + check_curl_easy_setopt(mCurlHandle, CURLOPT_NOPROGRESS, 1); + check_curl_easy_setopt(mCurlHandle, CURLOPT_URL, mReqURL.c_str()); + check_curl_easy_setopt(mCurlHandle, CURLOPT_PRIVATE, getHandle()); + check_curl_easy_setopt(mCurlHandle, CURLOPT_ENCODING, ""); + + check_curl_easy_setopt(mCurlHandle, CURLOPT_AUTOREFERER, 1); + check_curl_easy_setopt(mCurlHandle, CURLOPT_MAXREDIRS, HTTP_REDIRECTS_DEFAULT); + check_curl_easy_setopt(mCurlHandle, CURLOPT_WRITEFUNCTION, writeCallback); + check_curl_easy_setopt(mCurlHandle, CURLOPT_WRITEDATA, getHandle()); + check_curl_easy_setopt(mCurlHandle, CURLOPT_READFUNCTION, readCallback); + check_curl_easy_setopt(mCurlHandle, CURLOPT_READDATA, getHandle()); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SEEKFUNCTION, seekCallback); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SEEKDATA, getHandle()); + + check_curl_easy_setopt(mCurlHandle, CURLOPT_COOKIEFILE, ""); if (gpolicy.mSslCtxCallback) { - code = curl_easy_setopt(mCurlHandle, CURLOPT_SSL_CTX_FUNCTION, curlSslCtxCallback); - check_curl_easy_code(code, CURLOPT_SSL_CTX_FUNCTION); - code = curl_easy_setopt(mCurlHandle, CURLOPT_SSL_CTX_DATA, getHandle()); - check_curl_easy_code(code, CURLOPT_SSL_CTX_DATA); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SSL_CTX_FUNCTION, curlSslCtxCallback); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SSL_CTX_DATA, getHandle()); mCallbackSSLVerify = gpolicy.mSslCtxCallback; } @@ -552,16 +542,12 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) dnsCacheTimeout = mReqOptions->getDNSCacheTimeout(); nobody = mReqOptions->getHeadersOnly() ? 1L : 0L; } - code = curl_easy_setopt(mCurlHandle, CURLOPT_FOLLOWLOCATION, follow_redirect); - check_curl_easy_code(code, CURLOPT_FOLLOWLOCATION); + check_curl_easy_setopt(mCurlHandle, CURLOPT_FOLLOWLOCATION, follow_redirect); - code = curl_easy_setopt(mCurlHandle, CURLOPT_SSL_VERIFYPEER, sslPeerV); - check_curl_easy_code(code, CURLOPT_SSL_VERIFYPEER); - code = curl_easy_setopt(mCurlHandle, CURLOPT_SSL_VERIFYHOST, sslHostV); - check_curl_easy_code(code, CURLOPT_SSL_VERIFYHOST); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SSL_VERIFYPEER, sslPeerV); + check_curl_easy_setopt(mCurlHandle, CURLOPT_SSL_VERIFYHOST, sslHostV); - code = curl_easy_setopt(mCurlHandle, CURLOPT_NOBODY, nobody); - check_curl_easy_code(code, CURLOPT_NOBODY); + check_curl_easy_setopt(mCurlHandle, CURLOPT_NOBODY, nobody); // The Linksys WRT54G V5 router has an issue with frequent // DNS lookups from LAN machines. If they happen too often, @@ -569,8 +555,7 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) // about 700 or so requests and starts issuing TCP RSTs to // new connections. Reuse the DNS lookups for even a few // seconds and no RSTs. - code = curl_easy_setopt(mCurlHandle, CURLOPT_DNS_CACHE_TIMEOUT, dnsCacheTimeout); - check_curl_easy_code(code, CURLOPT_DNS_CACHE_TIMEOUT); + check_curl_easy_setopt(mCurlHandle, CURLOPT_DNS_CACHE_TIMEOUT, dnsCacheTimeout); if (gpolicy.mUseLLProxy) { @@ -593,81 +578,66 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) { // *TODO: This is fine for now but get fuller socks5/ // authentication thing going later.... - code = curl_easy_setopt(mCurlHandle, CURLOPT_PROXY, gpolicy.mHttpProxy.c_str()); - check_curl_easy_code(code, CURLOPT_PROXY); - code = curl_easy_setopt(mCurlHandle, CURLOPT_PROXYTYPE, CURLPROXY_HTTP); - check_curl_easy_code(code, CURLOPT_PROXYTYPE); + check_curl_easy_setopt(mCurlHandle, CURLOPT_PROXY, gpolicy.mHttpProxy.c_str()); + check_curl_easy_setopt(mCurlHandle, CURLOPT_PROXYTYPE, CURLPROXY_HTTP); } if (gpolicy.mCAPath.size()) { - code = curl_easy_setopt(mCurlHandle, CURLOPT_CAPATH, gpolicy.mCAPath.c_str()); - check_curl_easy_code(code, CURLOPT_CAPATH); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CAPATH, gpolicy.mCAPath.c_str()); } if (gpolicy.mCAFile.size()) { - code = curl_easy_setopt(mCurlHandle, CURLOPT_CAINFO, gpolicy.mCAFile.c_str()); - check_curl_easy_code(code, CURLOPT_CAINFO); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CAINFO, gpolicy.mCAFile.c_str()); } switch (mReqMethod) { case HOR_GET: if (nobody == 0) - code = curl_easy_setopt(mCurlHandle, CURLOPT_HTTPGET, 1); - check_curl_easy_code(code, CURLOPT_HTTPGET); + check_curl_easy_setopt(mCurlHandle, CURLOPT_HTTPGET, 1); break; case HOR_POST: { - code = curl_easy_setopt(mCurlHandle, CURLOPT_POST, 1); - check_curl_easy_code(code, CURLOPT_POST); - code = curl_easy_setopt(mCurlHandle, CURLOPT_ENCODING, ""); - check_curl_easy_code(code, CURLOPT_ENCODING); + check_curl_easy_setopt(mCurlHandle, CURLOPT_POST, 1); + check_curl_easy_setopt(mCurlHandle, CURLOPT_ENCODING, ""); long data_size(0); if (mReqBody) { data_size = mReqBody->size(); } - code = curl_easy_setopt(mCurlHandle, CURLOPT_POSTFIELDS, static_cast<void *>(NULL)); - check_curl_easy_code(code, CURLOPT_POSTFIELDS); - code = curl_easy_setopt(mCurlHandle, CURLOPT_POSTFIELDSIZE, data_size); - check_curl_easy_code(code, CURLOPT_POSTFIELDSIZE); + check_curl_easy_setopt(mCurlHandle, CURLOPT_POSTFIELDS, static_cast<void *>(NULL)); + check_curl_easy_setopt(mCurlHandle, CURLOPT_POSTFIELDSIZE, data_size); mCurlHeaders = curl_slist_append(mCurlHeaders, "Expect:"); } break; case HOR_PATCH: - code = curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "PATCH"); - check_curl_easy_code(code, CURLOPT_CUSTOMREQUEST); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "PATCH"); // fall through. The rest is the same as PUT case HOR_PUT: { - code = curl_easy_setopt(mCurlHandle, CURLOPT_UPLOAD, 1); - check_curl_easy_code(code, CURLOPT_UPLOAD); + check_curl_easy_setopt(mCurlHandle, CURLOPT_UPLOAD, 1); long data_size(0); if (mReqBody) { data_size = mReqBody->size(); } - code = curl_easy_setopt(mCurlHandle, CURLOPT_INFILESIZE, data_size); - check_curl_easy_code(code, CURLOPT_INFILESIZE); + check_curl_easy_setopt(mCurlHandle, CURLOPT_INFILESIZE, data_size); mCurlHeaders = curl_slist_append(mCurlHeaders, "Expect:"); } break; case HOR_DELETE: - code = curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "DELETE"); - check_curl_easy_code(code, CURLOPT_CUSTOMREQUEST); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "DELETE"); break; case HOR_COPY: - code = curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "COPY"); - check_curl_easy_code(code, CURLOPT_CUSTOMREQUEST); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "COPY"); break; case HOR_MOVE: - code = curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "MOVE"); - check_curl_easy_code(code, CURLOPT_CUSTOMREQUEST); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CUSTOMREQUEST, "MOVE"); break; default: @@ -685,12 +655,9 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) // Tracing if (mTracing >= HTTP_TRACE_CURL_HEADERS) { - code = curl_easy_setopt(mCurlHandle, CURLOPT_VERBOSE, 1); - check_curl_easy_code(code, CURLOPT_VERBOSE); - code = curl_easy_setopt(mCurlHandle, CURLOPT_DEBUGDATA, this); - check_curl_easy_code(code, CURLOPT_DEBUGDATA); - code = curl_easy_setopt(mCurlHandle, CURLOPT_DEBUGFUNCTION, debugCallback); - check_curl_easy_code(code, CURLOPT_DEBUGFUNCTION); + check_curl_easy_setopt(mCurlHandle, CURLOPT_VERBOSE, 1); + check_curl_easy_setopt(mCurlHandle, CURLOPT_DEBUGDATA, this); + check_curl_easy_setopt(mCurlHandle, CURLOPT_DEBUGFUNCTION, debugCallback); } // There's a CURLOPT for this now... @@ -767,6 +734,13 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) // // xfer_timeout *= cpolicy.mPipelining; xfer_timeout *= 2L; + + // Also try requesting HTTP/2. +/******************************/ + // but for test purposes, only if overriding VIEWERASSET + if (getenv("VIEWERASSET")) +/******************************/ + check_curl_easy_setopt(mCurlHandle, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2_0); } // *DEBUG: Enable following override for timeout handling and "[curl:bugs] #1420" tests //if (cpolicy.mPipelining) @@ -774,10 +748,8 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) // xfer_timeout = 1L; // timeout = 1L; //} - code = curl_easy_setopt(mCurlHandle, CURLOPT_TIMEOUT, xfer_timeout); - check_curl_easy_code(code, CURLOPT_TIMEOUT); - code = curl_easy_setopt(mCurlHandle, CURLOPT_CONNECTTIMEOUT, timeout); - check_curl_easy_code(code, CURLOPT_CONNECTTIMEOUT); + check_curl_easy_setopt(mCurlHandle, CURLOPT_TIMEOUT, xfer_timeout); + check_curl_easy_setopt(mCurlHandle, CURLOPT_CONNECTTIMEOUT, timeout); // Request headers if (mReqHeaders) @@ -785,15 +757,12 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service) // Caller's headers last to override mCurlHeaders = append_headers_to_slist(mReqHeaders, mCurlHeaders); } - code = curl_easy_setopt(mCurlHandle, CURLOPT_HTTPHEADER, mCurlHeaders); - check_curl_easy_code(code, CURLOPT_HTTPHEADER); + check_curl_easy_setopt(mCurlHandle, CURLOPT_HTTPHEADER, mCurlHeaders); if (mProcFlags & (PF_SCAN_RANGE_HEADER | PF_SAVE_HEADERS | PF_USE_RETRY_AFTER)) { - code = curl_easy_setopt(mCurlHandle, CURLOPT_HEADERFUNCTION, headerCallback); - check_curl_easy_code(code, CURLOPT_HEADERFUNCTION); - code = curl_easy_setopt(mCurlHandle, CURLOPT_HEADERDATA, this); - check_curl_easy_code(code, CURLOPT_HEADERDATA); + check_curl_easy_setopt(mCurlHandle, CURLOPT_HEADERFUNCTION, headerCallback); + check_curl_easy_setopt(mCurlHandle, CURLOPT_HEADERDATA, this); } if (status) diff --git a/indra/llmessage/CMakeLists.txt b/indra/llmessage/CMakeLists.txt index 87bec60d95..e0922c0667 100644 --- a/indra/llmessage/CMakeLists.txt +++ b/indra/llmessage/CMakeLists.txt @@ -214,6 +214,7 @@ target_link_libraries( ${JSONCPP_LIBRARIES} ${OPENSSL_LIBRARIES} ${CRYPTO_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${XMLRPCEPI_LIBRARIES} ${LLCOREHTTP_LIBRARIES} ${BOOST_COROUTINE_LIBRARY} @@ -231,6 +232,7 @@ target_link_libraries( ${JSONCPP_LIBRARIES} ${OPENSSL_LIBRARIES} ${CRYPTO_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${XMLRPCEPI_LIBRARIES} ${LLCOREHTTP_LIBRARIES} ${BOOST_COROUTINE_LIBRARY} @@ -257,6 +259,7 @@ if (LINUX) ${LLVFS_LIBRARIES} ${LLMATH_LIBRARIES} ${CURL_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${LLCOMMON_LIBRARIES} ${LLMESSAGE_LIBRARIES} ${LLCOREHTTP_LIBRARIES} @@ -272,6 +275,7 @@ else (LINUX) ${LLVFS_LIBRARIES} ${LLMATH_LIBRARIES} ${CURL_LIBRARIES} + ${NGHTTP2_LIBRARIES} ${LLCOMMON_LIBRARIES} ${LLMESSAGE_LIBRARIES} ${LLCOREHTTP_LIBRARIES} diff --git a/indra/llplugin/CMakeLists.txt b/indra/llplugin/CMakeLists.txt index 2cdea67723..129efeb529 100644 --- a/indra/llplugin/CMakeLists.txt +++ b/indra/llplugin/CMakeLists.txt @@ -82,7 +82,7 @@ if (LL_TESTS) set_source_files_properties( llplugincookiestore.cpp PROPERTIES - LL_TEST_ADDITIONAL_LIBRARIES "${CURL_LIBRARIES}" + LL_TEST_ADDITIONAL_LIBRARIES "${CURL_LIBRARIES};${NGHTTP2_LIBRARIES}" ) LL_ADD_PROJECT_UNIT_TESTS(llplugin "${llplugin_TEST_SOURCE_FILES}") diff --git a/indra/newview/CMakeLists.txt b/indra/newview/CMakeLists.txt index f353109deb..2100c91cfe 100644 --- a/indra/newview/CMakeLists.txt +++ b/indra/newview/CMakeLists.txt @@ -2219,6 +2219,7 @@ if (LL_TESTS) ${LLCOMMON_LIBRARIES} ${JSONCPP_LIBRARIES} ${CURL_LIBRARIES} + ${NGHTTP2_LIBRARIES} ) set_source_files_properties( diff --git a/indra/newview/lltexturefetch.cpp b/indra/newview/lltexturefetch.cpp index 84a2f1b597..8447cf893e 100644 --- a/indra/newview/lltexturefetch.cpp +++ b/indra/newview/lltexturefetch.cpp @@ -1746,7 +1746,7 @@ bool LLTextureFetchWorker::doWork(S32 param) // In case of a partial response, our offset may // not be trivially contiguous with the data we have. // Get back into alignment. - if (mHttpReplyOffset > cur_size) + if ( (mHttpReplyOffset > cur_size) || (cur_size > mHttpReplyOffset + append_size)) { LL_WARNS(LOG_TXT) << "Partial HTTP response produces break in image data for texture " << mID << ". Aborting load." << LL_ENDL; diff --git a/indra/newview/llviewerregion.cpp b/indra/newview/llviewerregion.cpp index 3fd2af87de..06b5c48e6c 100644 --- a/indra/newview/llviewerregion.cpp +++ b/indra/newview/llviewerregion.cpp @@ -2965,6 +2965,17 @@ void LLViewerRegion::setCapability(const std::string& name, const std::string& u mImpl->mCapabilities[name] = url; if(name == "ViewerAsset") { + /*==============================================================*/ + // The following inserted lines are a hack for testing MAINT-7081, + // which is why the indentation and formatting are left ugly. + const char* VIEWERASSET = getenv("VIEWERASSET"); + if (VIEWERASSET) + { + mImpl->mCapabilities[name] = VIEWERASSET; + mViewerAssetUrl = VIEWERASSET; + } + else + /*==============================================================*/ mViewerAssetUrl = url; } } @@ -2978,6 +2989,17 @@ void LLViewerRegion::setCapabilityDebug(const std::string& name, const std::stri mImpl->mSecondCapabilitiesTracker[name] = url; if(name == "ViewerAsset") { + /*==============================================================*/ + // The following inserted lines are a hack for testing MAINT-7081, + // which is why the indentation and formatting are left ugly. + const char* VIEWERASSET = getenv("VIEWERASSET"); + if (VIEWERASSET) + { + mImpl->mSecondCapabilitiesTracker[name] = VIEWERASSET; + mViewerAssetUrl = VIEWERASSET; + } + else + /*==============================================================*/ mViewerAssetUrl = url; } } diff --git a/indra/newview/viewer_manifest.py b/indra/newview/viewer_manifest.py index 9778ceb8fa..5cee88ca32 100755 --- a/indra/newview/viewer_manifest.py +++ b/indra/newview/viewer_manifest.py @@ -449,6 +449,9 @@ class WindowsManifest(ViewerManifest): self.path("ssleay32.dll") self.path("libeay32.dll") + # HTTP/2 + self.path("nghttp2.dll") + # Hunspell self.path("libhunspell.dll") @@ -880,10 +883,19 @@ class DarwinManifest(ViewerManifest): or a list containing dst (present). Concatenate these return values to get a list of all libs that are present. """ - if self.path(src, dst): - return [dst] - print "Skipping %s" % dst - return [] + # This was simple before we started needing to pass + # wildcards. Fortunately, self.path() ends up appending a + # (source, dest) pair to self.file_list for every expanded + # file processed. Remember its size before the call. + oldlen = len(self.file_list) + self.path(src, dst) + # The dest appended to self.file_list has been prepended + # with self.get_dst_prefix(). Strip it off again. + added = [os.path.relpath(d, self.get_dst_prefix()) + for s, d in self.file_list[oldlen:]] + if not added: + print "Skipping %s" % dst + return added # dylibs is a list of all the .dylib files we expect to need # in our bundled sub-apps. For each of these we'll create a @@ -904,6 +916,10 @@ class DarwinManifest(ViewerManifest): "libexpat.1.dylib", "libexception_handler.dylib", "libGLOD.dylib", + # libnghttp2.dylib is a symlink to + # libnghttp2.major.dylib, which is a symlink + # to libnghttp2.version.dylib. Get all of them. + "libnghttp2.*dylib", ): dylibs += path_optional(os.path.join(relpkgdir, libfile), libfile) diff --git a/scripts/packages-formatter.py b/scripts/packages-formatter.py index f91f5819b7..b1eef3c721 100755 --- a/scripts/packages-formatter.py +++ b/scripts/packages-formatter.py @@ -62,39 +62,68 @@ def autobuild(*args): # no exceptions yet, let caller read stdout return child.stdout -version={} -versions=autobuild('install', '--versions') -for line in versions: - pkg_info = pkg_line.match(line) - if pkg_info: - pkg = pkg_info.group(1) - if pkg not in version: - version[pkg] = pkg_info.group(2).strip() - else: - sys.exit("Duplicate version for %s" % pkg) +info=dict(versions={}, copyrights={}) +dups=dict(versions=set(), copyrights=set()) + +def add_info(key, pkg, lines): + if pkg not in info[key]: + info[key][pkg] = '\n'.join(lines) else: - sys.exit("Unrecognized --versions output: %s" % line) + dups[key].add(pkg) -copyright={} +versions=autobuild('install', '--versions') copyrights=autobuild('install', '--copyrights') viewer_copyright = copyrights.readline() # first line is the copyright for the viewer itself -for line in copyrights: - pkg_info = pkg_line.match(line) - if pkg_info: - pkg = pkg_info.group(1) - if pkg not in copyright: - copyright[pkg] = pkg_info.group(2).strip() - else: - sys.exit("Duplicate copyright for %s" % pkg) + +# Two different autobuild outputs, but we treat them essentially the same way: +# populating each into a dict; each a subdict of 'info'. +for key, rawdata in ("versions", versions), ("copyrights", copyrights): + lines = iter(rawdata) + try: + line = next(lines) + except StopIteration: + # rawdata is completely empty? okay... + pass else: - sys.exit("Unrecognized --copyrights output: %s" % line) + pkg_info = pkg_line.match(line) + # The first line for each package must match pkg_line. + if not pkg_info: + sys.exit("Unrecognized --%s output: %r" % (key, line)) + # Only the very first line in rawdata MUST match; for the rest of + # rawdata, matching the regexp is how we recognize the start of the + # next package. + while True: # iterate over packages in rawdata + pkg = pkg_info.group(1) + pkg_lines = [pkg_info.group(2).strip()] + for line in lines: + pkg_info = pkg_line.match(line) + if pkg_info: + # we hit the start of the next package data + add_info(key, pkg, pkg_lines) + break + else: + # no package prefix: additional line for same package + pkg_lines.append(line.rstrip()) + else: + # last package in the output -- finished 'lines' + add_info(key, pkg, pkg_lines) + break + +# Now that we've run through all of both outputs -- are there duplicates? +if any(pkgs for pkgs in dups.values()): + for key, pkgs in dups.items(): + if pkgs: + print >>sys.stderr, "Duplicate %s for %s" % (key, ", ".join(pkgs)) + sys.exit(1) print "%s %s" % (args.channel, args.version) print viewer_copyright -for pkg in sorted(version): - print ': '.join([pkg, version[pkg]]) - if pkg in copyright: - print copyright[pkg] - else: +version = list(info['versions'].items()) +version.sort() +for pkg, pkg_version in version: + print ': '.join([pkg, pkg_version]) + try: + print info['copyrights'][pkg] + except KeyError: sys.exit("No copyright for %s" % pkg) - print '' + print |