extern crate bindgen;
extern crate duct;
use duct::cmd;
use flate2::read::GzDecoder;
use std::error::Error as StdError;
use std::ffi::OsString;
use std::fs::{read_to_string, File};
use std::io::ErrorKind::NotFound;
use std::io::{Error as IoError, Write};
use std::path::{Path, PathBuf};
use std::process::Output;
use std::{env, thread};
use tar::Archive;
use which::which;
const ZLIB_DEFAULT_VERSION: &str = "1.3";
const ZLIB_GPG_SERVER_AND_KEY_ID: (&str, &str) = ("keyserver.ubuntu.com", "783FCD8E58BCAFBA");
const ZLIB_DOWNLOAD_URL_PREFIX: &str = "https://www.zlib.net";
const PCRE2_DEFAULT_VERSION: &str = "10.42";
const PCRE2_GPG_SERVER_AND_KEY_ID: (&str, &str) = ("keyserver.ubuntu.com", "9766E084FB0F43D8");
const PCRE2_DOWNLOAD_URL_PREFIX: &str = "https://github.com/PCRE2Project/pcre2/releases/download";
const OPENSSL_DEFAULT_VERSION: &str = "3.0.7";
const OPENSSL_GPG_SERVER_AND_KEY_IDS: (&str, &str) = (
"keys.openpgp.org",
"\
A21FAB74B0088AA361152586B8EF1A6BA9DA2D5C \
8657ABB260F056B1E5190839D9C4D26D0E604491 \
B7C1C14360F353A36862E4D5231C84CDDCC69C45 \
95A9908DDFA16830BE9FB9003D30A3A9FF1360DC \
7953AC1FBC3DC8B3B292393ED5E9E43F7DF9EE8C",
);
const OPENSSL_DOWNLOAD_URL_PREFIX: &str = "https://www.openssl.org/source/";
const NGX_DEFAULT_VERSION: &str = "1.23.3";
const NGX_GPG_SERVER_AND_KEY_ID: (&str, &str) = ("keyserver.ubuntu.com", "A0EA981B66B0D967");
const NGX_DOWNLOAD_URL_PREFIX: &str = "https://nginx.org/download";
const ALL_SERVERS_AND_PUBLIC_KEY_IDS: [(&str, &str); 4] = [
ZLIB_GPG_SERVER_AND_KEY_ID,
PCRE2_GPG_SERVER_AND_KEY_ID,
OPENSSL_GPG_SERVER_AND_KEY_IDS,
NGX_GPG_SERVER_AND_KEY_ID,
];
const NGX_BASE_MODULES: [&str; 20] = [
"--with-compat",
"--with-http_addition_module",
"--with-http_auth_request_module",
"--with-http_flv_module",
"--with-http_gunzip_module",
"--with-http_gzip_static_module",
"--with-http_random_index_module",
"--with-http_realip_module",
"--with-http_secure_link_module",
"--with-http_slice_module",
"--with-http_slice_module",
"--with-http_ssl_module",
"--with-http_stub_status_module",
"--with-http_sub_module",
"--with-http_v2_module",
"--with-stream_realip_module",
"--with-stream_ssl_module",
"--with-stream_ssl_preread_module",
"--with-stream",
"--with-threads",
];
const NGX_LINUX_ADDITIONAL_OPTS: [&str; 3] = [
"--with-file-aio",
"--with-cc-opt=-g -fstack-protector-strong -Wformat -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -fPIC",
"--with-ld-opt=-Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -pie",
];
const ENV_VARS_TRIGGERING_RECOMPILE: [&str; 9] = [
"DEBUG",
"OUT_DIR",
"ZLIB_VERSION",
"PCRE2_VERSION",
"OPENSSL_VERSION",
"NGX_VERSION",
"CARGO_CFG_TARGET_OS",
"CARGO_MANIFEST_DIR",
"CARGO_TARGET_TMPDIR",
];
fn main() -> Result<(), Box<dyn StdError>> {
let cache_dir = make_cache_dir()?;
import_gpg_keys(&cache_dir)?;
let (_nginx_install_dir, nginx_src_dir) = compile_nginx()?;
for var in ENV_VARS_TRIGGERING_RECOMPILE {
println!("cargo:rerun-if-env-changed={var}");
}
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=wrapper.h");
generate_binding(nginx_src_dir);
Ok(())
}
fn generate_binding(nginx_source_dir: PathBuf) {
let autoconf_makefile_path = nginx_source_dir.join("objs").join("Makefile");
let clang_args: Vec<String> = parse_includes_from_makefile(&autoconf_makefile_path)
.into_iter()
.map(|path| format!("-I{}", path.to_string_lossy()))
.collect();
let bindings = bindgen::Builder::default()
.blocklist_item("IPPORT_RESERVED")
.header("wrapper.h")
.clang_args(clang_args)
.layout_tests(false)
.generate()
.expect("Unable to generate bindings");
let out_dir_env = env::var("OUT_DIR").expect("The required environment variable OUT_DIR was not set");
let out_path = PathBuf::from(out_dir_env);
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
fn zlib_archive_url() -> String {
let version = env::var("ZLIB_VERSION").unwrap_or_else(|_| ZLIB_DEFAULT_VERSION.to_string());
format!("{ZLIB_DOWNLOAD_URL_PREFIX}/zlib-{version}.tar.gz")
}
fn pcre2_archive_url() -> String {
let version = env::var("PCRE2_VERSION").unwrap_or_else(|_| PCRE2_DEFAULT_VERSION.to_string());
format!("{PCRE2_DOWNLOAD_URL_PREFIX}/pcre2-{version}/pcre2-{version}.tar.gz")
}
fn openssl_archive_url() -> String {
let version = env::var("OPENSSL_VERSION").unwrap_or_else(|_| OPENSSL_DEFAULT_VERSION.to_string());
format!("{OPENSSL_DOWNLOAD_URL_PREFIX}/openssl-{version}.tar.gz")
}
fn nginx_archive_url() -> String {
let version = env::var("NGX_VERSION").unwrap_or_else(|_| NGX_DEFAULT_VERSION.to_string());
format!("{NGX_DOWNLOAD_URL_PREFIX}/nginx-{version}.tar.gz")
}
fn all_archives() -> Vec<(String, String)> {
vec![
(zlib_archive_url(), format!("{}.asc", zlib_archive_url())),
(pcre2_archive_url(), format!("{}.sig", pcre2_archive_url())),
(openssl_archive_url(), format!("{}.asc", openssl_archive_url())),
(nginx_archive_url(), format!("{}.asc", nginx_archive_url())),
]
}
fn gpg_path() -> Option<PathBuf> {
which::which("gpg").ok()
}
fn source_output_dir(cache_dir: &Path) -> PathBuf {
env::var("CARGO_TARGET_TMPDIR").map(PathBuf::from).unwrap_or_else(|_| {
cache_dir
.join("src")
.join(format!("{}-{}", env::consts::OS, env::consts::ARCH))
})
}
#[allow(clippy::ptr_arg)]
fn nginx_install_dir(base_dir: &PathBuf) -> PathBuf {
let nginx_version = env::var("NGX_VERSION").unwrap_or_else(|_| NGX_DEFAULT_VERSION.to_string());
let platform = format!("{}-{}", env::consts::OS, env::consts::ARCH);
base_dir.join("nginx").join(nginx_version).join(platform)
}
fn import_gpg_keys(cache_dir: &Path) -> Result<(), Box<dyn StdError>> {
if let Some(gpg) = gpg_path() {
let gnupghome = cache_dir.join(".gnupg");
if !gnupghome.exists() {
std::fs::create_dir_all(&gnupghome)?;
}
let keys_to_import = ALL_SERVERS_AND_PUBLIC_KEY_IDS.iter().filter(|(_, key_id)| {
let key_id_record_file = gnupghome.join(format!("{key_id}.key"));
!key_id_record_file.exists()
});
for (server, key_ids) in keys_to_import {
for key_id in key_ids.split_whitespace() {
let output = cmd!(
&gpg,
"--homedir",
&gnupghome,
"--keyserver",
server,
"--recv-keys",
key_id
)
.stderr_to_stdout()
.stderr_capture()
.run()?;
if !output.status.success() {
return Err(format!(
"Failed to import GPG key {} from server {}: {}",
key_id,
server,
String::from_utf8_lossy(&output.stdout)
)
.into());
}
println!("Imported GPG key: {key_id}");
let key_id_record_file = gnupghome.join(format!("{key_ids}.key"));
File::create(key_id_record_file).expect("Unable to create key id record file");
}
}
}
Ok(())
}
fn make_cache_dir() -> Result<PathBuf, Box<dyn StdError>> {
let base_dir = env::var("CARGO_MANIFEST_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| env::current_dir().expect("Failed to get current directory"));
let cache_dir = base_dir
.parent()
.expect("Failed to find parent directory of manifest directory")
.join(".cache");
if !cache_dir.exists() {
std::fs::create_dir_all(&cache_dir)?;
}
Ok(cache_dir)
}
fn download(cache_dir: &Path, url: &str) -> Result<PathBuf, Box<dyn StdError>> {
fn proceed_with_download(file_path: &Path) -> bool {
!file_path.exists() || file_path.metadata().map_or(false, |m| m.len() < 1)
}
let filename = url.split('/').last().unwrap();
let file_path = cache_dir.join(filename);
if proceed_with_download(&file_path) {
let mut reader = ureq::get(url).call()?.into_reader();
let mut file = std::fs::File::create(&file_path)?;
std::io::copy(&mut reader, &mut file)?;
}
Ok(file_path)
}
fn verify_signature_file(cache_dir: &Path, signature_path: &Path) -> Result<(), Box<dyn StdError>> {
if let Some(gpg) = gpg_path() {
let gnupghome = cache_dir.join(".gnupg");
let output = cmd!(gpg, "--homedir", &gnupghome, "--list-packets", signature_path)
.stderr_to_stdout()
.stdout_capture()
.run()?;
if !output.status.success() {
eprintln!("{}", String::from_utf8_lossy(&output.stdout));
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
format!(
"GPG signature file verification failed for signature: {}",
signature_path.display()
),
)));
}
} else {
println!("GPG not found, skipping signature file verification");
}
Ok(())
}
fn verify_archive_signature(
cache_dir: &Path,
archive_path: &Path,
signature_path: &Path,
) -> Result<(), Box<dyn StdError>> {
if let Some(gpg) = gpg_path() {
let gnupghome = cache_dir.join(".gnupg");
let output = cmd!(gpg, "--homedir", &gnupghome, "--verify", signature_path, archive_path)
.stderr_to_stdout()
.stdout_capture()
.run()?;
if !output.status.success() {
eprintln!("{}", String::from_utf8_lossy(&output.stdout));
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
format!(
"GPG signature verification failed of archive failed [{}]",
archive_path.display()
),
)));
}
} else {
println!("GPG not found, skipping signature verification");
}
Ok(())
}
fn get_archive(cache_dir: &Path, archive_url: &str, signature_url: &str) -> Result<PathBuf, Box<dyn StdError>> {
let signature_path = download(cache_dir, signature_url)?;
if let Err(e) = verify_signature_file(cache_dir, &signature_path) {
std::fs::remove_file(&signature_path)?;
return Err(e);
}
let archive_path = download(cache_dir, archive_url)?;
match verify_archive_signature(cache_dir, &archive_path, &signature_path) {
Ok(_) => Ok(archive_path),
Err(e) => {
std::fs::remove_file(&archive_path)?;
Err(e)
}
}
}
fn extract_archive(
archive_path: &Path,
extract_output_base_dir: &Path,
) -> Result<(String, PathBuf), Box<dyn StdError>> {
if !extract_output_base_dir.exists() {
std::fs::create_dir_all(extract_output_base_dir)?;
}
let archive_file =
File::open(archive_path).unwrap_or_else(|_| panic!("Unable to open archive file: {}", archive_path.display()));
let stem = archive_path
.file_name()
.and_then(|s| s.to_str())
.and_then(|s| s.rsplitn(3, '.').last())
.expect("Unable to determine archive file name stem");
let dependency_name = stem
.split_once('-')
.map(|(s, _)| s.to_owned())
.unwrap_or_else(|| panic!("Unable to determine dependency name based on stem: {stem}"));
let extract_output_dir = extract_output_base_dir.to_owned();
let archive_output_dir = extract_output_dir.join(stem);
if !archive_output_dir.exists() {
Archive::new(GzDecoder::new(archive_file))
.entries()?
.filter_map(|e| e.ok())
.for_each(|mut entry| {
let path = entry.path().unwrap();
let stripped_path = path.components().skip(1).collect::<PathBuf>();
entry.unpack(&archive_output_dir.join(stripped_path)).unwrap();
});
} else {
println!(
"Archive [{}] already extracted to directory: {}",
stem,
archive_output_dir.display()
);
}
Ok((dependency_name, archive_output_dir))
}
fn extract_all_archives(cache_dir: &Path) -> Result<Vec<(String, PathBuf)>, Box<dyn StdError>> {
let archives = all_archives();
let mut sources = Vec::new();
let extract_output_base_dir = source_output_dir(cache_dir);
if !extract_output_base_dir.exists() {
std::fs::create_dir_all(&extract_output_base_dir)?;
}
for (archive_url, signature_url) in archives {
let archive_path = get_archive(cache_dir, &archive_url, &signature_url)?;
let (name, output_dir) = extract_archive(&archive_path, &extract_output_base_dir)?;
sources.push((name, output_dir));
}
Ok(sources)
}
fn compile_nginx() -> Result<(PathBuf, PathBuf), Box<dyn StdError>> {
fn find_dependency_path<'a>(sources: &'a [(String, PathBuf)], name: &str) -> &'a PathBuf {
sources
.iter()
.find(|(n, _)| n == name)
.map(|(_, p)| p)
.unwrap_or_else(|| panic!("Unable to find dependency [{name}] path"))
}
let cache_dir = make_cache_dir()?;
let nginx_install_dir = nginx_install_dir(&cache_dir);
let sources = extract_all_archives(&cache_dir)?;
let zlib_src_dir = find_dependency_path(&sources, "zlib");
let openssl_src_dir = find_dependency_path(&sources, "openssl");
let pcre2_src_dir = find_dependency_path(&sources, "pcre2");
let nginx_src_dir = find_dependency_path(&sources, "nginx");
let nginx_configure_flags = nginx_configure_flags(&nginx_install_dir, zlib_src_dir, openssl_src_dir, pcre2_src_dir);
let nginx_binary_exists = nginx_install_dir.join("sbin").join("nginx").exists();
let autoconf_makefile_exists = nginx_src_dir.join("Makefile").exists();
let build_info_path = nginx_src_dir.join("last-build-info");
let current_build_info = build_info(&nginx_configure_flags);
let build_info_no_change = if build_info_path.exists() {
read_to_string(&build_info_path).map_or(false, |s| s == current_build_info)
} else {
false
};
println!("NGINX already installed: {nginx_binary_exists}");
println!("NGINX autoconf makefile already created: {autoconf_makefile_exists}");
println!("NGINX build info changed: {}", !build_info_no_change);
if !nginx_binary_exists || !autoconf_makefile_exists || !build_info_no_change {
std::fs::create_dir_all(&nginx_install_dir)?;
configure(nginx_configure_flags, nginx_src_dir)?;
make(nginx_src_dir, "install")?;
let mut output = File::create(build_info_path)?;
output.write_all(current_build_info.as_bytes())?;
}
Ok((nginx_install_dir, nginx_src_dir.to_owned()))
}
fn build_info(nginx_configure_flags: &[String]) -> String {
nginx_configure_flags.join(" ")
}
fn nginx_configure_flags(
nginx_install_dir: &Path,
zlib_src_dir: &Path,
openssl_src_dir: &Path,
pcre2_src_dir: &Path,
) -> Vec<String> {
fn format_source_path(flag: &str, path: &Path) -> String {
format!(
"{}={}",
flag,
path.as_os_str().to_str().expect("Unable to read source path as string")
)
}
let modules = || -> Vec<String> {
let mut modules = vec![
format_source_path("--with-zlib", zlib_src_dir),
format_source_path("--with-pcre", pcre2_src_dir),
format_source_path("--with-openssl", openssl_src_dir),
];
for module in NGX_BASE_MODULES {
modules.push(module.to_string());
}
modules
};
let mut nginx_opts = vec![format_source_path("--prefix", nginx_install_dir)];
if env::var("NGX_DEBUG").map_or(false, |s| s == "true") {
println!("Enabling --with-debug");
nginx_opts.push("--with-debug".to_string());
}
if env::var("CARGO_CFG_TARGET_OS").map_or(env::consts::OS == "linux", |s| s == "linux") {
for flag in NGX_LINUX_ADDITIONAL_OPTS {
nginx_opts.push(flag.to_string());
}
}
for flag in modules() {
nginx_opts.push(flag);
}
nginx_opts
}
fn configure(nginx_configure_flags: Vec<String>, nginx_src_dir: &Path) -> std::io::Result<Output> {
let flags = nginx_configure_flags
.iter()
.map(OsString::from)
.collect::<Vec<OsString>>();
let configure_executable = nginx_src_dir.join("configure");
if !configure_executable.exists() {
panic!(
"Unable to find NGINX configure script at: {}",
configure_executable.to_string_lossy()
);
}
println!(
"Running NGINX configure script with flags: {:?}",
nginx_configure_flags.join(" ")
);
duct::cmd(configure_executable, flags)
.dir(nginx_src_dir)
.stderr_to_stdout()
.run()
}
fn make(nginx_src_dir: &Path, arg: &str) -> std::io::Result<Output> {
let make_bin_path = match (which("gmake"), which("make")) {
(Ok(path), _) => Ok(path),
(_, Ok(path)) => Ok(path),
_ => Err(IoError::new(NotFound, "Unable to find make in path (gmake or make)")),
}?;
let num_jobs = match env::var("NUM_JOBS") {
Ok(s) => s.parse::<usize>().ok(),
Err(_) => thread::available_parallelism().ok().map(|n| n.get()),
}
.unwrap_or(1);
cmd!(make_bin_path, "-j", num_jobs.to_string(), arg)
.dir(nginx_src_dir)
.stderr_to_stdout()
.run()
}
fn parse_includes_from_makefile(nginx_autoconf_makefile_path: &PathBuf) -> Vec<PathBuf> {
fn extract_include_part(line: &str) -> &str {
line.strip_suffix('\\').map_or(line, |s| s.trim())
}
fn extract_after_i_flag(line: &str) -> Option<&str> {
let mut parts = line.split("-I ");
match parts.next() {
Some(_) => parts.next().map(extract_include_part),
None => None,
}
}
let mut includes = vec![];
let makefile_contents = match std::fs::read_to_string(nginx_autoconf_makefile_path) {
Ok(path) => path,
Err(e) => {
panic!(
"Unable to read makefile from path [{}]. Error: {}",
nginx_autoconf_makefile_path.to_string_lossy(),
e
);
}
};
let mut includes_lines = false;
for line in makefile_contents.lines() {
if !includes_lines {
if let Some(stripped) = line.strip_prefix("ALL_INCS") {
includes_lines = true;
if let Some(part) = extract_after_i_flag(stripped) {
includes.push(part);
}
continue;
}
}
if includes_lines {
if let Some(part) = extract_after_i_flag(line) {
includes.push(part);
} else {
break;
}
}
}
let makefile_dir = nginx_autoconf_makefile_path
.parent()
.expect("makefile path has no parent")
.parent()
.expect("objs dir has no parent")
.to_path_buf()
.canonicalize()
.expect("Unable to canonicalize makefile path");
includes
.into_iter()
.map(PathBuf::from)
.map(|path| {
if path.is_absolute() {
path
} else {
makefile_dir.join(path)
}
})
.collect()
}