use {
crate::{
ebpf,
elf::ElfError,
vm::{Config, ContextObject, EbpfVm},
},
std::collections::{btree_map::Entry, BTreeMap},
};
#[derive(Debug, PartialEq, PartialOrd, Eq, Clone, Copy)]
pub enum SBPFVersion {
V0,
V1,
V2,
V3,
Reserved,
}
impl SBPFVersion {
pub fn dynamic_stack_frames(self) -> bool {
self >= SBPFVersion::V1
}
pub fn enable_pqr(self) -> bool {
self >= SBPFVersion::V2
}
pub fn explicit_sign_extension_of_results(self) -> bool {
self >= SBPFVersion::V2
}
pub fn swap_sub_reg_imm_operands(self) -> bool {
self >= SBPFVersion::V2
}
pub fn disable_neg(self) -> bool {
self >= SBPFVersion::V2
}
pub fn callx_uses_src_reg(self) -> bool {
self >= SBPFVersion::V2
}
pub fn disable_lddw(self) -> bool {
self >= SBPFVersion::V2
}
pub fn disable_le(self) -> bool {
self >= SBPFVersion::V2
}
pub fn move_memory_instruction_classes(self) -> bool {
self >= SBPFVersion::V2
}
pub fn static_syscalls(self) -> bool {
self >= SBPFVersion::V3
}
pub fn enable_stricter_elf_headers(self) -> bool {
self >= SBPFVersion::V3
}
pub fn enable_lower_bytecode_vaddr(self) -> bool {
self >= SBPFVersion::V3
}
pub fn reject_rodata_stack_overlap(self) -> bool {
self != SBPFVersion::V0
}
pub fn enable_elf_vaddr(self) -> bool {
self != SBPFVersion::V0
}
pub fn calculate_call_imm_target_pc(self, pc: usize, imm: i64) -> u32 {
if self.static_syscalls() {
(pc as i64).saturating_add(imm).saturating_add(1) as u32
} else {
imm as u32
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct FunctionRegistry<T> {
pub(crate) map: BTreeMap<u32, (Vec<u8>, T)>,
}
impl<T> Default for FunctionRegistry<T> {
fn default() -> Self {
Self {
map: BTreeMap::new(),
}
}
}
impl<T: Copy + PartialEq> FunctionRegistry<T> {
pub fn register_function(
&mut self,
key: u32,
name: impl Into<Vec<u8>>,
value: T,
) -> Result<(), ElfError> {
match self.map.entry(key) {
Entry::Vacant(entry) => {
entry.insert((name.into(), value));
}
Entry::Occupied(entry) => {
if entry.get().1 != value {
return Err(ElfError::SymbolHashCollision(key));
}
}
}
Ok(())
}
pub(crate) fn register_function_hashed_legacy<C: ContextObject>(
&mut self,
loader: &BuiltinProgram<C>,
hash_symbol_name: bool,
name: impl Into<Vec<u8>>,
value: T,
) -> Result<u32, ElfError>
where
usize: From<T>,
{
let name = name.into();
let config = loader.get_config();
let key = if hash_symbol_name {
let hash = if name == b"entrypoint" {
ebpf::hash_symbol_name(b"entrypoint")
} else {
ebpf::hash_symbol_name(&usize::from(value).to_le_bytes())
};
if loader.get_function_registry().lookup_by_key(hash).is_some() {
return Err(ElfError::SymbolHashCollision(hash));
}
hash
} else {
usize::from(value) as u32
};
self.register_function(
key,
if config.enable_symbol_and_section_labels || name == b"entrypoint" {
name
} else {
Vec::default()
},
value,
)?;
Ok(key)
}
pub fn unregister_function(&mut self, key: u32) {
self.map.remove(&key);
}
pub fn keys(&self) -> impl Iterator<Item = u32> + '_ {
self.map.keys().copied()
}
pub fn iter(&self) -> impl Iterator<Item = (u32, (&[u8], T))> + '_ {
self.map
.iter()
.map(|(key, (name, value))| (*key, (name.as_slice(), *value)))
}
pub fn lookup_by_key(&self, key: u32) -> Option<(&[u8], T)> {
self.map
.get(&key)
.map(|(function_name, value)| (function_name.as_slice(), *value))
}
pub fn lookup_by_name(&self, name: &[u8]) -> Option<(&[u8], T)> {
self.map
.values()
.find(|(function_name, _value)| function_name == name)
.map(|(function_name, value)| (function_name.as_slice(), *value))
}
pub fn mem_size(&self) -> usize {
std::mem::size_of::<Self>().saturating_add(self.map.iter().fold(
0,
|state: usize, (_, (name, value))| {
state.saturating_add(
std::mem::size_of_val(value).saturating_add(
std::mem::size_of_val(name).saturating_add(name.capacity()),
),
)
},
))
}
}
pub type BuiltinFunction<C> = fn(*mut EbpfVm<C>, u64, u64, u64, u64, u64);
#[derive(Eq)]
pub struct BuiltinProgram<C: ContextObject> {
config: Option<Box<Config>>,
sparse_registry: FunctionRegistry<BuiltinFunction<C>>,
}
impl<C: ContextObject> PartialEq for BuiltinProgram<C> {
fn eq(&self, other: &Self) -> bool {
self.config.eq(&other.config) && self.sparse_registry.eq(&other.sparse_registry)
}
}
impl<C: ContextObject> BuiltinProgram<C> {
pub fn new_loader(config: Config) -> Self {
Self {
config: Some(Box::new(config)),
sparse_registry: FunctionRegistry::default(),
}
}
pub fn new_builtin() -> Self {
Self {
config: None,
sparse_registry: FunctionRegistry::default(),
}
}
pub fn new_mock() -> Self {
Self {
config: Some(Box::default()),
sparse_registry: FunctionRegistry::default(),
}
}
pub fn get_config(&self) -> &Config {
self.config.as_ref().unwrap()
}
pub fn get_function_registry(&self) -> &FunctionRegistry<BuiltinFunction<C>> {
&self.sparse_registry
}
pub fn mem_size(&self) -> usize {
std::mem::size_of::<Self>()
.saturating_add(if self.config.is_some() {
std::mem::size_of::<Config>()
} else {
0
})
.saturating_add(self.sparse_registry.mem_size())
}
pub fn register_function(
&mut self,
name: &str,
value: BuiltinFunction<C>,
) -> Result<(), ElfError> {
let key = ebpf::hash_symbol_name(name.as_bytes());
self.sparse_registry
.register_function(key, name, value)
.map(|_| ())
}
}
impl<C: ContextObject> std::fmt::Debug for BuiltinProgram<C> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
unsafe {
writeln!(
f,
"registry: {:?}",
std::mem::transmute::<
&FunctionRegistry<BuiltinFunction<C>>,
&FunctionRegistry<usize>,
>(&self.sparse_registry),
)?;
}
Ok(())
}
}
#[macro_export]
macro_rules! declare_builtin_function {
($(#[$attr:meta])* $name:ident $(<$($generic_ident:tt : $generic_type:tt),+>)?, fn rust(
$vm:ident : &mut $ContextObject:ty,
$arg_a:ident : u64,
$arg_b:ident : u64,
$arg_c:ident : u64,
$arg_d:ident : u64,
$arg_e:ident : u64,
$memory_mapping:ident : &mut $MemoryMapping:ty,
) -> $Result:ty { $($rust:tt)* }) => {
$(#[$attr])*
pub struct $name {}
impl $name {
pub fn rust $(<$($generic_ident : $generic_type),+>)? (
$vm: &mut $ContextObject,
$arg_a: u64,
$arg_b: u64,
$arg_c: u64,
$arg_d: u64,
$arg_e: u64,
$memory_mapping: &mut $MemoryMapping,
) -> $Result {
$($rust)*
}
#[allow(clippy::too_many_arguments)]
pub fn vm $(<$($generic_ident : $generic_type),+>)? (
$vm: *mut $crate::vm::EbpfVm<$ContextObject>,
$arg_a: u64,
$arg_b: u64,
$arg_c: u64,
$arg_d: u64,
$arg_e: u64,
) {
use $crate::vm::ContextObject;
let vm = unsafe {
&mut *($vm.cast::<u64>().offset(-($crate::vm::get_runtime_environment_key() as isize)).cast::<$crate::vm::EbpfVm<$ContextObject>>())
};
let config = vm.loader.get_config();
if config.enable_instruction_meter {
vm.context_object_pointer.consume(vm.previous_instruction_meter - vm.due_insn_count);
}
let converted_result: $crate::error::ProgramResult = Self::rust $(::<$($generic_ident),+>)?(
vm.context_object_pointer, $arg_a, $arg_b, $arg_c, $arg_d, $arg_e, &mut vm.memory_mapping,
).map_err(|err| $crate::error::EbpfError::SyscallError(err)).into();
vm.program_result = converted_result;
if config.enable_instruction_meter {
vm.previous_instruction_meter = vm.context_object_pointer.get_remaining();
}
}
}
};
}