first commit

This commit is contained in:
root
2025-12-05 07:14:11 +00:00
commit 2ed4393eb9
129 changed files with 20524 additions and 0 deletions

View File

@@ -0,0 +1,3 @@
{
"editor.tabSize": 4
}

3814
aws_sigma_service/executor/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,23 @@
[package]
name = "executor"
version = "0.1.0"
edition = "2024"
[dependencies]
anyhow = "1.0.100"
axum = "0.8.7"
base64 = "0.22.1"
boa_engine = "0.21.0"
boa_gc = "0.21.0"
boa_runtime = { version = "0.21.0", features = ["reqwest-blocking"] }
chrono = { version = "0.4.38", default-features = true }
clap = { version = "4.5.53", features = ["derive"] }
envy = "0.4.2"
futures-concurrency = "7.6.3"
futures-lite = "2.6.1"
memory-stats = "1.2.0"
rlimit = "0.10.2"
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.132"
sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "json", "chrono"] }
tokio = { version = "1.40.0", features = ["rt-multi-thread"] }

View File

@@ -0,0 +1,58 @@
declare namespace fs {
function readFileSync(
path: string,
options?: { encoding?: null } | "utf-8"
): string;
function writeFileSync(
path: string,
data: string,
options?: { encoding?: null } | "utf-8"
);
function existsSync(path: string): boolean;
function mkdirSync(path: string, options?: { recursive?: boolean });
function readDirSync(path: string): string[];
function statSync(path: string): {
isFile: boolean;
isDirectory: boolean;
size: number;
};
}
declare namespace req {
const method: string;
const path: string;
const originalUrl: string;
const functionPath: string;
const headers: Record<string, string | string[]>;
const query: Record<string, string | string[]>;
const params: Record<string, string | string[]>;
const body: unknown;
}
declare namespace res {
let statusCode: number;
let headers: Record<string, string | string[]>;
let body: unknown;
function setStatus(code: number): typeof res;
function setHeader(name: string, value: string | string[]): typeof res;
function setHeaders(map: Record<string, string | string[]>): typeof res;
function removeHeader(name: string): typeof res;
function reset(): typeof res;
function send(payload: unknown): typeof res;
function json(payload: unknown): typeof res;
}
declare namespace sql {
function query<T = unknown>(query: string, params?: unknown[]): Promise<T[]>;
function execute(
query: string,
params?: unknown[]
): Promise<{ rowsAffected: number }>;
}

View File

@@ -0,0 +1,252 @@
use std::fs;
use std::path::PathBuf;
use boa_engine::error::JsNativeError;
use boa_engine::js_string;
use boa_engine::native_function::NativeFunction;
use boa_engine::object::{JsObject, ObjectInitializer};
use boa_engine::property::Attribute;
use boa_engine::{Context, JsError, JsResult, JsString, JsValue};
#[derive(Debug, Clone, Copy)]
enum Encoding {
Utf8,
}
pub(crate) struct Fs;
impl Fs {
pub const NAME: JsString = js_string!("fs");
pub fn init(context: &mut Context) -> JsObject {
ObjectInitializer::new(context)
.property(
js_string!("name"),
JsString::from(Self::NAME),
Attribute::READONLY,
)
.function(
NativeFunction::from_fn_ptr(Self::read_file_sync),
js_string!("readFileSync"),
2,
)
.function(
NativeFunction::from_fn_ptr(Self::write_file_sync),
js_string!("writeFileSync"),
2,
)
.function(
NativeFunction::from_fn_ptr(Self::exists_sync),
js_string!("existsSync"),
1,
)
.function(
NativeFunction::from_fn_ptr(Self::mkdir_sync),
js_string!("mkdirSync"),
1,
)
.function(
NativeFunction::from_fn_ptr(Self::readdir_sync),
js_string!("readdirSync"),
1,
)
.function(
NativeFunction::from_fn_ptr(Self::stat_sync),
js_string!("statSync"),
1,
)
.build()
}
fn read_file_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "readFileSync", context)?;
let encoding = Self::encoding_from_arg(args.get(1), context)?;
let content = fs::read(&path).map_err(JsError::from_rust)?;
match encoding {
Encoding::Utf8 => {
let text = String::from_utf8(content).map_err(JsError::from_rust)?;
Ok(JsValue::new(JsString::from(text)))
}
}
}
fn write_file_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "writeFileSync", context)?;
let data = Self::string_from_arg(args, 1, "data", "writeFileSync", context)?;
let encoding = Self::encoding_from_arg(args.get(2), context)?;
match encoding {
Encoding::Utf8 => {
fs::write(path, data).map_err(JsError::from_rust)?;
Ok(JsValue::undefined())
}
}
}
fn exists_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "existsSync", context)?;
Ok(JsValue::new(path.exists()))
}
fn mkdir_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "mkdirSync", context)?;
let recursive = Self::recursive_from_arg(args.get(1), context)?;
if recursive {
fs::create_dir_all(path).map_err(JsError::from_rust)?;
} else {
fs::create_dir(path).map_err(JsError::from_rust)?;
}
Ok(JsValue::undefined())
}
fn readdir_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "readdirSync", context)?;
let entries = fs::read_dir(path).map_err(JsError::from_rust)?;
let names = entries
.map(|entry| {
entry
.map(|dir_entry| {
let name = dir_entry.file_name();
JsValue::new(JsString::from(name.to_string_lossy().into_owned()))
})
.map_err(JsError::from_rust)
})
.collect::<JsResult<Vec<_>>>()?;
Self::array_from_values(names, context)
}
fn stat_sync(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let path = Self::path_from_args(args, 0, "path", "statSync", context)?;
let metadata = fs::metadata(path).map_err(JsError::from_rust)?;
let mut stats = ObjectInitializer::new(context);
stats
.property(
js_string!("size"),
metadata.len() as f64,
Attribute::READONLY | Attribute::ENUMERABLE,
)
.property(
js_string!("isFile"),
metadata.is_file(),
Attribute::READONLY | Attribute::ENUMERABLE,
)
.property(
js_string!("isDirectory"),
metadata.is_dir(),
Attribute::READONLY | Attribute::ENUMERABLE,
);
Ok(stats.build().into())
}
fn path_from_args(
args: &[JsValue],
index: usize,
name: &str,
method: &str,
context: &mut Context,
) -> JsResult<PathBuf> {
let value = args
.get(index)
.ok_or_else(|| Self::missing_argument(name, method))?;
if value.is_undefined() || value.is_null() {
return Err(Self::missing_argument(name, method));
}
let string = value.to_string(context)?.to_std_string_escaped();
Ok(PathBuf::from(string))
}
fn string_from_arg(
args: &[JsValue],
index: usize,
name: &str,
method: &str,
context: &mut Context,
) -> JsResult<String> {
let value = args
.get(index)
.ok_or_else(|| Self::missing_argument(name, method))?;
if value.is_undefined() || value.is_null() {
return Err(Self::missing_argument(name, method));
}
Ok(value.to_string(context)?.to_std_string_escaped())
}
fn encoding_from_arg(arg: Option<&JsValue>, context: &mut Context) -> JsResult<Encoding> {
match arg {
None => Ok(Encoding::Utf8),
Some(value) if value.is_undefined() => Ok(Encoding::Utf8),
Some(value) => {
if let Some(object) = value.as_object() {
let enc_value = object.get(JsString::from("encoding"), context)?;
if enc_value.is_undefined() {
return Ok(Encoding::Utf8);
}
return Self::encoding_from_value(&enc_value, context);
}
Self::encoding_from_value(value, context)
}
}
}
fn encoding_from_value(value: &JsValue, context: &mut Context) -> JsResult<Encoding> {
if value.is_undefined() {
return Ok(Encoding::Utf8);
}
let requested = value
.to_string(context)?
.to_std_string_escaped()
.to_ascii_lowercase();
match requested.as_str() {
"utf8" | "utf-8" | "utf" => Ok(Encoding::Utf8),
other => Err(JsNativeError::range()
.with_message(format!("Unsupported encoding '{other}'"))
.into()),
}
}
fn recursive_from_arg(option: Option<&JsValue>, context: &mut Context) -> JsResult<bool> {
match option {
None => Ok(false),
Some(value) if value.is_object() => {
let object = value.as_object().expect("checked object");
let flag = object.get(JsString::from("recursive"), context)?;
Ok(flag.to_boolean())
}
Some(value) => Ok(value.to_boolean()),
}
}
fn missing_argument(name: &str, method: &str) -> JsError {
JsNativeError::typ()
.with_message(format!("{method}: missing required argument `{name}`"))
.into()
}
fn array_from_values(values: Vec<JsValue>, context: &mut Context) -> JsResult<JsValue> {
let constructor = context
.intrinsics()
.constructors()
.array()
.constructor()
.clone();
let array_value = constructor.construct(&[], None, context)?;
for (index, value) in values.into_iter().enumerate() {
array_value.create_data_property_or_throw(index, value, context)?;
}
Ok(array_value.into())
}
}

View File

@@ -0,0 +1,2 @@
pub(crate) mod fs;
pub(crate) mod sql;

View File

@@ -0,0 +1,78 @@
(function () {
const EVENT_FILENAME = "./event.json";
function initRequest() {
if (!fs.existsSync(EVENT_FILENAME)) {
console.error("Event payload file does not exist:", EVENT_FILENAME);
return;
}
const raw = fs.readFileSync(EVENT_FILENAME, "utf-8");
try {
return JSON.parse(raw);
} catch (error) {
console.error("Error parsing event payload:", error);
}
}
function initResponse() {
return {
statusCode: 200,
headers: {},
body: null,
setStatus(code) {
if (Number.isFinite(code)) {
this.statusCode = Math.trunc(code);
}
return this;
},
setHeader(name, value) {
if (typeof name === "string") {
this.headers[name] = value;
}
return this;
},
setHeaders(map) {
if (isPlainObject(map)) {
for (const key in map) {
if (Object.prototype.hasOwnProperty.call(map, key)) {
this.headers[key] = map[key];
}
}
}
return this;
},
removeHeader(name) {
if (typeof name === "string") {
delete this.headers[name];
}
return this;
},
reset() {
this.statusCode = 200;
this.headers = {};
this.body = null;
return this;
},
send(payload) {
this.body = payload;
return this;
},
json(payload) {
try {
this.body = JSON.stringify(payload);
} catch (_error) {
this.body = null;
}
if (!("content-type" in this.headers)) {
this.headers["content-type"] = "application/json";
}
return this;
},
};
}
globalThis.req = initRequest();
globalThis.res = initResponse();
})();

View File

@@ -0,0 +1,453 @@
use core::cell::RefCell;
use std::sync::OnceLock;
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD as BASE64_STANDARD;
use boa_engine::error::JsNativeError;
use boa_engine::js_string;
use boa_engine::native_function::NativeFunction;
use boa_engine::object::{JsObject, ObjectInitializer};
use boa_engine::property::Attribute;
use boa_engine::{Context, JsError, JsResult, JsString, JsValue};
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use serde_json::Value as JsonValue;
use sqlx::postgres::{PgArguments, PgQueryResult, PgRow, PgTypeInfo};
use sqlx::types::Json;
use sqlx::{Column, PgPool, Postgres, Row};
pub(crate) struct Sql;
static POOL: OnceLock<PgPool> = OnceLock::new();
static SCOPE: OnceLock<String> = OnceLock::new();
static BLACKLIST: &'static [&str] = &[
"pg_authid",
"pg_shadow",
"pg_user",
"pg_roles",
"pg_auth_members",
"pg_database",
"pg_tablespace",
"pg_settings",
"pg_file_settings",
"pg_hba_file_rules",
"pg_stat_activity",
"pg_stat_replication",
"pg_replication_slots",
"pg_config",
"pg_backend_memory_contexts",
];
impl Sql {
pub const NAME: JsString = js_string!("sql");
pub fn init(pool: PgPool, scope: String, context: &mut Context) -> JsObject {
POOL.set(pool).unwrap();
SCOPE.set(scope).unwrap();
ObjectInitializer::new(context)
.property(
js_string!("name"),
JsString::from(Self::NAME),
Attribute::READONLY,
)
.function(
NativeFunction::from_async_fn(Self::query),
js_string!("query"),
2,
)
.function(
NativeFunction::from_async_fn(Self::execute),
js_string!("execute"),
2,
)
.build()
}
fn pool() -> JsResult<PgPool> {
POOL.get().cloned().ok_or_else(|| {
JsNativeError::error()
.with_message("sql: module not initialized")
.into()
})
}
async fn query(
_: &JsValue,
args: &[JsValue],
context: &RefCell<&mut Context>,
) -> JsResult<JsValue> {
let sql = Self::string_from_arg(args, 0, "query", "sql.query", &mut context.borrow_mut())?;
Self::check_query(&*sql)?;
let params = Self::params_from_js_value(args.get(1), &mut context.borrow_mut())?;
let rows = Self::fetch_rows(Self::pool()?, sql, params).await?;
Self::rows_to_js(rows, &mut context.borrow_mut())
}
async fn execute(
_: &JsValue,
args: &[JsValue],
context: &RefCell<&mut Context>,
) -> JsResult<JsValue> {
let sql =
Self::string_from_arg(args, 0, "query", "sql.execute", &mut context.borrow_mut())?;
Self::check_query(&*sql)?;
let params = Self::params_from_js_value(args.get(1), &mut context.borrow_mut())?;
let result = Self::execute_query(Self::pool()?, sql, params).await?;
Self::result_to_js(result, &mut context.borrow_mut())
}
fn check_query(query: &str) -> JsResult<()> {
let lowered = query.to_ascii_lowercase();
for &blacklisted in BLACKLIST {
if lowered.contains(blacklisted) {
return Err(JsNativeError::error()
.with_message(format!(
"sql: use of the system table `{blacklisted}` is prohibited"
))
.into());
}
}
if let Some(scope) = SCOPE.get() {
if !scope.is_empty() && !lowered.contains(scope) {
return Err(JsNativeError::error()
.with_message(format!(
"sql: query must only reference the configured scope `{scope}`"
))
.into());
}
}
Ok(())
}
async fn fetch_rows(
pool: PgPool,
sql: String,
params: Vec<SqlParam>,
) -> Result<Vec<PgRow>, SqlExecutionError> {
let mut query = sqlx::query(&sql);
for param in params {
query = param.bind(query);
}
query
.fetch_all(&pool)
.await
.map_err(SqlExecutionError::from)
}
async fn execute_query(
pool: PgPool,
sql: String,
params: Vec<SqlParam>,
) -> Result<PgQueryResult, SqlExecutionError> {
let mut query = sqlx::query(&sql);
for param in params {
query = param.bind(query);
}
query.execute(&pool).await.map_err(SqlExecutionError::from)
}
fn params_from_js_value(
arg: Option<&JsValue>,
context: &mut Context,
) -> JsResult<Vec<SqlParam>> {
let Some(value) = arg else {
return Ok(Vec::new());
};
if value.is_undefined() || value.is_null() {
return Ok(Vec::new());
}
let object = value.as_object().ok_or_else(|| {
JsError::from(
JsNativeError::typ().with_message("sql: parameters must be provided as an array"),
)
})?;
let length_value = object.get(js_string!("length"), context)?;
let length_number = length_value.to_number(context)?;
let length = if length_number.is_nan() || length_number.is_sign_negative() {
0
} else {
length_number.floor().min(u32::MAX as f64) as u32
};
let mut params = Vec::with_capacity(length as usize);
for index in 0..length {
let element = object.get(index, context)?;
params.push(Self::param_from_js_value(&element, context)?);
}
Ok(params)
}
fn param_from_js_value(value: &JsValue, context: &mut Context) -> JsResult<SqlParam> {
if value.is_undefined() || value.is_null() {
return Ok(SqlParam::Null);
}
if value.is_boolean() {
return Ok(SqlParam::Bool(value.to_boolean()));
}
if value.is_number() {
let number = value.to_number(context)?;
if number.fract() == 0.0 && number >= i64::MIN as f64 && number <= i64::MAX as f64 {
return Ok(SqlParam::Int(number as i64));
}
return Ok(SqlParam::Float(number));
}
if value.is_string() {
return Ok(SqlParam::Text(
value.to_string(context)?.to_std_string_escaped(),
));
}
if value.is_bigint() {
return Ok(SqlParam::Text(
value.to_string(context)?.to_std_string_escaped(),
));
}
if value.is_symbol() {
return Err(JsNativeError::typ()
.with_message("sql: Symbols cannot be sent as parameters")
.into());
}
let Some(json) = value.to_json(context)? else {
return Ok(SqlParam::Null);
};
Ok(SqlParam::Json(json))
}
fn string_from_arg(
args: &[JsValue],
index: usize,
name: &str,
method: &str,
context: &mut Context,
) -> JsResult<String> {
let value = args
.get(index)
.ok_or_else(|| Self::missing_argument(name, method))?;
if value.is_undefined() || value.is_null() {
return Err(Self::missing_argument(name, method));
}
Ok(value.to_string(context)?.to_std_string_escaped())
}
fn rows_to_js(rows: Vec<PgRow>, context: &mut Context) -> JsResult<JsValue> {
let constructor = context
.intrinsics()
.constructors()
.array()
.constructor()
.clone();
let array_value = constructor.construct(&[], None, context)?;
for (index, row) in rows.iter().enumerate() {
let js_row = Self::row_to_object(row, context)?;
let row_value: JsValue = js_row.into();
array_value.create_data_property_or_throw(index, row_value, context)?;
}
Ok(array_value.into())
}
fn row_to_object(row: &PgRow, context: &mut Context) -> JsResult<JsObject> {
let object = JsObject::with_null_proto();
for (index, column) in row.columns().iter().enumerate() {
let value = Self::value_to_js(row, index, column.type_info(), context)?;
object.create_data_property_or_throw(JsString::from(column.name()), value, context)?;
}
Ok(object)
}
fn value_to_js(
row: &PgRow,
index: usize,
type_info: &PgTypeInfo,
context: &mut Context,
) -> JsResult<JsValue> {
let type_name = type_info.to_string().to_ascii_uppercase();
macro_rules! optional_number {
($ty:ty) => {{
let value: Option<$ty> = row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|inner| JsValue::new(inner as f64))
.unwrap_or_else(JsValue::null))
}};
}
match type_name.as_str() {
"BOOL" => {
let value: Option<bool> = row.try_get(index).map_err(Self::column_access_error)?;
Ok(value.map(JsValue::new).unwrap_or_else(JsValue::null))
}
"INT2" | "INT4" => optional_number!(i32),
"INT8" => optional_number!(i64),
"FLOAT4" | "FLOAT8" => {
let value: Option<f64> = row.try_get(index).map_err(Self::column_access_error)?;
Ok(value.map(JsValue::new).unwrap_or_else(JsValue::null))
}
"NUMERIC" | "DECIMAL" => {
let value: Option<f64> = row.try_get(index).map_err(Self::column_access_error)?;
Ok(value.map(JsValue::new).unwrap_or_else(JsValue::null))
}
"TEXT" | "VARCHAR" | "BPCHAR" | "CHAR" | "UUID" | "INET" | "CIDR" => {
let value: Option<String> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|text| JsValue::from(JsString::from(text)))
.unwrap_or_else(JsValue::null))
}
"JSON" | "JSONB" => {
let value: Option<JsonValue> =
row.try_get(index).map_err(Self::column_access_error)?;
match value {
Some(json) => JsValue::from_json(&json, context),
None => Ok(JsValue::null()),
}
}
"TIMESTAMP" => {
let value: Option<NaiveDateTime> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|ts| {
let dt = DateTime::<Utc>::from_naive_utc_and_offset(ts, Utc);
JsValue::from(JsString::from(dt.to_rfc3339()))
})
.unwrap_or_else(JsValue::null))
}
"TIMESTAMPTZ" => {
let value: Option<DateTime<Utc>> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|ts| JsValue::from(JsString::from(ts.to_rfc3339())))
.unwrap_or_else(JsValue::null))
}
"DATE" => {
let value: Option<NaiveDate> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|date| JsValue::from(JsString::from(date.to_string())))
.unwrap_or_else(JsValue::null))
}
"TIME" | "TIMETZ" => {
let value: Option<NaiveTime> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|time| JsValue::from(JsString::from(time.to_string())))
.unwrap_or_else(JsValue::null))
}
"BYTEA" => {
let value: Option<Vec<u8>> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|bytes| {
let encoded = BASE64_STANDARD.encode(bytes);
JsValue::from(JsString::from(encoded))
})
.unwrap_or_else(JsValue::null))
}
_ => {
let value: Option<String> =
row.try_get(index).map_err(Self::column_access_error)?;
Ok(value
.map(|text| JsValue::from(JsString::from(text)))
.unwrap_or_else(JsValue::null))
}
}
}
fn result_to_js(result: PgQueryResult, context: &mut Context) -> JsResult<JsValue> {
let mut initializer = ObjectInitializer::new(context);
initializer.property(
js_string!("rowsAffected"),
result.rows_affected() as f64,
Attribute::READONLY | Attribute::ENUMERABLE,
);
Ok(initializer.build().into())
}
fn missing_argument(name: &str, method: &str) -> JsError {
JsNativeError::typ()
.with_message(format!("{method}: missing required argument `{name}`"))
.into()
}
fn column_access_error(err: sqlx::Error) -> JsError {
JsNativeError::error()
.with_message(format!("sql: failed to read column value: {err}"))
.into()
}
}
#[derive(Debug, Clone)]
enum SqlParam {
Int(i64),
Float(f64),
Bool(bool),
Text(String),
Json(JsonValue),
Null,
}
impl SqlParam {
fn bind<'q>(
self,
query: sqlx::query::Query<'q, Postgres, PgArguments>,
) -> sqlx::query::Query<'q, Postgres, PgArguments> {
match self {
SqlParam::Int(value) => query.bind(value),
SqlParam::Float(value) => query.bind(value),
SqlParam::Bool(value) => query.bind(value),
SqlParam::Text(value) => query.bind(value),
SqlParam::Json(value) => query.bind(Json(value)),
SqlParam::Null => query.bind(Option::<String>::None),
}
}
}
#[derive(Debug)]
enum SqlExecutionError {
Sql(sqlx::Error),
}
impl From<sqlx::Error> for SqlExecutionError {
fn from(value: sqlx::Error) -> Self {
Self::Sql(value)
}
}
impl SqlExecutionError {
fn into_js_error(self, method: &str) -> JsError {
match self {
Self::Sql(err) => JsNativeError::error()
.with_message(format!("{method}: database error: {err}"))
.into(),
}
}
}
impl From<SqlExecutionError> for JsError {
fn from(value: SqlExecutionError) -> Self {
value.into_js_error("sql")
}
}

View File

@@ -0,0 +1,28 @@
use boa_engine::{Context, JsResult, Trace};
use boa_gc::Finalize;
use boa_runtime::{ConsoleState, Logger};
#[derive(Trace, Finalize, Debug)]
pub(crate) struct StderrLogger;
impl Logger for StderrLogger {
fn log(&self, msg: String, _state: &ConsoleState, _context: &mut Context) -> JsResult<()> {
eprintln!("[LOG] {}", msg);
Ok(())
}
fn info(&self, msg: String, _state: &ConsoleState, _context: &mut Context) -> JsResult<()> {
eprintln!("[INFO] {}", msg);
Ok(())
}
fn warn(&self, msg: String, _state: &ConsoleState, _context: &mut Context) -> JsResult<()> {
eprintln!("[WARN] {}", msg);
Ok(())
}
fn error(&self, msg: String, _state: &ConsoleState, _context: &mut Context) -> JsResult<()> {
eprintln!("[ERROR] {}", msg);
Ok(())
}
}

View File

@@ -0,0 +1,217 @@
use core::cell::RefCell;
use core::u64;
use std::io::{self, Read};
use std::process;
use std::rc::Rc;
use anyhow::Context as _;
use anyhow::anyhow;
use boa_engine::context::ContextBuilder;
use boa_engine::job::JobExecutor;
use boa_engine::property::Attribute;
use boa_engine::vm::RuntimeLimits;
use boa_engine::{Context, Script, Source};
use boa_runtime::fetch::BlockingReqwestFetcher;
use boa_runtime::{Console, fetch};
use clap::Parser;
use clap::builder::NonEmptyStringValueParser;
use rlimit::Resource;
use serde::Deserialize;
use sqlx::postgres::PgPoolOptions;
use crate::builtins::fs::Fs;
use crate::builtins::sql::Sql;
use crate::logger::StderrLogger;
use crate::queue::Queue;
pub(crate) mod builtins;
pub(crate) mod logger;
pub(crate) mod queue;
static JS_PRELUDE: &str = include_str!("builtins/prelude.js");
#[derive(Parser, Debug, Clone)]
struct Args {
#[clap(long, help = "Set the recursion limit")]
recursion_limit: Option<usize>,
#[clap(long, help = "Set the loop iteration limit")]
loop_limit: Option<u64>,
#[clap(long, help = "Set the virtual memory limit (in megabytes)")]
virtual_memory_limit: Option<u64>,
#[clap(long, help = "Enable file system access", default_value_t = false)]
enable_fs: bool,
#[clap(long, help = "Enable network access", default_value_t = false)]
enable_net: bool,
#[clap(long, help = "Enables PostgreSQL acress limited to single table")]
enable_sql: Option<String>,
#[clap(
help = "Input filename. Use - to read from stdin",
value_parser = NonEmptyStringValueParser::new(),
)]
filename: String,
}
#[derive(Deserialize, Debug, Clone)]
struct DatabaseConfig {
postgres_url: String,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let args = Args::parse();
let script_source = read_script(&args)?;
let (mut context, queue) = init_context(&args).await?;
let prelude = Script::parse(
Source::from_bytes(JS_PRELUDE.as_bytes()),
None,
&mut context,
)
.map_err(|_| anyhow!("Failed to parse prelude"))?;
let script = Script::parse(
Source::from_bytes(script_source.as_bytes()),
None,
&mut context,
)
.map_err(|_| anyhow!("Failed to parse input script"))?;
prelude
.evaluate_async(&mut context)
.await
.map_err(|e| anyhow!("Prelude: Uncaught {e}"))?;
limit_additional_memory(&args)?;
script
.evaluate_async(&mut context)
.await
.map_err(|e| anyhow!("Uncaught {e}"))?;
queue
.run_jobs_async(&RefCell::new(&mut context))
.await
.map_err(|e| anyhow!("Uncaught {e}"))?;
unlimit_memory()?;
let serialize_code = r#"
JSON.stringify(globalThis.res)
"#;
match context.eval(Source::from_bytes(serialize_code.as_bytes())) {
Ok(result) => match result.to_string(&mut context) {
Ok(output) => println!("{}", output.to_std_string_lossy()),
Err(err) => {
eprintln!("Unexpected serialization output: {err}")
}
},
Err(err) => {
eprintln!("Uncaught {err}");
process::exit(129);
}
}
Ok(())
}
fn limit_additional_memory(args: &Args) -> anyhow::Result<()> {
let usage = memory_stats::memory_stats().context("Failed to obtain memory usage statistics")?;
if let Some(limit_mb) = args.virtual_memory_limit {
let limit = limit_mb * 1024 * 1024 + usage.virtual_mem as u64;
Resource::AS
.set(limit, limit)
.context("Failed to enforce virtual memory limit")?;
}
Ok(())
}
fn unlimit_memory() -> anyhow::Result<()> {
Resource::AS
.set(u64::MAX, u64::MAX)
.context("Failed to remove memory limit")
}
fn init_runtime_limits(context: &mut Context, args: &Args) {
context.set_runtime_limits(get_runtime_limits(args));
}
fn get_runtime_limits(args: &Args) -> RuntimeLimits {
let mut limits = RuntimeLimits::default();
if let Some(recursion_limit) = args.recursion_limit {
limits.set_recursion_limit(recursion_limit);
}
if let Some(loop_limit) = args.loop_limit {
limits.set_loop_iteration_limit(loop_limit);
}
limits
}
async fn init_builtins(context: &mut Context, args: &Args) -> anyhow::Result<()> {
let console = Console::init_with_logger(StderrLogger, context);
context
.register_global_property(Console::NAME, console, Attribute::all())
.unwrap();
if args.enable_fs {
let fs = builtins::fs::Fs::init(context);
context
.register_global_property(Fs::NAME, fs, Attribute::all())
.unwrap();
}
if args.enable_net {
let fetcher = BlockingReqwestFetcher::default();
fetch::register(fetcher, None, context).unwrap();
}
if let Some(ref scope) = args.enable_sql {
let db_config = envy::from_env::<DatabaseConfig>()?;
let pool = PgPoolOptions::default()
.min_connections(0)
.max_connections(1)
.connect(&db_config.postgres_url)
.await?;
let sql = Sql::init(pool, scope.clone(), context);
context
.register_global_property(Sql::NAME, sql, Attribute::all())
.unwrap();
}
Ok(())
}
async fn init_context(args: &Args) -> anyhow::Result<(Context, Rc<Queue>)> {
let queue = Rc::new(Queue::new());
let mut context = ContextBuilder::new()
.job_executor(queue.clone())
.build()
.unwrap();
init_runtime_limits(&mut context, args);
init_builtins(&mut context, args).await?;
Ok((context, queue))
}
fn read_script(args: &Args) -> anyhow::Result<String> {
if args.filename == "-" {
let mut buffer = String::new();
io::stdin()
.read_to_string(&mut buffer)
.context("Failed to read input script from stdin")?;
Ok(buffer)
} else {
std::fs::read_to_string(&args.filename).context("Failed to read the script file")
}
}

View File

@@ -0,0 +1,112 @@
use core::cell::RefCell;
use core::ops::DerefMut;
use std::collections::{BTreeMap, VecDeque};
use std::rc::Rc;
use boa_engine::context::time::JsInstant;
use boa_engine::job::{GenericJob, Job, JobExecutor, NativeAsyncJob, PromiseJob, TimeoutJob};
use boa_engine::{Context, JsResult};
use futures_concurrency::future::FutureGroup;
use futures_lite::{StreamExt, future};
use tokio::task;
pub(crate) struct Queue {
async_jobs: RefCell<VecDeque<NativeAsyncJob>>,
promise_jobs: RefCell<VecDeque<PromiseJob>>,
timeout_jobs: RefCell<BTreeMap<JsInstant, TimeoutJob>>,
generic_jobs: RefCell<VecDeque<GenericJob>>,
}
impl Queue {
pub(crate) fn new() -> Self {
Self {
async_jobs: RefCell::default(),
promise_jobs: RefCell::default(),
timeout_jobs: RefCell::default(),
generic_jobs: RefCell::default(),
}
}
pub(crate) fn drain_timeout_jobs(&self, context: &mut Context) {
let now = context.clock().now();
let mut timeouts_borrow = self.timeout_jobs.borrow_mut();
let mut jobs_to_keep = timeouts_borrow.split_off(&now);
jobs_to_keep.retain(|_, job| !job.is_cancelled());
let jobs_to_run = std::mem::replace(timeouts_borrow.deref_mut(), jobs_to_keep);
drop(timeouts_borrow);
for job in jobs_to_run.into_values() {
if let Err(e) = job.call(context) {
eprintln!("Uncaught {e}");
}
}
}
pub(crate) fn drain_jobs(&self, context: &mut Context) {
self.drain_timeout_jobs(context);
let job = self.generic_jobs.borrow_mut().pop_front();
if let Some(generic) = job
&& let Err(err) = generic.call(context)
{
eprintln!("Uncaught {err}");
}
let jobs = std::mem::take(&mut *self.promise_jobs.borrow_mut());
for job in jobs {
if let Err(e) = job.call(context) {
eprintln!("Uncaught {e}");
}
}
context.clear_kept_objects();
}
}
impl JobExecutor for Queue {
fn enqueue_job(self: Rc<Self>, job: Job, context: &mut Context) {
match job {
Job::PromiseJob(job) => self.promise_jobs.borrow_mut().push_back(job),
Job::AsyncJob(job) => self.async_jobs.borrow_mut().push_back(job),
Job::TimeoutJob(t) => {
let now = context.clock().now();
self.timeout_jobs.borrow_mut().insert(now + t.timeout(), t);
}
Job::GenericJob(g) => self.generic_jobs.borrow_mut().push_back(g),
_ => panic!("unsupported job type"),
}
}
fn run_jobs(self: Rc<Self>, context: &mut Context) -> JsResult<()> {
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_time()
.build()
.unwrap();
task::LocalSet::default().block_on(&runtime, self.run_jobs_async(&RefCell::new(context)))
}
async fn run_jobs_async(self: Rc<Self>, context: &RefCell<&mut Context>) -> JsResult<()> {
let mut group = FutureGroup::new();
loop {
for job in std::mem::take(&mut *self.async_jobs.borrow_mut()) {
group.insert(job.call(context));
}
if group.is_empty()
&& self.promise_jobs.borrow().is_empty()
&& self.timeout_jobs.borrow().is_empty()
&& self.generic_jobs.borrow().is_empty()
{
return Ok(());
}
if let Some(Err(err)) = future::poll_once(group.next()).await.flatten() {
eprintln!("Uncaught {err}");
};
self.drain_jobs(&mut context.borrow_mut());
task::yield_now().await
}
}
}