diff --git a/src/bin/solang.rs b/src/bin/solang.rs index 40112b584..4f1ed8ebb 100644 --- a/src/bin/solang.rs +++ b/src/bin/solang.rs @@ -11,13 +11,15 @@ use solang::{ emit::Generate, file_resolver::FileResolver, sema::{ast::Namespace, file::PathDisplay}, - standard_json::{EwasmContract, JsonContract, JsonResult}, + standard_json::{ + EwasmContract, JsonContract, JsonResult, OutputJson, StandardJsonInput, StandardJsonSource, + }, }; use std::{ collections::{HashMap, HashSet}, ffi::OsString, fs::{self, create_dir, create_dir_all, File}, - io::prelude::*, + io::{self, prelude::*}, path::{Path, PathBuf}, process::exit, }; @@ -187,6 +189,7 @@ fn compile(compile_args: &Compile) { let mut namespaces = Vec::new(); let mut errors = false; + let std_json = compile_args.compiler_output.std_json_output; // Build a map of requested contract names, and a flag specifying whether it was found or not let contract_names: HashSet<&str> = if let Some(values) = &compile_args.package.contracts { @@ -195,7 +198,13 @@ fn compile(compile_args: &Compile) { HashSet::new() }; - for filename in compile_args.package.get_input() { + let input_files = if std_json && compile_args.package.input.is_none() { + load_standard_json_input(&mut resolver, &mut json) + } else { + compile_args.package.get_input().clone() + }; + + for filename in &input_files { // TODO: this could be parallelized using e.g. rayon let ns = process_file( filename, @@ -210,8 +219,6 @@ fn compile(compile_args: &Compile) { let mut json_contracts = HashMap::new(); - let std_json = compile_args.compiler_output.std_json_output; - for ns in &namespaces { if std_json { let mut out = ns.diagnostics_as_json(&resolver); @@ -224,7 +231,6 @@ fn compile(compile_args: &Compile) { errors = true; } } - if let Some("ast-dot") = compile_args.compiler_output.emit.as_deref() { exit(0); } @@ -285,6 +291,8 @@ fn compile(compile_args: &Compile) { } } + json.contracts = json_contracts; + if std_json { println!("{}", serde_json::to_string(&json).unwrap()); exit(0); @@ -354,11 +362,103 @@ fn process_file( ns } +fn load_standard_json_input(resolver: &mut FileResolver, json: &mut JsonResult) -> Vec { + let mut stdin = String::new(); + + if let Err(err) = io::stdin().read_to_string(&mut stdin) { + standard_json_exit_with_error(json, format!("failed to read stdin: {err}")); + } + + let input = match serde_json::from_str::(&stdin) { + Ok(input) => input, + Err(err) => standard_json_exit_with_error( + json, + format!("failed to parse standard json input: {err}"), + ), + }; + + if let Some(language) = &input.language { + if language != "Solidity" { + standard_json_exit_with_error( + json, + format!("unsupported language '{language}', expected 'Solidity'"), + ); + } + } + + for remapping in &input.settings.remappings { + let (prefix, path) = match remapping.split_once('=') { + Some((prefix, path)) => (prefix, path), + None => standard_json_exit_with_error( + json, + format!("invalid remapping '{remapping}', expected '='"), + ), + }; + + resolver.add_import_map(prefix.into(), PathBuf::from(path)); + } + + if input.sources.is_empty() { + standard_json_exit_with_error(json, "standard json input did not contain any sources"); + } + + let mut input_files = Vec::with_capacity(input.sources.len()); + + for (source_name, source) in input.sources { + let contents = load_standard_json_source(&source_name, source, json); + + resolver.set_file_contents(&source_name, contents); + input_files.push(PathBuf::from(source_name)); + } + + input_files +} + +fn load_standard_json_source( + source_name: &str, + source: StandardJsonSource, + json: &mut JsonResult, +) -> String { + if let Some(content) = source.content { + return content; + } + + for url in source.urls { + match fs::read_to_string(&url) { + Ok(contents) => return contents, + Err(_) => continue, + } + } + + standard_json_exit_with_error( + json, + format!( + "source '{source_name}' is missing inline content and no readable urls were provided" + ), + ); +} + +fn standard_json_exit_with_error(json: &mut JsonResult, message: impl Into) -> ! { + let message = message.into(); + + json.errors.push(OutputJson { + sourceLocation: None, + ty: "JSONError".to_owned(), + component: "solang".to_owned(), + severity: "error".to_owned(), + formattedMessage: message.clone(), + message, + }); + + println!("{}", serde_json::to_string(json).unwrap()); + exit(1); +} + fn contract_results( contract_no: usize, compiler_output: &CompilerOutput, ns: &mut Namespace, - json_contracts: &mut HashMap, + json_contracts: &mut HashMap>, seen_contracts: &mut HashMap, opt: &Options, default_authors: &[String], @@ -431,7 +531,12 @@ fn contract_results( } if std_json { - json_contracts.insert( + let source_name = ns.files[ns.top_file_no()] + .path + .to_string_lossy() + .into_owned(); + + json_contracts.entry(source_name).or_default().insert( bin.name, JsonContract { abi: abi::ethereum::gen_abi(contract_no, ns), diff --git a/src/standard_json.rs b/src/standard_json.rs index fea82522f..6ba7852c4 100644 --- a/src/standard_json.rs +++ b/src/standard_json.rs @@ -3,8 +3,8 @@ //! This module defines the json format for `solang compile --standard-json`. use crate::abi::ethereum::ABI; -use serde::Serialize; -use std::collections::HashMap; +use serde::{Deserialize, Serialize}; +use std::collections::{BTreeMap, HashMap}; #[derive(Serialize)] pub struct EwasmContract { @@ -29,6 +29,30 @@ pub struct JsonResult { pub contracts: HashMap>, } +#[derive(Deserialize)] +pub struct StandardJsonInput { + #[serde(default)] + pub language: Option, + #[serde(default)] + pub sources: BTreeMap, + #[serde(default)] + pub settings: StandardJsonSettings, +} + +#[derive(Deserialize)] +pub struct StandardJsonSource { + #[serde(default)] + pub content: Option, + #[serde(default)] + pub urls: Vec, +} + +#[derive(Default, Deserialize)] +pub struct StandardJsonSettings { + #[serde(default)] + pub remappings: Vec, +} + #[derive(Serialize)] pub struct LocJson { pub file: String, diff --git a/tests/cli.rs b/tests/cli.rs index 2b5443aa5..c0ff0f2d4 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -1,6 +1,7 @@ // SPDX-License-Identifier: Apache-2.0 use assert_cmd::cargo_bin_cmd; +use serde_json::Value; use std::fs::File; use tempfile::TempDir; @@ -129,3 +130,28 @@ fn basic_compilation_from_toml() { compile_cmd.current_dir(polkadot_test).assert().success(); } + +#[test] +fn standard_json_reads_sources_from_stdin() { + let input = r#"{ + "language": "Solidity", + "sources": { + "input.sol": { + "content": "contract flipper { function flip() public {} }" + } + } + }"#; + + let assert = cargo_bin_cmd!("solang") + .args(["compile", "--target", "polkadot", "--standard-json"]) + .write_stdin(input) + .assert() + .success(); + + let output = String::from_utf8_lossy(&assert.get_output().stdout); + let json: Value = serde_json::from_str(&output).unwrap(); + + assert_eq!(json["target"], "Polkadot"); + assert!(json["contracts"]["input.sol"]["flipper"].is_object()); + assert_eq!(json["errors"].as_array().unwrap().len(), 0); +}