Skip to content

Commit

Permalink
Update how schemas are generated and stored and loaded into C++
Browse files Browse the repository at this point in the history
See #20
  • Loading branch information
ianhbell committed Sep 7, 2023
1 parent 38e2424 commit 554afed
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 32 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,7 @@
/**/.DS_Store
doc/source/_static/doxygen
doc/source/api/*.rst
doc/source/examples/*.pdf
doc/source/examples/*.pdf
interface/CPP/model_schemas.cpp

interface/CPP/model_schemas.json
19 changes: 11 additions & 8 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,15 @@ else()
message(FATAL_ERROR "tar.xz of boost sources needed for teqp cannot be found")
endif()

set(ZIPFN "${CMAKE_CURRENT_SOURCE_DIR}/dev/model_schemas.tar.xz")
if (EXISTS ${ZIPFN})
execute_process(COMMAND ${CMAKE_COMMAND} -E tar -xJf ${ZIPFN}
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/interface/CPP")
message(STATUS "Untarred model schemas")
else()
message(FATAL_ERROR "tar.xz of thermodynamic model schemas needed for teqp cannot be found")
endif()

add_library(teqpinterface INTERFACE)

target_include_directories(teqpinterface INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}/include")
Expand Down Expand Up @@ -101,14 +110,8 @@ if (NOT TEQP_NO_TEQPCPP)
target_compile_definitions(teqpcpp PRIVATE -DMULTICOMPLEX_NO_MULTIPRECISION)
target_compile_definitions(teqpcpp PUBLIC -DUSE_AUTODIFF)

# Populate the model schema cpp file with the contents
file(READ "${CMAKE_CURRENT_SOURCE_DIR}/notebooks/schemas.json" MODEL_SCHEMA_CONTENTS)
file(READ "${CMAKE_CURRENT_SOURCE_DIR}/dev/templates/model_schema.cpp.in" MODEL_JSON_SCHEMA_TEMPLATE)
file(CONFIGURE
OUTPUT model_schemas.cpp
CONTENT ${MODEL_JSON_SCHEMA_TEMPLATE}
@ONLY)
target_sources(teqpcpp PRIVATE model_schemas.cpp)
# Add the schema file
target_sources(teqpcpp PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/interface/CPP/model_schemas.cpp")

if (TEQP_TESTTEQPCPP)
add_executable(test_teqpcpp "${CMAKE_CURRENT_SOURCE_DIR}/interface/CPP/test/test_teqpcpp.cpp")
Expand Down
Binary file added dev/model_schemas.tar.xz
Binary file not shown.
9 changes: 0 additions & 9 deletions dev/templates/model_schema.cpp.in

This file was deleted.

74 changes: 60 additions & 14 deletions notebooks/Schema Builder.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -289,18 +289,7 @@
"execution_count": 9,
"id": "e83a283c",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"SAFTVRMieWithCoeffs(kmat=None, polar_model='GrayGubbins+GubbinsTwu', SAFTVRMie_flags=None, polar_flags=None, coeffs=[SAFTVRMieCoeffsigmaAngstrom(name='R1234YF', m=1.3656, epsilon_over_k=299.424, BibTeXKey='Paricaud', lambda_r=21.7779, lambda_a=6.0, mu_Cm=None, mu_D=2.2814, Q_Cm2=None, Q_DA=1.4151, nmu=1.0, nQ=1.0, sigma_Angstrom=4.5307)])"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"class SAFTVRMieCoeffBase(NoExtraBaseModel):\n",
" name: str\n",
Expand Down Expand Up @@ -338,7 +327,7 @@
" \n",
"SAFTVRMie = TypeAdapter(Union[SAFTVRMieWithNames, SAFTVRMieWithCoeffs])\n",
"# SAFTVRMieWithCoeffs.validate({\"coeffs\":[{\"BibTeXKey\":\"me\",\"epsilon_over_k\":100,\"lambda_a\":6,\"lambda_r\": 12,\"m\":1.0,\"name\":\"Stockmayer126\",\"sigma_m\":3e-10}]})\n",
"SAFTVRMie.validate_python({\"coeffs\":[{\"BibTeXKey\":\"Paricaud\",\"Q_DA\":1.4151,\"epsilon_over_k\":299.424, \"lambda_a\":6.0,\"lambda_r\":21.7779,\"m\":1.3656,\"mu_D\":2.2814,\"nQ\":1.0,\"name\":\"R1234YF\",\"nmu\":1.0,\"sigma_Angstrom\":4.5307}],\"polar_model\":\"GrayGubbins+GubbinsTwu\"})"
"# SAFTVRMie.validate_python({\"coeffs\":[{\"BibTeXKey\":\"Paricaud\",\"Q_DA\":1.4151,\"epsilon_over_k\":299.424, \"lambda_a\":6.0,\"lambda_r\":21.7779,\"m\":1.3656,\"mu_D\":2.2814,\"nQ\":1.0,\"name\":\"R1234YF\",\"nmu\":1.0,\"sigma_Angstrom\":4.5307}],\"polar_model\":\"GrayGubbins+GubbinsTwu\"})"
]
},
{
Expand Down Expand Up @@ -387,9 +376,66 @@
" else:\n",
" schemas[klass.__name__] = klass.model_json_schema()\n",
" \n",
"with open(\"schemas.json\", 'w') as fp:\n",
"with open(\"model_schemas.json\", 'w') as fp:\n",
" fp.write(json.dumps(schemas, indent=2))"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "463514bd",
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"def string_to_hexchunk(jsonstring):\n",
" \n",
" def to_chunks(l, n):\n",
" if n < 1:\n",
" n = 1\n",
" return [l[i:i + n] for i in range(0, len(l), n)]\n",
" \n",
" try:\n",
" h = [\"0x{:02x}\".format(ord(b)) for b in jsonstring] + ['0x00']\n",
" except TypeError:\n",
" h = [\"0x{:02x}\".format(int(b)) for b in jsonstring] + ['0x00']\n",
"\n",
" # Break up the file into lines of 16 hex characters\n",
" # because some compilers don't like VERY long lines\n",
" chunks = to_chunks(h, 16)\n",
"\n",
" # Put the lines back together again\n",
" # The chunks are joined together with commas, and then EOL are used to join the rest\n",
" hex_string = '{' + ',\\n'.join([', '.join(chunk) for chunk in chunks]) + '}'\n",
" return hex_string\n",
" \n",
"chunk = string_to_hexchunk(json.dumps(schemas, indent=0))\n",
"INFO = \"\"\"// Due to limitations in MSVC, very long string literals are \n",
"// not allowed. Thus the string must be re-encoded as binary. The \n",
"// contents of the string are in the JSON file next to this file\n",
"\n",
"#include \"nlohmann/json.hpp\"\n",
"#include <string>\n",
"\"\"\"\n",
"with open('model_schemas.cpp','w') as fp:\n",
" fp.write(INFO)\n",
" fp.write('extern const auto model_schema_library = nlohmann::json::parse(std::string(\\n' + chunk + '\\n)); ')"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "d141efd0",
"metadata": {},
"outputs": [],
"source": [
"import tarfile\n",
"\n",
"path = '../dev/model_schemas.tar.xz'\n",
"with tarfile.open(path, mode='w:xz') as tar:\n",
" tar.add('model_schemas.json')\n",
" tar.add('model_schemas.cpp')"
]
}
],
"metadata": {
Expand Down

0 comments on commit 554afed

Please sign in to comment.