├── .gitmodules ├── test ├── cbmc │ ├── .gitignore │ ├── output │ │ └── latest │ │ │ └── html │ │ │ └── README.md │ ├── sources │ │ └── README.md │ ├── include │ │ ├── README.md │ │ └── jobs_annex.h │ ├── stubs │ │ ├── strnlen.c │ │ ├── JSON_Validate.c │ │ └── JSON_SearchConst.c │ ├── run_proofs.sh │ ├── lib │ │ └── summarize.py │ └── proofs.c ├── unit-test │ ├── catch_assert.h │ ├── CMakeLists.txt │ ├── ota_job_handler_utest.c │ └── job_parser_utest.c └── CMakeLists.txt ├── manifest.yml ├── .gitignore ├── .github ├── workflows │ ├── doxygen.yml │ ├── ci.yml │ └── release.yml ├── memory_statistics_config.json ├── .cSpellWords.txt └── CONTRIBUTING.md ├── CODE_OF_CONDUCT.md ├── SECURITY.md ├── MISRA.md ├── cspell.config.yaml ├── jobsFilePaths.cmake ├── tools ├── cmock │ ├── project.yml │ ├── coverage.cmake │ └── create_test.cmake └── coverity │ ├── misra.config │ └── README.md ├── docs └── doxygen │ ├── include │ └── size_table.md │ ├── style.css │ ├── pages.dox │ └── layout.xml ├── LICENSE ├── source ├── otaJobParser │ ├── include │ │ ├── ota_job_processor.h │ │ └── job_parser.h │ ├── ota_job_handler.c │ └── job_parser.c └── jobs.c ├── CHANGELOG.md ├── MigrationGuide.md └── README.md /.gitmodules: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/cbmc/.gitignore: -------------------------------------------------------------------------------- 1 | # Emitted when running CBMC proofs 2 | coreJSON 3 | run.json 4 | -------------------------------------------------------------------------------- /test/cbmc/output/latest/html/README.md: -------------------------------------------------------------------------------- 1 | CBMC proof output 2 | ================= 3 | 4 | This directory contains the output from a run of cbmc. -------------------------------------------------------------------------------- /manifest.yml: -------------------------------------------------------------------------------- 1 | name : "Jobs-for-AWS-IoT-embedded-sdk" 2 | version: "v2.0.0" 3 | description: | 4 | "Library for using the AWS IoT Jobs service on embedded devices.\n" 5 | license: "MIT" 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore documentation output. 2 | **/docs/**/output/* 3 | 4 | # Ignore CMake build directory. 5 | build/ 6 | 7 | # Ignore build artifacts 8 | *.o 9 | 10 | # Ignore code coverage artifacts 11 | *.gcda 12 | *.gcno 13 | *.gcov 14 | -------------------------------------------------------------------------------- /test/cbmc/sources/README.md: -------------------------------------------------------------------------------- 1 | CBMC proof source code 2 | ====================== 3 | 4 | This directory contains source code written for CBMC proofs. It is 5 | common to write some code to model aspects of the system under test, 6 | and this code goes here. 7 | -------------------------------------------------------------------------------- /test/cbmc/include/README.md: -------------------------------------------------------------------------------- 1 | CBMC proof include files 2 | ======================== 3 | 4 | This directory contains include files written for CBMC proof. It is 5 | common to write some code to model aspects of the system under test, 6 | and the header files for this code go here. 7 | -------------------------------------------------------------------------------- /.github/workflows/doxygen.yml: -------------------------------------------------------------------------------- 1 | name: Doxygen Generation 2 | on: 3 | push: 4 | branches: [main] 5 | workflow_dispatch: 6 | jobs: 7 | doxygen-generation: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Doxygen generation 11 | uses: FreeRTOS/CI-CD-Github-Actions/doxygen-generation@main 12 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Reporting a Vulnerability 2 | 3 | If you discover a potential security issue in this project, we ask that you notify AWS/Amazon Security 4 | via our [vulnerability reporting page](https://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. 5 | Please do **not** create a public github issue. 6 | -------------------------------------------------------------------------------- /.github/memory_statistics_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "lib_name": "AWS IoT Jobs", 3 | "src": [ 4 | "source/jobs.c", 5 | "source/otaJobParser/job_parser.c", 6 | "source/otaJobParser/ota_job_handler.c", 7 | "coreJSON/source/core_json.c" 8 | ], 9 | "include": [ 10 | "source/include", 11 | "source/otaJobParser/include", 12 | "coreJSON/source/include" 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /test/cbmc/stubs/strnlen.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | size_t strnlen( const char * s, 5 | size_t maxlen ) 6 | { 7 | __CPROVER_assert( s != NULL, "String pointer must not be null." ); 8 | 9 | size_t result; 10 | 11 | result = ( size_t ) strlen( s ); 12 | 13 | if( result > maxlen ) 14 | { 15 | result = maxlen; 16 | } 17 | 18 | return result; 19 | } 20 | -------------------------------------------------------------------------------- /test/cbmc/stubs/JSON_Validate.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include "../coreJSON/source/include/core_json.h" 3 | 4 | JSONStatus_t JSON_Validate( const char * buf, 5 | size_t max ) 6 | { 7 | __CPROVER_assert( buf != NULL, "Buffer must not be null." ); 8 | 9 | JSONStatus_t ret = 0; 10 | 11 | __CPROVER_assume( ( ret == JSONNullParameter ) || ( ret == JSONBadParameter ) || 12 | ( ret == JSONSuccess ) || ( ret = JSONIllegalDocument ) ); 13 | 14 | return ret; 15 | } 16 | -------------------------------------------------------------------------------- /MISRA.md: -------------------------------------------------------------------------------- 1 | # MISRA Compliance 2 | 3 | The jobs library files conform to the [MISRA C:2012](https://www.misra.org.uk) 4 | guidelines, with some noted exceptions. Compliance is checked with Coverity static analysis 5 | version 2023.6.1. The specific deviations, suppressed inline, are listed below. 6 | 7 | Additionally, [MISRA configuration file](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/blob/main/tools/coverity/misra.config) contains the project wide deviations. 8 | 9 | ### Suppressed with Coverity Comments 10 | To find the violation references in the source files run grep on the source code 11 | with ( Assuming rule 11.4 violation; with justification in point 2 ): 12 | ``` 13 | grep 'MISRA Ref 11.1.4' . -rI 14 | ``` 15 | 16 | *None.* 17 | -------------------------------------------------------------------------------- /.github/.cSpellWords.txt: -------------------------------------------------------------------------------- 1 | ASDFLKJ 2 | cbmc 3 | CBMC 4 | cbor 5 | CBOR 6 | cmock 7 | Cmock 8 | CMock 9 | CMOCK 10 | coremqtt 11 | coverity 12 | Coverity 13 | CSDK 14 | ctest 15 | DCMOCK 16 | DCOV 17 | decihours 18 | Decihours 19 | DECIHOURS 20 | DNDEBUG 21 | DUNITY 22 | getpacketid 23 | isjobupdatestatus 24 | isystem 25 | jobz 26 | KQERL 27 | lcov 28 | litani 29 | MEQCIGOTD 30 | MEYCIQCV 31 | misra 32 | Misra 33 | MISRA 34 | MQTT 35 | Mrcd 36 | mypy 37 | nondet 38 | Nondet 39 | NONDET 40 | notifyzz 41 | otaparser 42 | pylint 43 | pytest 44 | pyyaml 45 | rejectedzz 46 | sinclude 47 | strn 48 | strnn 49 | thingz 50 | Uhyc 51 | UNACKED 52 | unpadded 53 | Unpadded 54 | UNPADDED 55 | UNSUB 56 | UNSUBACK 57 | unsubscriptions 58 | unwindings 59 | utest 60 | vect 61 | Vect 62 | VECT 63 | Wunused 64 | -------------------------------------------------------------------------------- /cspell.config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | $schema: https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json 3 | version: '0.2' 4 | # Allows things like stringLength 5 | allowCompoundWords: true 6 | 7 | # Read files not to spell check from the git ignore 8 | useGitignore: true 9 | 10 | # Language settings for C 11 | languageSettings: 12 | - caseSensitive: false 13 | enabled: true 14 | languageId: c 15 | locale: "*" 16 | 17 | # Add a dictionary, and the path to the word list 18 | dictionaryDefinitions: 19 | - name: freertos-words 20 | path: '.github/.cSpellWords.txt' 21 | addWords: true 22 | 23 | dictionaries: 24 | - freertos-words 25 | 26 | # Paths and files to ignore 27 | ignorePaths: 28 | - 'dependency' 29 | - 'docs' 30 | - 'ThirdParty' 31 | - 'History.txt' 32 | -------------------------------------------------------------------------------- /jobsFilePaths.cmake: -------------------------------------------------------------------------------- 1 | # This file adds source files and include directories 2 | # into variables for use from different repositories 3 | # in their Cmake based build systems. Only the library 4 | # files are added. 5 | 6 | # JOBS library source files. 7 | set( JOBS_SOURCES 8 | ${CMAKE_CURRENT_LIST_DIR}/source/jobs.c ) 9 | 10 | # JOBS library Public Include directories. 11 | set( JOBS_INCLUDE_PUBLIC_DIRS 12 | ${CMAKE_CURRENT_LIST_DIR}/source/include ) 13 | 14 | # OTA Parser source files 15 | set( OTA_HANDLER_SOURCES 16 | ${CMAKE_CURRENT_LIST_DIR}/source/otaJobParser/job_parser.c 17 | ${CMAKE_CURRENT_LIST_DIR}/source/otaJobParser/ota_job_handler.c ) 18 | 19 | # OTA Parser Public Include directories. 20 | set( OTA_HANDLER_INCLUDES 21 | ${CMAKE_CURRENT_LIST_DIR}/source/otaJobParser/include ) 22 | -------------------------------------------------------------------------------- /test/cbmc/stubs/JSON_SearchConst.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include "../coreJSON/source/include/core_json.h" 3 | 4 | JSONStatus_t JSON_SearchConst( char * buf, 5 | size_t max, 6 | const char * query, 7 | size_t queryLength, 8 | char ** outValue, 9 | size_t * outValueLength, 10 | JSONTypes_t * outType ) 11 | { 12 | __CPROVER_assert( buf != NULL, "Buffer is not NULL" ); 13 | __CPROVER_assert( query != NULL, "query is not NULL" ); 14 | 15 | JSONStatus_t ret = 0; 16 | 17 | __CPROVER_assume( ( ret == JSONNullParameter ) || ( ret == JSONBadParameter ) || 18 | ( ret == JSONSuccess ) || ( ret == JSONNotFound ) ); 19 | 20 | return ret; 21 | } 22 | -------------------------------------------------------------------------------- /tools/cmock/project.yml: -------------------------------------------------------------------------------- 1 | # Taken from amazon-freertos repository 2 | :cmock: 3 | :mock_prefix: mock_ 4 | :when_no_prototypes: :warn 5 | :enforce_strict_ordering: TRUE 6 | :plugins: 7 | - :ignore 8 | - :ignore_arg 9 | - :expect_any_args 10 | - :array 11 | - :callback 12 | - :return_thru_ptr 13 | :callback_include_count: true # include a count arg when calling the callback 14 | :callback_after_arg_check: false # check arguments before calling the callback 15 | :treat_as: 16 | uint8: HEX8 17 | uint16: HEX16 18 | uint32: UINT32 19 | int8: INT8 20 | bool: UINT8 21 | :includes: # This will add these includes to each mock. 22 | - 23 | - 24 | :treat_externs: :exclude # Now the extern-ed functions will be mocked. 25 | :weak: __attribute__((weak)) 26 | :treat_externs: :include -------------------------------------------------------------------------------- /docs/doxygen/include/size_table.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 |
Code Size of AWS IoT Jobs (example generated with GCC for ARM Cortex-M)
File
With -O1 Optimization
With -Os Optimization
jobs.c
2.9K
2.6K
job_parser.c
0.9K
0.9K
ota_job_handler.c
0.2K
0.2K
core_json.c
2.9K
2.4K
Total estimates
6.9K
6.1K
36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tools/coverity/misra.config: -------------------------------------------------------------------------------- 1 | { 2 | "version" : "2.0", 3 | "standard" : "c2012", 4 | "title": "Coverity MISRA Configuration", 5 | "deviations" : [ 6 | { 7 | "deviation": "Directive 4.8", 8 | "category": "Advisory", 9 | "reason": "AfrOtaJobDocumentFields_t struct must be externally visible in able to be used by the application." 10 | }, 11 | { 12 | "deviation": "Directive 4.9", 13 | "category": "Advisory", 14 | "reason": "Allow inclusion of function like macros." 15 | }, 16 | { 17 | "deviation": "Rule 2.5", 18 | "category": "Advisory", 19 | "reason": "Allow unused macros. Library headers may define macros intended for the application's use, but not used by a specific file." 20 | }, 21 | { 22 | "deviation": "Rule 3.1", 23 | "category": "Required", 24 | "reason": "Allow nested comments. Documentation blocks contain comments for example code." 25 | }, 26 | { 27 | "deviation": "Rule 8.7", 28 | "reason": "API functions are not used by library. They must be externally visible in order to be used by the application." 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /test/cbmc/run_proofs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | UNWIND_COUNT=${UNWIND_COUNT:-10} 4 | 5 | JobsSourceDir="../../source" 6 | coreJSONSourceDir="coreJSON/source" 7 | OTAJobParserSourceDir="../../source/otaJobParser" 8 | 9 | #If coreJSON not found, clone it 10 | if [ ! -d "coreJSON" ]; then 11 | git clone https://github.com/FreeRTOS/coreJSON.git --depth 1 --branch v3.2.0 12 | fi 13 | 14 | exec cbmc proofs.c $JobsSourceDir/jobs.c stubs/strnlen.c \ 15 | stubs/JSON_Validate.c stubs/JSON_SearchConst.c \ 16 | $OTAJobParserSourceDir/job_parser.c $OTAJobParserSourceDir/ota_job_handler.c \ 17 | -I $JobsSourceDir/include -I $coreJSONSourceDir/include \ 18 | -I $OTAJobParserSourceDir/include -I include \ 19 | --unwindset strnAppend.0:26 --unwindset strnEq.0:26 \ 20 | --unwindset matchIdApi.0:84 --unwindset isValidID.0:65 \ 21 | --unwindset strlen.0:51 --unwindset strncpy.0:16 \ 22 | --bounds-check --pointer-check --memory-cleanup-check --div-by-zero-check \ 23 | --signed-overflow-check --unsigned-overflow-check --pointer-overflow-check \ 24 | --conversion-check --undefined-shift-check --enum-range-check \ 25 | --pointer-primitive-check --drop-unused-functions --nondet-static \ 26 | --unwinding-assertions --c99 "$@" --unwind "$UNWIND_COUNT" --json-ui \ 27 | -DUNWIND_COUNT="$UNWIND_COUNT" >&1 | tee output/latest/html/run.json 28 | -------------------------------------------------------------------------------- /source/otaJobParser/include/ota_job_processor.h: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #ifndef OTA_JOB_PROCESSOR_H 12 | #define OTA_JOB_PROCESSOR_H 13 | 14 | #include 15 | 16 | #include 17 | #include 18 | 19 | #include "job_parser.h" 20 | 21 | /** 22 | * @brief Signals if the job document provided is an AWS IoT Core OTA update document 23 | * 24 | * @param jobDoc The job document contained in the AWS IoT Job 25 | * @param jobDocLength The length of the job document 26 | * @param fields A pointer to an job document fields structure populated by call 27 | * @return int8_t The next file index in the job. Returns 0 if no additional files are available. Returns -1 if error. 28 | * 29 | * Example 30 | * @code{c} 31 | * 32 | * // The following example shows how to use the otaParser_parseJobDocFile API 33 | * // to parse out the fields of a received Job Document and populate the fields 34 | * // of the AfrOtaJobDocumentFields_t stucture. 35 | * 36 | * const char * jobDoc; // Populated by call to Jobs_GetJobDocument 37 | * size_t jobDocLength; // Return value of Jobs_GetJobDocument 38 | * int8_t fileIndex = 0; 39 | * AfrOtaJobDocumentFields_t fields = {0}; // populated by API 40 | * 41 | * do 42 | * { 43 | * fileIndex = otaParser_parseJobDocFile(jobDoc, 44 | * jobDocLength, 45 | * fileIndex, 46 | * &fields); 47 | * } while(fileIndex > 0) 48 | * // File index will be -1 if an error occurred, and 0 if all files were 49 | * // processed 50 | * @endcode 51 | */ 52 | /* @[declare_otaparser_parsejobdocfile] */ 53 | int8_t otaParser_parseJobDocFile( const char * jobDoc, 54 | const size_t jobDocLength, 55 | const uint8_t fileIndex, 56 | AfrOtaJobDocumentFields_t * fields ); 57 | /* @[declare_otaparser_parsejobdocfile] */ 58 | 59 | #endif /*OTA_JOB_PROCESSOR_H*/ 60 | -------------------------------------------------------------------------------- /tools/cmock/coverage.cmake: -------------------------------------------------------------------------------- 1 | # Taken from amazon-freertos repository 2 | cmake_minimum_required(VERSION 3.13) 3 | set(BINARY_DIR ${CMAKE_BINARY_DIR}) 4 | # reset coverage counters 5 | execute_process( 6 | COMMAND lcov --directory ${CMAKE_BINARY_DIR} --base-directory 7 | ${CMAKE_BINARY_DIR} --zerocounters 8 | COMMAND mkdir -p ${CMAKE_BINARY_DIR}/coverage) 9 | # make the initial/baseline capture a zeroed out files 10 | execute_process( 11 | COMMAND 12 | lcov --directory ${CMAKE_BINARY_DIR} --base-directory ${CMAKE_BINARY_DIR} 13 | --initial --capture --rc lcov_branch_coverage=1 --rc 14 | genhtml_branch_coverage=1 15 | --output-file=${CMAKE_BINARY_DIR}/base_coverage.info) 16 | file(GLOB files "${CMAKE_BINARY_DIR}/bin/tests/*") 17 | 18 | set(REPORT_FILE ${CMAKE_BINARY_DIR}/utest_report.txt) 19 | file(WRITE ${REPORT_FILE} "") 20 | # execute all files in bin directory, gathering the output to show it in CI 21 | foreach(testname ${files}) 22 | get_filename_component(test ${testname} NAME_WLE) 23 | message("Running ${testname}") 24 | execute_process(COMMAND ${testname} 25 | OUTPUT_FILE ${CMAKE_BINARY_DIR}/${test}_out.txt) 26 | 27 | file(READ ${CMAKE_BINARY_DIR}/${test}_out.txt CONTENTS) 28 | file(APPEND ${REPORT_FILE} "${CONTENTS}") 29 | endforeach() 30 | 31 | # generate Junit style xml output 32 | execute_process(COMMAND ruby ${CMOCK_DIR}/vendor/unity/auto/parse_output.rb -xml 33 | ${REPORT_FILE} WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) 34 | 35 | # capture data after running the tests 36 | execute_process( 37 | COMMAND 38 | lcov --capture --rc lcov_branch_coverage=1 --rc genhtml_branch_coverage=1 39 | --base-directory ${CMAKE_BINARY_DIR} --directory ${CMAKE_BINARY_DIR} 40 | --output-file ${CMAKE_BINARY_DIR}/second_coverage.info) 41 | 42 | # combile baseline results (zeros) with the one after running the tests 43 | execute_process( 44 | COMMAND 45 | lcov --base-directory ${CMAKE_BINARY_DIR} --directory ${CMAKE_BINARY_DIR} 46 | --add-tracefile ${CMAKE_BINARY_DIR}/base_coverage.info --add-tracefile 47 | ${CMAKE_BINARY_DIR}/second_coverage.info --output-file 48 | ${CMAKE_BINARY_DIR}/coverage.info --no-external --rc lcov_branch_coverage=1) 49 | execute_process( 50 | COMMAND 51 | genhtml --rc lcov_branch_coverage=1 --branch-coverage --output-directory 52 | ${CMAKE_BINARY_DIR}/coverage ${CMAKE_BINARY_DIR}/coverage.info) -------------------------------------------------------------------------------- /source/otaJobParser/include/job_parser.h: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #ifndef JOB_PARSER_H 12 | #define JOB_PARSER_H 13 | 14 | #include 15 | #include 16 | #include 17 | 18 | /** 19 | * @ingroup jobs_structs 20 | * @brief struct containing the fields of an AFR OTA Job Document 21 | */ 22 | typedef struct 23 | { 24 | /** @brief Code Signing Signature */ 25 | const char * signature; 26 | 27 | /** @brief Length of signature */ 28 | size_t signatureLen; 29 | 30 | /** @brief File path to store OTA Update on device */ 31 | const char * filepath; 32 | 33 | /** @brief Length of filepath */ 34 | size_t filepathLen; 35 | 36 | /** @brief Path to Code Signing Certificate on Device */ 37 | const char * certfile; 38 | 39 | /** @brief Length of certfile */ 40 | size_t certfileLen; 41 | 42 | /** @brief Authentication Scheme for HTTP URL ( null for MQTT ) */ 43 | const char * authScheme; 44 | 45 | /** @brief Length of authScheme */ 46 | size_t authSchemeLen; 47 | 48 | /** @brief MQTT Stream or HTTP URL */ 49 | const char * imageRef; 50 | 51 | /** @brief Length of imageRef */ 52 | size_t imageRefLen; 53 | 54 | /** @brief File ID */ 55 | uint32_t fileId; 56 | 57 | /** @brief Size of the OTA Update */ 58 | uint32_t fileSize; 59 | 60 | /** @brief File Type */ 61 | uint32_t fileType; 62 | } AfrOtaJobDocumentFields_t; 63 | 64 | /** 65 | * @brief Populate the fields of 'result', returning 66 | * true if successful. 67 | * 68 | * @param jobDoc FreeRTOS OTA job document 69 | * @param jobDocLength OTA job document length 70 | * @param fileIndex The index of the file to use properties of 71 | * @param result Job document structure to populate 72 | * @return true Job document fields were parsed from the document 73 | * @return false Job document fields were not parsed from the document 74 | */ 75 | /* @[declare_populatejobdocfields] */ 76 | bool populateJobDocFields( const char * jobDoc, 77 | const size_t jobDocLength, 78 | int32_t fileIndex, 79 | AfrOtaJobDocumentFields_t * result ); 80 | /* @[declare_populatejobdocfields] */ 81 | 82 | #endif /* JOB_PARSER_H */ 83 | -------------------------------------------------------------------------------- /test/cbmc/include/jobs_annex.h: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v1.3.0 3 | * Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | * 5 | * SPDX-License-Identifier: MIT 6 | * 7 | * Permission is hereby granted, free of charge, to any person obtaining a copy of 8 | * this software and associated documentation files (the "Software"), to deal in 9 | * the Software without restriction, including without limitation the rights to 10 | * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 11 | * the Software, and to permit persons to whom the Software is furnished to do so, 12 | * subject to the following conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be included in all 15 | * copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 19 | * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 20 | * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 21 | * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 22 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | */ 24 | 25 | #ifndef JOBS_ANNEX_H_ 26 | #define JOBS_ANNEX_H_ 27 | 28 | #include "jobs.h" 29 | 30 | /** 31 | * @brief check that an enum belongs to JobsTopic_t 32 | */ 33 | #define jobsTopicEnum( x ) ( ( x >= JobsInvalidTopic ) && ( x < JobsMaxTopic ) ) 34 | 35 | /** 36 | * @brief check that an enum belongs to a subset of JobsStatus_t 37 | * returned by the named function 38 | */ 39 | #define parameterEnum( x ) ( x == JobsBadParameter ) 40 | #define strnAppendFailEnum( x ) ( x == JobsBufferTooSmall ) 41 | #define strnAppendEnum( x ) ( ( x == JobsSuccess ) || strnAppendFailEnum( x ) ) 42 | #define jobsCommonEnum( x ) ( parameterEnum( x ) || strnAppendEnum( x ) ) 43 | #define jobsGetTopicEnum( x ) jobsCommonEnum( x ) 44 | #define jobsGetPendingEnum( x ) jobsCommonEnum( x ) 45 | #define jobsStartNextEnum( x ) jobsCommonEnum( x ) 46 | #define jobsDescribeEnum( x ) jobsCommonEnum( x ) 47 | #define jobsUpdateEnum( x ) jobsCommonEnum( x ) 48 | #define strnEqFailEnum( x ) ( x == JobsNoMatch ) 49 | #define strnEqEnum( x ) ( ( x == JobsSuccess ) || strnEqFailEnum( x ) ) 50 | #define jobsMatchTopicEnum( x ) ( parameterEnum( x ) || strnEqEnum( x ) ) 51 | 52 | /* 53 | * These are declarations for the (normally) static functions from jobs.c. 54 | * Please see jobs.c for documentation. 55 | */ 56 | 57 | JobsStatus_t strnAppend( char * buffer, 58 | size_t * start, 59 | size_t max, 60 | const char * value, 61 | size_t valueLength ); 62 | 63 | JobsStatus_t strnEq( const char * a, 64 | const char * b, 65 | size_t n ); 66 | 67 | #endif /* ifndef JOBS_ANNEX_H_ */ 68 | -------------------------------------------------------------------------------- /docs/doxygen/style.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Stylesheet for Doxygen HTML output. 3 | * 4 | * This file defines styles for custom elements in the header/footer and 5 | * overrides some of the default Doxygen styles. 6 | * 7 | * Styles in this file do not affect the treeview sidebar. 8 | */ 9 | 10 | /* Set the margins to place a small amount of whitespace on the left and right 11 | * side of the page. */ 12 | div.contents { 13 | margin-left:4em; 14 | margin-right:4em; 15 | } 16 | 17 | /* Justify text in paragraphs. */ 18 | p { 19 | text-align: justify; 20 | } 21 | 22 | /* Style of section headings. */ 23 | h1 { 24 | border-bottom: 1px solid #879ECB; 25 | color: #354C7B; 26 | font-size: 160%; 27 | font-weight: normal; 28 | padding-bottom: 4px; 29 | padding-top: 8px; 30 | } 31 | 32 | /* Style of subsection headings. */ 33 | h2:not(.memtitle):not(.groupheader) { 34 | font-size: 125%; 35 | margin-bottom: 0px; 36 | margin-top: 16px; 37 | padding: 0px; 38 | } 39 | 40 | /* Style of paragraphs immediately after subsection headings. */ 41 | h2 + p { 42 | margin: 0px; 43 | padding: 0px; 44 | } 45 | 46 | /* Style of subsection headings. */ 47 | h3 { 48 | font-size: 100%; 49 | margin-bottom: 0px; 50 | margin-left: 2em; 51 | margin-right: 2em; 52 | } 53 | 54 | /* Style of paragraphs immediately after subsubsection headings. */ 55 | h3 + p { 56 | margin-top: 0px; 57 | margin-left: 2em; 58 | margin-right: 2em; 59 | } 60 | 61 | /* Style of the prefix "AWS IoT Device SDK C" that appears in the header. */ 62 | #csdkprefix { 63 | color: #757575; 64 | } 65 | 66 | /* Style of the "Return to main page" link that appears in the header. */ 67 | #returntomain { 68 | padding: 0.5em; 69 | } 70 | 71 | /* Style of the dividers on Configuration Settings pages. */ 72 | div.configpagedivider { 73 | margin-left: 0px !important; 74 | margin-right: 0px !important; 75 | margin-top: 20px !important; 76 | } 77 | 78 | /* Style of configuration setting names. */ 79 | dl.section.user ~ h1 { 80 | border-bottom: none; 81 | color: #000000; 82 | font-family: monospace, fixed; 83 | font-size: 16px; 84 | margin-bottom: 0px; 85 | margin-left: 2em; 86 | margin-top: 1.5em; 87 | } 88 | 89 | /* Style of paragraphs on a configuration settings page. */ 90 | dl.section.user ~ * { 91 | margin-bottom: 10px; 92 | margin-left: 4em; 93 | margin-right: 4em; 94 | margin-top: 0px; 95 | } 96 | 97 | /* Hide the configuration setting marker. */ 98 | dl.section.user { 99 | display: none; 100 | } 101 | 102 | /* Overrides for code fragments and lines. */ 103 | div.fragment { 104 | background: #ffffff; 105 | border: none; 106 | padding: 5px; 107 | } 108 | 109 | div.line { 110 | color: #3a3a3a; 111 | } 112 | 113 | /* Overrides for code syntax highlighting colors. */ 114 | span.comment { 115 | color: #008000; 116 | } 117 | 118 | span.keyword, span.keywordtype, span.keywordflow { 119 | color: #0000ff; 120 | } 121 | 122 | span.preprocessor { 123 | color: #50015a; 124 | } 125 | 126 | span.stringliteral, span.charliteral { 127 | color: #800c0c; 128 | } 129 | 130 | a.code, a.code:visited, a.line, a.line:visited { 131 | color: #496194; 132 | } 133 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog for AWS IoT Jobs Library 2 | 3 | ## v2.0.0 4 | - [#105](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/105), [#109](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/109) Change the `Jobs_UpdateMsg` API signature to include optional status parameters. 5 | - [#99](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/99) Update CBMC version to 5.95.1 6 | - [#113](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/113) Update links for coverity related information. 7 | - [#115](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/115) Add support for optional job document fields 8 | 9 | ## v1.5.1 (June 2024) 10 | - Fix doxygen deployment on Github 11 | 12 | ## v1.5.0 (May 2024) 13 | - [#95](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/95) Fix misra violations 14 | - [#93](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/93) Performance and safety improvements 15 | 16 | ## v1.4.0 (November 2023) 17 | - [#90](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/90) Update doxygen to reflect addition of OTA Job Parser 18 | - [#88](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/88) ADD OTA Job Parser for parsing out the fields of an incoming Job Document sent to a device from AWS IoT 19 | - [#86](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/85) MISRA C:2012 Compliance update 20 | - [#85](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/85) Add API's to jobs.c for interacting with the AWS IoT Jobs Service 21 | 22 | ## v1.3.0 (October 2022) 23 | 24 | ### Other Updates: 25 | - [#67](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/67) MISRA C:2012 Compliance update 26 | - [#65](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/65) Update CBMC starter kit 27 | - [#61](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/61) Loop invariant update 28 | 29 | ## v1.2.0 (November 2021) 30 | 31 | ### Other Updates: 32 | - [#57](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/57) Update litani and template submodules for cbmc 33 | - [#55](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/55) Update Doxygen version to 1.9.2 34 | - [#54](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/54) Updated aws-templates-for-cbmc-proofs submodule 35 | 36 | ## v1.1.1 (July 2021) 37 | 38 | ### API Changes: 39 | - [#50](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/50) Move declaration of temporary variables after length check 40 | 41 | ## v1.1.0 (March 2021) 42 | 43 | ### API Changes: 44 | - [#34](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/34) and [#35](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/35) Add support for generating and matching DescribeJobExecution API topics with `$next` as a jobID. 45 | - [#38](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/38) Add new utility macros to API for compile-time generation of AWS IoT Jobs API topics. 46 | 47 | ### Other Updates: 48 | - [#37](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/37) Add code examples in documentation of API functions. 49 | - [#43](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/pull/43) Add MISRA rule 2.5 in list of ignored violations in MISRA.md file. 50 | 51 | ## v1.0.0 (November 2020) 52 | 53 | This is the first release of the AWS IoT Jobs Library in this 54 | repository. 55 | -------------------------------------------------------------------------------- /test/unit-test/catch_assert.h: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | * 5 | * SPDX-License-Identifier: MIT 6 | * 7 | * Permission is hereby granted, free of charge, to any person obtaining a copy of 8 | * this software and associated documentation files (the "Software"), to deal in 9 | * the Software without restriction, including without limitation the rights to 10 | * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 11 | * the Software, and to permit persons to whom the Software is furnished to do so, 12 | * subject to the following conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be included in all 15 | * copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 19 | * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 20 | * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 21 | * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 22 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | */ 24 | 25 | /* 26 | * How to catch an assert: 27 | * - save a jump buffer where execution will resume after the assert 28 | * - setup a handler for the abort signal, call longjmp within 29 | * - optional - close stderr ( fd 2 ) to discard the assert message 30 | * 31 | * Unity also does a longjmp within its TEST_ASSERT* macros, 32 | * so the macro below restores stderr and the prior abort handler 33 | * before calling the Unity macro. 34 | */ 35 | 36 | #ifndef CATCH_ASSERT_H_ 37 | #define CATCH_ASSERT_H_ 38 | 39 | #include 40 | #include 41 | #include 42 | 43 | #ifndef CATCH_JMPBUF 44 | #define CATCH_JMPBUF waypoint_ 45 | #endif 46 | 47 | extern jmp_buf CATCH_JMPBUF; 48 | 49 | #pragma GCC diagnostic push 50 | #pragma GCC diagnostic ignored "-Wunused-function" 51 | static void catchHandler_( int signal ) 52 | { 53 | longjmp( CATCH_JMPBUF, signal ); 54 | } 55 | #pragma GCC diagnostic pop 56 | 57 | #define catch_assert( x ) \ 58 | do { \ 59 | int try = 0, catch = 0; \ 60 | int saveFd = dup( 2 ); \ 61 | struct sigaction sa = { 0 }, saveSa; \ 62 | sa.sa_handler = catchHandler_; \ 63 | sigaction( SIGABRT, &sa, &saveSa ); \ 64 | close( 2 ); \ 65 | if( setjmp( CATCH_JMPBUF ) == 0 ) \ 66 | { \ 67 | try++; \ 68 | x; \ 69 | } \ 70 | else \ 71 | { \ 72 | catch++; \ 73 | } \ 74 | sigaction( SIGABRT, &saveSa, NULL ); \ 75 | dup2( saveFd, 2 ); \ 76 | close( saveFd ); \ 77 | TEST_ASSERT_EQUAL( try, catch ); \ 78 | } while( 0 ) 79 | 80 | #endif /* ifndef CATCH_ASSERT_H_ */ 81 | -------------------------------------------------------------------------------- /MigrationGuide.md: -------------------------------------------------------------------------------- 1 | # Migration guide for Jobs-for-AWS-IoT-embedded-sdk version $\geq$ v2.0.0 2 | 3 | With Jobs-for-AWS-IoT-embedded-sdk versions $\geq$ v2.0.0, there are some breaking changes that need to be addressed when upgrading. 4 | 5 | ## Breaking Changes 6 | 7 | The signature of the `Jobs_UpdateMsg` API has been changed from 8 | ```c 9 | size_t Jobs_UpdateMsg( JobCurrentStatus_t status, 10 | const char * expectedVersion, 11 | size_t expectedVersionLength, 12 | char * buffer, 13 | size_t bufferSize ) 14 | ``` 15 | to 16 | ```c 17 | size_t Jobs_UpdateMsg( JobsUpdateRequest_t request, 18 | char * buffer, 19 | size_t bufferSize ); 20 | ``` 21 | A new structure `JobsUpdateRequest_t` has been introduced. This struct is now passed as a parameter to the API to include values like `status`, `expectedVersion` and `expectedVersionLength` which were earlier direct input to the API. All values in the struct except `status` are now optional. Following is the definition of the `JobsUpdateRequest_t` struct. 22 | ```c 23 | typedef struct 24 | { 25 | JobCurrentStatus_t status; /**< Status to update the job to. */ 26 | const char * expectedVersion; /**< Expected version, optional. */ 27 | size_t expectedVersionLength; /**< Expected version length, optional. */ 28 | const char * statusDetails; /**< JSON key-value pair, optional. */ 29 | size_t statusDetailsLength; /**< JSON key-value pair length, optional. */ 30 | } JobsUpdateRequest_t; 31 | ``` 32 | ### Old Code Snippet 33 | ```c 34 | const char * expectedVersion = "2"; 35 | size_t expectedVersionLength = ( sizeof(expectedVersion ) - 1U ); 36 | JobCurrentStatus_t status = Succeeded; 37 | char messageBuffer[ UPDATE_JOB_MSG_LENGTH ] = {0}; 38 | size_t messageLength = 0U; 39 | 40 | messageLength = Jobs_UpdateMsg( status, 41 | expectedVersion, 42 | expectedVersionLength, 43 | messageBuffer, 44 | UPDATE_JOB_MSG_LENGTH ); 45 | 46 | if (messageLength > 0 ) 47 | { 48 | // The message string of length, messageLength, has been 49 | // generated in the buffer, messageBuffer, for the UpdateJobExecution API 50 | // Publish this message to the topic generated by Jobs_Update using an 51 | // MQTT client of your choice. 52 | } 53 | ``` 54 | 55 | ### New Code Snippet 56 | ```c 57 | const char * expectedVersion = "2"; 58 | const chat * statusDetails = "{\"key\":\"value\"}"; // This can be any user defined JSON key value pair 59 | char messageBuffer[ UPDATE_JOB_MSG_LENGTH ] = {0}; 60 | size_t messageLength = 0U; 61 | 62 | JobsUpdateRequest_t request; 63 | request.status = Succeeded; 64 | request.expectedVersion = expectedVersion; 65 | request.expectedVersionLength = ( sizeof( expectedVersion ) - 1U ); 66 | request.statusDetails = statusDetails; 67 | request.statusDetailsLength = ( sizeof( statusDetails ) - 1U ); 68 | 69 | messageLength = Jobs_UpdateMsg( request 70 | messageBuffer, 71 | UPDATE_JOB_MSG_LENGTH ); 72 | 73 | if (messageBufferLength > 0 ) 74 | { 75 | // The message string of length, messageLength, has been 76 | // generated in the buffer, messageBuffer, for the UpdateJobExecution API 77 | // Publish this message to the topic generated by Jobs_Update using an 78 | // MQTT client of your choice. 79 | } 80 | ``` 81 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check [existing open](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/issues), or [recently closed](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/issues?q=is%3Aissue+is%3Aclosed), issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 1. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 1. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 1. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 1. Ensure that your contributions conform to the [style guide](https://docs.aws.amazon.com/embedded-csdk/202011.00/lib-ref/docs/doxygen/output/html/guide_developer_styleguide.html). 35 | 1. Format your code with uncrustify, using the config available in [FreeRTOS/CI-CD-Github-Actions](https://github.com/FreeRTOS/CI-CD-Github-Actions/blob/main/formatting/uncrustify.cfg). 36 | 1. Ensure local tests pass. 37 | 1. Commit to your fork using clear commit messages. 38 | 1. Send us a pull request, answering any default questions in the pull request interface. 39 | 1. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 40 | 41 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 42 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 43 | 44 | 45 | ## Finding contributions to work on 46 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/labels?q=help+wanted) issues is a great place to start. 47 | 48 | 49 | ## Code of Conduct 50 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 51 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 52 | opensource-codeofconduct@amazon.com with any additional questions or comments. 53 | 54 | 55 | ## Security issue notifications 56 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](https://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 57 | 58 | 59 | ## Licensing 60 | 61 | See the [LICENSE](../LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 62 | 63 | We may ask you to sign a [Contributor License Agreement (CLA)](https://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 64 | -------------------------------------------------------------------------------- /source/otaJobParser/ota_job_handler.c: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | #include "core_json.h" 16 | 17 | #include "job_parser.h" 18 | #include "ota_job_processor.h" 19 | 20 | static bool isFreeRTOSOtaJob( const char * jobDoc, 21 | const size_t jobDocLength ); 22 | static bool isJobFileIndexValid( const char * jobDoc, 23 | const size_t jobDocLength, 24 | const uint8_t fileIndex ); 25 | 26 | /** 27 | * @brief Signals if the job document provided is a FreeRTOS OTA update document 28 | * 29 | * @param jobDoc The job document contained in the AWS IoT Job 30 | * @param jobDocLength The length of the job document 31 | * @param fields A pointer to an job document fields structure populated by call 32 | * @return int8_t The next file index in the job. Returns 0 if no additional files are available. Returns -1 if error. 33 | */ 34 | int8_t otaParser_parseJobDocFile( const char * jobDoc, 35 | const size_t jobDocLength, 36 | const uint8_t fileIndex, 37 | AfrOtaJobDocumentFields_t * fields ) 38 | { 39 | bool fieldsPopulated = false; 40 | int8_t nextFileIndex = -1; 41 | 42 | if( ( jobDoc != NULL ) && ( jobDocLength > 0U ) ) 43 | { 44 | if( isFreeRTOSOtaJob( jobDoc, jobDocLength ) && isJobFileIndexValid( jobDoc, jobDocLength, fileIndex ) ) 45 | { 46 | fieldsPopulated = populateJobDocFields( jobDoc, 47 | jobDocLength, 48 | ( int32_t ) fileIndex, 49 | fields ); 50 | } 51 | 52 | if( fieldsPopulated ) 53 | { 54 | nextFileIndex = ( isJobFileIndexValid( jobDoc, jobDocLength, fileIndex + 1U ) ) ? ( int8_t ) ( ( int8_t ) fileIndex + 1 ) : ( int8_t ) 0; 55 | } 56 | } 57 | 58 | return nextFileIndex; 59 | } 60 | 61 | static bool isFreeRTOSOtaJob( const char * jobDoc, 62 | const size_t jobDocLength ) 63 | { 64 | JSONStatus_t isFreeRTOSOta = JSONIllegalDocument; 65 | const char * afrOtaDocHeader; 66 | size_t afrOtaDocHeaderLength = 0U; 67 | 68 | /* FreeRTOS OTA updates have a top level "afr_ota" job document key. 69 | * Check for this to ensure the document is an FreeRTOS OTA update */ 70 | isFreeRTOSOta = JSON_SearchConst( jobDoc, 71 | jobDocLength, 72 | "afr_ota", 73 | 7U, 74 | &afrOtaDocHeader, 75 | &afrOtaDocHeaderLength, 76 | NULL ); 77 | 78 | return( JSONSuccess == isFreeRTOSOta ); 79 | } 80 | 81 | static bool isJobFileIndexValid( const char * jobDoc, 82 | const size_t jobDocLength, 83 | const uint8_t fileIndex ) 84 | { 85 | JSONStatus_t isFreeRTOSOta = JSONIllegalDocument; 86 | const char * fileValue; 87 | size_t fileValueLength = 0U; 88 | char file[ 17U ] = "afr_ota.files[i]"; 89 | 90 | if( fileIndex <= 9U ) 91 | { 92 | int32_t index = ( ( int32_t ) '0' + ( int32_t ) fileIndex ); 93 | file[ 14U ] = ( char ) index; 94 | 95 | isFreeRTOSOta = JSON_SearchConst( jobDoc, 96 | jobDocLength, 97 | file, 98 | 16U, 99 | &fileValue, 100 | &fileValueLength, 101 | NULL ); 102 | } 103 | 104 | return( JSONSuccess == isFreeRTOSOta ); 105 | } 106 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI Checks 2 | on: 3 | push: 4 | branches: ["**"] 5 | pull_request: 6 | branches: [main] 7 | workflow_dispatch: 8 | jobs: 9 | unittest: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Clone This Repo 13 | uses: actions/checkout@v3 14 | - name: Build 15 | run: | 16 | sudo apt-get install -y lcov sed 17 | cmake -S test -B build/ \ 18 | -G "Unix Makefiles" \ 19 | -DCMAKE_BUILD_TYPE=Debug \ 20 | -DUNITTEST=1 \ 21 | -DCMAKE_C_FLAGS='-Wall -Wextra -Werror' 22 | make -C build/ all 23 | - name: Test 24 | run: | 25 | cd build/ 26 | ctest -E system --output-on-failure 27 | cd .. 28 | - name: Run Coverage 29 | run: | 30 | make -C build/ coverage 31 | declare -a EXCLUDE=("\*_deps\*") 32 | echo ${EXCLUDE[@]} | xargs lcov --rc lcov_branch_coverage=1 -r build/coverage.info -o build/coverage.info 33 | lcov --rc branch_coverage=1 --summary build/coverage.info 34 | - name: Check Coverage 35 | uses: FreeRTOS/CI-CD-Github-Actions/coverage-cop@main 36 | with: 37 | coverage-file: ./build/coverage.info 38 | complexity: 39 | runs-on: ubuntu-latest 40 | steps: 41 | - uses: actions/checkout@v3 42 | - name: Check complexity 43 | uses: FreeRTOS/CI-CD-Github-Actions/complexity@main 44 | with: 45 | path: ./ 46 | doxygen: 47 | runs-on: ubuntu-latest 48 | steps: 49 | - uses: actions/checkout@v3 50 | - name: Run doxygen build 51 | uses: FreeRTOS/CI-CD-Github-Actions/doxygen@main 52 | with: 53 | path: ./ 54 | spell-check: 55 | runs-on: ubuntu-latest 56 | steps: 57 | - name: Clone This Repo 58 | uses: actions/checkout@v3 59 | - name: Run spellings check 60 | uses: FreeRTOS/CI-CD-Github-Actions/spellings@main 61 | with: 62 | path: ./ 63 | formatting: 64 | runs-on: ubuntu-20.04 65 | steps: 66 | - uses: actions/checkout@v3 67 | - name: Check formatting 68 | uses: FreeRTOS/CI-CD-Github-Actions/formatting@main 69 | with: 70 | path: ./ 71 | git-secrets: 72 | runs-on: ubuntu-latest 73 | steps: 74 | - uses: actions/checkout@v3 75 | - name: Checkout awslabs/git-secrets 76 | uses: actions/checkout@v3 77 | with: 78 | repository: awslabs/git-secrets 79 | ref: master 80 | path: git-secrets 81 | - name: Install git-secrets 82 | run: cd git-secrets && sudo make install && cd .. 83 | - name: Run git-secrets 84 | run: | 85 | git-secrets --register-aws 86 | git-secrets --scan 87 | memory_statistics: 88 | runs-on: ubuntu-latest 89 | steps: 90 | - uses: actions/checkout@v3 91 | - name: Clone coreJSON 92 | run: git clone https://github.com/FreeRTOS/coreJSON.git --depth 1 --branch v3.2.0 93 | - name: Install Python3 94 | uses: actions/setup-python@v3 95 | with: 96 | python-version: "3.11.0" 97 | - name: Measure sizes 98 | uses: FreeRTOS/CI-CD-Github-Actions/memory_statistics@main 99 | with: 100 | config: .github/memory_statistics_config.json 101 | check_against: docs/doxygen/include/size_table.md 102 | 103 | link-verifier: 104 | runs-on: ubuntu-latest 105 | steps: 106 | - uses: actions/checkout@v3 107 | - name: Check Links 108 | env: 109 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 110 | uses: FreeRTOS/CI-CD-Github-Actions/link-verifier@main 111 | with: 112 | exclude-urls: https://s3.region.amazonaws.com/joe-ota, https://www.misra.org.uk 113 | 114 | verify-manifest: 115 | runs-on: ubuntu-latest 116 | steps: 117 | - uses: actions/checkout@v3 118 | with: 119 | sparse-checkout: . 120 | 121 | - name: Run manifest verifier 122 | uses: FreeRTOS/CI-CD-GitHub-Actions/manifest-verifier@main 123 | with: 124 | path: ./ 125 | fail-on-incorrect-version: true 126 | 127 | proof_ci: 128 | if: ${{ github.event.pull_request }} 129 | runs-on: cbmc_ubuntu-latest_16-core 130 | steps: 131 | - name: Set up CBMC runner 132 | uses: FreeRTOS/CI-CD-Github-Actions/set_up_cbmc_runner@main 133 | with: 134 | cbmc_version: "5.95.1" 135 | - name: Run CBMC 136 | uses: FreeRTOS/CI-CD-Github-Actions/run_cbmc@main 137 | with: 138 | proofs_dir: test/cbmc 139 | run_cbmc_proofs_command: ./run_proofs.sh 140 | -------------------------------------------------------------------------------- /tools/coverity/README.md: -------------------------------------------------------------------------------- 1 | # Static code analysis for Jobs-for-AWS-IoT-embedded-sdk library 2 | This directory is made for the purpose of statically testing the MISRA C:2012 compliance of Jobs-for-AWS-IoT-embedded-sdk using 3 | [Synopsys Coverity](https://www.synopsys.com/software-integrity/security-testing/static-analysis-sast.html) static analysis tool. 4 | To that end, this directory provides a [configuration file](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/blob/main/tools/coverity/misra.config) to use when 5 | building a binary for the tool to analyze. 6 | 7 | > **Note** 8 | For generating the report as outlined below, we have used Coverity version 2023.6.1. 9 | 10 | For details regarding the suppressed violations in the report (which can be generated using the instructions described below), please 11 | see the [MISRA.md](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/blob/main/MISRA.md) file. 12 | 13 | ## Getting Started 14 | ### Prerequisites 15 | You can run this on a platform supported by Coverity. The list and other details can be found [here](https://documentation.blackduck.com/bundle/coverity-docs-2024.9/page/deploy-install-guide/topics/supported_platforms_for_coverity_analysis.html). 16 | To compile and run the Coverity target successfully, you must have the following: 17 | 18 | 1. CMake version > 3.13.0 (You can check whether you have this by typing `cmake --version`) 19 | 2. GCC compiler 20 | - You can see the downloading and installation instructions [here](https://gcc.gnu.org/install/). 21 | 3. Download the repo and include the submodules using the following commands. 22 | - `git clone --recurse-submodules git@github.com:aws/Jobs-for-AWS-IoT-embedded-sdk.git ./Jobs-for-AWS-IoT-embedded-sdk` 23 | - `cd ./Jobs-for-AWS-IoT-embedded-sdk` 24 | - `git submodule update --checkout --init --recursive` 25 | 26 | ### To build and run coverity: 27 | Go to the root directory of the library and run the following commands in terminal: 28 | 1. Update the compiler configuration in Coverity 29 | ~~~ 30 | cov-configure --force --compiler cc --comptype gcc 31 | ~~~ 32 | 2. Create the build files using CMake in a `build` directory 33 | ~~~ 34 | cmake -B build -S test -DCOV_ANALYSIS 35 | ~~~ 36 | 3. Go to the build directory and copy the coverity configuration file 37 | ~~~ 38 | cd build/ 39 | ~~~ 40 | 4. Build the static analysis target 41 | ~~~ 42 | cov-build --emit-complementary-info --dir cov-out make coverity_analysis 43 | ~~~ 44 | 5. Go to the Coverity output directory (`cov-out`) and begin Coverity static analysis 45 | ~~~ 46 | cd cov-out/ 47 | cov-analyze --dir . --coding-standard-config ../../tools/coverity/misra.config --tu-pattern "file('.*/source/.*')" 48 | ~~~ 49 | 6. Format the errors in HTML format so that it is more readable while removing the test and build directory from the report 50 | ~~~ 51 | cov-format-errors --dir . --file "source" --exclude-files '(/build/|/test/)' --html-output html-out; 52 | ~~~ 53 | 7. Format the errors in JSON format to perform a jq query to get a simplified list of any exceptions. 54 | NOTE: A blank output means there are no defects that aren't being suppressed by the config or inline comments. 55 | ~~~ 56 | cov-format-errors --dir . --file "source" --exclude-files '(/build/|/test/)' --json-output-v2 defects.json; 57 | echo -e "\n-------------------------Non-Suppresed Deviations, if any, Listed Below-------------------------\n"; 58 | jq '.issues[] | .events[] | .eventTag ' defects.json | sort | uniq -c | sort -nr; 59 | echo -e "\n-------------------------Non-Suppresed Deviations, if any, Listed Above-------------------------\n"; 60 | ~~~ 61 | 62 | For your convenience the commands above are below to be copy/pasted into a UNIX command friendly terminal. 63 | ~~~ 64 | cov-configure --force --compiler cc --comptype gcc; 65 | cmake -B build -S test -DCOV_ANALYSIS; 66 | cd build/; 67 | cov-build --emit-complementary-info --dir cov-out make coverity_analysis; 68 | cd cov-out/ 69 | cov-analyze --dir . --coding-standard-config ../../tools/coverity/misra.config; 70 | cov-format-errors --dir . --file "source" --exclude-files '(/build/|/test/)' --html-output html-out; 71 | cov-format-errors --dir . --file "source" --exclude-files '(/build/|/test/)' --json-output-v2 defects.json; 72 | echo -e "\n-------------------------Non-Suppresed Deviations, if any, Listed Below-------------------------\n"; 73 | jq '.issues[] | .events[] | .eventTag ' defects.json | sort | uniq -c | sort -nr; 74 | echo -e "\n-------------------------Non-Suppresed Deviations, if any, Listed Above-------------------------\n"; 75 | cd ../../; 76 | ~~~ 77 | 78 | You should now have the HTML formatted violations list in a directory named `build/cov-out/html-output`. 79 | With the current configuration and the provided project, you should not see any deviations. -------------------------------------------------------------------------------- /test/unit-test/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.16.0) 2 | project( 3 | "AWS IoT Jobs Tests" 4 | VERSION 1.5.0 5 | LANGUAGES C) 6 | 7 | # Include filepaths for source and include. 8 | include(${MODULE_ROOT_DIR}/jobsFilePaths.cmake) 9 | include("${corejson_SOURCE_DIR}/jsonFilePaths.cmake") 10 | 11 | # ==================== Define your project name (edit) ======================== 12 | set(project_name "jobs") 13 | 14 | # ===================== Create your mock here (edit) ======================== 15 | 16 | # list the files to mock here 17 | list(APPEND mock_list "${MODULE_ROOT_DIR}/source/otaJobParser/include/job_parser.h") 18 | 19 | # list the directories your mocks need 20 | list(APPEND mock_include_list "${MODULE_ROOT_DIR}/source/otaJobParser/include") 21 | # list the definitions of your mocks to control what to be included 22 | list(APPEND mock_define_list "") 23 | 24 | # ================= Create the library under test here (edit) ================== 25 | 26 | # Base name for temporary files 27 | set( TEMP_BASE ${CMAKE_BINARY_DIR}/${project_name} ) 28 | 29 | # Strip static constraints so unit tests may call internal functions 30 | execute_process( COMMAND sed "s/^static //" 31 | WORKING_DIRECTORY ${CMAKE_BINARY_DIR} 32 | INPUT_FILE ${JOBS_SOURCES} 33 | OUTPUT_FILE ${TEMP_BASE}.c 34 | ) 35 | 36 | # Generate a header file for internal functions 37 | execute_process( COMMAND sed -n "/^static.*(/,/^{\$/{s/^static //; s/)\$/&;/; /{/d; p;}" 38 | WORKING_DIRECTORY ${CMAKE_BINARY_DIR} 39 | INPUT_FILE ${JOBS_SOURCES} 40 | OUTPUT_FILE ${TEMP_BASE}_annex.h 41 | ) 42 | 43 | execute_process(COMMAND cp ./source/otaJobParser/job_parser.c ./build/job_parser.c ) 44 | 45 | execute_process(COMMAND cp ./source/otaJobParser/ota_job_handler.c ./build/ota_job_handler.c ) 46 | 47 | set(OTA_HANDLER_TEST_SOURCES 48 | ${MODULE_ROOT_DIR}/build/job_parser.c 49 | ${MODULE_ROOT_DIR}/build/ota_job_handler.c) 50 | 51 | # list the files you would like to test here 52 | list(APPEND real_source_files ${OTA_HANDLER_TEST_SOURCES} ${JSON_SOURCES}) 53 | # list the directories the module under test includes 54 | list(APPEND real_include_directories . ${OTA_HANDLER_INCLUDES} 55 | ${JSON_INCLUDE_PUBLIC_DIRS}) 56 | # ===================== Create UnitTest Code here (edit) ===================== 57 | 58 | # list the directories your test needs to include 59 | list(APPEND test_include_directories . ${OTA_HANDLER_INCLUDES}) 60 | 61 | # ============================= (end edit) =================================== 62 | 63 | # Create ota_job_handler unit test 64 | set(real_name "ota_job_handler_real") 65 | set(utest_name "ota_job_handler_utest") 66 | set(utest_source "ota_job_handler_utest.c") 67 | set(mock_name "ota_job_handler_mock") 68 | set(real_name "ota_job_handler_real") 69 | 70 | create_mock_list( 71 | ${mock_name} "${mock_list}" "${MODULE_ROOT_DIR}/tools/cmock/project.yml" 72 | "${mock_include_list}" "${mock_define_list}") 73 | 74 | create_real_library(${real_name} "${real_source_files}" 75 | "${real_include_directories}" "${mock_name}") 76 | 77 | list(APPEND utest_link_list -l${mock_name} lib${real_name}.a) 78 | 79 | list(APPEND utest_dep_list ${real_name}) 80 | 81 | create_test(${utest_name} ${utest_source} "${utest_link_list}" 82 | "${utest_dep_list}" "${test_include_directories}") 83 | 84 | # # Create job parser unit test 85 | set(real_name "job_parser_real") 86 | set(utest_name "job_parser_utest") 87 | set(utest_source "job_parser_utest.c") 88 | # No need to redefine mocks since we do not need any 89 | 90 | set(utest_link_list "") 91 | list(APPEND utest_dep_list ${real_name}) 92 | 93 | create_real_library(${real_name} "${real_source_files}" 94 | "${real_include_directories}" "") 95 | 96 | # Redefine the linked files to ignore the mock files 97 | list(APPEND utest_link_list lib${real_name}.a) 98 | 99 | create_test(${utest_name} ${utest_source} "${utest_link_list}" 100 | "${utest_dep_list}" "${test_include_directories}") 101 | 102 | # Create jobs unit test 103 | list(APPEND real_source_files ${TEMP_BASE}.c) 104 | list(APPEND real_include_directories ${JOBS_INCLUDE_PUBLIC_DIRS}) 105 | list(APPEND test_include_directories ${JOBS_INCLUDE_PUBLIC_DIRS} 106 | ${CMAKE_BINARY_DIR}) 107 | set(real_name "jobs_real") 108 | 109 | create_real_library(${real_name} 110 | "${real_source_files}" 111 | "${real_include_directories}" 112 | "" 113 | ) 114 | 115 | list(APPEND utest_link_list 116 | lib${real_name}.a 117 | ) 118 | 119 | list(APPEND utest_dep_list 120 | ${real_name} 121 | ) 122 | 123 | set(utest_name "jobs_utest") 124 | set(utest_source "jobs_utest.c") 125 | create_test(${utest_name} 126 | ${utest_source} 127 | "${utest_link_list}" 128 | "${utest_dep_list}" 129 | "${test_include_directories}" 130 | ) 131 | -------------------------------------------------------------------------------- /test/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.22.0) 2 | 3 | project( 4 | "AWS IoT Jobs Tests" 5 | VERSION 1.5.1 6 | LANGUAGES C) 7 | 8 | # Allow the project to be organized into folders. 9 | set_property(GLOBAL PROPERTY USE_FOLDERS ON) 10 | 11 | if(NOT DEFINED CMAKE_C_STANDARD) 12 | set(CMAKE_C_STANDARD 99) 13 | endif() 14 | if(NOT DEFINED CMAKE_C_STANDARD_REQUIRED) 15 | set(CMAKE_C_STANDARD_REQUIRED ON) 16 | endif() 17 | 18 | # If no configuration is defined, turn everything on. 19 | if( NOT DEFINED COV_ANALYSIS AND NOT DEFINED UNITTEST ) 20 | set( COV_ANALYSIS TRUE ) 21 | set( UNITTEST TRUE ) 22 | endif() 23 | 24 | # Do not allow in-source build. 25 | if(${PROJECT_SOURCE_DIR} STREQUAL ${PROJECT_BINARY_DIR}) 26 | message( 27 | FATAL_ERROR 28 | "In-source build is not allowed. Please build in a separate directory, such as ${PROJECT_SOURCE_DIR}/build." 29 | ) 30 | endif() 31 | 32 | # Set global path variables. 33 | get_filename_component(__MODULE_ROOT_DIR "${CMAKE_CURRENT_LIST_DIR}/.." 34 | ABSOLUTE) 35 | set(MODULE_ROOT_DIR 36 | ${__MODULE_ROOT_DIR} 37 | CACHE INTERNAL "Jobs repository root.") 38 | 39 | include(FetchContent) 40 | 41 | FetchContent_Declare( 42 | CMock 43 | GIT_REPOSITORY https://github.com/ThrowTheSwitch/CMock.git 44 | GIT_TAG ed29ce388f4478bda1bdfb274d371e961b671c2b) 45 | 46 | FetchContent_Declare( 47 | CoreJSON 48 | # hash: sha256-r0lJff61NK2rPtO7Wr6RudFNQiLt1D4M30V7/p60Zi0= 49 | GIT_REPOSITORY https://github.com/FreeRTOS/coreJSON.git 50 | GIT_TAG dc1ab9130a1fb99b801a2a1fa8e9f42239f752be) 51 | 52 | FetchContent_MakeAvailable(CMock CoreJSON) 53 | 54 | # Add coreJSON library 55 | include("${corejson_SOURCE_DIR}/jsonFilePaths.cmake") 56 | add_library(coreJSON ${JSON_SOURCES}) 57 | target_include_directories(coreJSON PUBLIC ${JSON_INCLUDE_PUBLIC_DIRS}) 58 | 59 | # ================================ Coverity Analysis Configuration ================================= 60 | 61 | if( COV_ANALYSIS ) 62 | # Include filepaths for source and include. 63 | include( ${MODULE_ROOT_DIR}/jobsFilePaths.cmake ) 64 | # Target for Coverity analysis that builds the library. 65 | add_library( coverity_analysis 66 | ${JOBS_SOURCES} 67 | ${OTA_HANDLER_SOURCES} ) 68 | # JOBS public include path. 69 | target_include_directories( coverity_analysis PUBLIC ${JOBS_INCLUDE_PUBLIC_DIRS} 70 | ${OTA_HANDLER_INCLUDES} ) 71 | 72 | target_link_libraries(coverity_analysis PUBLIC coreJSON) 73 | 74 | # Build HTTP library target without logging 75 | target_compile_options(coverity_analysis PUBLIC -DNDEBUG ) 76 | endif() 77 | 78 | # ================================================================================================== 79 | if( UNITTEST ) 80 | add_library( 81 | unity STATIC 82 | "${cmock_SOURCE_DIR}/vendor/unity/src/unity.c" 83 | "${cmock_SOURCE_DIR}/vendor/unity/extras/fixture/src/unity_fixture.c" 84 | "${cmock_SOURCE_DIR}/vendor/unity/extras/memory/src/unity_memory.c") 85 | target_include_directories( 86 | unity 87 | PUBLIC "${cmock_SOURCE_DIR}/vendor/unity/src" 88 | "${cmock_SOURCE_DIR}/vendor/unity/extras/fixture/src" 89 | "${cmock_SOURCE_DIR}/vendor/unity/extras/memory/src" 90 | "${cmock_SOURCE_DIR}/src") 91 | 92 | set_target_properties( 93 | unity PROPERTIES ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib 94 | POSITION_INDEPENDENT_CODE ON) 95 | 96 | add_library(cmock STATIC) 97 | target_sources(cmock PRIVATE ${cmock_SOURCE_DIR}/src/cmock.c) 98 | target_include_directories( 99 | cmock 100 | PUBLIC "${cmock_SOURCE_DIR}/src" 101 | "${cmock_SOURCE_DIR}/vendor/unity/src/" 102 | "${cmock_SOURCE_DIR}/examples" 103 | "${cmock_SOURCE_DIR}/vendor/unity/extras/fixture/src" 104 | "${cmock_SOURCE_DIR}/vendor/unity/extras/memory/src") 105 | set_target_properties( 106 | cmock 107 | PROPERTIES ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib 108 | POSITION_INDEPENDENT_CODE ON 109 | COMPILE_FLAGS "-Og") 110 | 111 | # ==================================== Test Configuration ======================================== 112 | 113 | # Use CTest utility for managing test runs. This has to be added BEFORE defining 114 | # test targets with add_test() 115 | enable_testing() 116 | 117 | # Add function to enable CMock based tests and coverage. 118 | include(${MODULE_ROOT_DIR}/tools/cmock/create_test.cmake) 119 | 120 | # Include build configuration for unit tests. 121 | add_subdirectory(unit-test) 122 | 123 | # ==================================== Coverage Analysis configuration ======================================== 124 | 125 | # Add a target for running coverage on tests. 126 | add_custom_target( 127 | coverage 128 | COMMAND ${CMAKE_COMMAND} -DCMOCK_DIR=${cmock_SOURCE_DIR} -P 129 | ${MODULE_ROOT_DIR}/tools/cmock/coverage.cmake 130 | DEPENDS cmock unity jobs_utest ota_job_handler_utest job_parser_utest 131 | WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) 132 | endif() 133 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## AWS IoT Jobs library 2 | 3 | **[API Documentation Pages for current and previous releases of this library can be found here](https://aws.github.io/Jobs-for-AWS-IoT-embedded-sdk/)** 4 | 5 | The AWS IoT Jobs library helps you notify connected IoT devices of a pending 6 | [Job](https://freertos.org/Documentation/03-Libraries/04-AWS-libraries/04-AWS-IoT-Jobs/02-Jobs-terminology). A Job can be used to 7 | manage your fleet of devices, update firmware and security certificates on your 8 | devices, or perform administrative tasks such as restarting devices and 9 | performing diagnostics. It interacts with the 10 | [AWS IoT Jobs service](https://docs.aws.amazon.com/iot/latest/developerguide/iot-jobs.html) 11 | using MQTT, a lightweight publish-subscribe protocol. This library provides a 12 | convenience API to compose and recognize the MQTT topic strings used by the Jobs 13 | service. The library is written in C compliant with ISO C90 and MISRA C:2012, 14 | and is distributed under the [MIT Open Source License](LICENSE). 15 | 16 | This library has gone through code quality checks including verification that no 17 | function has a 18 | [GNU Complexity](https://www.gnu.org/software/complexity/manual/complexity.html) 19 | score over 10, and checks against deviations from mandatory rules in the 20 | [MISRA coding standard](https://www.misra.org.uk). Deviations from the MISRA 21 | C:2012 guidelines are documented under [MISRA Deviations](MISRA.md). This 22 | library has also undergone both static code analysis from 23 | [Coverity](https://scan.coverity.com/), and validation of memory safety with the 24 | [CBMC bounded model checker](https://www.cprover.org/cbmc/). 25 | 26 | See memory requirements for this library 27 | [here](./docs/doxygen/include/size_table.md). 28 | 29 | **AWS IoT Jobs v1.5.1 30 | [source code](https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk/tree/v1.5.1/source) 31 | is part of the 32 | [FreeRTOS 202406.00 LTS](https://github.com/FreeRTOS/FreeRTOS-LTS/tree/202406.00-LTS) 33 | release.** 34 | 35 | ## Building the Jobs library 36 | 37 | A compiler that supports **C99 or later** such as _gcc_ is required to build the 38 | library. 39 | 40 | Additionally, coreJSON is required for parsing. To build the library, first run: 41 | ```bash 42 | git clone https://github.com/FreeRTOS/coreJSON.git --depth 1 --branch v3.2.0 43 | ``` 44 | 45 | Given an application in a file named `example.c`, _gcc_ can be used like so: 46 | 47 | ```bash 48 | gcc -I source/include -I coreJSON/source/include example.c coreJSON/source/core_json.c source/jobs.c -o example 49 | ``` 50 | 51 | _gcc_ can also produce an object file to be linked later: 52 | 53 | ```bash 54 | gcc -I source/include -I coreJSON/source/include -c source/jobs.c 55 | ``` 56 | 57 | ## CBMC 58 | 59 | To learn more about CBMC and proofs specifically, review the training material 60 | [here](https://model-checking.github.io/cbmc-training). 61 | 62 | The `test/cbmc` directory contains CBMC proofs. 63 | 64 | In order to run these proofs you will need to install CBMC and other tools by 65 | following the instructions 66 | [here](https://model-checking.github.io/cbmc-training/installation.html). 67 | 68 | ## Reference example 69 | 70 | The AWS IoT Device SDK for Embedded C repository contains a demo using the jobs 71 | library on a POSIX platform. 72 | https://github.com/aws/aws-iot-device-sdk-embedded-C/tree/main/demos/jobs/jobs_demo_mosquitto 73 | 74 | ## Documentation 75 | 76 | ### Existing Documentation 77 | 78 | For pre-generated documentation, please see the documentation linked in the 79 | locations below: 80 | 81 | | Location | 82 | | :--------------------------------------------------------------------------------------------------------------------------: | 83 | | [AWS IoT Device SDK for Embedded C](https://github.com/aws/aws-iot-device-sdk-embedded-C#releases-and-documentation) | 84 | | [FreeRTOS.org](https://aws.github.io/Jobs-for-AWS-IoT-embedded-sdk/latest/) | 85 | 86 | Note that the latest included version of the AWS IoT Jobs library may differ 87 | across repositories. 88 | 89 | ### Generating Documentation 90 | 91 | The Doxygen references were created using Doxygen version 1.9.2. To generate the 92 | Doxygen pages, please run the following command from the root of this 93 | repository: 94 | 95 | ```shell 96 | doxygen docs/doxygen/config.doxyfile 97 | ``` 98 | 99 | ## Building unit tests 100 | 101 | ### Platform Prerequisites 102 | 103 | - For running unit tests 104 | - C99 compiler like gcc 105 | - CMake 3.13.0 or later 106 | - Ruby 2.0.0 or later is additionally required for the Unity test framework 107 | (that we use). 108 | - For running the coverage target, lcov is additionally required. 109 | 110 | ### Steps to build Unit Tests 111 | 112 | 1. Create build directory: `mkdir build` 113 | 114 | 1. Run _cmake_ while inside build directory: `cmake -S test/ -B build/` 115 | 116 | 1. Change to build directory: `cd build` 117 | 118 | 1. Run this command to build the library and unit tests: `make all` 119 | 120 | 1. The generated test executables will be present in `build/bin/tests` folder. 121 | 122 | 1. Run `ctest` to execute all tests and view the test run summary. 123 | 124 | ## Contributing 125 | 126 | See [CONTRIBUTING.md](./.github/CONTRIBUTING.md) for information on 127 | contributing. 128 | -------------------------------------------------------------------------------- /test/cbmc/lib/summarize.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | import argparse 5 | import json 6 | import logging 7 | import os 8 | import sys 9 | 10 | 11 | DESCRIPTION = """Print 2 tables in GitHub-flavored Markdown that summarize 12 | an execution of CBMC proofs.""" 13 | 14 | 15 | def get_args(): 16 | """Parse arguments for summarize script.""" 17 | parser = argparse.ArgumentParser(description=DESCRIPTION) 18 | for arg in [{ 19 | "flags": ["--run-file"], 20 | "help": "path to the Litani run.json file", 21 | "required": True, 22 | }]: 23 | flags = arg.pop("flags") 24 | parser.add_argument(*flags, **arg) 25 | return parser.parse_args() 26 | 27 | 28 | def _get_max_length_per_column_list(data): 29 | ret = [len(item) + 1 for item in data[0]] 30 | for row in data[1:]: 31 | for idx, item in enumerate(row): 32 | ret[idx] = max(ret[idx], len(item) + 1) 33 | return ret 34 | 35 | 36 | def _get_table_header_separator(max_length_per_column_list): 37 | line_sep = "" 38 | for max_length_of_word_in_col in max_length_per_column_list: 39 | line_sep += "|" + "-" * (max_length_of_word_in_col + 1) 40 | line_sep += "|\n" 41 | return line_sep 42 | 43 | 44 | def _get_entries(max_length_per_column_list, row_data): 45 | entries = [] 46 | for row in row_data: 47 | entry = "" 48 | for idx, word in enumerate(row): 49 | max_length_of_word_in_col = max_length_per_column_list[idx] 50 | space_formatted_word = (max_length_of_word_in_col - len(word)) * " " 51 | entry += "| " + word + space_formatted_word 52 | entry += "|\n" 53 | entries.append(entry) 54 | return entries 55 | 56 | 57 | def _get_rendered_table(data): 58 | table = [] 59 | max_length_per_column_list = _get_max_length_per_column_list(data) 60 | entries = _get_entries(max_length_per_column_list, data) 61 | for idx, entry in enumerate(entries): 62 | if idx == 1: 63 | line_sep = _get_table_header_separator(max_length_per_column_list) 64 | table.append(line_sep) 65 | table.append(entry) 66 | table.append("\n") 67 | return "".join(table) 68 | 69 | 70 | def _get_status_and_proof_summaries(run_dict): 71 | """Parse a dict representing a Litani run and create lists summarizing the 72 | proof results. 73 | 74 | Parameters 75 | ---------- 76 | run_dict 77 | A dictionary representing a Litani run. 78 | 79 | 80 | Returns 81 | ------- 82 | A list of 2 lists. 83 | The first sub-list maps a status to the number of proofs with that status. 84 | The second sub-list maps each proof to its function and status. 85 | """ 86 | count_statuses = {} 87 | proofs = [["Function","Proof", "Status"]] 88 | for proof_pipeline in run_dict: 89 | status_pretty_name = proof_pipeline['status'] 90 | try: 91 | count_statuses[status_pretty_name] += 1 92 | except KeyError: 93 | count_statuses[status_pretty_name] = 1 94 | proofs.append([proof_pipeline["sourceLocation"]["function"], proof_pipeline["property"], status_pretty_name]) 95 | statuses = [["Status", "Count"]] 96 | for status, count in count_statuses.items(): 97 | statuses.append([status, str(count)]) 98 | return [statuses, proofs] 99 | 100 | 101 | def print_proof_results(out_file): 102 | """ 103 | Print 2 strings that summarize the proof results. 104 | When printing, each string will render as a GitHub flavored Markdown table. 105 | """ 106 | output = "## Summary of CBMC proof results\n\n" 107 | with open(out_file, encoding='utf-8') as run_json: 108 | run_dict = json.load(run_json) 109 | 110 | """ 111 | Iterate through the json output until we get to the proof results 112 | Before this is info about loop unwinding, etc, which is not relevant 113 | to the summary page 114 | """ 115 | for proof_pipeline in run_dict: 116 | if ('result' not in proof_pipeline): 117 | continue 118 | else: 119 | #Set the run_dict to be only the proof results. 120 | run_dict = proof_pipeline['result'] 121 | 122 | status_table, proof_table = _get_status_and_proof_summaries(run_dict) 123 | for summary in (status_table, proof_table): 124 | output += _get_rendered_table(summary) 125 | 126 | print(output) 127 | sys.stdout.flush() 128 | 129 | github_summary_file = os.getenv("GITHUB_STEP_SUMMARY") 130 | if github_summary_file: 131 | with open(github_summary_file, "a") as handle: 132 | print(output, file=handle) 133 | handle.flush() 134 | else: 135 | logging.warning( 136 | "$GITHUB_STEP_SUMMARY not set, not writing summary file") 137 | 138 | msg = ( 139 | "Click the 'Summary' button to view a Markdown table " 140 | "summarizing all proof results") 141 | if run_dict["status"] != "success": 142 | logging.error("Not all proofs passed.") 143 | logging.error(msg) 144 | sys.exit(1) 145 | logging.info(msg) 146 | 147 | 148 | if __name__ == '__main__': 149 | args = get_args() 150 | logging.basicConfig(format="%(levelname)s: %(message)s") 151 | try: 152 | print_proof_results(args.run_file) 153 | except Exception as ex: # pylint: disable=broad-except 154 | logging.critical("Could not print results. Exception: %s", str(ex)) -------------------------------------------------------------------------------- /tools/cmock/create_test.cmake: -------------------------------------------------------------------------------- 1 | # Taken from amazon-freertos repository 2 | 3 | # function to create the test executable 4 | function(create_test test_name test_src link_list dep_list include_list) 5 | set(mocks_dir "${CMAKE_CURRENT_BINARY_DIR}/mocks") 6 | include(CTest) 7 | get_filename_component(test_src_absolute ${test_src} ABSOLUTE) 8 | add_custom_command( 9 | OUTPUT ${test_name}_runner.c 10 | COMMAND 11 | ruby ${cmock_SOURCE_DIR}/vendor/unity/auto/generate_test_runner.rb 12 | ${MODULE_ROOT_DIR}/tools/cmock/project.yml ${test_src_absolute} 13 | ${test_name}_runner.c 14 | DEPENDS ${test_src}) 15 | add_executable(${test_name} ${test_src} ${test_name}_runner.c) 16 | set_target_properties( 17 | ${test_name} 18 | PROPERTIES COMPILE_FLAG "-O0 -ggdb" 19 | RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin/tests" 20 | INSTALL_RPATH_USE_LINK_PATH TRUE 21 | LINK_FLAGS " \ 22 | -Wl,-rpath,${CMAKE_BINARY_DIR}/lib \ 23 | -Wl,-rpath,${CMAKE_CURRENT_BINARY_DIR}/lib") 24 | target_include_directories(${test_name} PUBLIC ${mocks_dir} ${include_list}) 25 | 26 | target_link_directories(${test_name} PUBLIC ${CMAKE_CURRENT_BINARY_DIR}) 27 | 28 | # link all libraries sent through parameters 29 | foreach(link IN LISTS link_list) 30 | target_link_libraries(${test_name} ${link}) 31 | endforeach() 32 | 33 | # add dependency to all the dep_list parameter 34 | foreach(dependency IN LISTS dep_list) 35 | add_dependencies(${test_name} ${dependency}) 36 | target_link_libraries(${test_name} ${dependency}) 37 | endforeach() 38 | target_link_libraries(${test_name} -lgcov unity) 39 | target_link_directories(${test_name} PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/lib) 40 | add_test( 41 | NAME ${test_name} 42 | COMMAND ${CMAKE_BINARY_DIR}/bin/tests/${test_name} 43 | WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) 44 | endfunction() 45 | 46 | # Run the C preprocessor on target files. Takes a CMAKE list of arguments to 47 | # pass to the C compiler 48 | function(preprocess_mock_list mock_name file_list compiler_args) 49 | set_property(GLOBAL PROPERTY ${mock_name}_processed TRUE) 50 | foreach(target_file IN LISTS file_list) 51 | # Has to be TARGET ALL so the file is pre-processed before CMOCK is executed 52 | # on the file. 53 | add_custom_command( 54 | OUTPUT ${target_file}.backup 55 | COMMAND scp ${target_file} ${target_file}.backup 56 | VERBATIM 57 | COMMAND ${CMAKE_C_COMPILER} -E ${compiler_args} ${target_file} > 58 | ${target_file}.out) 59 | add_custom_target( 60 | pre_${mock_name} 61 | COMMAND mv ${target_file}.out ${target_file} 62 | DEPENDS ${target_file}.backup) 63 | endforeach() 64 | 65 | # Clean up temporary files that were created. First we test to see if the 66 | # backup file still exists. If it does we revert the change made to the 67 | # original file. 68 | foreach(target_file IN LISTS file_list) 69 | add_custom_command( 70 | TARGET ${mock_name} 71 | POST_BUILD 72 | COMMAND test ! -e ${target_file}.backup || mv ${target_file}.backup 73 | ${target_file}) 74 | endforeach() 75 | endfunction() 76 | 77 | # Generates a mock library based on a module's header file places the generated 78 | # source file in the build directory @param mock_name: name of the target name 79 | # @param mock_list list of header files to mock @param cmock_config 80 | # configuration file of the cmock framework @param mock_include_list include 81 | # list for the target @param mock_define_list special definitions to control 82 | # compilation 83 | function(create_mock_list mock_name mock_list cmock_config mock_include_list 84 | mock_define_list) 85 | set(mocks_dir "${CMAKE_CURRENT_BINARY_DIR}/mocks") 86 | add_library(${mock_name} SHARED) 87 | foreach(mock_file IN LISTS mock_list) 88 | get_filename_component(mock_file_abs ${mock_file} ABSOLUTE) 89 | get_filename_component(mock_file_name ${mock_file} NAME_WLE) 90 | get_filename_component(mock_file_dir ${mock_file} DIRECTORY) 91 | add_custom_command( 92 | OUTPUT ${mocks_dir}/mock_${mock_file_name}.c 93 | COMMAND ruby ${cmock_SOURCE_DIR}/lib/cmock.rb -o${cmock_config} ${mock_file_abs} 94 | WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) 95 | target_sources(${mock_name} PUBLIC ${mocks_dir}/mock_${mock_file_name}.c) 96 | 97 | target_include_directories(${mock_name} PUBLIC ${mock_file_dir}) 98 | endforeach() 99 | target_include_directories(${mock_name} PUBLIC ${mocks_dir} 100 | ${mock_include_list}) 101 | set_target_properties( 102 | ${mock_name} 103 | PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib 104 | POSITION_INDEPENDENT_CODE ON) 105 | target_compile_definitions(${mock_name} PUBLIC ${mock_define_list}) 106 | target_link_libraries(${mock_name} cmock unity) 107 | endfunction() 108 | 109 | function(create_real_library target src_file real_include_list mock_name) 110 | add_library(${target} STATIC ${src_file}) 111 | target_include_directories(${target} PUBLIC ${real_include_list}) 112 | set_target_properties( 113 | ${target} 114 | PROPERTIES COMPILE_FLAGS "-Wextra -Wpedantic \ 115 | -fprofile-arcs -ftest-coverage -fprofile-generate \ 116 | -Wno-unused-but-set-variable" 117 | LINK_FLAGS "-fprofile-arcs -ftest-coverage \ 118 | -fprofile-generate " 119 | ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib) 120 | if(NOT (mock_name STREQUAL "")) 121 | add_dependencies(${target} ${mock_name}) 122 | target_link_libraries(${target} -l${mock_name} -lgcov) 123 | endif() 124 | endfunction() 125 | -------------------------------------------------------------------------------- /docs/doxygen/pages.dox: -------------------------------------------------------------------------------- 1 | /** 2 | @mainpage Overview 3 | @anchor jobs 4 | @brief AWS IoT Jobs client library, part of the AWS IoT Device SDK for Embedded C 5 | 6 |

7 | AWS IoT jobs can be used to define a set of remote operations that are sent to and executed on one or more devices connected to AWS IoT. 8 | For documentation of the service, please see the [AWS IoT Developer Guide](https://docs.aws.amazon.com/iot/latest/developerguide/iot-jobs.html). 9 | Interactions with the jobs service use MQTT, a lightweight publish-subscribe protocol. 10 | This library provides a convenience API to compose and recognize the MQTT topic strings used by the jobs service. 11 | The library is written in C and designed to be compliant with ISO C99 and MISRA C:2012. 12 | It has proofs showing safe memory use and no heap allocation, making it suitable for IoT microcontrollers, but also fully portable to other platforms. 13 |

14 | 15 |

16 | AWS IoT OTA Job Parser can be used to Parse out the fields of an incoming Job Document sent to a device from AWS IoT. 17 | The library is written in C and designed to be compliant with ISO C99 and MISRA C:2012. 18 | It has proofs showing safe memory use and no heap allocation, making it suitable for IoT microcontrollers, but also fully portable to other platforms. 19 |

20 | 21 | > **NOTE**: 22 | > If your application uses both AWS IoT Jobs library and [OTA library](https://github.com/aws/ota-for-aws-iot-embedded-sdk) to communicate with AWS IoT through a shared MQTT connection, we suggest that you keep the application logic that uses these libraries within a single task/thread. 23 | > As the OTA agent also makes calls to the AWS IoT Jobs service, keeping the use of libraries within the same thread context will avoid complexity of synchronizing communication with AWS IoT Jobs topics between multiple tasks/threads. 24 | > However, if you choose to use different tasks/threads for calling these libraries, please be aware that the OTA library will subscribe and configurably, unsubscribe from AWS IoT Jobs topics, and also attempt to send status updates for incoming non-OTA jobs, if your application configures the OTA library to handle custom jobs. 25 | 26 | @section jobs_memory_requirements Memory Requirements 27 | @brief Memory requirements of the jobs library. 28 | 29 | @include{doc} size_table.md 30 | */ 31 | 32 | /** 33 | @page jobs_features Features 34 | Jobs Library Features 35 | 36 |

Memory Usage

37 |

38 | All functions in the jobs library operate only on the buffers provided, and use only 39 | local variables on the stack. 40 |

41 | 42 |

Compliance & Coverage

43 |

44 | The jobs library is designed to be compliant with ISO C90 and MISRA C:2012. 45 | All functions are written to have minimal complexity. 46 | Unit tests and [CBMC](https://www.cprover.org/cbmc/) proofs are written to cover every path of execution and achieve 100% branch coverage. 47 |

48 | */ 49 | 50 | /** 51 | @page jobs_config Configurations 52 | @brief Configurations of the Jobs Library. 53 | 54 | @par configpagestyle 55 | 56 | These configuration settings are C pre-processor constants they are set using a compiler option such as -D in gcc. 57 | 58 | @section THINGNAME_MAX_LENGTH 59 | @copydoc THINGNAME_MAX_LENGTH 60 | 61 | @section JOBID_MAX_LENGTH 62 | @copydoc JOBID_MAX_LENGTH 63 | */ 64 | 65 | /** 66 | @page jobs_functions Jobs Functions 67 | @brief Primary functions of the Jobs library:

68 | @subpage jobs_gettopic_function
69 | @subpage jobs_matchtopic_function
70 | @subpage jobs_getpending_function
71 | @subpage jobs_startnext_function
72 | @subpage jobs_startnextmsg_function
73 | @subpage jobs_describe_function
74 | @subpage jobs_update_function
75 | @subpage jobs_updatemsg_function
76 | @subpage jobs_getjobid_function
77 | @subpage jobs_getjobdocument_function
78 | @subpage jobs_isstartnextaccepted_function
79 | @subpage jobs_isjobupdatestatus_function
80 | 81 | @page jobs_gettopic_function Jobs_GetTopic 82 | @snippet jobs.h declare_jobs_gettopic 83 | @copydoc Jobs_GetTopic 84 | 85 | @page jobs_matchtopic_function Jobs_MatchTopic 86 | @snippet jobs.h declare_jobs_matchtopic 87 | @copydoc Jobs_MatchTopic 88 | 89 | @page jobs_getpending_function Jobs_GetPending 90 | @snippet jobs.h declare_jobs_getpending 91 | @copydoc Jobs_GetPending 92 | 93 | @page jobs_startnext_function Jobs_StartNext 94 | @snippet jobs.h declare_jobs_startnext 95 | @copydoc Jobs_StartNext 96 | 97 | @page jobs_startnextmsg_function Jobs_StartNextMsg 98 | @snippet jobs.h declare_jobs_startnextmsg 99 | @copydoc Jobs_StartNextMsg 100 | 101 | @page jobs_describe_function Jobs_Describe 102 | @snippet jobs.h declare_jobs_describe 103 | @copydoc Jobs_Describe 104 | 105 | @page jobs_update_function Jobs_Update 106 | @snippet jobs.h declare_jobs_update 107 | @copydoc Jobs_Update 108 | 109 | @page jobs_updatemsg_function Jobs_UpdateMsg 110 | @snippet jobs.h declare_jobs_updatemsg 111 | @copydoc Jobs_UpdateMsg 112 | 113 | @page jobs_getjobid_function Jobs_GetJobId 114 | @snippet jobs.h declare_jobs_getjobid 115 | @copydoc Jobs_GetJobId 116 | 117 | @page jobs_getjobdocument_function Jobs_GetJobDocument 118 | @snippet jobs.h declare_jobs_getjobdocument 119 | @copydoc Jobs_GetJobDocument 120 | 121 | @page jobs_isstartnextaccepted_function Jobs_IsStartNextAccepted 122 | @snippet jobs.h declare_jobs_isstartnextaccepted 123 | @copydoc Jobs_IsStartNextAccepted 124 | 125 | @page jobs_isjobupdatestatus_function Jobs_IsJobUpdateStatus 126 | @snippet jobs.h declare_jobs_isjobupdatestatus 127 | @copydoc Jobs_IsJobUpdateStatus 128 | */ 129 | 130 | /** 131 | @page ota_parser_functions OTA Job Parser Functions 132 | @brief Primary Functions of the OTA Job Parser library:

133 | @subpage populatejobdocfields_function
134 | @subpage otaparser_parsejobdocfile_function
135 | 136 | @page populatejobdocfields_function populateJobDocFields 137 | @snippet job_parser.h declare_populatejobdocfields 138 | @copydoc populateJobDocFields 139 | 140 | @page otaparser_parsejobdocfile_function otaParser_parseJobDocFile 141 | @snippet ota_job_processor.h declare_otaparser_parsejobdocfile 142 | @copydoc otaParser_parseJobDocFile 143 | */ 144 | 145 | /** 146 | @defgroup jobs_enum_types Enumerated Types 147 | @brief Enumerated types of the Jobs library 148 | */ 149 | 150 | /** 151 | @defgroup jobs_constants Constants 152 | @brief Constants defined in the Jobs library 153 | */ 154 | 155 | /** 156 | @defgroup jobs_structs Structs 157 | @brief Structs defined in the Jobs Library 158 | */ 159 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release automation 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | commit_id: 7 | description: 'Commit ID to tag and create a release for' 8 | required: true 9 | version_number: 10 | description: 'Release Version Number (Eg, v1.0.0)' 11 | required: true 12 | 13 | jobs: 14 | tag-commit: 15 | name: Tag commit 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout code 19 | uses: actions/checkout@v4 20 | with: 21 | ref: ${{ github.event.inputs.commit_id }} 22 | - name: Configure git identity 23 | run: | 24 | git config --global user.name ${{ github.actor }} 25 | git config --global user.email ${{ github.actor }}@users.noreply.github.com 26 | - name: create a new branch that references commit id 27 | run: git checkout -b ${{ github.event.inputs.version_number }} ${{ github.event.inputs.commit_id }} 28 | - name: Generate SBOM 29 | uses: FreeRTOS/CI-CD-Github-Actions/sbom-generator@main 30 | with: 31 | repo_path: ./ 32 | source_path: ./source 33 | - name: commit SBOM file 34 | run: | 35 | git add . 36 | git commit -m 'Update SBOM' 37 | git push -u origin ${{ github.event.inputs.version_number }} 38 | - name: Tag Commit and Push to remote 39 | run: | 40 | git tag ${{ github.event.inputs.version_number }} -a -m "AWS IoT Jobs ${{ github.event.inputs.version_number }}" 41 | git push origin --tags 42 | - name: Verify tag on remote 43 | run: | 44 | git tag -d ${{ github.event.inputs.version_number }} 45 | git remote update 46 | git checkout tags/${{ github.event.inputs.version_number }} 47 | git diff ${{ github.event.inputs.commit_id }} tags/${{ github.event.inputs.version_number }} 48 | create-zip: 49 | needs: tag-commit 50 | name: Create ZIP and verify package for release asset. 51 | runs-on: ubuntu-latest 52 | steps: 53 | - name: Install ZIP tools 54 | run: sudo apt-get install zip unzip 55 | - name: Checkout code 56 | uses: actions/checkout@v4 57 | with: 58 | ref: ${{ github.event.inputs.commit_id }} 59 | path: Jobs-for-AWS-IoT-embedded-sdk 60 | submodules: recursive 61 | - name: Checkout disabled submodules 62 | run: | 63 | cd Jobs-for-AWS-IoT-embedded-sdk 64 | git submodule update --init --checkout --recursive 65 | - name: Create ZIP 66 | run: | 67 | zip -r Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip Jobs-for-AWS-IoT-embedded-sdk -x "*.git*" 68 | ls ./ 69 | - name: Validate created ZIP 70 | run: | 71 | mkdir zip-check 72 | mv Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip zip-check 73 | cd zip-check 74 | unzip Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip -d Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }} 75 | ls Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }} 76 | diff -r -x "*.git*" Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}/Jobs-for-AWS-IoT-embedded-sdk/ ../Jobs-for-AWS-IoT-embedded-sdk/ 77 | cd ../ 78 | - name: Build 79 | run: | 80 | cd zip-check/Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}/Jobs-for-AWS-IoT-embedded-sdk 81 | sudo apt-get install -y lcov 82 | cmake -S test -B build/ \ 83 | -G "Unix Makefiles" \ 84 | -DCMAKE_BUILD_TYPE=Debug \ 85 | -DBUILD_CLONE_SUBMODULES=ON \ 86 | -DCMAKE_C_FLAGS='--coverage -Wall -Wextra -Werror' 87 | make -C build/ all 88 | - name: Test 89 | run: | 90 | cd zip-check/Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}/Jobs-for-AWS-IoT-embedded-sdk/build/ 91 | ctest -E system --output-on-failure 92 | cd .. 93 | - name: Create artifact of ZIP 94 | uses: actions/upload-artifact@v4 95 | with: 96 | name: Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 97 | path: zip-check/Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 98 | deploy-doxygen: 99 | needs: tag-commit 100 | name: Deploy doxygen documentation 101 | runs-on: ubuntu-latest 102 | steps: 103 | - name: Doxygen generation 104 | uses: FreeRTOS/CI-CD-Github-Actions/doxygen-generation@main 105 | with: 106 | ref: ${{ github.event.inputs.version_number }} 107 | add_release: "true" 108 | create-release: 109 | permissions: 110 | id-token: write 111 | needs: 112 | - create-zip 113 | - deploy-doxygen 114 | name: Create Release and Upload Release Asset 115 | runs-on: ubuntu-latest 116 | steps: 117 | - name: Create Release 118 | id: create_release 119 | uses: actions/create-release@v1 120 | env: 121 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 122 | with: 123 | tag_name: ${{ github.event.inputs.version_number }} 124 | release_name: ${{ github.event.inputs.version_number }} 125 | body: Release ${{ github.event.inputs.version_number }} of AWS IoT Jobs. 126 | draft: false 127 | prerelease: false 128 | - name: Download ZIP artifact 129 | uses: actions/download-artifact@v4.1.7 130 | with: 131 | name: Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 132 | - name: Upload Release Asset 133 | id: upload-release-asset 134 | uses: actions/upload-release-asset@v1 135 | env: 136 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 137 | with: 138 | upload_url: ${{ steps.create_release.outputs.upload_url }} 139 | asset_path: ./Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 140 | asset_name: Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 141 | asset_content_type: application/zip 142 | - name: Backup Release Asset 143 | uses: FreeRTOS/CI-CD-Github-Actions/artifact-backup@main 144 | with: 145 | artifact_path: ./Jobs-for-AWS-IoT-embedded-sdk-${{ github.event.inputs.version_number }}.zip 146 | release_tag: ${{ github.event.inputs.version_number }} 147 | 148 | -------------------------------------------------------------------------------- /docs/doxygen/layout.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | -------------------------------------------------------------------------------- /test/unit-test/ota_job_handler_utest.c: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #include 12 | 13 | #include "unity.h" 14 | 15 | #include "mock_job_parser.h" 16 | #include "ota_job_processor.h" 17 | 18 | #define JOB_DOC_ID "jobDocId" 19 | #define JOB_DOC_ID_LEN 8U 20 | #define AFR_OTA_DOCUMENT "{\"afr_ota\":{\"files\":[{\"filesize\":123456789}]}}" 21 | #define AFR_OTA_DOCUMENT_LENGTH ( sizeof( AFR_OTA_DOCUMENT ) - 1U ) 22 | #define MULTI_FILE_OTA_DOCUMENT \ 23 | "{\"afr_ota\":{\"files\":[{\"filesize\":1},{\"filesize\":2},{" \ 24 | "\"filesize\":3}]}}" 25 | #define MULTI_FILE_OTA_DOCUMENT_LENGTH \ 26 | ( sizeof( MULTI_FILE_OTA_DOCUMENT ) - 1U ) 27 | #define TOO_MANY_FILES_OTA_DOCUMENT \ 28 | "{\"afr_ota\":{\"files\":[{\"filesize\":1},{\"filesize\":2},{" \ 29 | "\"filesize\":3},{\"filesize\":4},{\"filesize\":5},{\"filesize\":6},{" \ 30 | "\"filesize\":7},{\"filesize\":8},{\"filesize\":9},{\"filesize\":10}]}}" 31 | #define TOO_MANY_FILES_OTA_DOCUMENT_LENGTH \ 32 | ( sizeof( TOO_MANY_FILES_OTA_DOCUMENT ) - 1U ) 33 | #define CUSTOM_DOCUMENT "{\"custom_job\":\"test\"}" 34 | #define CUSTOM_DOCUMENT_LENGTH ( sizeof( CUSTOM_DOCUMENT ) - 1U ) 35 | 36 | AfrOtaJobDocumentFields_t parsedFields; 37 | 38 | /* =========================== UNITY FIXTURES ============================ */ 39 | 40 | /* Called before each test method. */ 41 | void setUp() 42 | { 43 | parsedFields.signature = "expectedSignature"; 44 | parsedFields.signatureLen = strlen( "expectedSignature" ); 45 | parsedFields.filepath = "expectedFilepath"; 46 | parsedFields.filepathLen = strlen( "expectedFilepath" ); 47 | parsedFields.certfile = "expectedCertfile"; 48 | parsedFields.certfileLen = strlen( "expectedCertfile" ); 49 | parsedFields.authScheme = "expectedAuthScheme"; 50 | parsedFields.authSchemeLen = strlen( "expectedAuthScheme" ); 51 | parsedFields.imageRef = "expectedImageRef"; 52 | parsedFields.imageRefLen = strlen( "expectedImageRef" ); 53 | parsedFields.fileId = UINT32_MAX; 54 | parsedFields.fileSize = UINT32_MAX; 55 | parsedFields.fileType = UINT32_MAX; 56 | } 57 | 58 | /* Called after each test method. */ 59 | void tearDown() 60 | { 61 | } 62 | 63 | /* Called at the beginning of the whole suite. */ 64 | void suiteSetUp() 65 | { 66 | } 67 | 68 | /* Called at the end of the whole suite. */ 69 | int suiteTearDown( int numFailures ) 70 | { 71 | return numFailures; 72 | } 73 | 74 | /* 75 | * NOTE: In production, the string fields would not be null-terminated strings, 76 | * however since we're mocking the return we can force them to be 77 | * null-terminated for easier validation. 78 | */ 79 | void verifyCallbackValues( AfrOtaJobDocumentFields_t * params ) 80 | { 81 | TEST_ASSERT_EQUAL_STRING( "expectedSignature", params->signature ); 82 | TEST_ASSERT_EQUAL( strlen( "expectedSignature" ), params->signatureLen ); 83 | TEST_ASSERT_EQUAL_STRING( "expectedFilepath", params->filepath ); 84 | TEST_ASSERT_EQUAL( strlen( "expectedFilepath" ), params->filepathLen ); 85 | TEST_ASSERT_EQUAL_STRING( "expectedCertfile", params->certfile ); 86 | TEST_ASSERT_EQUAL( strlen( "expectedCertfile" ), params->certfileLen ); 87 | TEST_ASSERT_EQUAL_STRING( "expectedAuthScheme", params->authScheme ); 88 | TEST_ASSERT_EQUAL( strlen( "expectedAuthScheme" ), params->authSchemeLen ); 89 | TEST_ASSERT_EQUAL_STRING( "expectedImageRef", params->imageRef ); 90 | TEST_ASSERT_EQUAL( strlen( "expectedImageRef" ), params->imageRefLen ); 91 | TEST_ASSERT_EQUAL( UINT32_MAX, params->fileId ); 92 | TEST_ASSERT_EQUAL( UINT32_MAX, params->fileSize ); 93 | TEST_ASSERT_EQUAL( UINT32_MAX, params->fileType ); 94 | } 95 | 96 | static void expectPopulateJobDocWithFileIndex( const char * document, 97 | size_t docLength, 98 | int index ) 99 | { 100 | populateJobDocFields_ExpectAndReturn( document, 101 | docLength, 102 | index, 103 | NULL, 104 | true ); 105 | populateJobDocFields_IgnoreArg_result(); 106 | populateJobDocFields_ReturnThruPtr_result( &parsedFields ); 107 | } 108 | 109 | /* =============================== TESTS =============================== */ 110 | 111 | void test_parseJobDocFile_returnsZero_whenSingleFileJob( void ) 112 | { 113 | expectPopulateJobDocWithFileIndex( AFR_OTA_DOCUMENT, 114 | AFR_OTA_DOCUMENT_LENGTH, 115 | 0 ); 116 | 117 | int8_t result = otaParser_parseJobDocFile( AFR_OTA_DOCUMENT, 118 | AFR_OTA_DOCUMENT_LENGTH, 119 | 0U, 120 | &parsedFields ); 121 | 122 | TEST_ASSERT_EQUAL( 0, result ); 123 | } 124 | 125 | void test_parseJobDocFile_returnsNextIndex_whenMultiFileIOTOtaJob( void ) 126 | { 127 | expectPopulateJobDocWithFileIndex( MULTI_FILE_OTA_DOCUMENT, 128 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 129 | 0 ); 130 | expectPopulateJobDocWithFileIndex( MULTI_FILE_OTA_DOCUMENT, 131 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 132 | 1 ); 133 | 134 | int8_t result = otaParser_parseJobDocFile( MULTI_FILE_OTA_DOCUMENT, 135 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 136 | 0U, 137 | &parsedFields ); 138 | 139 | TEST_ASSERT_EQUAL( 1, result ); 140 | 141 | result = otaParser_parseJobDocFile( MULTI_FILE_OTA_DOCUMENT, 142 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 143 | 1U, 144 | &parsedFields ); 145 | 146 | TEST_ASSERT_EQUAL( 2, result ); 147 | } 148 | 149 | void test_parseJobDocFile_returnsZero_whenLastFileIndex( void ) 150 | { 151 | expectPopulateJobDocWithFileIndex( MULTI_FILE_OTA_DOCUMENT, 152 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 153 | 2 ); 154 | 155 | int8_t result = otaParser_parseJobDocFile( MULTI_FILE_OTA_DOCUMENT, 156 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 157 | 2U, 158 | &parsedFields ); 159 | 160 | TEST_ASSERT_EQUAL( 0, result ); 161 | } 162 | 163 | void test_parseJobDocFile_returnsNegativeOne_whenIndexOutOfRange( void ) 164 | { 165 | int8_t result = otaParser_parseJobDocFile( AFR_OTA_DOCUMENT, 166 | AFR_OTA_DOCUMENT_LENGTH, 167 | 1U, 168 | &parsedFields ); 169 | 170 | TEST_ASSERT_EQUAL( -1, result ); 171 | } 172 | 173 | void test_parseJobDocFile_returnsNegativeOne_whenParsingFails( void ) 174 | { 175 | populateJobDocFields_ExpectAndReturn( AFR_OTA_DOCUMENT, 176 | AFR_OTA_DOCUMENT_LENGTH, 177 | 0, 178 | NULL, 179 | false ); 180 | populateJobDocFields_IgnoreArg_result(); 181 | 182 | int8_t result = otaParser_parseJobDocFile( AFR_OTA_DOCUMENT, 183 | AFR_OTA_DOCUMENT_LENGTH, 184 | 0U, 185 | &parsedFields ); 186 | 187 | TEST_ASSERT_EQUAL( -1, result ); 188 | } 189 | 190 | void test_parseJobDocFile_returnsNegativeOne_whenMultiFileParsingFails( void ) 191 | { 192 | populateJobDocFields_ExpectAndReturn( MULTI_FILE_OTA_DOCUMENT, 193 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 194 | 0, 195 | NULL, 196 | false ); 197 | populateJobDocFields_IgnoreArg_result(); 198 | 199 | int8_t result = otaParser_parseJobDocFile( MULTI_FILE_OTA_DOCUMENT, 200 | MULTI_FILE_OTA_DOCUMENT_LENGTH, 201 | 0, 202 | &parsedFields ); 203 | 204 | TEST_ASSERT_EQUAL( -1, result ); 205 | } 206 | 207 | void test_parseJobDocFile_returnsNegativeOne_whenCustomJob( void ) 208 | { 209 | int8_t result = otaParser_parseJobDocFile( CUSTOM_DOCUMENT, 210 | CUSTOM_DOCUMENT_LENGTH, 211 | 0U, 212 | &parsedFields ); 213 | 214 | TEST_ASSERT_EQUAL( -1, result ); 215 | } 216 | 217 | void test_parseJobDocFile_returnsFalse_givenNullJobDocument( void ) 218 | { 219 | int8_t result = otaParser_parseJobDocFile( NULL, 220 | CUSTOM_DOCUMENT_LENGTH, 221 | 0U, 222 | &parsedFields ); 223 | 224 | TEST_ASSERT_EQUAL( -1, result ); 225 | } 226 | 227 | void test_parseJobDocFile_returnsFalse_givenZeroDocumentLength( void ) 228 | { 229 | int8_t result = otaParser_parseJobDocFile( AFR_OTA_DOCUMENT, 230 | 0U, 231 | 0U, 232 | &parsedFields ); 233 | 234 | TEST_ASSERT_EQUAL( -1, result ); 235 | } 236 | -------------------------------------------------------------------------------- /test/cbmc/proofs.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | #include "jobs.h" 8 | #include "core_json.h" 9 | #include "jobs_annex.h" 10 | #include "../../source/otaJobParser/include/job_parser.h" 11 | #include "../../source/otaJobParser/include/ota_job_processor.h" 12 | 13 | #ifndef UNWIND_COUNT 14 | #define UNWIND_COUNT 10 15 | #endif 16 | 17 | #define CBMC_MAX_OBJECT_SIZE ( PTRDIFF_MAX ) 18 | #define CBMC_MAX_BUFSIZE ( UNWIND_COUNT ) 19 | #define CBMC_THINGNAME_MAX_LEN ( UNWIND_COUNT - 1 ) 20 | #define CBMC_JOBID_MAX_LEN ( UNWIND_COUNT - 1 ) 21 | #define CBMC_JOBDOC_MAX_LEN ( UNWIND_COUNT - 1 ) 22 | #define CBMC_TOPIC_MAX_LEN ( UNWIND_COUNT - 1 ) 23 | 24 | /* utils */ 25 | int nondet_int( void ); 26 | 27 | JobCurrentStatus_t nondet_JobCurrentStatus( void ) 28 | { 29 | int jobStatus[] = { Queued, InProgress, Failed, Succeeded, Rejected }; 30 | 31 | int index = nondet_int(); 32 | 33 | __CPROVER_assume( ( index >= 0 ) && ( index <= ( sizeof( jobStatus ) / sizeof( jobStatus[ 0 ] ) ) - 1 ) ); 34 | 35 | return jobStatus[ index ]; 36 | } 37 | 38 | JobUpdateStatus_t nondet_JobUpdateStatus( void ) 39 | { 40 | int updateStatus[] = { JobUpdateStatus_Accepted, JobUpdateStatus_Rejected }; 41 | 42 | int index = nondet_int(); 43 | 44 | __CPROVER_assume( ( index >= 0 ) && ( index <= ( sizeof( updateStatus ) / sizeof( updateStatus[ 0 ] ) ) - 1 ) ); 45 | 46 | return updateStatus[ index ]; 47 | } 48 | 49 | JobsTopic_t nondet_JobsTopic_t( void ) 50 | { 51 | int jobsTopics[] = 52 | { 53 | JobsInvalidTopic, JobsJobsChanged, JobsNextJobChanged, 54 | JobsGetPendingSuccess, JobsGetPendingFailed, JobsStartNextSuccess, 55 | JobsStartNextFailed, JobsDescribeSuccess, JobsDescribeFailed, 56 | JobsUpdateSuccess, JobsUpdateFailed, JobsMaxTopic 57 | }; 58 | 59 | int index = nondet_int(); 60 | 61 | __CPROVER_assume( ( index >= 0 ) && ( index <= ( sizeof( jobsTopics ) / sizeof( jobsTopics[ 0 ] ) ) - 1 ) ); 62 | 63 | return jobsTopics[ index ]; 64 | } 65 | /* end of utils */ 66 | 67 | void proof_Jobs_Describe( void ) 68 | { 69 | char * buffer; 70 | size_t bufferLength; 71 | const char * thingName; 72 | uint16_t thingNameLength; 73 | const char * jobId; 74 | uint16_t jobIdLength; 75 | size_t * outLength; 76 | JobsStatus_t ret; 77 | 78 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 79 | __CPROVER_assume( bufferLength < CBMC_MAX_OBJECT_SIZE ); 80 | buffer = malloc( bufferLength ); 81 | 82 | /* The thing name length must not exceed unwindings. */ 83 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 84 | thingName = malloc( thingNameLength ); 85 | 86 | /* The job ID length must not exceed unwindings. */ 87 | __CPROVER_assume( jobIdLength <= CBMC_JOBID_MAX_LEN ); 88 | jobId = malloc( jobIdLength ); 89 | 90 | outLength = malloc( sizeof( *outLength ) ); 91 | 92 | ret = Jobs_Describe( buffer, 93 | bufferLength, 94 | thingName, 95 | thingNameLength, 96 | jobId, 97 | jobIdLength, 98 | outLength ); 99 | 100 | __CPROVER_assert( jobsDescribeEnum( ret ), "The return value is a subset of JobsStatus_t." ); 101 | 102 | if( ( ret != JobsBadParameter ) && ( outLength != NULL ) ) 103 | { 104 | __CPROVER_assert( ( *outLength < bufferLength ), "Buffer writes do not exceed buffer length." ); 105 | 106 | __CPROVER_assert( ( buffer[ *outLength ] == '\0' ), "Buffer is NULL terminated." ); 107 | } 108 | } 109 | 110 | void proof_Jobs_GetPending( void ) 111 | { 112 | char * buffer; 113 | size_t bufferLength; 114 | const char * thingName; 115 | uint16_t thingNameLength; 116 | size_t * outLength; 117 | JobsStatus_t ret; 118 | 119 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 120 | __CPROVER_assume( bufferLength < CBMC_MAX_OBJECT_SIZE ); 121 | buffer = malloc( bufferLength ); 122 | 123 | /* The thing name length must not exceed unwindings. */ 124 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 125 | thingName = malloc( thingNameLength ); 126 | 127 | outLength = malloc( sizeof( *outLength ) ); 128 | 129 | ret = Jobs_GetPending( buffer, 130 | bufferLength, 131 | thingName, 132 | thingNameLength, 133 | outLength ); 134 | 135 | __CPROVER_assert( jobsGetPendingEnum( ret ), "The return value is a subset of JobsStatus_t." ); 136 | 137 | if( ( ret != JobsBadParameter ) && ( outLength != NULL ) ) 138 | { 139 | __CPROVER_assert( ( *outLength < bufferLength ), "Buffer writes do not exceed buffer length." ); 140 | 141 | __CPROVER_assert( ( buffer[ *outLength ] == '\0' ), "Buffer is NUL terminated." ); 142 | } 143 | } 144 | 145 | void proof_Jobs_GetTopic( void ) 146 | { 147 | char * buffer; 148 | size_t bufferLength; 149 | const char * thingName; 150 | uint16_t thingNameLength; 151 | JobsTopic_t api = nondet_JobsTopic_t(); 152 | size_t * outLength; 153 | JobsStatus_t ret; 154 | 155 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 156 | __CPROVER_assume( bufferLength < CBMC_MAX_OBJECT_SIZE ); 157 | buffer = malloc( bufferLength ); 158 | 159 | /* The thing name length must not exceed unwindings. */ 160 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 161 | thingName = malloc( thingNameLength ); 162 | 163 | outLength = malloc( sizeof( *outLength ) ); 164 | 165 | ret = Jobs_GetTopic( buffer, 166 | bufferLength, 167 | thingName, 168 | thingNameLength, 169 | api, 170 | outLength ); 171 | 172 | __CPROVER_assert( jobsGetTopicEnum( ret ), "The return value is a subset of JobsStatus_t." ); 173 | 174 | if( ( ret != JobsBadParameter ) && ( outLength != NULL ) ) 175 | { 176 | __CPROVER_assert( ( *outLength < bufferLength ), "Buffer writes do not exceed buffer length." ); 177 | 178 | __CPROVER_assert( ( buffer[ *outLength ] == '\0' ), "Buffer is NUL terminated." ); 179 | } 180 | } 181 | 182 | void proof_Jobs_MatchTopic( void ) 183 | { 184 | char * topic; 185 | size_t topicLength; 186 | const char * thingName; 187 | uint16_t thingNameLength; 188 | JobsTopic_t * outApi; 189 | char ** outJobId; 190 | uint16_t * outJobIdLength; 191 | JobsStatus_t ret; 192 | 193 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 194 | __CPROVER_assume( topicLength < CBMC_TOPIC_MAX_LEN ); 195 | topic = malloc( topicLength ); 196 | 197 | /* The thing name length must not exceed unwindings. */ 198 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 199 | thingName = malloc( thingNameLength ); 200 | 201 | outApi = malloc( sizeof( *outApi ) ); 202 | outJobId = malloc( sizeof( *outJobId ) ); 203 | outJobIdLength = malloc( sizeof( *outJobIdLength ) ); 204 | 205 | ret = Jobs_MatchTopic( topic, 206 | topicLength, 207 | thingName, 208 | thingNameLength, 209 | outApi, 210 | outJobId, 211 | outJobIdLength ); 212 | 213 | __CPROVER_assert( jobsMatchTopicEnum( ret ), "The return value is a subset of JobsStatus_t." ); 214 | 215 | if( ret == JobsSuccess ) 216 | { 217 | if( outApi != NULL ) 218 | { 219 | __CPROVER_assert( jobsTopicEnum( *outApi ), "The API value is a JobsTopic_t enum." ); 220 | } 221 | 222 | if( ( outJobId != NULL ) && ( *outJobId != NULL ) ) 223 | { 224 | __CPROVER_assert( ( ( *outJobId > topic ) && ( *outJobId < ( topic + topicLength ) ) ), 225 | "The output parameter for jobId points within the topic string." ); 226 | } 227 | 228 | if( ( outJobIdLength != NULL ) && ( *outJobIdLength > 0 ) ) 229 | { 230 | __CPROVER_assert( ( *outJobIdLength < topicLength ), 231 | "The length of the jobId part of the topic is less than the length of the topic." ); 232 | } 233 | } 234 | } 235 | 236 | void proof_Jobs_StartNext( void ) 237 | { 238 | char * buffer; 239 | size_t bufferLength; 240 | const char * thingName; 241 | uint16_t thingNameLength; 242 | size_t * outLength; 243 | JobsStatus_t ret; 244 | 245 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 246 | __CPROVER_assume( bufferLength < CBMC_MAX_OBJECT_SIZE ); 247 | buffer = malloc( bufferLength ); 248 | 249 | /* The thing name length must not exceed unwindings. */ 250 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 251 | thingName = malloc( thingNameLength ); 252 | 253 | outLength = malloc( sizeof( *outLength ) ); 254 | 255 | ret = Jobs_StartNext( buffer, 256 | bufferLength, 257 | thingName, 258 | thingNameLength, 259 | outLength ); 260 | 261 | __CPROVER_assert( jobsStartNextEnum( ret ), "The return value is a subset of JobsStatus_t." ); 262 | 263 | if( ( ret != JobsBadParameter ) && ( outLength != NULL ) ) 264 | { 265 | __CPROVER_assert( ( *outLength < bufferLength ), "Buffer writes do not exceed buffer length." ); 266 | 267 | __CPROVER_assert( ( buffer[ *outLength ] == '\0' ), "Buffer is NULL terminated." ); 268 | } 269 | } 270 | 271 | void proof_Jobs_Update( void ) 272 | { 273 | char * buffer; 274 | size_t bufferLength; 275 | const char * thingName; 276 | uint16_t thingNameLength; 277 | const char * jobId; 278 | uint16_t jobIdLength; 279 | size_t * outLength; 280 | JobsStatus_t ret; 281 | 282 | /* The buffer length must not exceed the maximum object size supported by CBMC. */ 283 | __CPROVER_assume( bufferLength < CBMC_MAX_OBJECT_SIZE ); 284 | buffer = malloc( bufferLength ); 285 | 286 | /* The thing name length must not exceed unwindings. */ 287 | __CPROVER_assume( thingNameLength <= CBMC_THINGNAME_MAX_LEN ); 288 | thingName = malloc( thingNameLength ); 289 | 290 | /* The job ID length must not exceed unwindings. */ 291 | __CPROVER_assume( jobIdLength <= CBMC_JOBID_MAX_LEN ); 292 | jobId = malloc( jobIdLength ); 293 | 294 | outLength = malloc( sizeof( *outLength ) ); 295 | 296 | ret = Jobs_Update( buffer, 297 | bufferLength, 298 | thingName, 299 | thingNameLength, 300 | jobId, 301 | jobIdLength, 302 | outLength ); 303 | 304 | __CPROVER_assert( jobsUpdateEnum( ret ), "The return value is a subset of JobsStatus_t." ); 305 | 306 | if( ( ret != JobsBadParameter ) && ( outLength != NULL ) ) 307 | { 308 | __CPROVER_assert( ( *outLength < bufferLength ), "Buffer writes do not exceed buffer length." ); 309 | 310 | __CPROVER_assert( ( buffer[ *outLength ] == '\0' ), "Buffer is NUL terminated." ); 311 | } 312 | } 313 | 314 | void proof_Jobs_IsStartNextAccepted( void ) 315 | { 316 | bool ret; 317 | const char * topic; 318 | const size_t topicLength; 319 | const char * thingName; 320 | const size_t thingNameLength; 321 | 322 | __CPROVER_assume( topicLength < CBMC_TOPIC_MAX_LEN ); 323 | topic = malloc( topicLength ); 324 | 325 | __CPROVER_assume( thingNameLength < CBMC_THINGNAME_MAX_LEN ); 326 | thingName = malloc( thingNameLength ); 327 | 328 | 329 | ret = Jobs_IsStartNextAccepted( topic, 330 | topicLength, 331 | thingName, 332 | thingNameLength ); 333 | 334 | __CPROVER_assert( ( ret == 0 || ret == 1 ), "Return value is bool" ); 335 | } 336 | 337 | void proof_Jobs_IsJobUpdateStatus( void ) 338 | { 339 | bool ret; 340 | const char * topic; 341 | const size_t topicLength; 342 | const char * thingName; 343 | const size_t thingNameLength; 344 | const char * jobId; 345 | const size_t jobIdLength; 346 | JobUpdateStatus_t expectedStatus = nondet_JobUpdateStatus(); 347 | 348 | __CPROVER_assume( topicLength < CBMC_TOPIC_MAX_LEN ); 349 | topic = malloc( topicLength ); 350 | 351 | __CPROVER_assume( thingNameLength < CBMC_THINGNAME_MAX_LEN ); 352 | thingName = malloc( thingNameLength ); 353 | 354 | __CPROVER_assume( jobIdLength < CBMC_JOBID_MAX_LEN ); 355 | jobId = malloc( jobIdLength ); 356 | 357 | ret = Jobs_IsJobUpdateStatus( topic, 358 | topicLength, 359 | jobId, 360 | jobIdLength, 361 | thingName, 362 | thingNameLength, 363 | expectedStatus ); 364 | 365 | __CPROVER_assert( ( ret == 0 || ret == 1 ), "Return value is bool" ); 366 | } 367 | 368 | void proof_Jobs_GetJobId( void ) 369 | { 370 | const char * message; 371 | const size_t messageLength; 372 | char * jobId = NULL; 373 | size_t ret; 374 | 375 | __CPROVER_assume( messageLength <= CBMC_MAX_OBJECT_SIZE ); 376 | message = malloc( messageLength ); 377 | 378 | ret = Jobs_GetJobId( message, 379 | messageLength, 380 | &jobId ); 381 | } 382 | 383 | void proof_Jobs_GetJobDocument( void ) 384 | { 385 | const char * message; 386 | size_t messageLength; 387 | char * jobdoc; 388 | size_t ret; 389 | 390 | __CPROVER_assume( messageLength <= CBMC_MAX_OBJECT_SIZE ); 391 | message = malloc( messageLength ); 392 | 393 | ret = Jobs_GetJobDocument( message, 394 | messageLength, 395 | &jobdoc ); 396 | } 397 | 398 | 399 | void proof_Jobs_StartNextMsg( void ) 400 | { 401 | const char * clientToken; 402 | size_t clientTokenLength; 403 | const char * buffer; 404 | size_t bufferLength; 405 | size_t ret; 406 | 407 | __CPROVER_assume( clientTokenLength <= CBMC_MAX_BUFSIZE ); 408 | clientToken = malloc( clientTokenLength ); 409 | 410 | __CPROVER_assume( bufferLength <= CBMC_MAX_OBJECT_SIZE ); 411 | buffer = malloc( bufferLength ); 412 | 413 | ret = Jobs_StartNextMsg( clientToken, 414 | clientTokenLength, 415 | buffer, 416 | bufferLength ); 417 | } 418 | 419 | void proof_Jobs_UpdateMsg( void ) 420 | { 421 | JobCurrentStatus_t status = nondet_JobCurrentStatus(); 422 | char * expectedVersion; 423 | size_t expectedVersionLength; 424 | char * buffer; 425 | size_t bufferLength; 426 | size_t ret; 427 | 428 | __CPROVER_assume( expectedVersionLength <= CBMC_THINGNAME_MAX_LEN ); 429 | expectedVersion = malloc( expectedVersionLength ); 430 | 431 | __CPROVER_assume( bufferLength <= 64 ); 432 | buffer = malloc( bufferLength ); 433 | 434 | ret = Jobs_UpdateMsg( status, 435 | expectedVersion, 436 | expectedVersionLength, 437 | buffer, 438 | bufferLength ); 439 | } 440 | 441 | void proof_populateJobDocFields( void ) 442 | { 443 | const char * jobDoc; 444 | const size_t jobDocLength; 445 | int fileIndex; 446 | AfrOtaJobDocumentFields_t result = { 0 }; 447 | bool ret; 448 | 449 | __CPROVER_assume( jobDocLength <= CBMC_JOBDOC_MAX_LEN ); 450 | jobDoc = malloc( jobDocLength ); 451 | 452 | __CPROVER_assume( fileIndex >= 0 ); 453 | 454 | ret = populateJobDocFields( jobDoc, 455 | jobDocLength, 456 | fileIndex, 457 | &result ); 458 | } 459 | 460 | void proof_otaParser_parseJobDocFile( void ) 461 | { 462 | const char * jobDoc; 463 | const size_t jobDocLength; 464 | const uint8_t fileIndex; 465 | AfrOtaJobDocumentFields_t fields = { 0 }; 466 | int8_t ret; 467 | 468 | __CPROVER_assume( jobDocLength <= CBMC_JOBDOC_MAX_LEN ); 469 | jobDoc = malloc( jobDocLength ); 470 | 471 | ret = otaParser_parseJobDocFile( jobDoc, 472 | jobDocLength, 473 | fileIndex, 474 | &fields ); 475 | } 476 | 477 | int main() 478 | { 479 | proof_Jobs_Describe(); 480 | proof_Jobs_GetPending(); 481 | proof_Jobs_GetTopic(); 482 | proof_Jobs_MatchTopic(); 483 | proof_Jobs_StartNext(); 484 | proof_Jobs_Update(); 485 | proof_Jobs_IsStartNextAccepted(); 486 | proof_Jobs_IsJobUpdateStatus(); 487 | proof_Jobs_StartNextMsg(); 488 | proof_Jobs_UpdateMsg(); 489 | proof_Jobs_GetJobId(); 490 | proof_Jobs_GetJobDocument(); 491 | proof_populateJobDocFields(); 492 | proof_otaParser_parseJobDocFile(); 493 | } 494 | -------------------------------------------------------------------------------- /source/otaJobParser/job_parser.c: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | #include "core_json.h" 17 | #include "job_parser.h" 18 | 19 | /** 20 | * @brief Populates common job document fields in result 21 | * 22 | * @param jobDoc FreeRTOS OTA job document 23 | * @param jobDocLength OTA job document length 24 | * @param fileIndex The index of the file to use 25 | * @param result Job document structure to populate 26 | * @return JSONStatus_t JSON parsing status 27 | */ 28 | static JSONStatus_t populateCommonFields( const char * jobDoc, 29 | const size_t jobDocLength, 30 | int32_t fileIndex, 31 | AfrOtaJobDocumentFields_t * result ); 32 | 33 | /** 34 | * @brief Populates optional, common job document fields in result 35 | * 36 | * @param jobDoc FreeRTOS OTA job document 37 | * @param jobDocLength OTA job document length 38 | * @param fileIndex The index of the file to use 39 | * @param result Job document structure to populate 40 | * @return JSONStatus_t JSON parsing status 41 | */ 42 | static JSONStatus_t populateOptionalCommonFields( const char * jobDoc, 43 | const size_t jobDocLength, 44 | int32_t fileIndex, 45 | AfrOtaJobDocumentFields_t * result ); 46 | 47 | /** 48 | * @brief Populates MQTT job document fields in result 49 | * 50 | * @param jobDoc FreeRTOS OTA job document 51 | * @param jobDocLength OTA job document length 52 | * @param result Job document structure to populate 53 | * @return JSONStatus_t JSON parsing status 54 | */ 55 | static JSONStatus_t populateMqttStreamingFields( const char * jobDoc, 56 | const size_t jobDocLength, 57 | AfrOtaJobDocumentFields_t * result ); 58 | 59 | /** 60 | * @brief Populates HTTP job document fields in result 61 | * 62 | * @param jobDoc FreeRTOS OTA job document 63 | * @param jobDocLength OTA job document length 64 | * @param fileIndex The index of the file to use 65 | * @param result Job document structure to populate 66 | * @return JSONStatus_t JSON parsing status 67 | */ 68 | static JSONStatus_t populateHttpStreamingFields( const char * jobDoc, 69 | const size_t jobDocLength, 70 | int32_t fileIndex, 71 | AfrOtaJobDocumentFields_t * result ); 72 | 73 | /** 74 | * @brief Assembles an indexed OTA file query 75 | * 76 | * @param fileIndex The file index 77 | * @param queryString The JSON element inside of the File JSON structure to 78 | * search for 79 | * @param queryStringLength The length of the query 80 | * @param result The resulting value of the query key 81 | * @param resultLength The length of the value 82 | */ 83 | static void buildIndexedFileQueryString( int32_t fileIndex, 84 | const char * queryString, 85 | size_t queryStringLength, 86 | char * result, 87 | size_t * resultLength ); 88 | 89 | /** 90 | * @brief Searches the JSON document for the uint32_t value 91 | * 92 | * @param jobDoc FreeRTOS OTA job document 93 | * @param jobDocLength OTA job document length 94 | * @param query The JSON path to query 95 | * @param queryLength The length of the JSON path 96 | * @param value Pointer to set uint32_t value 97 | * @return JSONStatus_t JSON parsing status 98 | */ 99 | static JSONStatus_t searchUintValue( const char * jobDoc, 100 | const size_t jobDocLength, 101 | const char * query, 102 | const size_t queryLength, 103 | uint32_t * value ); 104 | 105 | /** 106 | * @brief Convert a non-null terminated string to a unsigned 32-bit integer 107 | * 108 | * @param string String representation of 32-bit unsigned integer 109 | * @param length Length of the integer when represented as a string 110 | * @param value Unsigned 32-bit integer representation of the value 111 | * @return true Successfully converted to uint32 112 | * @return false Unsuccessfully converted to uint32 113 | */ 114 | static bool uintFromString( const char * string, 115 | const uint32_t length, 116 | uint32_t * value ); 117 | 118 | /** 119 | * @brief Check if a character is a digit 120 | * 121 | * @param c Character to validate 122 | * @return true Character is a 0-9 digit 123 | * @return false Character is not a digit 124 | */ 125 | static bool charIsDigit( const char c ); 126 | 127 | /** 128 | * @brief Check for multiplication overflow between two uint32 values 129 | * 130 | * @param a First uint32 value 131 | * @param b Second uint32 value 132 | * @return true If overflow will occur 133 | * @return false If overflow will not occur 134 | */ 135 | static bool multOverflowUnit32( const uint32_t a, 136 | const uint32_t b ); 137 | 138 | /** 139 | * @brief Check for addition overflow between two uint32 values 140 | * 141 | * @param a First uint32 value 142 | * @param b Second uint32 value 143 | * @return true If overflow will occur 144 | * @return false If overflow will not occur 145 | */ 146 | static bool addOverflowUint32( const uint32_t a, 147 | const uint32_t b ); 148 | 149 | bool populateJobDocFields( const char * jobDoc, 150 | const size_t jobDocLength, 151 | int32_t fileIndex, 152 | AfrOtaJobDocumentFields_t * result ) 153 | { 154 | bool populatedJobDocFields = false; 155 | JSONStatus_t jsonResult = JSONNotFound; 156 | const char * protocol = NULL; 157 | size_t protocolLength = 0U; 158 | 159 | /* TODO - Add assertions for NULL job docs or 0 length documents*/ 160 | jsonResult = populateCommonFields( jobDoc, jobDocLength, fileIndex, result ); 161 | 162 | if( jsonResult == JSONSuccess ) 163 | { 164 | jsonResult = JSON_SearchConst( jobDoc, 165 | jobDocLength, 166 | "afr_ota.protocols[0]", 167 | 20U, 168 | &protocol, 169 | &protocolLength, 170 | NULL ); 171 | } 172 | 173 | /* Determine if the supported protocol is MQTT or HTTP */ 174 | if( ( jsonResult == JSONSuccess ) && ( protocolLength == 4U ) ) 175 | { 176 | if( strncmp( "MQTT", protocol, protocolLength ) == 0 ) 177 | { 178 | jsonResult = populateMqttStreamingFields( jobDoc, 179 | jobDocLength, 180 | result ); 181 | } 182 | else 183 | { 184 | jsonResult = populateHttpStreamingFields( jobDoc, 185 | jobDocLength, 186 | fileIndex, 187 | result ); 188 | } 189 | } 190 | 191 | populatedJobDocFields = ( jsonResult == JSONSuccess ); 192 | 193 | /* Should this nullify the fields which have been populated before 194 | * returning? */ 195 | return populatedJobDocFields; 196 | } 197 | 198 | static JSONStatus_t populateCommonFields( const char * jobDoc, 199 | const size_t jobDocLength, 200 | int32_t fileIndex, 201 | AfrOtaJobDocumentFields_t * result ) 202 | { 203 | JSONStatus_t jsonResult = JSONNotFound; 204 | const char * jsonValue = NULL; 205 | size_t jsonValueLength = 0U; 206 | char queryString[ 33 ]; 207 | size_t queryStringLength; 208 | 209 | if( fileIndex <= 9 ) 210 | { 211 | buildIndexedFileQueryString( fileIndex, 212 | "filesize", 213 | 8U, 214 | queryString, 215 | &queryStringLength ); 216 | jsonResult = searchUintValue( jobDoc, 217 | jobDocLength, 218 | queryString, 219 | queryStringLength, 220 | &( result->fileSize ) ); 221 | } 222 | else 223 | { 224 | jsonResult = JSONIllegalDocument; 225 | } 226 | 227 | if( jsonResult == JSONSuccess ) 228 | { 229 | buildIndexedFileQueryString( fileIndex, 230 | "fileid", 231 | 6U, 232 | queryString, 233 | &queryStringLength ); 234 | jsonResult = searchUintValue( jobDoc, 235 | jobDocLength, 236 | queryString, 237 | queryStringLength, 238 | &( result->fileId ) ); 239 | } 240 | 241 | if( jsonResult == JSONSuccess ) 242 | { 243 | buildIndexedFileQueryString( fileIndex, 244 | "filepath", 245 | 8U, 246 | queryString, 247 | &queryStringLength ); 248 | jsonResult = JSON_SearchConst( jobDoc, 249 | jobDocLength, 250 | queryString, 251 | queryStringLength, 252 | &jsonValue, 253 | &jsonValueLength, 254 | NULL ); 255 | result->filepath = jsonValue; 256 | result->filepathLen = ( uint32_t ) jsonValueLength; 257 | } 258 | 259 | if( jsonResult == JSONSuccess ) 260 | { 261 | buildIndexedFileQueryString( fileIndex, 262 | "certfile", 263 | 8U, 264 | queryString, 265 | &queryStringLength ); 266 | jsonResult = JSON_SearchConst( jobDoc, 267 | jobDocLength, 268 | queryString, 269 | queryStringLength, 270 | &jsonValue, 271 | &jsonValueLength, 272 | NULL ); 273 | result->certfile = jsonValue; 274 | result->certfileLen = ( uint32_t ) jsonValueLength; 275 | } 276 | 277 | if( jsonResult == JSONSuccess ) 278 | { 279 | buildIndexedFileQueryString( fileIndex, 280 | "sig-sha256-ecdsa", 281 | 16U, 282 | queryString, 283 | &queryStringLength ); 284 | jsonResult = JSON_SearchConst( jobDoc, 285 | jobDocLength, 286 | queryString, 287 | queryStringLength, 288 | &jsonValue, 289 | &jsonValueLength, 290 | NULL ); 291 | result->signature = jsonValue; 292 | result->signatureLen = ( uint32_t ) jsonValueLength; 293 | } 294 | 295 | if( jsonResult == JSONSuccess ) 296 | { 297 | jsonResult = populateOptionalCommonFields( jobDoc, 298 | jobDocLength, 299 | fileIndex, 300 | result ); 301 | } 302 | 303 | return jsonResult; 304 | } 305 | 306 | static JSONStatus_t populateOptionalCommonFields( const char * jobDoc, 307 | const size_t jobDocLength, 308 | int32_t fileIndex, 309 | AfrOtaJobDocumentFields_t * result ) 310 | { 311 | JSONStatus_t jsonResult = JSONNotFound; 312 | char queryString[ 33 ]; 313 | size_t queryStringLength; 314 | 315 | buildIndexedFileQueryString( fileIndex, 316 | "fileType", 317 | 8U, 318 | queryString, 319 | &queryStringLength ); 320 | jsonResult = searchUintValue( jobDoc, 321 | jobDocLength, 322 | queryString, 323 | queryStringLength, 324 | &( result->fileType ) ); 325 | 326 | return ( jsonResult == JSONBadParameter ) ? jsonResult : JSONSuccess; 327 | } 328 | 329 | static JSONStatus_t populateMqttStreamingFields( const char * jobDoc, 330 | const size_t jobDocLength, 331 | AfrOtaJobDocumentFields_t * result ) 332 | { 333 | JSONStatus_t jsonResult = JSONNotFound; 334 | const char * jsonValue = NULL; 335 | size_t jsonValueLength = 0U; 336 | 337 | jsonResult = JSON_SearchConst( jobDoc, 338 | jobDocLength, 339 | "afr_ota.streamname", 340 | 18U, 341 | &jsonValue, 342 | &jsonValueLength, 343 | NULL ); 344 | result->imageRef = jsonValue; 345 | result->imageRefLen = ( uint32_t ) jsonValueLength; 346 | 347 | /* If the stream name is empty, consider this an error */ 348 | if( jsonValueLength == 0U ) 349 | { 350 | jsonResult = JSONNotFound; 351 | } 352 | 353 | return jsonResult; 354 | } 355 | 356 | static JSONStatus_t populateHttpStreamingFields( const char * jobDoc, 357 | const size_t jobDocLength, 358 | int32_t fileIndex, 359 | AfrOtaJobDocumentFields_t * result ) 360 | { 361 | JSONStatus_t jsonResult = JSONNotFound; 362 | const char * jsonValue = NULL; 363 | size_t jsonValueLength = 0U; 364 | char queryString[ 33 ]; 365 | size_t queryStringLength; 366 | 367 | buildIndexedFileQueryString( fileIndex, 368 | "auth_scheme", 369 | 11U, 370 | queryString, 371 | &queryStringLength ); 372 | jsonResult = JSON_SearchConst( jobDoc, 373 | jobDocLength, 374 | queryString, 375 | queryStringLength, 376 | &jsonValue, 377 | &jsonValueLength, 378 | NULL ); 379 | result->authScheme = jsonValue; 380 | result->authSchemeLen = ( uint32_t ) jsonValueLength; 381 | 382 | if( jsonResult == JSONSuccess ) 383 | { 384 | buildIndexedFileQueryString( fileIndex, 385 | "update_data_url", 386 | 15U, 387 | queryString, 388 | &queryStringLength ); 389 | jsonResult = JSON_SearchConst( jobDoc, 390 | jobDocLength, 391 | queryString, 392 | queryStringLength, 393 | &jsonValue, 394 | &jsonValueLength, 395 | NULL ); 396 | result->imageRef = jsonValue; 397 | result->imageRefLen = ( uint32_t ) jsonValueLength; 398 | 399 | /* If the url is empty, consider this an error */ 400 | if( jsonValueLength == 0U ) 401 | { 402 | jsonResult = JSONNotFound; 403 | } 404 | } 405 | 406 | return jsonResult; 407 | } 408 | 409 | static void buildIndexedFileQueryString( int32_t fileIndex, 410 | const char * queryString, 411 | size_t queryStringLength, 412 | char * result, 413 | size_t * resultLength ) 414 | { 415 | /*TODO: Should there be a check on the length of the result buffer? */ 416 | ( void ) strncpy( result, ( const char * ) "afr_ota.files[", 15U ); 417 | int32_t index = ( fileIndex + ( int32_t ) '0' ); 418 | result[ 14 ] = ( char ) index; 419 | ( void ) strncpy( &result[ 15 ], ( const char * ) "].", 3U ); 420 | ( void ) memcpy( &result[ 17 ], queryString, queryStringLength ); 421 | 422 | *resultLength = 17U + queryStringLength; 423 | } 424 | 425 | static JSONStatus_t searchUintValue( const char * jobDoc, 426 | const size_t jobDocLength, 427 | const char * query, 428 | const size_t queryLength, 429 | uint32_t * value ) 430 | { 431 | bool numConversionSuccess = true; 432 | JSONStatus_t jsonResult = JSONNotFound; 433 | const char * jsonValue = NULL; 434 | size_t jsonValueLength = 0U; 435 | 436 | jsonResult = JSON_SearchConst( jobDoc, 437 | jobDocLength, 438 | query, 439 | queryLength, 440 | &jsonValue, 441 | &jsonValueLength, 442 | NULL ); 443 | 444 | if( jsonResult == JSONSuccess ) 445 | { 446 | numConversionSuccess = uintFromString( jsonValue, 447 | ( const uint32_t ) 448 | jsonValueLength, 449 | value ); 450 | } 451 | 452 | return ( numConversionSuccess ) ? jsonResult : JSONBadParameter; 453 | } 454 | 455 | static bool uintFromString( const char * string, 456 | const uint32_t length, 457 | uint32_t * value ) 458 | { 459 | bool ret = false; 460 | bool overflow = false; 461 | uint32_t retVal = 0U; 462 | size_t i; 463 | 464 | if( ( string != NULL ) && ( value != NULL ) ) 465 | { 466 | for( i = 0U; ( i < length ) && !overflow; i++ ) 467 | { 468 | char c = string[ i ]; 469 | 470 | if( !charIsDigit( c ) ) 471 | { 472 | break; 473 | } 474 | else 475 | { 476 | if( !multOverflowUnit32( retVal, 10U ) ) 477 | { 478 | retVal *= 10U; 479 | 480 | if( !addOverflowUint32( retVal, ( ( uint32_t ) c - ( uint32_t ) '0' ) ) ) 481 | { 482 | retVal += ( ( uint32_t ) c - ( uint32_t ) '0' ); 483 | } 484 | else 485 | { 486 | overflow = true; 487 | } 488 | } 489 | else 490 | { 491 | overflow = true; 492 | } 493 | } 494 | } 495 | 496 | if( ( length > 0U ) && ( i == length ) ) 497 | { 498 | *value = retVal; 499 | ret = true; 500 | } 501 | } 502 | 503 | return ret; 504 | } 505 | 506 | static bool charIsDigit( const char c ) 507 | { 508 | return ( c >= '0' ) && ( c <= '9' ); 509 | } 510 | 511 | static bool multOverflowUnit32( const uint32_t a, 512 | const uint32_t b ) 513 | { 514 | return ( b > 0U ) && ( a > ( UINT32_MAX / b ) ); 515 | } 516 | 517 | static bool addOverflowUint32( const uint32_t a, 518 | const uint32_t b ) 519 | { 520 | return a > ( UINT32_MAX - b ); 521 | } 522 | -------------------------------------------------------------------------------- /source/jobs.c: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | * 5 | * SPDX-License-Identifier: MIT 6 | * 7 | * Permission is hereby granted, free of charge, to any person obtaining a copy of 8 | * this software and associated documentation files (the "Software"), to deal in 9 | * the Software without restriction, including without limitation the rights to 10 | * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 11 | * the Software, and to permit persons to whom the Software is furnished to do so, 12 | * subject to the following conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be included in all 15 | * copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 19 | * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 20 | * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 21 | * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 22 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | */ 24 | 25 | /** 26 | * @file jobs.c 27 | * @brief Implementation of the APIs from jobs.h. 28 | */ 29 | 30 | #include 31 | #include 32 | #include 33 | 34 | /* Internal Includes */ 35 | #include "jobs.h" 36 | /* External Dependencies */ 37 | #include "core_json.h" 38 | 39 | /** @cond DO_NOT_DOCUMENT */ 40 | 41 | /** 42 | * @brief Get the length of a string literal. 43 | */ 44 | #define CONST_STRLEN( x ) ( sizeof( ( x ) ) - 1U ) 45 | 46 | /** 47 | * @brief Get the length on an array. 48 | */ 49 | #define ARRAY_LENGTH( x ) ( sizeof( ( x ) ) / sizeof( ( x )[ 0 ] ) ) 50 | 51 | /** 52 | * @brief Table of topic API strings in JobsTopic_t order. 53 | */ 54 | static const char * const apiTopic[] = 55 | { 56 | JOBS_API_JOBSCHANGED, 57 | JOBS_API_NEXTJOBCHANGED, 58 | JOBS_API_GETPENDING JOBS_API_SUCCESS, 59 | JOBS_API_GETPENDING JOBS_API_FAILURE, 60 | JOBS_API_STARTNEXT JOBS_API_SUCCESS, 61 | JOBS_API_STARTNEXT JOBS_API_FAILURE, 62 | JOBS_API_DESCRIBE JOBS_API_SUCCESS, 63 | JOBS_API_DESCRIBE JOBS_API_FAILURE, 64 | JOBS_API_UPDATE JOBS_API_SUCCESS, 65 | JOBS_API_UPDATE JOBS_API_FAILURE, 66 | }; 67 | 68 | /** 69 | * @brief Table of topic API string lengths in JobsTopic_t order. 70 | */ 71 | static const size_t apiTopicLength[] = 72 | { 73 | JOBS_API_JOBSCHANGED_LENGTH, 74 | JOBS_API_NEXTJOBCHANGED_LENGTH, 75 | JOBS_API_GETPENDING_LENGTH + JOBS_API_SUCCESS_LENGTH, 76 | JOBS_API_GETPENDING_LENGTH + JOBS_API_FAILURE_LENGTH, 77 | JOBS_API_STARTNEXT_LENGTH + JOBS_API_SUCCESS_LENGTH, 78 | JOBS_API_STARTNEXT_LENGTH + JOBS_API_FAILURE_LENGTH, 79 | JOBS_API_DESCRIBE_LENGTH + JOBS_API_SUCCESS_LENGTH, 80 | JOBS_API_DESCRIBE_LENGTH + JOBS_API_FAILURE_LENGTH, 81 | JOBS_API_UPDATE_LENGTH + JOBS_API_SUCCESS_LENGTH, 82 | JOBS_API_UPDATE_LENGTH + JOBS_API_FAILURE_LENGTH, 83 | }; 84 | 85 | static const char * const jobStatusString[] = 86 | { 87 | "QUEUED", 88 | "IN_PROGRESS", 89 | "FAILED", 90 | "SUCCEEDED", 91 | "REJECTED" 92 | }; 93 | 94 | /** 95 | * @brief Predicate returns true for a valid thing name or job ID character. 96 | * 97 | * @param[in] a character to check 98 | * @param[in] allowColon set to true for thing names 99 | * 100 | * @return true if the character is valid; 101 | * false otherwise 102 | */ 103 | static bool isValidChar( char a, 104 | bool allowColon ) 105 | { 106 | bool ret; 107 | 108 | if( ( a == '-' ) || ( a == '_' ) ) 109 | { 110 | ret = true; 111 | } 112 | else if( ( a >= '0' ) && ( a <= '9' ) ) 113 | { 114 | ret = true; 115 | } 116 | else if( ( a >= 'A' ) && ( a <= 'Z' ) ) 117 | { 118 | ret = true; 119 | } 120 | else if( ( a >= 'a' ) && ( a <= 'z' ) ) 121 | { 122 | ret = true; 123 | } 124 | else if( a == ':' ) 125 | { 126 | ret = allowColon; 127 | } 128 | else 129 | { 130 | ret = false; 131 | } 132 | 133 | return ret; 134 | } 135 | 136 | /** 137 | * @brief Predicate returns true for a valid identifier. 138 | * 139 | * The identifier may be a thing name or a job ID. 140 | * 141 | * @param[in] id character sequence to check 142 | * @param[in] length length of the character sequence 143 | * @param[in] max maximum length of a valid identifier 144 | * @param[in] allowColon set to true for thing names 145 | * 146 | * @return true if the identifier is valid; 147 | * false otherwise 148 | */ 149 | static bool isValidID( const char * id, 150 | uint16_t length, 151 | uint16_t max, 152 | bool allowColon ) 153 | { 154 | bool ret = false; 155 | 156 | if( ( id != NULL ) && ( length > 0U ) && 157 | ( length <= max ) ) 158 | { 159 | size_t i; 160 | 161 | for( i = 0; i < length; i++ ) 162 | { 163 | if( isValidChar( id[ i ], allowColon ) == false ) 164 | { 165 | break; 166 | } 167 | } 168 | 169 | ret = ( i == length ) ? true : false; 170 | } 171 | 172 | return ret; 173 | } 174 | 175 | 176 | /** 177 | * @brief Predicate returns true for a valid thing name string. 178 | * 179 | * @param[in] thingName character sequence to check 180 | * @param[in] thingNameLength length of the character sequence 181 | * 182 | * @return true if the thing name is valid; 183 | * false otherwise 184 | */ 185 | static bool isValidThingName( const char * thingName, 186 | uint16_t thingNameLength ) 187 | { 188 | return isValidID( thingName, thingNameLength, 189 | THINGNAME_MAX_LENGTH, true ); 190 | } 191 | 192 | /** 193 | * @brief Predicate returns true for a valid job ID string. 194 | * 195 | * @param[in] jobId character sequence to check 196 | * @param[in] jobIdLength length of the character sequence 197 | * 198 | * @return true if the job ID is valid; 199 | * false otherwise 200 | */ 201 | static bool isValidJobId( const char * jobId, 202 | uint16_t jobIdLength ) 203 | { 204 | return isValidID( jobId, jobIdLength, 205 | JOBID_MAX_LENGTH, false ); 206 | } 207 | 208 | /** 209 | * @brief A strncpy replacement based on lengths only. 210 | * 211 | * @param[in] buffer The buffer to be written. 212 | * @param[in,out] start The index at which to begin. 213 | * @param[in] max The size of the buffer. 214 | * @param[in] value The characters to copy. 215 | * @param[in] valueLength How many characters to copy. 216 | * 217 | * @return #JobsSuccess if the value was written to the buffer; 218 | * #JobsBufferTooSmall if the buffer cannot hold the entire value. 219 | * 220 | * @note There is no harm from calling this function when 221 | * start >= max. This allows for sequential calls to 222 | * strnAppend() where only the final call's return value 223 | * is needed. 224 | */ 225 | static JobsStatus_t strnAppend( char * buffer, 226 | size_t * start, 227 | size_t max, 228 | const char * value, 229 | size_t valueLength ) 230 | { 231 | size_t i, j = 0; 232 | 233 | assert( ( buffer != NULL ) && ( start != NULL ) && ( value != NULL ) ); 234 | 235 | i = *start; 236 | 237 | while( ( i < max ) && ( j < valueLength ) ) 238 | { 239 | buffer[ i ] = value[ j ]; 240 | i++; 241 | j++; 242 | } 243 | 244 | *start = i; 245 | 246 | return ( i < max ) ? JobsSuccess : JobsBufferTooSmall; 247 | } 248 | 249 | /** 250 | * @brief Populate the common leading portion of a topic string. 251 | * 252 | * @param[in] buffer The buffer to contain the topic string. 253 | * @param[in,out] start The index at which to begin. 254 | * @param[in] length The size of the buffer. 255 | * @param[in] thingName The device's thingName as registered with AWS IoT. 256 | * @param[in] thingNameLength The length of the thingName. 257 | */ 258 | static void writePreamble( char * buffer, 259 | size_t * start, 260 | size_t length, 261 | const char * thingName, 262 | uint16_t thingNameLength ) 263 | { 264 | ( void ) strnAppend( buffer, start, length, 265 | JOBS_API_PREFIX, JOBS_API_PREFIX_LENGTH ); 266 | ( void ) strnAppend( buffer, start, length, 267 | thingName, thingNameLength ); 268 | ( void ) strnAppend( buffer, start, length, 269 | JOBS_API_BRIDGE, JOBS_API_BRIDGE_LENGTH ); 270 | } 271 | 272 | #define checkThingParams() \ 273 | ( isValidThingName( thingName, thingNameLength ) == true ) 274 | 275 | #define checkCommonParams() \ 276 | ( ( buffer != NULL ) && ( length > 0UL ) && checkThingParams() ) 277 | 278 | /** @endcond */ 279 | 280 | /** 281 | * See jobs.h for docs. 282 | * 283 | * @brief Populate a topic string for a subscription request. 284 | */ 285 | JobsStatus_t Jobs_GetTopic( char * buffer, 286 | size_t length, 287 | const char * thingName, 288 | uint16_t thingNameLength, 289 | JobsTopic_t api, 290 | size_t * outLength ) 291 | { 292 | JobsStatus_t ret = JobsBadParameter; 293 | size_t start = 0U; 294 | 295 | if( checkCommonParams() && 296 | ( api > JobsInvalidTopic ) && ( api < JobsMaxTopic ) ) 297 | { 298 | writePreamble( buffer, &start, length, thingName, thingNameLength ); 299 | 300 | if( api >= JobsDescribeSuccess ) 301 | { 302 | ( void ) strnAppend( buffer, &start, length, 303 | "+/", ( CONST_STRLEN( "+/" ) ) ); 304 | } 305 | 306 | ret = strnAppend( buffer, &start, length, 307 | apiTopic[ api ], apiTopicLength[ api ] ); 308 | 309 | if( start == length ) 310 | { 311 | start--; 312 | } 313 | 314 | buffer[ start ] = '\0'; 315 | 316 | if( outLength != NULL ) 317 | { 318 | *outLength = start; 319 | } 320 | } 321 | 322 | return ret; 323 | } 324 | 325 | /** @cond DO_NOT_DOCUMENT */ 326 | 327 | /** 328 | * @brief Compare the leading n bytes of two character sequences. 329 | * 330 | * @param[in] a first character sequence 331 | * @param[in] b second character sequence 332 | * @param[in] n number of bytes 333 | * 334 | * @return JobsSuccess if the sequences are the same; 335 | * JobsNoMatch otherwise 336 | */ 337 | static JobsStatus_t strnEquals( const char * a, 338 | const char * b, 339 | size_t n ) 340 | { 341 | size_t i; 342 | 343 | assert( ( a != NULL ) && ( b != NULL ) ); 344 | 345 | for( i = 0U; i < n; i++ ) 346 | { 347 | if( a[ i ] != b[ i ] ) 348 | { 349 | break; 350 | } 351 | } 352 | 353 | return ( i == n ) ? JobsSuccess : JobsNoMatch; 354 | } 355 | 356 | /** 357 | * @brief Wrap strnEquals() with a check to compare two lengths. 358 | * 359 | * @param[in] a first character sequence 360 | * @param[in] aLength Length of a 361 | * @param[in] b second character sequence 362 | * @param[in] bLength Length of b 363 | * 364 | * @return JobsSuccess if the sequences are the same; 365 | * JobsNoMatch otherwise 366 | */ 367 | static JobsStatus_t strnnEq( const char * a, 368 | size_t aLength, 369 | const char * b, 370 | size_t bLength ) 371 | { 372 | JobsStatus_t ret = JobsNoMatch; 373 | 374 | if( aLength == bLength ) 375 | { 376 | ret = strnEquals( a, b, aLength ); 377 | } 378 | 379 | return ret; 380 | } 381 | 382 | /** 383 | * @brief Predicate returns true for a match to JOBS_API_JOBID_NEXT 384 | * 385 | * @param[in] jobId character sequence to check 386 | * @param[in] jobIdLength length of the character sequence 387 | * 388 | * @return true if the job ID matches; 389 | * false otherwise 390 | */ 391 | static bool isNextJobId( const char * jobId, 392 | uint16_t jobIdLength ) 393 | { 394 | bool ret = false; 395 | 396 | if( ( jobId != NULL ) && 397 | ( strnnEq( JOBS_API_JOBID_NEXT, JOBS_API_JOBID_NEXT_LENGTH, jobId, jobIdLength ) == JobsSuccess ) ) 398 | { 399 | ret = true; 400 | } 401 | 402 | return ret; 403 | } 404 | 405 | 406 | /** 407 | * @brief Parse a job ID and search for the API portion of a topic string in a table. 408 | * 409 | * @param[in] topic The topic string to check. 410 | * @param[in] topicLength The length of the topic string. 411 | * @param[out] outApi The jobs topic API value if present, e.g., #JobsUpdateSuccess. 412 | * @param[out] outJobId The beginning of the jobID in the topic string. 413 | * @param[out] outJobIdLength The length of the jobID in the topic string. 414 | * 415 | * @return #JobsSuccess if a matching topic was found; 416 | * #JobsNoMatch if a matching topic was NOT found 417 | * (parameter outApi gets #JobsInvalidTopic ). 418 | */ 419 | static JobsStatus_t matchIdApi( char * topic, 420 | size_t topicLength, 421 | JobsTopic_t * outApi, 422 | char ** outJobId, 423 | uint16_t * outJobIdLength ) 424 | { 425 | JobsStatus_t ret = JobsNoMatch; 426 | size_t i; 427 | char * p = topic; 428 | size_t length = topicLength; 429 | char * jobId = NULL; 430 | uint16_t jobIdLength = 0U; 431 | 432 | assert( ( topic != NULL ) && ( outApi != NULL ) && 433 | ( outJobId != NULL ) && ( outJobIdLength != NULL ) ); 434 | 435 | for( i = 0U; i < length; i++ ) 436 | { 437 | if( ( i > 0U ) && ( p[ i ] == '/' ) ) 438 | { 439 | /* Save the leading job ID and its length. */ 440 | jobId = p; 441 | jobIdLength = ( uint16_t ) i; 442 | break; 443 | } 444 | } 445 | 446 | /* Advance p to after the '/' and reduce buffer length 447 | * for the remaining API search. */ 448 | p = &p[ jobIdLength + 1U ]; 449 | length = length - jobIdLength - 1U; 450 | 451 | if( ( isNextJobId( jobId, jobIdLength ) == true ) || 452 | ( isValidJobId( jobId, jobIdLength ) == true ) ) 453 | { 454 | if( JobsSuccess == strnnEq( p, length, apiTopic[ JobsDescribeSuccess ], apiTopicLength[ JobsDescribeSuccess ] ) ) 455 | { 456 | ret = JobsSuccess; 457 | *outApi = JobsDescribeSuccess; 458 | } 459 | else if( JobsSuccess == strnnEq( p, length, apiTopic[ JobsDescribeFailed ], apiTopicLength[ JobsDescribeFailed ] ) ) 460 | { 461 | ret = JobsSuccess; 462 | *outApi = JobsDescribeFailed; 463 | } 464 | else if( JobsSuccess == strnnEq( p, length, apiTopic[ JobsUpdateSuccess ], apiTopicLength[ JobsUpdateSuccess ] ) ) 465 | { 466 | ret = JobsSuccess; 467 | *outApi = JobsUpdateSuccess; 468 | } 469 | else if( JobsSuccess == strnnEq( p, length, apiTopic[ JobsUpdateFailed ], apiTopicLength[ JobsUpdateFailed ] ) ) 470 | { 471 | ret = JobsSuccess; 472 | *outApi = JobsUpdateFailed; 473 | } 474 | else 475 | { 476 | /* MISRA Empty Body */ 477 | } 478 | 479 | if( ret == JobsSuccess ) 480 | { 481 | *outJobId = jobId; 482 | *outJobIdLength = jobIdLength; 483 | } 484 | } 485 | 486 | return ret; 487 | } 488 | 489 | /** 490 | * @brief Search for the API portion of a topic string in a table. 491 | * 492 | * @param[in] topic The topic string to check. 493 | * @param[in] topicLength The length of the topic string. 494 | * @param[out] outApi The jobs topic API value if present, e.g., #JobsUpdateSuccess. 495 | * @param[out] outJobId The beginning of the jobID in the topic string. 496 | * @param[out] outJobIdLength The length of the jobID in the topic string. 497 | * 498 | * @return #JobsSuccess if a matching topic was found; 499 | * #JobsNoMatch if a matching topic was NOT found 500 | * (parameter outApi gets #JobsInvalidTopic ). 501 | */ 502 | static JobsStatus_t matchApi( char * topic, 503 | size_t topicLength, 504 | JobsTopic_t * outApi, 505 | char ** outJobId, 506 | uint16_t * outJobIdLength ) 507 | { 508 | JobsStatus_t ret = JobsNoMatch; 509 | 510 | assert( ( topic != NULL ) && ( outApi != NULL ) && 511 | ( outJobId != NULL ) && ( outJobIdLength != NULL ) ); 512 | 513 | /* The first set of APIs do not have job IDs. */ 514 | if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsJobsChanged ], apiTopicLength[ JobsJobsChanged ] ) ) 515 | { 516 | ret = JobsSuccess; 517 | *outApi = JobsJobsChanged; 518 | } 519 | else if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsNextJobChanged ], apiTopicLength[ JobsNextJobChanged ] ) ) 520 | { 521 | ret = JobsSuccess; 522 | *outApi = JobsNextJobChanged; 523 | } 524 | else if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsGetPendingSuccess ], apiTopicLength[ JobsGetPendingSuccess ] ) ) 525 | { 526 | ret = JobsSuccess; 527 | *outApi = JobsGetPendingSuccess; 528 | } 529 | else if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsGetPendingFailed ], apiTopicLength[ JobsGetPendingFailed ] ) ) 530 | { 531 | ret = JobsSuccess; 532 | *outApi = JobsGetPendingFailed; 533 | } 534 | else if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsStartNextSuccess ], apiTopicLength[ JobsStartNextSuccess ] ) ) 535 | { 536 | ret = JobsSuccess; 537 | *outApi = JobsStartNextSuccess; 538 | } 539 | else if( JobsSuccess == strnnEq( topic, topicLength, apiTopic[ JobsStartNextFailed ], apiTopicLength[ JobsStartNextFailed ] ) ) 540 | { 541 | ret = JobsSuccess; 542 | *outApi = JobsStartNextFailed; 543 | } 544 | else 545 | { 546 | /* MISRA Empty Body */ 547 | } 548 | 549 | /* The remaining APIs must have a job ID. */ 550 | if( ret == JobsNoMatch ) 551 | { 552 | ret = matchIdApi( topic, topicLength, outApi, outJobId, outJobIdLength ); 553 | } 554 | 555 | return ret; 556 | } 557 | 558 | static bool isThingnameTopicMatch( const char * topic, 559 | const size_t topicLength, 560 | const char * topicSuffix, 561 | const size_t topicSuffixLength, 562 | const char * thingName, 563 | const size_t thingNameLength ) 564 | { 565 | char expectedTopicBuffer[ TOPIC_BUFFER_SIZE + 1 ] = { '\0' }; 566 | bool isMatch = true; 567 | size_t start = 0U; 568 | 569 | if( ( topic == NULL ) || ( topicLength == 0U ) ) 570 | { 571 | isMatch = false; 572 | } 573 | else if( ( thingName == NULL ) || ( thingNameLength == 0U ) ) 574 | { 575 | isMatch = false; 576 | } 577 | else 578 | { 579 | /* Empty MISRA body */ 580 | } 581 | 582 | if( isMatch ) 583 | { 584 | writePreamble( expectedTopicBuffer, &start, TOPIC_BUFFER_SIZE, thingName, ( uint16_t ) thingNameLength ); 585 | ( void ) strnAppend( expectedTopicBuffer, &start, TOPIC_BUFFER_SIZE, topicSuffix, topicSuffixLength ); 586 | 587 | isMatch = ( size_t ) strnlen( expectedTopicBuffer, TOPIC_BUFFER_SIZE ) == 588 | topicLength; 589 | isMatch = isMatch && ( strncmp( expectedTopicBuffer, topic, topicLength ) == 0 ); 590 | } 591 | else 592 | { 593 | /* Empty MISRA body */ 594 | } 595 | 596 | return isMatch; 597 | } 598 | 599 | /** @endcond */ 600 | 601 | /** 602 | * See jobs.h for docs. 603 | * 604 | * @brief Output a topic value if a Jobs API topic string is present. 605 | */ 606 | JobsStatus_t Jobs_MatchTopic( char * topic, 607 | size_t length, 608 | const char * thingName, 609 | uint16_t thingNameLength, 610 | JobsTopic_t * outApi, 611 | char ** outJobId, 612 | uint16_t * outJobIdLength ) 613 | { 614 | JobsStatus_t ret = JobsBadParameter; 615 | JobsTopic_t api = JobsInvalidTopic; 616 | char * jobId = NULL; 617 | uint16_t jobIdLength = 0U; 618 | 619 | if( ( topic != NULL ) && ( outApi != NULL ) && checkThingParams() && ( length > 0U ) ) 620 | { 621 | ret = JobsNoMatch; 622 | 623 | if( ( length > JOBS_API_COMMON_LENGTH( thingNameLength ) ) && 624 | ( length < JOBS_API_MAX_LENGTH( thingNameLength ) ) ) 625 | { 626 | char * prefix = topic; 627 | char * name = &prefix[ JOBS_API_PREFIX_LENGTH ]; 628 | char * bridge = &name[ thingNameLength ]; 629 | 630 | /* check the shortest match first */ 631 | if( ( strnEquals( bridge, JOBS_API_BRIDGE, JOBS_API_BRIDGE_LENGTH ) == JobsSuccess ) && 632 | ( strnEquals( prefix, JOBS_API_PREFIX, JOBS_API_PREFIX_LENGTH ) == JobsSuccess ) && 633 | ( strnEquals( name, thingName, thingNameLength ) == JobsSuccess ) ) 634 | { 635 | char * tail = &bridge[ JOBS_API_BRIDGE_LENGTH ]; 636 | size_t tailLength = length - JOBS_API_COMMON_LENGTH( thingNameLength ); 637 | 638 | ret = matchApi( tail, tailLength, &api, &jobId, &jobIdLength ); 639 | } 640 | } 641 | } 642 | 643 | if( outApi != NULL ) 644 | { 645 | *outApi = api; 646 | } 647 | 648 | if( outJobId != NULL ) 649 | { 650 | *outJobId = jobId; 651 | } 652 | 653 | if( outJobIdLength != NULL ) 654 | { 655 | *outJobIdLength = jobIdLength; 656 | } 657 | 658 | return ret; 659 | } 660 | 661 | /** 662 | * See jobs.h for docs. 663 | * 664 | * @brief Populate a topic string for a GetPendingJobExecutions request. 665 | */ 666 | JobsStatus_t Jobs_GetPending( char * buffer, 667 | size_t length, 668 | const char * thingName, 669 | uint16_t thingNameLength, 670 | size_t * outLength ) 671 | { 672 | JobsStatus_t ret = JobsBadParameter; 673 | size_t start = 0U; 674 | 675 | if( checkCommonParams() ) 676 | { 677 | writePreamble( buffer, &start, length, thingName, thingNameLength ); 678 | 679 | ret = strnAppend( buffer, &start, length, 680 | JOBS_API_GETPENDING, JOBS_API_GETPENDING_LENGTH ); 681 | 682 | start = ( start >= length ) ? ( length - 1U ) : start; 683 | buffer[ start ] = '\0'; 684 | 685 | if( outLength != NULL ) 686 | { 687 | *outLength = start; 688 | } 689 | } 690 | 691 | return ret; 692 | } 693 | 694 | /** 695 | * See jobs.h for docs. 696 | * 697 | * @brief Populate a topic string for a StartNextPendingJobExecution request. 698 | */ 699 | JobsStatus_t Jobs_StartNext( char * buffer, 700 | size_t length, 701 | const char * thingName, 702 | uint16_t thingNameLength, 703 | size_t * outLength ) 704 | { 705 | JobsStatus_t ret = JobsBadParameter; 706 | size_t start = 0U; 707 | 708 | if( checkCommonParams() ) 709 | { 710 | writePreamble( buffer, &start, length, thingName, thingNameLength ); 711 | 712 | ret = strnAppend( buffer, &start, length, 713 | JOBS_API_STARTNEXT, JOBS_API_STARTNEXT_LENGTH ); 714 | 715 | start = ( start >= length ) ? ( length - 1U ) : start; 716 | buffer[ start ] = '\0'; 717 | 718 | if( outLength != NULL ) 719 | { 720 | *outLength = start; 721 | } 722 | } 723 | 724 | return ret; 725 | } 726 | 727 | size_t Jobs_StartNextMsg( const char * clientToken, 728 | size_t clientTokenLength, 729 | char * buffer, 730 | size_t bufferSize ) 731 | { 732 | size_t start = 0U; 733 | 734 | if( ( clientToken != NULL ) && ( clientTokenLength > 0U ) && ( bufferSize >= ( 18U + clientTokenLength ) ) ) 735 | { 736 | ( void ) strnAppend( buffer, &start, bufferSize, JOBS_API_CLIENTTOKEN, JOBS_API_CLIENTTOKEN_LENGTH ); 737 | ( void ) strnAppend( buffer, &start, bufferSize, clientToken, clientTokenLength ); 738 | ( void ) strnAppend( buffer, &start, bufferSize, "\"}", ( CONST_STRLEN( "\"}" ) ) ); 739 | } 740 | 741 | return start; 742 | } 743 | 744 | /** 745 | * See jobs.h for docs. 746 | * 747 | * @brief Populate a topic string for a DescribeJobExecution request. 748 | */ 749 | JobsStatus_t Jobs_Describe( char * buffer, 750 | size_t length, 751 | const char * thingName, 752 | uint16_t thingNameLength, 753 | const char * jobId, 754 | uint16_t jobIdLength, 755 | size_t * outLength ) 756 | { 757 | JobsStatus_t ret = JobsBadParameter; 758 | size_t start = 0U; 759 | 760 | if( checkCommonParams() && 761 | ( ( isNextJobId( jobId, jobIdLength ) == true ) || 762 | ( isValidJobId( jobId, jobIdLength ) == true ) ) ) 763 | { 764 | writePreamble( buffer, &start, length, thingName, thingNameLength ); 765 | 766 | ( void ) strnAppend( buffer, &start, length, 767 | jobId, jobIdLength ); 768 | ( void ) strnAppend( buffer, &start, length, 769 | "/", ( CONST_STRLEN( "/" ) ) ); 770 | ret = strnAppend( buffer, &start, length, 771 | JOBS_API_DESCRIBE, JOBS_API_DESCRIBE_LENGTH ); 772 | 773 | start = ( start >= length ) ? ( length - 1U ) : start; 774 | buffer[ start ] = '\0'; 775 | 776 | if( outLength != NULL ) 777 | { 778 | *outLength = start; 779 | } 780 | } 781 | 782 | return ret; 783 | } 784 | 785 | /** 786 | * See jobs.h for docs. 787 | * 788 | * @brief Populate a topic string for an UpdateJobExecution request. 789 | */ 790 | JobsStatus_t Jobs_Update( char * buffer, 791 | size_t length, 792 | const char * thingName, 793 | uint16_t thingNameLength, 794 | const char * jobId, 795 | uint16_t jobIdLength, 796 | size_t * outLength ) 797 | { 798 | JobsStatus_t ret = JobsBadParameter; 799 | size_t start = 0U; 800 | 801 | if( checkCommonParams() && 802 | ( isValidJobId( jobId, jobIdLength ) == true ) ) 803 | { 804 | writePreamble( buffer, &start, length, thingName, thingNameLength ); 805 | 806 | ( void ) strnAppend( buffer, &start, length, 807 | jobId, jobIdLength ); 808 | ( void ) strnAppend( buffer, &start, length, 809 | "/", ( CONST_STRLEN( "/" ) ) ); 810 | ret = strnAppend( buffer, &start, length, 811 | JOBS_API_UPDATE, JOBS_API_UPDATE_LENGTH ); 812 | 813 | start = ( start >= length ) ? ( length - 1U ) : start; 814 | buffer[ start ] = '\0'; 815 | 816 | if( outLength != NULL ) 817 | { 818 | *outLength = start; 819 | } 820 | } 821 | 822 | return ret; 823 | } 824 | 825 | /** 826 | * @brief Get the total length of optional fields provided for 827 | * the Jobs_UpdateMsg. These optional fields, if provided, require 828 | * additional buffer space. 829 | * 830 | * @param request A JobsUpdateRequest_t containing the optional fields. 831 | * @return size_t The buffer space required for the optional fields. 832 | */ 833 | static size_t getOptionalFieldsLength( JobsUpdateRequest_t request ) 834 | { 835 | size_t minimumOptionalFieldsBufferSize = 0U; 836 | 837 | if( ( request.expectedVersion != NULL ) && ( request.expectedVersionLength > 0U ) ) 838 | { 839 | minimumOptionalFieldsBufferSize += JOBS_API_EXPECTED_VERSION_LENGTH + request.expectedVersionLength; 840 | } 841 | 842 | if( ( request.statusDetails != NULL ) && ( request.statusDetailsLength > 0U ) ) 843 | { 844 | minimumOptionalFieldsBufferSize += JOBS_API_STATUS_DETAILS_LENGTH + request.statusDetailsLength; 845 | } 846 | 847 | return minimumOptionalFieldsBufferSize; 848 | } 849 | 850 | /** 851 | * @brief Get the total length of the required fields in the 852 | * Jobs_UpdateMsg request. 853 | * 854 | * @param request A JobsUpdateRequest_t containing the optional fields. 855 | * @return size_t The buffer space required for the optional fields. 856 | */ 857 | static size_t getRequiredFieldsLength( JobsUpdateRequest_t request ) 858 | { 859 | return JOBS_API_STATUS_LENGTH + strlen( jobStatusString[ request.status ] ) + CONST_STRLEN( "\"}" ); 860 | } 861 | 862 | /** 863 | * @brief Check non-null optional fields in the Jobs_UpdateMsg request 864 | * for validity. 865 | * 866 | * @param request A JobsUpdateRequest_t containing the optional fields. 867 | * @return true Optional fields appear valid. 868 | * @return false Optional fields are invalid. 869 | */ 870 | static bool areOptionalFieldsValid( JobsUpdateRequest_t request ) 871 | { 872 | bool optionalFieldsValid = true; 873 | 874 | if( ( request.statusDetails != NULL ) && ( request.statusDetailsLength > 0U ) ) 875 | { 876 | optionalFieldsValid = ( JSONSuccess == JSON_Validate( request.statusDetails, request.statusDetailsLength ) ); 877 | } 878 | 879 | return optionalFieldsValid; 880 | } 881 | 882 | size_t Jobs_UpdateMsg( JobsUpdateRequest_t request, 883 | char * buffer, 884 | size_t bufferSize ) 885 | { 886 | assert( ( ( size_t ) request.status ) < ARRAY_LENGTH( jobStatusString ) ); 887 | 888 | size_t start = 0U; 889 | size_t minimumBufferSize = getRequiredFieldsLength( request ) + getOptionalFieldsLength( request ); 890 | bool writeFailed = ( bufferSize < minimumBufferSize ) || !areOptionalFieldsValid( request ); 891 | 892 | if( !writeFailed ) 893 | { 894 | ( void ) strnAppend( buffer, &start, bufferSize, JOBS_API_STATUS, JOBS_API_STATUS_LENGTH ); 895 | ( void ) strnAppend( buffer, &start, bufferSize, jobStatusString[ request.status ], strlen( jobStatusString[ request.status ] ) ); 896 | 897 | /* This is an optional field so do not fail if expected version is missing.*/ 898 | if( ( request.expectedVersion != NULL ) && ( request.expectedVersionLength > 0U ) ) 899 | { 900 | ( void ) strnAppend( buffer, &start, bufferSize, JOBS_API_EXPECTED_VERSION, JOBS_API_EXPECTED_VERSION_LENGTH ); 901 | ( void ) strnAppend( buffer, &start, bufferSize, request.expectedVersion, request.expectedVersionLength ); 902 | } 903 | 904 | /* This is an optional field so do not fail if status details is missing.*/ 905 | if( ( request.statusDetails != NULL ) && ( request.statusDetailsLength > 0U ) ) 906 | { 907 | ( void ) strnAppend( buffer, &start, bufferSize, JOBS_API_STATUS_DETAILS, JOBS_API_STATUS_DETAILS_LENGTH ); 908 | ( void ) strnAppend( buffer, &start, bufferSize, request.statusDetails, request.statusDetailsLength ); 909 | 910 | ( void ) strnAppend( buffer, &start, bufferSize, "}", ( CONST_STRLEN( "}" ) ) ); 911 | } 912 | else 913 | { 914 | ( void ) strnAppend( buffer, &start, bufferSize, "\"}", ( CONST_STRLEN( "\"}" ) ) ); 915 | } 916 | } 917 | 918 | return start; 919 | } 920 | 921 | bool Jobs_IsStartNextAccepted( const char * topic, 922 | const size_t topicLength, 923 | const char * thingName, 924 | const size_t thingNameLength ) 925 | { 926 | return isThingnameTopicMatch( topic, topicLength, "start-next/accepted", strlen( "start-next/accepted" ), thingName, thingNameLength ); 927 | } 928 | 929 | bool Jobs_IsJobUpdateStatus( const char * topic, 930 | const size_t topicLength, 931 | const char * jobId, 932 | const size_t jobIdLength, 933 | const char * thingName, 934 | const size_t thingNameLength, 935 | JobUpdateStatus_t expectedStatus ) 936 | { 937 | static const char * const jobUpdateStatusString[] = 938 | { 939 | "accepted", 940 | "rejected" 941 | }; 942 | 943 | static const size_t jobUpdateStatusStringLengths[] = 944 | { 945 | CONST_STRLEN( "accepted" ), 946 | CONST_STRLEN( "rejected" ) 947 | }; 948 | 949 | assert( ( ( size_t ) expectedStatus ) < ARRAY_LENGTH( jobUpdateStatusString ) ); 950 | 951 | /* Max suffix size = max topic size - "$aws/" prefix */ 952 | size_t suffixBufferLength = ( TOPIC_BUFFER_SIZE - CONST_STRLEN( "$aws/" ) ); 953 | char suffixBuffer[ TOPIC_BUFFER_SIZE - CONST_STRLEN( "$aws/" ) ] = { '\0' }; 954 | size_t start = 0U; 955 | 956 | ( void ) strnAppend( suffixBuffer, &start, suffixBufferLength, jobId, jobIdLength ); 957 | ( void ) strnAppend( suffixBuffer, &start, suffixBufferLength, "/update/", ( CONST_STRLEN( "/update/" ) ) ); 958 | ( void ) strnAppend( suffixBuffer, &start, suffixBufferLength, jobUpdateStatusString[ expectedStatus ], jobUpdateStatusStringLengths[ expectedStatus ] ); 959 | 960 | return isThingnameTopicMatch( topic, topicLength, suffixBuffer, strnlen( suffixBuffer, suffixBufferLength ), thingName, thingNameLength ); 961 | } 962 | 963 | size_t Jobs_GetJobId( const char * message, 964 | size_t messageLength, 965 | const char ** jobId ) 966 | { 967 | size_t jobIdLength = 0U; 968 | JSONStatus_t jsonResult = JSONNotFound; 969 | 970 | jsonResult = JSON_Validate( message, messageLength ); 971 | 972 | if( jsonResult == JSONSuccess ) 973 | { 974 | jsonResult = JSON_SearchConst( message, 975 | messageLength, 976 | "execution.jobId", 977 | CONST_STRLEN( "execution.jobId" ), 978 | jobId, 979 | &jobIdLength, 980 | NULL ); 981 | } 982 | 983 | return jobIdLength; 984 | } 985 | 986 | size_t Jobs_GetJobDocument( const char * message, 987 | size_t messageLength, 988 | const char ** jobDoc ) 989 | { 990 | size_t jobDocLength = 0U; 991 | JSONStatus_t jsonResult = JSONNotFound; 992 | 993 | jsonResult = JSON_Validate( message, messageLength ); 994 | 995 | if( jsonResult == JSONSuccess ) 996 | { 997 | jsonResult = JSON_SearchConst( message, 998 | messageLength, 999 | "execution.jobDocument", 1000 | CONST_STRLEN( "execution.jobDocument" ), 1001 | jobDoc, 1002 | &jobDocLength, 1003 | NULL ); 1004 | } 1005 | 1006 | return jobDocLength; 1007 | } 1008 | -------------------------------------------------------------------------------- /test/unit-test/job_parser_utest.c: -------------------------------------------------------------------------------- 1 | /* 2 | * AWS IoT Jobs v2.0.0 3 | * Copyright (C) 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 4 | * SPDX-License-Identifier: MIT 5 | * 6 | * Licensed under the MIT License. See the LICENSE accompanying this file 7 | * for the specific language governing permissions and limitations under 8 | * the License. 9 | */ 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | #include "unity.h" 16 | 17 | #include "job_parser.h" 18 | #include "ota_job_processor.h" 19 | 20 | static bool result; 21 | static uint32_t convertedUint; 22 | static AfrOtaJobDocumentFields_t documentFields; 23 | 24 | static void resetDocumentFields( void ); 25 | 26 | /* =========================== UNITY FIXTURES ============================ */ 27 | 28 | static void resetDocumentFields( void ) 29 | { 30 | documentFields.signature = NULL; 31 | documentFields.signatureLen = UINT32_MAX; 32 | documentFields.filepath = NULL; 33 | documentFields.filepathLen = UINT32_MAX; 34 | documentFields.certfile = NULL; 35 | documentFields.certfileLen = UINT32_MAX; 36 | documentFields.authScheme = NULL; 37 | documentFields.authSchemeLen = UINT32_MAX; 38 | documentFields.imageRef = NULL; 39 | documentFields.imageRefLen = UINT32_MAX; 40 | documentFields.fileId = UINT32_MAX; 41 | documentFields.fileSize = UINT32_MAX; 42 | documentFields.fileType = UINT32_MAX; 43 | } 44 | 45 | /* Called before each test method. */ 46 | void setUp() 47 | { 48 | resetDocumentFields(); 49 | result = true; 50 | convertedUint = 0U; 51 | } 52 | 53 | /* Called after each test method. */ 54 | void tearDown() 55 | { 56 | } 57 | 58 | /* Called at the beginning of the whole suite. */ 59 | void suiteSetUp() 60 | { 61 | } 62 | 63 | /* Called at the end of the whole suite. */ 64 | int suiteTearDown( int numFailures ) 65 | { 66 | return numFailures; 67 | } 68 | 69 | /* =============================== TESTS =============================== */ 70 | 71 | void test_populateJobDocFields_returnsTrue_givenValidMqttDocument( void ) 72 | { 73 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 74 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 75 | "\"filepath\":\"/device\",\"filesize\": " 76 | "123456789,\"fileid\":0,\"certfile\":\"certfile." 77 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 78 | "239871\"}]}}"; 79 | 80 | result = false; 81 | result = populateJobDocFields( document, 82 | strlen( document ), 83 | 0, 84 | &documentFields ); 85 | 86 | TEST_ASSERT_TRUE( result ); 87 | TEST_ASSERT_EQUAL( 123456789U, documentFields.fileSize ); 88 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 89 | TEST_ASSERT_EQUAL_STRING_LEN( "certfile.cert", 90 | documentFields.certfile, 91 | strlen( "certfile.cert" ) ); 92 | TEST_ASSERT_EQUAL( strlen( "certfile.cert" ), documentFields.certfileLen ); 93 | TEST_ASSERT_EQUAL_STRING_LEN( "signature_hash_239871", 94 | documentFields.signature, 95 | strlen( "signature_hash_239871" ) ); 96 | TEST_ASSERT_EQUAL( strlen( "signature_hash_239871" ), 97 | documentFields.signatureLen ); 98 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 99 | documentFields.filepath, 100 | strlen( "/device" ) ); 101 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 102 | TEST_ASSERT_EQUAL_STRING_LEN( "AFR_OTA-streamname", 103 | documentFields.imageRef, 104 | strlen( "AFR_OTA-streamname" ) ); 105 | TEST_ASSERT_EQUAL( strlen( "AFR_OTA-streamname" ), 106 | documentFields.imageRefLen ); 107 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.fileType ); 108 | TEST_ASSERT_NULL( documentFields.authScheme ); 109 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.authSchemeLen ); 110 | } 111 | 112 | void test_populateJobDocFields_returnsTrue_givenValidMqttDocumentWithOptionalFields( void ) 113 | { 114 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 115 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 116 | "\"filepath\":\"/device\",\"filesize\": " 117 | "123456789,\"fileid\":0,\"fileType\":1234,\"certfile\":\"certfile." 118 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 119 | "239871\"}]}}"; 120 | 121 | result = false; 122 | result = populateJobDocFields( document, 123 | strlen( document ), 124 | 0, 125 | &documentFields ); 126 | 127 | TEST_ASSERT_TRUE( result ); 128 | TEST_ASSERT_EQUAL( 123456789U, documentFields.fileSize ); 129 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 130 | TEST_ASSERT_EQUAL_STRING_LEN( "certfile.cert", 131 | documentFields.certfile, 132 | strlen( "certfile.cert" ) ); 133 | TEST_ASSERT_EQUAL( strlen( "certfile.cert" ), documentFields.certfileLen ); 134 | TEST_ASSERT_EQUAL_STRING_LEN( "signature_hash_239871", 135 | documentFields.signature, 136 | strlen( "signature_hash_239871" ) ); 137 | TEST_ASSERT_EQUAL( strlen( "signature_hash_239871" ), 138 | documentFields.signatureLen ); 139 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 140 | documentFields.filepath, 141 | strlen( "/device" ) ); 142 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 143 | TEST_ASSERT_EQUAL_STRING_LEN( "AFR_OTA-streamname", 144 | documentFields.imageRef, 145 | strlen( "AFR_OTA-streamname" ) ); 146 | TEST_ASSERT_EQUAL( strlen( "AFR_OTA-streamname" ), 147 | documentFields.imageRefLen ); 148 | TEST_ASSERT_EQUAL( 1234U, documentFields.fileType ); 149 | TEST_ASSERT_NULL( documentFields.authScheme ); 150 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.authSchemeLen ); 151 | } 152 | 153 | void test_populateJobDocFields_returnsTrue_givenValidMultiFileMqttDocument( void ) 154 | { 155 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 156 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 157 | "\"filepath\":\"/path1\",\"filesize\": " 158 | "123456789,\"fileid\":0,\"certfile\":\"certfile." 159 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 160 | "239871\"},{\"filepath\":\"/path2\",\"filesize\": " 161 | "101010,\"fileid\":13,\"certfile\":\"certfile2." 162 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 163 | "101010\"}]}}"; 164 | 165 | result = false; 166 | result = populateJobDocFields( document, 167 | strlen( document ), 168 | 0, 169 | &documentFields ); 170 | 171 | TEST_ASSERT_TRUE( result ); 172 | TEST_ASSERT_EQUAL( 123456789U, documentFields.fileSize ); 173 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 174 | TEST_ASSERT_EQUAL_STRING_LEN( "certfile.cert", 175 | documentFields.certfile, 176 | strlen( "certfile.cert" ) ); 177 | TEST_ASSERT_EQUAL( strlen( "certfile.cert" ), documentFields.certfileLen ); 178 | TEST_ASSERT_EQUAL_STRING_LEN( "signature_hash_239871", 179 | documentFields.signature, 180 | strlen( "signature_hash_239871" ) ); 181 | TEST_ASSERT_EQUAL( strlen( "signature_hash_239871" ), 182 | documentFields.signatureLen ); 183 | TEST_ASSERT_EQUAL_STRING_LEN( "/path1", 184 | documentFields.filepath, 185 | strlen( "/path1" ) ); 186 | TEST_ASSERT_EQUAL( strlen( "/path1" ), documentFields.filepathLen ); 187 | TEST_ASSERT_EQUAL_STRING_LEN( "AFR_OTA-streamname", 188 | documentFields.imageRef, 189 | strlen( "AFR_OTA-streamname" ) ); 190 | TEST_ASSERT_EQUAL( strlen( "AFR_OTA-streamname" ), 191 | documentFields.imageRefLen ); 192 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.fileType ); 193 | TEST_ASSERT_NULL( documentFields.authScheme ); 194 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.authSchemeLen ); 195 | 196 | result = false; 197 | result = populateJobDocFields( document, 198 | strlen( document ), 199 | 1, 200 | &documentFields ); 201 | 202 | TEST_ASSERT_TRUE( result ); 203 | TEST_ASSERT_EQUAL( 101010, documentFields.fileSize ); 204 | TEST_ASSERT_EQUAL( 13U, documentFields.fileId ); 205 | TEST_ASSERT_EQUAL_STRING_LEN( "certfile2.cert", 206 | documentFields.certfile, 207 | strlen( "certfile2.cert" ) ); 208 | TEST_ASSERT_EQUAL( strlen( "certfile2.cert" ), documentFields.certfileLen ); 209 | TEST_ASSERT_EQUAL_STRING_LEN( "signature_hash_101010", 210 | documentFields.signature, 211 | strlen( "signature_hash_101010" ) ); 212 | TEST_ASSERT_EQUAL( strlen( "signature_hash_101010" ), 213 | documentFields.signatureLen ); 214 | TEST_ASSERT_EQUAL_STRING_LEN( "/path2", 215 | documentFields.filepath, 216 | strlen( "/path2" ) ); 217 | TEST_ASSERT_EQUAL( strlen( "/path2" ), documentFields.filepathLen ); 218 | TEST_ASSERT_EQUAL_STRING_LEN( "AFR_OTA-streamname", 219 | documentFields.imageRef, 220 | strlen( "AFR_OTA-streamname" ) ); 221 | TEST_ASSERT_EQUAL( strlen( "AFR_OTA-streamname" ), 222 | documentFields.imageRefLen ); 223 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.fileType ); 224 | TEST_ASSERT_NULL( documentFields.authScheme ); 225 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.authSchemeLen ); 226 | } 227 | 228 | void test_populateJobDocFields_returnsTrue_givenValidHttpDocument( void ) 229 | { 230 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 231 | "{\"filepath\":\"/" 232 | "device\",\"filesize\":343135,\"fileid\":0," 233 | "\"certfile\":\"/strangepath/" 234 | "certificate.cert\",\"update_data_url\":\"presignedS3Url.s3.amazon." 235 | "com\",\"auth_scheme\":\"aws.s3.presigned\",\"sig-" 236 | "sha256-ecdsa\":\"SIGNATUREHASH+ASDFLKJ123===\"}]}" 237 | "}"; 238 | 239 | result = false; 240 | result = populateJobDocFields( document, 241 | strlen( document ), 242 | 0, 243 | &documentFields ); 244 | 245 | TEST_ASSERT_TRUE( result ); 246 | TEST_ASSERT_EQUAL( 343135U, documentFields.fileSize ); 247 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 248 | TEST_ASSERT_EQUAL_STRING_LEN( "/strangepath/certificate.cert", 249 | documentFields.certfile, 250 | strlen( "/strangepath/certificate.cert" ) ); 251 | TEST_ASSERT_EQUAL( strlen( "/strangepath/certificate.cert" ), 252 | documentFields.certfileLen ); 253 | TEST_ASSERT_EQUAL_STRING_LEN( "SIGNATUREHASH+ASDFLKJ123===", 254 | documentFields.signature, 255 | strlen( "SIGNATUREHASH+ASDFLKJ123===" ) ); 256 | TEST_ASSERT_EQUAL( strlen( "SIGNATUREHASH+ASDFLKJ123===" ), 257 | documentFields.signatureLen ); 258 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 259 | documentFields.filepath, 260 | strlen( "/device" ) ); 261 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 262 | TEST_ASSERT_EQUAL_STRING_LEN( "presignedS3Url.s3.amazon.com", 263 | documentFields.imageRef, 264 | strlen( "presignedS3Url.s3.amazon.com" ) ); 265 | TEST_ASSERT_EQUAL( strlen( "presignedS3Url.s3.amazon.com" ), 266 | documentFields.imageRefLen ); 267 | TEST_ASSERT_EQUAL_STRING_LEN( "aws.s3.presigned", 268 | documentFields.authScheme, 269 | strlen( "aws.s3.presigned" ) ); 270 | TEST_ASSERT_EQUAL( strlen( "aws.s3.presigned" ), 271 | documentFields.authSchemeLen ); 272 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.fileType ); 273 | } 274 | 275 | void test_populateJobDocFields_returnsTrue_givenValidHttpDocumentWithOptionalFields( void ) 276 | { 277 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 278 | "{\"filepath\":\"/" 279 | "device\",\"filesize\":343135,\"fileid\":0," 280 | "\"certfile\":\"/strangepath/" 281 | "certificate.cert\",\"fileType\": " 282 | "1234,\"update_data_url\":\"presignedS3Url.s3.amazon." 283 | "com\",\"auth_scheme\":\"aws.s3.presigned\",\"sig-" 284 | "sha256-ecdsa\":\"SIGNATUREHASH+ASDFLKJ123===\"}]}" 285 | "}"; 286 | 287 | result = false; 288 | result = populateJobDocFields( document, 289 | strlen( document ), 290 | 0, 291 | &documentFields ); 292 | 293 | TEST_ASSERT_TRUE( result ); 294 | TEST_ASSERT_EQUAL( 343135U, documentFields.fileSize ); 295 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 296 | TEST_ASSERT_EQUAL( 1234U, documentFields.fileType ); 297 | TEST_ASSERT_EQUAL_STRING_LEN( "/strangepath/certificate.cert", 298 | documentFields.certfile, 299 | strlen( "/strangepath/certificate.cert" ) ); 300 | TEST_ASSERT_EQUAL( strlen( "/strangepath/certificate.cert" ), 301 | documentFields.certfileLen ); 302 | TEST_ASSERT_EQUAL_STRING_LEN( "SIGNATUREHASH+ASDFLKJ123===", 303 | documentFields.signature, 304 | strlen( "SIGNATUREHASH+ASDFLKJ123===" ) ); 305 | TEST_ASSERT_EQUAL( strlen( "SIGNATUREHASH+ASDFLKJ123===" ), 306 | documentFields.signatureLen ); 307 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 308 | documentFields.filepath, 309 | strlen( "/device" ) ); 310 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 311 | TEST_ASSERT_EQUAL_STRING_LEN( "presignedS3Url.s3.amazon.com", 312 | documentFields.imageRef, 313 | strlen( "presignedS3Url.s3.amazon.com" ) ); 314 | TEST_ASSERT_EQUAL( strlen( "presignedS3Url.s3.amazon.com" ), 315 | documentFields.imageRefLen ); 316 | TEST_ASSERT_EQUAL_STRING_LEN( "aws.s3.presigned", 317 | documentFields.authScheme, 318 | strlen( "aws.s3.presigned" ) ); 319 | TEST_ASSERT_EQUAL( strlen( "aws.s3.presigned" ), 320 | documentFields.authSchemeLen ); 321 | } 322 | 323 | void test_populateJobDocFields_returnsTrue_givenValidMultiFileHttpDocument( void ) 324 | { 325 | const char * 326 | document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[{" 327 | "\"filepath\":\"/" 328 | "device\",\"filesize\":343135,\"fileid\":0,\"certfile\":\"/" 329 | "strangepath/certificate.cert\",\"fileType\": " 330 | "2,\"update_data_url\":\"presignedS3Url.s3.amazon.com\"," 331 | "\"auth_scheme\":\"aws.s3.presigned\",\"sig-sha256-ecdsa\":" 332 | "\"SIGNATUREHASH+ASDFLKJ123===\"},{\"filepath\":\"/" 333 | "path2\",\"filesize\":43210,\"fileid\":99,\"certfile\":\"/" 334 | "strangepath/file.cert\",\"fileType\": " 335 | "333,\"update_data_url\":\"presignedS3Url.s3.amazon.com\"," 336 | "\"auth_scheme\":\"aws.s3.presigned\",\"sig-sha256-ecdsa\":" 337 | "\"SIGNATUREHASH+ASDFLKJ123===\"}]}}"; 338 | 339 | result = false; 340 | result = populateJobDocFields( document, 341 | strlen( document ), 342 | 0, 343 | &documentFields ); 344 | 345 | TEST_ASSERT_TRUE( result ); 346 | TEST_ASSERT_EQUAL( 343135U, documentFields.fileSize ); 347 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 348 | TEST_ASSERT_EQUAL( 2U, documentFields.fileType ); 349 | TEST_ASSERT_EQUAL_STRING_LEN( "/strangepath/certificate.cert", 350 | documentFields.certfile, 351 | strlen( "/strangepath/certificate.cert" ) ); 352 | TEST_ASSERT_EQUAL( strlen( "/strangepath/certificate.cert" ), 353 | documentFields.certfileLen ); 354 | TEST_ASSERT_EQUAL_STRING_LEN( "SIGNATUREHASH+ASDFLKJ123===", 355 | documentFields.signature, 356 | strlen( "SIGNATUREHASH+ASDFLKJ123===" ) ); 357 | TEST_ASSERT_EQUAL( strlen( "SIGNATUREHASH+ASDFLKJ123===" ), 358 | documentFields.signatureLen ); 359 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 360 | documentFields.filepath, 361 | strlen( "/device" ) ); 362 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 363 | TEST_ASSERT_EQUAL_STRING_LEN( "presignedS3Url.s3.amazon.com", 364 | documentFields.imageRef, 365 | strlen( "presignedS3Url.s3.amazon.com" ) ); 366 | TEST_ASSERT_EQUAL( strlen( "presignedS3Url.s3.amazon.com" ), 367 | documentFields.imageRefLen ); 368 | TEST_ASSERT_EQUAL_STRING_LEN( "aws.s3.presigned", 369 | documentFields.authScheme, 370 | strlen( "aws.s3.presigned" ) ); 371 | TEST_ASSERT_EQUAL( strlen( "aws.s3.presigned" ), 372 | documentFields.authSchemeLen ); 373 | 374 | result = false; 375 | result = populateJobDocFields( document, 376 | strlen( document ), 377 | 1, 378 | &documentFields ); 379 | 380 | TEST_ASSERT_TRUE( result ); 381 | TEST_ASSERT_EQUAL( 43210U, documentFields.fileSize ); 382 | TEST_ASSERT_EQUAL( 99U, documentFields.fileId ); 383 | TEST_ASSERT_EQUAL( 333U, documentFields.fileType ); 384 | TEST_ASSERT_EQUAL_STRING_LEN( "/strangepath/file.cert", 385 | documentFields.certfile, 386 | strlen( "/strangepath/file.cert" ) ); 387 | TEST_ASSERT_EQUAL( strlen( "/strangepath/file.cert" ), 388 | documentFields.certfileLen ); 389 | TEST_ASSERT_EQUAL_STRING_LEN( "SIGNATUREHASH+ASDFLKJ123===", 390 | documentFields.signature, 391 | strlen( "SIGNATUREHASH+ASDFLKJ123===" ) ); 392 | TEST_ASSERT_EQUAL( strlen( "SIGNATUREHASH+ASDFLKJ123===" ), 393 | documentFields.signatureLen ); 394 | TEST_ASSERT_EQUAL_STRING_LEN( "/path2", 395 | documentFields.filepath, 396 | strlen( "/path2" ) ); 397 | TEST_ASSERT_EQUAL( strlen( "/path2" ), documentFields.filepathLen ); 398 | TEST_ASSERT_EQUAL_STRING_LEN( "presignedS3Url.s3.amazon.com", 399 | documentFields.imageRef, 400 | strlen( "presignedS3Url.s3.amazon.com" ) ); 401 | TEST_ASSERT_EQUAL( strlen( "presignedS3Url.s3.amazon.com" ), 402 | documentFields.imageRefLen ); 403 | TEST_ASSERT_EQUAL_STRING_LEN( "aws.s3.presigned", 404 | documentFields.authScheme, 405 | strlen( "aws.s3.presigned" ) ); 406 | TEST_ASSERT_EQUAL( strlen( "aws.s3.presigned" ), 407 | documentFields.authSchemeLen ); 408 | } 409 | 410 | void test_populateJobDocFields_returnsTrue_givenValidMultiProtocolDocument( void ) 411 | { 412 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\",\"HTTP\"]," 413 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 414 | "\"filepath\":\"/device\",\"filesize\": " 415 | "123456789,\"fileid\":0,\"certfile\":\"certfile." 416 | "cert\",\"fileType\": " 417 | "66,\"update_data_url\":\"unusedurl.s3.amazon." 418 | "com\",\"auth_scheme\":\"aws.s3.presigned\",\"sig-" 419 | "sha256-ecdsa\":\"signature_hash_239871\"}]}}"; 420 | 421 | result = false; 422 | result = populateJobDocFields( document, 423 | strlen( document ), 424 | 0, 425 | &documentFields ); 426 | 427 | TEST_ASSERT_TRUE( result ); 428 | TEST_ASSERT_EQUAL( 123456789U, documentFields.fileSize ); 429 | TEST_ASSERT_EQUAL( 0U, documentFields.fileId ); 430 | TEST_ASSERT_EQUAL( 66U, documentFields.fileType ); 431 | TEST_ASSERT_EQUAL_STRING_LEN( "certfile.cert", 432 | documentFields.certfile, 433 | strlen( "certfile.cert" ) ); 434 | TEST_ASSERT_EQUAL( strlen( "certfile.cert" ), documentFields.certfileLen ); 435 | TEST_ASSERT_EQUAL_STRING_LEN( "signature_hash_239871", 436 | documentFields.signature, 437 | strlen( "signature_hash_239871" ) ); 438 | TEST_ASSERT_EQUAL( strlen( "signature_hash_239871" ), 439 | documentFields.signatureLen ); 440 | TEST_ASSERT_EQUAL_STRING_LEN( "/device", 441 | documentFields.filepath, 442 | strlen( "/device" ) ); 443 | TEST_ASSERT_EQUAL( strlen( "/device" ), documentFields.filepathLen ); 444 | TEST_ASSERT_EQUAL_STRING_LEN( "AFR_OTA-streamname", 445 | documentFields.imageRef, 446 | strlen( "AFR_OTA-streamname" ) ); 447 | TEST_ASSERT_EQUAL( strlen( "AFR_OTA-streamname" ), 448 | documentFields.imageRefLen ); 449 | TEST_ASSERT_NULL( documentFields.authScheme ); 450 | TEST_ASSERT_EQUAL( UINT32_MAX, documentFields.authSchemeLen ); 451 | } 452 | 453 | void test_populateJobDocFields_returnsFalse_givenValidInvalidOptionalFields( void ) 454 | { 455 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 456 | "{\"filepath\":\"/" 457 | "device\",\"filesize\":343135,\"fileid\":0," 458 | "\"certfile\":\"/strangepath/" 459 | "certificate.cert\",\"fileType\": \"badParameter\"," 460 | "\"update_data_url\":\"presignedS3Url.s3.amazon." 461 | "com\",\"auth_scheme\":\"aws.s3.presigned\",\"sig-" 462 | "sha256-ecdsa\":\"SIGNATUREHASH+ASDFLKJ123===\"}]}" 463 | "}"; 464 | 465 | result = false; 466 | result = populateJobDocFields( document, 467 | strlen( document ), 468 | 0, 469 | &documentFields ); 470 | 471 | TEST_ASSERT_FALSE( result ); 472 | } 473 | 474 | void test_populateJobDocFields_returnsFalse_whenEmptyProtocol( void ) 475 | { 476 | const char * 477 | document = "{\"afr_ota\":{\"protocols\":[],\"streamname\":\"AFR_OTA-" 478 | "d66ffc3b-4325-4496-9358-509f8f3504d1\",\"files\":[{" 479 | "\"filepath\": \"/device\",\"filesize\": 20417,\"fileid\": " 480 | "0,\"certfile\": " 481 | "\"/strangepath/certificate.cert\",\"sig-sha256-ecdsa\": " 482 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7AgIhAIdTm" 483 | "0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"}]}}"; 484 | 485 | result = populateJobDocFields( document, 486 | strlen( document ), 487 | 0, 488 | &documentFields ); 489 | 490 | TEST_ASSERT_FALSE( result ); 491 | } 492 | 493 | void test_populateJobDocFields_returnsFalse_whenMissingProtocol( void ) 494 | { 495 | const char * document = "{\"afr_ota\":{\"streamname\":\"AFR_OTA-d66ffc3b-" 496 | "4325-4496-9358-509f8f3504d1\",\"files\":[{" 497 | "\"filepath\": \"/device\",\"filesize\": " 498 | "20417,\"fileid\": 0,\"certfile\": " 499 | "\"/strangepath/" 500 | "certificate.cert\",\"sig-sha256-ecdsa\": " 501 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7" 502 | "AgIhAIdTm0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"" 503 | "}]}}"; 504 | 505 | result = populateJobDocFields( document, 506 | strlen( document ), 507 | 0, 508 | &documentFields ); 509 | 510 | TEST_ASSERT_FALSE( result ); 511 | } 512 | 513 | void test_populateJobDocFields_returnsFalse_whenMissingFilesize( void ) 514 | { 515 | const char * 516 | document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"],\"streamname\":" 517 | "\"AFR_OTA-d66ffc3b-4325-4496-9358-509f8f3504d1\",\"files\":" 518 | "[{\"filepath\": \"/device\",\"fileid\": 0,\"certfile\": " 519 | "\"/strangepath/certificate.cert\",\"sig-sha256-ecdsa\": " 520 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7AgIhAIdTm" 521 | "0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"}]}}"; 522 | 523 | result = populateJobDocFields( document, 524 | strlen( document ), 525 | 0, 526 | &documentFields ); 527 | 528 | TEST_ASSERT_FALSE( result ); 529 | } 530 | 531 | void test_populateJobDocFields_returnsFalse_whenMissingFileId( void ) 532 | { 533 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 534 | "\"streamname\":\"AFR_OTA-d66ffc3b-4325-4496-9358-" 535 | "509f8f3504d1\",\"files\":[{\"filepath\": " 536 | "\"/device\",\"filesize\": 20417,\"certfile\": " 537 | "\"/strangepath/" 538 | "certificate.cert\",\"sig-sha256-ecdsa\": " 539 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7" 540 | "AgIhAIdTm0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"" 541 | "}]}}"; 542 | 543 | result = populateJobDocFields( document, 544 | strlen( document ), 545 | 0, 546 | &documentFields ); 547 | 548 | TEST_ASSERT_FALSE( result ); 549 | } 550 | 551 | void test_populateJobDocFields_returnsFalse_whenMissingFilePath( void ) 552 | { 553 | const char * 554 | document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"],\"streamname\":" 555 | "\"AFR_OTA-d66ffc3b-4325-4496-9358-509f8f3504d1\",\"files\":" 556 | "[{\"filesize\": 20417,\"fileid\": 0,\"certfile\": " 557 | "\"/strangepath/certificate.cert\",\"sig-sha256-ecdsa\": " 558 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7AgIhAIdTm" 559 | "0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"}]}}"; 560 | 561 | result = populateJobDocFields( document, 562 | strlen( document ), 563 | 0, 564 | &documentFields ); 565 | 566 | TEST_ASSERT_FALSE( result ); 567 | } 568 | 569 | void test_populateJobDocFields_returnsFalse_whenMissingCertfile( void ) 570 | { 571 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 572 | "\"streamname\":\"AFR_OTA-d66ffc3b-4325-4496-9358-" 573 | "509f8f3504d1\",\"files\":[{\"filepath\": " 574 | "\"/device\",\"filesize\": 20417,\"fileid\": " 575 | "0,\"sig-sha256-ecdsa\": " 576 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7" 577 | "AgIhAIdTm0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"" 578 | "}]}}"; 579 | 580 | result = populateJobDocFields( document, 581 | strlen( document ), 582 | 0, 583 | &documentFields ); 584 | 585 | TEST_ASSERT_FALSE( result ); 586 | } 587 | 588 | void test_populateJobDocFields_returnsFalse_whenMissingSignature( void ) 589 | { 590 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 591 | "\"streamname\":\"AFR_OTA-d66ffc3b-4325-4496-9358-" 592 | "509f8f3504d1\",\"files\":[{\"filepath\": " 593 | "\"/device\",\"filesize\": 20417,\"fileid\": " 594 | "0,\"certfile\": " 595 | "\"/strangepath/certificate.cert\"}]}}"; 596 | 597 | result = populateJobDocFields( document, 598 | strlen( document ), 599 | 0, 600 | &documentFields ); 601 | 602 | TEST_ASSERT_FALSE( result ); 603 | } 604 | 605 | void test_populateJobDocFields_returnsFalse_whenMqttDocEmptyStreamName( void ) 606 | { 607 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 608 | "\"streamname\":\"\",\"files\":[{\"filepath\":\"/" 609 | "device\",\"filesize\": " 610 | "123456789,\"fileid\":0,\"certfile\":\"certfile." 611 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 612 | "239871\"}]}}"; 613 | 614 | result = populateJobDocFields( document, 615 | strlen( document ), 616 | 0, 617 | &documentFields ); 618 | 619 | TEST_ASSERT_FALSE( result ); 620 | } 621 | 622 | void test_populateJobDocFields_returnsFalse_whenMqttDocMissingStreamName( void ) 623 | { 624 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"],\"files\":[" 625 | "{\"filepath\": \"/device\",\"filesize\": " 626 | "20417,\"fileid\": 0,\"certfile\": " 627 | "\"/strangepath/" 628 | "certificate.cert\",\"sig-sha256-ecdsa\": " 629 | "\"MEYCIQCVWi3ki1d9fqa1vrRS3dyDE7qJv4Dl1guB9qBzvTz7" 630 | "AgIhAIdTm0MeZa2aHpHZnKQERLFpI69ii3GUhycQBOVqqP3N\"" 631 | "}]}}"; 632 | 633 | result = populateJobDocFields( document, 634 | strlen( document ), 635 | 0, 636 | &documentFields ); 637 | 638 | TEST_ASSERT_FALSE( result ); 639 | } 640 | 641 | void test_populateJobDocFields_returnsFalse_whenHttpDocEmptyUrl( void ) 642 | { 643 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 644 | "{\"filepath\":\"/" 645 | "device\",\"filesize\":343135,\"fileid\":0," 646 | "\"certfile\":\"/strangepath/" 647 | "certificate.cert\",\"fileType\": " 648 | "2,\"update_data_url\":\"\",\"auth_scheme\":\"aws." 649 | "s3.presigned\",\"sig-sha256-ecdsa\":" 650 | "\"SIGNATUREHASH+ASDFLKJ123===\"}]}}"; 651 | 652 | result = populateJobDocFields( document, 653 | strlen( document ), 654 | 0, 655 | &documentFields ); 656 | 657 | TEST_ASSERT_FALSE( result ); 658 | } 659 | 660 | void test_populateJobDocFields_returnsFalse_whenHttpDocMissingUrl( void ) 661 | { 662 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 663 | "{\"filepath\":\"/" 664 | "device\",\"filesize\":343135,\"fileid\":0," 665 | "\"certfile\":\"/strangepath/" 666 | "certificate.cert\",\"fileType\": " 667 | "2,\"auth_scheme\":\"aws.s3.presigned\",\"sig-" 668 | "sha256-ecdsa\":\"MEQCIGOTD5owb5s3R3+" 669 | "OlxH5UZcy52Vuz6QrJhg83F8t8tBfAiBTNiX0e8RR5JOzHfSqW" 670 | "Kq4WJC/xUwMrcdHEWgSAKfGQA==\"}]}}"; 671 | 672 | result = populateJobDocFields( document, 673 | strlen( document ), 674 | 0, 675 | &documentFields ); 676 | 677 | TEST_ASSERT_FALSE( result ); 678 | } 679 | 680 | void test_populateJobDocFields_returnsFalse_whenHttpDocMissingAuthScheme( void ) 681 | { 682 | const char * document = "{\"afr_ota\":{\"protocols\":[\"HTTP\"],\"files\":[" 683 | "{\"filepath\":\"/" 684 | "device\",\"filesize\":343135,\"fileid\":0," 685 | "\"certfile\":\"/strangepath/" 686 | "certificate.cert\",\"fileType\": " 687 | "2,\"update_data_url\":\"${aws:iot:s3-presigned-" 688 | "url:https://s3.region.amazonaws.com/joe-ota/" 689 | "SignedImages/" 690 | "ffdac2a7-0f70-4f47-8940-886ad260445c}\",\"sig-" 691 | "sha256-ecdsa\":\"MEQCIGOTD5owb5s3R3+" 692 | "OlxH5UZcy52Vuz6QrJhg83F8t8tBfAiBTNiX0e8RR5JOzHfSqW" 693 | "Kq4WJC/xUwMrcdHEWgSAKfGQA==\"}]}}"; 694 | 695 | result = populateJobDocFields( document, 696 | strlen( document ), 697 | 0, 698 | &documentFields ); 699 | 700 | TEST_ASSERT_FALSE( result ); 701 | } 702 | 703 | void test_populateJobDocFields_returnsFalse_whenFileSizeIsNegativeInteger() 704 | { 705 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 706 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 707 | "\"filepath\":\"/device\",\"filesize\": " 708 | "-123456789,\"fileid\":0,\"certfile\":\"certfile." 709 | "cert\",\"sig-sha256-ecdsa\":\"signature_hash_" 710 | "239871\"}]}}"; 711 | 712 | result = true; 713 | result = populateJobDocFields( document, 714 | strlen( document ), 715 | 0, 716 | &documentFields ); 717 | 718 | TEST_ASSERT_FALSE( result ); 719 | } 720 | 721 | void test_populateJobDocFields_returnsFalse_whenFileSizeZeroLength() 722 | { 723 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 724 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 725 | "\"filepath\":\"/device\",\"filesize\": " 726 | "\"\",\"fileid\":0,\"certfile\":\"certfile.cert\"," 727 | "\"sig-sha256-ecdsa\":\"signature_hash_239871\"}]}" 728 | "}"; 729 | 730 | result = true; 731 | result = populateJobDocFields( document, 732 | strlen( document ), 733 | 0, 734 | &documentFields ); 735 | 736 | TEST_ASSERT_FALSE( result ); 737 | } 738 | 739 | void test_populateJobDocFields_returnsFalse_whenFileSizeTooLarge() 740 | { 741 | const char * document = "{\"afr_ota\":{\"protocols\":[\"MQTT\"]," 742 | "\"streamname\":\"AFR_OTA-streamname\",\"files\":[{" 743 | "\"filepath\":\"/device\",\"filesize\": " 744 | "99999999999999,\"fileid\":0,\"certfile\":" 745 | "\"certfile.cert\",\"sig-sha256-ecdsa\":" 746 | "\"signature_hash_239871\"}]}}"; 747 | 748 | result = true; 749 | result = populateJobDocFields( document, 750 | strlen( document ), 751 | 0, 752 | &documentFields ); 753 | 754 | TEST_ASSERT_FALSE( result ); 755 | } 756 | --------------------------------------------------------------------------------