├── .clang-format ├── .devcontainer ├── devcontainer.json └── install.R ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── feature_request.yml │ ├── new_issue_template.yml │ ├── refactor_request.yml │ └── ss3-release-template.md ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── add-exe-build-artifacts-to-PR.yml │ ├── build-admb-and-ss3-from-source.yml │ ├── build-ss3-warnings.yml │ ├── build-ss3.yml │ ├── email_when_close_pr.yml │ ├── reference_files │ └── warnings_ss_ref.txt │ ├── run-ss3-bootstrap.yml │ ├── run-ss3-mcmc.yml │ ├── run-ss3-no-est.yml │ ├── run-ss3-with-est.yml │ └── test-r4ss-with-ss3.yml ├── .gitignore ├── .vscode └── settings.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Change_log_for_SS_3.30.xlsx ├── Compile ├── Make_SS_fast.bat ├── Make_SS_safe.bat └── Make_SS_warn.bat ├── GNUmakefile ├── LICENSE ├── Make_SS_330_new.sh ├── README.md ├── SS_ALK.tpl ├── SS_benchfore.tpl ├── SS_biofxn.tpl ├── SS_expval.tpl ├── SS_global.tpl ├── SS_miscfxn.tpl ├── SS_objfunc.tpl ├── SS_param.tpl ├── SS_popdyn.tpl ├── SS_prelim.tpl ├── SS_proced.tpl ├── SS_readcontrol_330.tpl ├── SS_readdata_330.tpl ├── SS_readstarter.tpl ├── SS_recruit.tpl ├── SS_selex.tpl ├── SS_tagrecap.tpl ├── SS_timevaryparm.tpl ├── SS_versioninfo_330opt.tpl ├── SS_versioninfo_330safe.tpl ├── SS_write.tpl ├── SS_write_report.tpl ├── SS_write_ssnew.tpl ├── StockSynthesis.code-workspace ├── coding_style.md ├── pretty_tpl.bat └── tpl-format.exe /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | Language: Cpp 3 | 4 | AccessModifierOffset: -4 5 | AlignAfterOpenBracket: false 6 | AlignConsecutiveAssignments: false 7 | AlignEscapedNewlinesLeft: false 8 | AlignOperands: false 9 | AlignTrailingComments: false 10 | AllowAllParametersOfDeclarationOnNextLine: true 11 | AllowShortBlocksOnASingleLine: false 12 | AllowShortCaseLabelsOnASingleLine: false 13 | AllowShortFunctionsOnASingleLine: Empty 14 | AllowShortIfStatementsOnASingleLine: false 15 | AllowShortLoopsOnASingleLine: false 16 | AlwaysBreakAfterDefinitionReturnType: None 17 | AlwaysBreakBeforeMultilineStrings: false 18 | AlwaysBreakTemplateDeclarations: false 19 | BinPackArguments: true 20 | BinPackParameters: true 21 | BreakBeforeBinaryOperators: None 22 | BreakBeforeBraces: Stroustrup 23 | BreakBeforeTernaryOperators: false 24 | BreakConstructorInitializersBeforeComma: true 25 | ColumnLimit: 0 26 | CommentPragmas: 'SS_Label' 27 | ConstructorInitializerAllOnOneLineOrOnePerLine: true 28 | ConstructorInitializerIndentWidth: 4 29 | ContinuationIndentWidth: 4 30 | Cpp11BracedListStyle: true 31 | DerivePointerAlignment: false 32 | DisableFormat: false 33 | ExperimentalAutoDetectBinPacking: false 34 | ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] 35 | IndentCaseLabels: true 36 | IndentWidth: 2 37 | IndentWrappedFunctionNames: false 38 | KeepEmptyLinesAtTheStartOfBlocks: true 39 | MacroBlockBegin: '' 40 | MacroBlockEnd: '' 41 | MaxEmptyLinesToKeep: 1 42 | NamespaceIndentation: All 43 | ObjCBlockIndentWidth: 4 44 | ObjCSpaceAfterProperty: false 45 | ObjCSpaceBeforeProtocolList: false 46 | PenaltyBreakBeforeFirstCallParameter: 19 47 | PenaltyBreakComment: 3000 48 | PenaltyBreakFirstLessLess: 120 49 | PenaltyBreakString: 10000 50 | PenaltyExcessCharacter: 1000000 51 | PenaltyReturnTypeOnItsOwnLine: 80 52 | PointerAlignment: Left 53 | SpaceAfterCStyleCast: false 54 | SpaceBeforeAssignmentOperators: true 55 | SpaceBeforeParens: ControlStatements 56 | SpaceInEmptyParentheses: false 57 | SpacesBeforeTrailingComments: 1 58 | SpacesInAngles: false 59 | SpacesInContainerLiterals: false 60 | SpacesInCStyleCastParentheses: false 61 | SpacesInParentheses: false 62 | SpacesInSquareBrackets: false 63 | Standard: Cpp11 64 | TabWidth: 4 65 | UseTab: Never 66 | ... 67 | 68 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // SS3 Developer Codespace 2 | // This codespace contains R and a port for an Rstudio server in case 3 | // that is the preferred IDE over VSCode. 4 | // R packages commonly used with SS3 are also installed (see install.R). 5 | { 6 | "name": "R (rocker/tidyverse)", 7 | "image": "ghcr.io/rocker-org/devcontainer/tidyverse:4.3", 8 | 9 | "features": { 10 | "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}, 11 | "ghcr.io/rocker-org/devcontainer-features/quarto-cli:1": {}, 12 | "ghcr.io/rocker-org/devcontainer-features/apt-packages:1": { 13 | "packages": "make,gcc,g++,cmake,clang-tidy,clang-format" 14 | }, 15 | "ghcr.io/rocker-org/devcontainer-features/r-packages:1": { 16 | "packages": "cli,rlang,scales,covr,devtools,ggplot2,reshape2,dplyr,tidyr,Rcpp,rlist,viridis,plyr,flextable,gridExtra,data.table,adnuts,gt,gtExtras,stringr,purrr,furrr", 17 | "installSystemRequirements": true 18 | } 19 | }, 20 | "customizations": { 21 | "vscode": { 22 | "settings": { 23 | "r.rterm.linux": "/usr/local/bin/radian", 24 | "r.bracketedPaste": true, 25 | "r.plot.useHttpgd": true, 26 | "[r]": { 27 | "editor.wordSeparators": "`~!@#%$^&*()-=+[{]}\\|;:'\",<>/?" 28 | } 29 | }, 30 | "extensions":[ 31 | "GitHub.codespaces", 32 | "hbenl.vscode-test-explorer", 33 | "reditorsupport.r", 34 | "rdebugger.r-debugger", 35 | "ms-vsliveshare.vsliveshare", 36 | "mathematic.vscode-pdf" 37 | ] 38 | } 39 | }, 40 | "forwardPorts": [8787], 41 | "portsAttributes": { 42 | "8787": { 43 | "label": "Rstudio", 44 | "requireLocalPort": true, 45 | "onAutoForward": "ignore" 46 | } 47 | }, 48 | // Use 'postCreateCommand' to run commands after the container is created. 49 | "postCreateCommand": "Rscript .devcontainer/install.R", 50 | "postAttachCommand": "sudo rstudio-server start", 51 | "remoteUser": "rstudio" 52 | 53 | } 54 | -------------------------------------------------------------------------------- /.devcontainer/install.R: -------------------------------------------------------------------------------- 1 | # Install R packages 2 | # Make sure the following packages are using the most recent versions: 3 | remotes::install_github("r4ss/r4ss") 4 | remotes::install_github("ss3sim/ss3sim") 5 | remotes::install_github("nwfsc-assess/nwfscDiag") 6 | remotes::install_github("PIFSCstockassessments/ss3diags") -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Reclassify .tpl files as c++ to get better syntax highlighting on github: 2 | *.tpl linguist-language=C++ 3 | 4 | # Add settings for .tpl line endings 5 | *.tpl text=auto 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report a potential bug 3 | title: "[Bug]: " 4 | labels: ["kind: bug", "New_Request"] 5 | assignees: 6 | - Rick-Methot-NOAA 7 | body: 8 | - type: markdown 9 | attributes: 10 | value: | 11 | Thanks for taking the time to fill out this bug report! 12 | - type: textarea 13 | id: describe 14 | attributes: 15 | label: Describe the bug 16 | description: A clear and concise description of what the bug is. 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: reproduce 21 | attributes: 22 | label: To Reproduce 23 | description: Steps to reproduce the behavior 24 | placeholder: | 25 | 1. Go to '...' 26 | 2. Click on '....' 27 | 3. Scroll down to '....' 28 | 4. See error 29 | validations: 30 | required: true 31 | - type: textarea 32 | id: expected 33 | attributes: 34 | label: Expected behavior 35 | description: A clear and concise description of what you expected to happen. 36 | placeholder: I expected ... 37 | validations: 38 | required: true 39 | - type: textarea 40 | id: screenshots 41 | attributes: 42 | label: Screenshots 43 | description: If applicable, add screenshots to help explain your problem. 44 | validations: 45 | required: false 46 | - type: dropdown 47 | id: os 48 | attributes: 49 | label: Which OS are you seeing the problem on? 50 | multiple: true 51 | options: 52 | - Windows 53 | - Mac 54 | - Linux 55 | - Other 56 | validations: 57 | required: false 58 | - type: input 59 | id: version 60 | attributes: 61 | label: Which version of SS3 are you seeing the problem on? 62 | validations: 63 | required: false 64 | - type: textarea 65 | id: context 66 | attributes: 67 | label: Additional Context 68 | description: Add any other context about the problem here. 69 | validations: 70 | required: false 71 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Request new features or changes to features 3 | title: "[Feature]: " 4 | labels: ["New_Request"] 5 | assignees: 6 | - Rick-Methot-NOAA 7 | body: 8 | - type: markdown 9 | attributes: 10 | value: | 11 | Thanks for taking the time to fill out this feature request! 12 | - type: textarea 13 | id: solution 14 | attributes: 15 | label: Describe the solution you would like. 16 | description: A clear and concise description of what you want to happen. 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: alternatives 21 | attributes: 22 | label: Describe alternatives you have considered 23 | description: A clear and concise description of any alternative solutions or features you have considered. 24 | placeholder: Other solutions include ... 25 | validations: 26 | required: true 27 | - type: textarea 28 | id: stats 29 | attributes: 30 | label: Statistical validity, if applicable 31 | description: Please link to a paper or reference that describes how this approach improves accuracy or statistical power. 32 | validations: 33 | required: false 34 | - type: textarea 35 | id: management 36 | attributes: 37 | label: Describe if this is needed for a management application 38 | description: If this feature is necessary to satisfy the Terms of Reference for your fishery management body, please explain why here and describe which management body. 39 | validations: 40 | required: false 41 | - type: textarea 42 | id: context 43 | attributes: 44 | label: Additional context 45 | description: Add any other context or screenshots about the feature request here. 46 | validations: 47 | required: false 48 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new_issue_template.yml: -------------------------------------------------------------------------------- 1 | name: SS3 Team Issues - New Template 2 | description: Issue template for SS3 team members ONLY 3 | title: "[Prefix for Issue Here]: Title of Issue Here" 4 | assignees: 5 | - Rick-Methot-NOAA 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | # **Main Issue Information** 11 | **Instructions** 12 | - Add a label prefix in the brackets "[ ]". This should be 1 - 2 words that helps categorize the issue (e.g. [Bug], [Feature Request], [Refactor]). 13 | - Add a title after the colon ":". This should be a short phrase that describes the issue. It should be suitable for display in the change log. 14 | - In the sidebar, add labels that apply to this issue. 15 | - Fill out information below and update the summary with pertinent information from the comments/discussion. 16 | - type: textarea 17 | id: describe 18 | attributes: 19 | label: Issue description 20 | description: Please provide a description of the issue. 21 | validations: 22 | required: true 23 | - type: textarea 24 | id: summary 25 | attributes: 26 | label: Summary of issue history, discussion, and major aspects of code development 27 | description: A summary of pertinent information on this issue, including major aspects of code development, that occurred in the discussion or via email. This section can be added on to/edited at any time and should be completely summarized prior to closing by the person that created the issue. 28 | placeholder: | 29 | - This issue is related to issue #X. 30 | - Will need to create an example vignette for this feature. 31 | - Below is example input change. 32 | ``` 33 | #_MG_type method st_year end_year 34 | 1 1 2002 2003 # (M) NatMort 35 | 4 1 2016 2018 # RecrDist 36 | -9999 -1 -1 -1 37 | ``` 38 | validations: 39 | required: true 40 | - type: textarea 41 | id: tasks 42 | attributes: 43 | label: Items to do 44 | description: Please create a list of tasks to be completed for this issue with the checkboxes. 45 | value: "- [ ] First Item" 46 | validations: 47 | required: true 48 | - type: markdown 49 | attributes: 50 | value: | 51 | ___ 52 | 53 | # **Other Issue Considerations** 54 | - type: dropdown 55 | id: documentation 56 | attributes: 57 | label: Does documentation already exist in the SS3 User Manual? 58 | description: Please review the [SS3 User Manual](https://nmfs-ost.github.io/ss3-doc/SS330_User_Manual.html) and check the box within the text area that applies. 59 | options: 60 | - Yes, the link to the section in the SS3 User Manual is provided below. 61 | - Yes, but further documentation needs to be added and is provided below. 62 | - No, the documentation that should be added to the SS3 User Manual pertaining to this issue is provided below. 63 | validations: 64 | required: true 65 | - type: textarea 66 | id: add-doc 67 | attributes: 68 | label: Documentation to add to the SS3 User Manual OR link to existing documentation 69 | description: Please add the text that needs to be added to the SS3 User Manual along with which section it should be added to. 70 | placeholder: | 71 | - Documentation can be found in section 8.4.2 of the SS3 User Manual [here](link) 72 | - The following text should be added at the end of the second paragraph in that section: "add text" 73 | validations: 74 | required: true 75 | - type: dropdown 76 | id: r4ss 77 | attributes: 78 | label: Are r4ss changes needed? 79 | description: Please check one of the boxes to indicate if r4ss changes are needed. 80 | options: 81 | - No, this issue doesn't require changes to r4ss 82 | - Yes, this issue requires changes to r4ss (if selecting this, please add iantaylor-NOAA as an Assignee in the panel to the right). 83 | - I don't know and would like an r4ss consult (if selecting this, please add iantaylor-NOAA as an Assignee in the panel to the right). 84 | validations: 85 | required: true 86 | - type: dropdown 87 | id: SSI 88 | attributes: 89 | label: Are SSI changes needed? 90 | description: Please check one of the boxes to indicate if SSI changes are needed. 91 | options: 92 | - No, this issue doesn't require changes to SSI 93 | - Yes, this issue requires changes to SSI (if selecting this, please add nschindler-noaa as an Assignee in the panel to the right). 94 | - I don't know and would like an SSI consult (if selecting this, please add nschindler-noaa as an Assignee in the panel to the right). 95 | validations: 96 | required: true 97 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/refactor_request.yml: -------------------------------------------------------------------------------- 1 | name: Refactor 2 | description: Change the current structure of the code or workflows 3 | title: "[Refactor]: " 4 | labels: ["New_Request"] 5 | assignees: 6 | - Rick-Methot-NOAA 7 | body: 8 | - type: markdown 9 | attributes: 10 | value: | 11 | Thanks for taking the time to fill out this refactor request! 12 | - type: textarea 13 | id: describe 14 | attributes: 15 | label: Refactor request 16 | description: Describe what needs to be refactored and why 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: reproduce 21 | attributes: 22 | label: Expected behavior 23 | description: A clear and concise description of what you expected to happen. 24 | placeholder: I expect ... 25 | validations: 26 | required: true 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/ss3-release-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: SS3 release checklist 3 | about: SS3 dev team only - Checklist for steps needed to release a version of SS3 4 | title: Release v3.30.[xx] checklist 5 | labels: request 6 | assignees: 7 | - Rick-Methot-NOAA 8 | - chantelwetzel-noaa 9 | - iantaylor-NOAA 10 | - kellijohnson-NOAA 11 | - shcaba 12 | - e-perl-NOAA 13 | --- 14 | 15 | # Release checklist 16 | 17 | 18 | 19 | ## General checklist before pre-release and release 20 | - [ ] SS3 testing and debugging completed (RM/IT) 21 | - [ ] Check artifact from the `call-build-ss3-warnings` GitHub action for useful warnings (RM/IT/EP) 22 | - [ ] r4ss updated (IT/EP) 23 | - [ ] [Change log project board](https://github.com/orgs/nmfs-ost/projects/11) updated with any issues labelled "change log" and check for any issues that should be labelled "change log" (RM) 24 | 25 | 30 | 31 | ## Checklist for before release 32 | - [ ] Make changes to SS3 if any bugs caught in prerelease (RM) 33 | - [ ] Code committed and tagged in repo as `v3.30.xx`, which will trigger a GHA to build the release executables (EP) (Instructions on [how to push a a local tag to a remote](https://github.com/nmfs-ost/ss3-source-code/wiki/Stock-Synthesis:-practices-for-maintainers#how-to-push-a-local-tag-up-to-github)) 34 | - [ ] All exes added to GitHub releases as `v3.30.xx` (EP) (get exes in the artifacts of the GHA that built the release exes) 35 | - [ ] Run [release workflow](https://github.com/nmfs-ost/ss3-doc/actions/workflows/release.yml) (or [bug fix release workflow](https://github.com/nmfs-ost/ss3-doc/actions/workflows/release_bug_fix.yml) if the release is a bug fix). **Note that the branch protection rules must be briefly turned off to allow this workflow to run.** (EP) 36 | - [ ] Exe and .tpl archived on [Google drive](https://drive.google.com/drive/folders/1Gh_dXi8v3rqawpwn2N6yaaEXZPq6G2io) (EP) 37 | - [ ] Send out release announcement message to the [SS3 forum](https://groups.google.com/g/ss3-forum) (RM) 38 | - [ ] Add to release discussion on GitHub repository (EP) 39 | 40 | ## Checklist for after release 41 | - [ ] Update user-example models using [this github action](https://github.com/nmfs-ost/ss3-user-examples/blob/main/R/update_examples.R) and tag with new release number after updating (EP) 42 | - [ ] Update test models using [this github action](https://github.com/nmfs-ost/ss3-test-models/actions/workflows/update-ss3-models.yml) and tag with new release number after updating (EP) 43 | - [ ] Update executables in the [SAC tool](https://github.com/shcaba/SS-DL-tool) (also suggest updating the input files to the .ss_new files (EP/JC) 44 | - [ ] Removed "resolved" tag and close all issues worked in the milestone for this release (RM) 45 | - [ ] Move unworked issues for the release milestone to the next milestone (RM) 46 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Uses dependabot to check things 2 | version: 2 3 | updates: 4 | # Enable version updates for GitHub Actions 5 | - package-ecosystem: "github-actions" 6 | # The default location of the bot is `.github/workflows` 7 | directory: "/" 8 | schedule: 9 | interval: "weekly" 10 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 10 | 11 | ## Concisely describe what has been changed/addressed in the pull request. 12 | 13 | 14 | 15 | 16 | 17 | 18 | * Resolves issue # 19 | 20 | ## What tests have been done? 21 | ### Where are the relevant files? 22 | 23 | 24 | 25 | 26 | 27 | 28 | ### What tests/review still need to be done? 29 | 30 | 31 | 32 | ## Is there an input change for users to Stock Synthesis? 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 45 | 46 | ## Additional information (optional). 47 | 48 | -------------------------------------------------------------------------------- /.github/workflows/add-exe-build-artifacts-to-PR.yml: -------------------------------------------------------------------------------- 1 | name: add-exe-build-artifacts-to-PR 2 | on: 3 | workflow_run: 4 | workflows: [build-ss3] 5 | types: [completed] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | artifacts-url-comments: 10 | name: add artifact links to pull request and related issues job 11 | runs-on: ubuntu-latest 12 | if: ${{ github.event.workflow_run.conclusion == 'success'}} 13 | steps: 14 | - name: add artifact links to PR and issues 15 | uses: tonyhallett/artifacts-url-comments@v1.1.0 16 | env: 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | with: 19 | prefix: 'Here are the successful executable builds from your PR:' 20 | format: name 21 | addTo: pullandissues 22 | -------------------------------------------------------------------------------- /.github/workflows/build-admb-and-ss3-from-source.yml: -------------------------------------------------------------------------------- 1 | # Builds admb from source and then builds the stock synthesis executables from source for testing and distribution. 2 | # for macOS 13 (couldn't get this to run on macOS 12), macOS latest (arm64 architecture), linux, Windows 3 | # Runs on a scheduled basis weekly to ensure that this workflow will work IF the build-ss3.yml workflow stops 4 | # working due to issues with the admb docker image. 5 | name: build-admb-and-ss3-from-source 6 | 7 | on: 8 | workflow_dispatch: 9 | schedule: 10 | - cron: '0 15 * * 2' # every Tuesday at 3pm UTC (= 1 0am EST or 11am EDT Eastern time) 11 | jobs: 12 | build-admb-and-ss3-from-source: 13 | runs-on: ${{ matrix.config.os }} 14 | name: ${{ matrix.config.os }} 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | config: 19 | - {os: windows-latest} 20 | - {os: macos-latest} 21 | - {os: macos-13} 22 | - {os: ubuntu-latest} 23 | # Limit run time to 90 min to avoid wasting action minutes. 24 | # Compiling admb and ss3 on all operating systems takes ~30 min 25 | timeout-minutes: 90 26 | # Steps represent a sequence of tasks that will be executed as part of the job 27 | steps: 28 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 29 | - uses: actions/checkout@v4 30 | 31 | # Set up R 32 | - name: Set up R, specify rtools version and path for Windows 33 | uses: r-lib/actions/setup-r@v2 34 | with: 35 | r-version: 'release' 36 | rtools-version: '42' 37 | windows-path-include-rtools: TRUE 38 | update-rtools: TRUE 39 | 40 | # Checkout ADMB repository 41 | - name: Checkout admb 42 | uses: actions/checkout@v4 43 | with: 44 | repository: admb-project/admb 45 | path: admb 46 | ref: admb-13.2 47 | 48 | # Build ADMB for Windows 49 | - name: Build admb for Windows using rtools 42 50 | if: matrix.config.os == 'windows-latest' 51 | run: | 52 | cd admb 53 | make -j 4 54 | shell: cmd 55 | 56 | - name: Put admb in path, Windows 57 | if: matrix.config.os == 'windows-latest' 58 | run: | 59 | echo "D:\a\ss3-source-code\ss3-source-code\admb\build\admb\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append 60 | echo $env:GITHUB_PATH 61 | 62 | # Build ADMB for macOS 63 | - name: Clean, macOS 64 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 65 | run: cd admb && make clean 66 | 67 | - name: See where admb is, macOS 68 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 69 | run: | 70 | ls 71 | cd admb && ls 72 | 73 | - name: Compile admb, macOS 74 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 75 | run: | 76 | cd admb && make -j 4 77 | 78 | # - name: See where admb is, mac 79 | # if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 80 | # run: | 81 | # cd admb && ls -l 82 | 83 | - name: Change permissions of admb and put in path, macOS 84 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 85 | run: | 86 | sudo mv admb /usr/local/bin 87 | sudo chmod 755 /usr/local/bin/admb 88 | echo "/usr/local/bin/admb" >> $GITHUB_PATH 89 | 90 | # Build ADMB for Linux 91 | - name: Update Ubuntu packages 92 | if: matrix.config.os == 'ubuntu-latest' 93 | run: sudo apt-get update 94 | 95 | - name: Change permissions of admb and put in path, linux 96 | if: matrix.config.os == 'ubuntu-latest' 97 | run: | 98 | cd admb && make clean 99 | num_cpus=`cat /proc/cpuinfo | grep processor | wc -l` 100 | num_cpus_minus1=$((num_cpus-1)) 101 | make -j $num_cpus_minus1 102 | ls -l 103 | chmod a+x admb 104 | sudo mv admb /usr/local/bin 105 | echo "/usr/local/bin/admb" >> $GITHUB_PATH 106 | 107 | # Fetch Tags 108 | - name: Fetch git tags 109 | run: | 110 | git fetch --tags 111 | git fetch --prune --unshallow || true 112 | 113 | - name: Get the last tag on Windows 114 | id: get-latest-tag-win 115 | if: matrix.config.os == 'windows-latest' 116 | run: | 117 | git tag 118 | $latest_tag = (git describe --abbrev=0 --tags) 119 | $latest_tag_commit = ( git rev-list -n 1 $latest_tag) 120 | $latest_commit = (git rev-list HEAD -n 1) 121 | echo "tag=${latest_tag}" >> $env:GITHUB_OUTPUT 122 | echo "tag_commit=${latest_tag_commit}" >> $env:GITHUB_OUTPUT 123 | echo "commit=${latest_commit}" >> $env:GITHUB_OUTPUT 124 | 125 | - name: Pull the last tag value to use in the Rscript on Windows 126 | id: get-version-win 127 | if: matrix.config.os == 'windows-latest' 128 | run: | 129 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.tag }} -FilePath .github/last_tag.txt 130 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.tag_commit }} -FilePath .github/last_tag_commit.txt 131 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.commit}} -FilePath .github/last_commit.txt 132 | 133 | - name: Get the last tag on unix (macOS and linux) 134 | id: get-latest-tag-unix 135 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' || matrix.config.os == 'ubuntu-latest' 136 | run: | 137 | git tag 138 | latest_tag=$(git describe --abbrev=0 --tags) 139 | latest_tag_commit=$(git rev-list -n 1 $latest_tag) 140 | latest_commit=$(git rev-list HEAD -n 1) 141 | echo "tag=${latest_tag}" >> $GITHUB_OUTPUT 142 | echo "tag_commit=${latest_tag_commit}" >> $GITHUB_OUTPUT 143 | echo "commit=${latest_commit}" >> $GITHUB_OUTPUT 144 | 145 | - name: Pull the last tag value to use in the Rscript on unix (macOS and linux) 146 | id: get-version-unix 147 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' || matrix.config.os == 'ubuntu-latest' 148 | run: | 149 | echo "${{ steps.get-latest-tag-unix.outputs.tag }}" > .github/last_tag.txt 150 | echo "${{ steps.get-latest-tag-unix.outputs.tag_commit }}" > .github/last_tag_commit.txt 151 | echo "${{ steps.get-latest-tag-unix.outputs.commit }}" > .github/last_commit.txt 152 | 153 | # R code to edit version info 154 | - name: Edit the version info for safe version using R code 155 | run: | 156 | # Get the version 157 | # get the last tag from the repository 158 | tag_label <- readLines(".github/last_tag.txt") 159 | # get commits from from the repository 160 | tag_commit <- readLines(".github/last_tag_commit.txt") 161 | last_commit <- readLines(".github/last_commit.txt") 162 | message("The tag_label is ", tag_label) 163 | if (substr(tag_label, start = 1, stop = 6) == "v3.30.") { 164 | ver_num_full <- strsplit(tag_label, split = "v3.30", fixed = TRUE)[[1]][2] 165 | ver_num <- strsplit(ver_num_full, split = ".", fixed = TRUE)[[1]][2] 166 | if(length(grep("-", ver_num)) > 0) { 167 | ver_num <- strsplit(ver_num, split = "-", fixed = TRUE)[[1]][1] 168 | } 169 | } else { 170 | ver_num <- "unknown" 171 | } 172 | message("tag commit: ", tag_commit) 173 | message("last commit: ", last_commit) 174 | if(tag_commit == last_commit) { 175 | # figure out the version using the tag 176 | if(ver_num == "unknown") { 177 | fix_ver_num <- "unknown" 178 | } else { 179 | ver_num_full_split <- strsplit(ver_num_full, split = ".", fixed = TRUE)[[1]] 180 | if(length(ver_num_full_split) == 3) { 181 | fix_ver_num <- ver_num_full_split[3] 182 | } else if(length(ver_num_full_split) == 2) { 183 | if(length(grep("-", ver_num_full_split, fixed = TRUE)) > 0) { 184 | fix_ver_num <- strsplit(ver_num_full_split[2], split = "-", fixed = TRUE)[[1]][2] 185 | fix_ver_num <- paste0("00-", fix_ver_num) 186 | } else { 187 | fix_ver_num <- "00" 188 | } 189 | } else { 190 | fix_ver_num <- "unknown" 191 | } 192 | } 193 | } else { 194 | fix_ver_num <- "beta: not an official version of SS" 195 | } 196 | message("The minor version label is ", ver_num) 197 | message("The patch version label is ", fix_ver_num) 198 | 199 | # add version numbers to files 200 | # safe file 201 | ver_info <- readLines("SS_versioninfo_330safe.tpl") 202 | ver_info_start <- grep('Create string with version info', ver_info, fixed = TRUE) 203 | ver_info[ver_info_start + 1] <- 204 | gsub('\\.xx', paste0('\\.', ver_num), ver_info[ver_info_start + 1]) 205 | ver_info[ver_info_start + 1] <- 206 | gsub('\\.yy', paste0('\\.', fix_ver_num), ver_info[ver_info_start+1]) 207 | writeLines(ver_info, "SS_versioninfo_330safe.tpl") 208 | #opt file 209 | ver_info <- readLines("SS_versioninfo_330opt.tpl") 210 | ver_info_start <- grep('Create string with version info', ver_info, fixed = TRUE) 211 | ver_info[ver_info_start + 1] <- 212 | gsub('\\.xx', paste0('\\.', ver_num), ver_info[ver_info_start + 1]) 213 | ver_info[ver_info_start + 1] <- 214 | gsub('\\.yy', paste0('\\.', fix_ver_num), ver_info[ver_info_start+1]) 215 | writeLines(ver_info, "SS_versioninfo_330opt.tpl") 216 | shell: Rscript {0} 217 | 218 | # Build SS3 for Windows 219 | - name: Build stock synthesis for Windows 220 | if: matrix.config.os == 'windows-latest' 221 | run: | 222 | cd Compile 223 | ./Make_SS_fast.bat || true 224 | ./Make_SS_safe.bat || true 225 | cd .. 226 | 227 | - name: Move exes to a new folder on windows 228 | if: matrix.config.os == 'windows-latest' 229 | run: | 230 | mkdir SS330 231 | chmod 777 SS330 232 | mv Compile/ss3.exe SS330/ 233 | mv Compile/ss3_opt.exe SS330/ 234 | mv SS330/ss3.exe SS330/ss3_win.exe 235 | mv SS330/ss3_opt.exe SS330/ss3_opt_win.exe 236 | 237 | # Build SS3 for macOS 238 | - name: Build stock synthesis for macOS 239 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 240 | run: | 241 | rm -rf SS330 242 | rm -rf ss3_osx.tar 243 | mkdir SS330 244 | chmod 777 SS330 245 | /bin/bash ./Make_SS_330_new.sh -b SS330 246 | /bin/bash ./Make_SS_330_new.sh -b SS330 -o 247 | 248 | - name: Verify binary on macOS 249 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 250 | run: | 251 | shasum -a 256 SS330/ss3 252 | shasum -a 256 SS330/ss3_opt 253 | 254 | - name: Delete unnecessary files and change exe names on macOS 255 | if: matrix.config.os == 'macos-latest' || matrix.config.os == 'macos-13' 256 | run: | 257 | cd SS330 258 | rm *.obj *.htp *.cpp ss3_opt.tpl 259 | mv ss3 ss3_osx 260 | mv ss3_opt ss3_opt_osx 261 | 262 | # Build SS3 for Linux 263 | - name: Build stock synthesis for linux with p flag 264 | if: matrix.config.os == 'ubuntu-latest' 265 | run: | 266 | rm -rf SS330 267 | rm -rf ss3_osx.tar 268 | mkdir SS330 269 | chmod 777 SS330 270 | /bin/bash ./Make_SS_330_new.sh -b SS330 -p 271 | /bin/bash ./Make_SS_330_new.sh -b SS330 -o -p 272 | 273 | - name: Verify binary on linux 274 | if: matrix.config.os == 'ubuntu-latest' 275 | run: | 276 | sha256sum SS330/ss3 277 | sha256sum SS330/ss3_opt 278 | 279 | - name: Delete unneeded files and change exe names on linux 280 | if: matrix.config.os == 'ubuntu-latest' 281 | run: | 282 | cd SS330 283 | rm *.obj *.htp *.cpp ss3_opt.tpl ss3.tpl 284 | mv ss3 ss3_linux 285 | mv ss3_opt ss3_opt_linux 286 | 287 | # Archive Binaries 288 | - name: Archive binaries 289 | if: success() 290 | uses: actions/upload-artifact@main 291 | with: 292 | name: ss3-${{ matrix.config.os }} 293 | path: SS330/ 294 | -------------------------------------------------------------------------------- /.github/workflows/build-ss3-warnings.yml: -------------------------------------------------------------------------------- 1 | # Build stock synthesis on linux with all c++ compiler warnings, and fail job if the 2 | # number of warnings has increased using the warnings_ss_ref.txt file under 3 | # workflows > reference_files. 4 | # Only runs on a pull request and a push to main if the tpl files have changed 5 | name: build-ss3-warnings 6 | 7 | # Controls when the action will run. 8 | on: 9 | workflow_dispatch: 10 | push: 11 | paths: 12 | - '**.tpl' 13 | branches: 14 | - main 15 | pull_request: 16 | types: ['opened', 'edited', 'reopened', 'synchronize', 'ready_for_review'] 17 | paths: 18 | - '**.tpl' 19 | branches: 20 | - main 21 | 22 | jobs: 23 | build-warnings: 24 | if: github.event.pull_request.draft == false 25 | runs-on: ubuntu-latest 26 | env: 27 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 28 | RSPM: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest" 29 | 30 | steps: 31 | - name: Checkout ss3 repo 32 | uses: actions/checkout@v4 33 | 34 | - name: Update Ubuntu packages 35 | run: sudo apt-get update 36 | 37 | - name: Setup R 38 | uses: r-lib/actions/setup-r@v2 39 | 40 | # - name: Get admb and put in path, linux 41 | # run: | 42 | # wget https://github.com/admb-project/admb/releases/download/admb-13.1/admb-13.1-linux.zip 43 | # sudo unzip admb-13.1-linux.zip -d /usr/local/bin 44 | # sudo chmod 755 /usr/local/bin/admb-13.1/bin/admb 45 | # echo "/usr/local/bin/admb-13.1/bin" >> $GITHUB_PATH 46 | 47 | - name: Build stock synthesis with warnings using admb docker image and display in console 48 | run: | 49 | rm -rf SS330 50 | rm -rf ss3_osx.tar 51 | mkdir SS330 52 | chmod 777 SS330 53 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 -w 54 | 55 | - name: Build stock synthesis with warnings using admb docker image again to save to file 56 | run: | 57 | rm -rf SS330 58 | rm -rf ss3_osx.tar 59 | mkdir SS330 60 | chmod 777 SS330 61 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 -w &> warnings.txt 62 | 63 | # Runs a set of commands using the runners shell 64 | - name: Use R to parse warnings output 65 | run: | 66 | txt <- readLines("warnings.txt", encoding = "UTF-8") 67 | warn_line <- grep(pattern = "g++ -c -std=c++17 -O3 -Wall -Wextra -D_USE_MATH_DEFINES -DUSE_ADMB_CONTRIBS", x = txt, fixed = TRUE) 68 | end_warn_line <- grep(pattern = "*** Linking: ss3.obj ", x = txt, fixed = TRUE) 69 | if (length(warn_line) == 1 & length(end_warn_line) == 1) { 70 | txt <- txt[(warn_line+3):(end_warn_line-1)] 71 | rm_warn_start_lines <- grep(pattern = "/usr/local/admb/include/admodel.h: ", x = txt, fixed = TRUE, invert = TRUE) 72 | txt <- txt[rm_warn_start_lines] 73 | n_errors <- length(grep(pattern = "^ [0-9]", x = txt)) 74 | message("There are ", n_errors, " warning messages related to SS3.") 75 | write.table(n_errors, "n_warn.txt") 76 | } 77 | writeLines(txt, "warnings_ss.txt") 78 | # warn_line <- grep(pattern = "compiling a second time to get warnings", x = txt, fixed = TRUE) 79 | # txt <- txt[(warn_line+1):length(txt)] 80 | shell: Rscript {0} 81 | 82 | - name: Print warnings 83 | run: cat warnings_ss.txt 84 | 85 | - name: Determine if fails/passes based on the number of warnings 86 | run: | 87 | ref <- readLines(".github/workflows/reference_files/warnings_ss_ref.txt") 88 | n_warn <- read.table("n_warn.txt") 89 | n_warn <- as.integer(n_warn[1,1]) 90 | if (n_warn > length(ref)) { 91 | stop("Increased number of warnings compared to reference warning file") 92 | } else { 93 | # n_warn doesn't take into account the "In file included from ss.cpp:7:" 94 | # so add 2 to add those lines back in 95 | if (n_warn+length(ref) < length(ref)){ 96 | stop("Number of warnings of decreased compared to reference warning file") 97 | } else { 98 | message("Acceptable number of warnings") 99 | } 100 | } 101 | shell: Rscript {0} 102 | 103 | - name: Archive warnings text file 104 | if: always() 105 | uses: actions/upload-artifact@main 106 | with: 107 | name: 'warnings_ss.txt' 108 | path: warnings_ss.txt -------------------------------------------------------------------------------- /.github/workflows/build-ss3.yml: -------------------------------------------------------------------------------- 1 | # Builds the stock synthesis executables for testing and distribution using the admb docker image. 2 | # for macOS 12, and macos-14 (arm64 architecture), linux, Windows 3 | # Runs on every push and PR (even draft PRs) 4 | name: build-ss3 5 | 6 | on: 7 | push: 8 | paths: 9 | - '**.tpl' 10 | - '**.sh' 11 | workflow_dispatch: 12 | 13 | 14 | jobs: 15 | build-ss3: 16 | runs-on: ${{ matrix.config.os }} 17 | name: ${{ matrix.config.os }} 18 | strategy: 19 | fail-fast: false 20 | matrix: 21 | config: 22 | - {os: windows-latest} 23 | - {os: macos-13} 24 | - {os: macos-latest} 25 | - {os: ubuntu-latest} 26 | # Limit run time to 90 min to avoid wasting action minutes. 27 | # was set to 15 and then 30 minutes previously, but compiling admb 28 | # on mac took too long 29 | timeout-minutes: 90 30 | # Steps represent a sequence of tasks that will be executed as part of the job 31 | steps: 32 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 33 | - name: checkout ss3 repo 34 | uses: actions/checkout@v4 35 | with: 36 | repository: 'nmfs-ost/ss3-source-code' 37 | 38 | # Checkout ADMB repository 39 | - name: Checkout admb for macos-13 build 40 | uses: actions/checkout@v4 41 | with: 42 | repository: admb-project/admb 43 | path: admb 44 | ref: admb-13.2 45 | 46 | - name: Update Ubuntu packages 47 | if: matrix.config.os == 'ubuntu-latest' 48 | run: sudo apt-get update 49 | 50 | - name: Setup R 51 | uses: r-lib/actions/setup-r@v2 52 | 53 | - name: Fetch git tags 54 | run: | 55 | git fetch --tags 56 | git fetch --prune --unshallow || true 57 | 58 | # Build ADMB for macos-13 59 | - name: Build ADMB for macos-13 and put in path 60 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 61 | run: | 62 | cd admb && make clean 63 | - name: See where admb is 64 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 65 | run: | 66 | cd admb && ls 67 | - name: Compile admb, macOS 68 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 69 | run: | 70 | cd admb && make -j 4 71 | - name: Change permissions of admb and put in path, macOS 72 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 73 | run: | 74 | sudo mv admb /usr/local/bin 75 | sudo chmod 755 /usr/local/bin/admb 76 | echo "/usr/local/bin/admb" >> $GITHUB_PATH 77 | 78 | - name: Get the last tag on Windows 79 | id: get-latest-tag-win 80 | if: matrix.config.os == 'windows-latest' 81 | run: | 82 | git tag 83 | $latest_tag = (git describe --abbrev=0 --tags) 84 | $latest_tag_commit = ( git rev-list -n 1 $latest_tag) 85 | $latest_commit = (git rev-list HEAD -n 1) 86 | echo "tag=${latest_tag}" >> $env:GITHUB_OUTPUT 87 | echo "tag_commit=${latest_tag_commit}" >> $env:GITHUB_OUTPUT 88 | echo "commit=${latest_commit}" >> $env:GITHUB_OUTPUT 89 | 90 | - name: Pull the last tag value to use in the Rscript on Windows 91 | id: get-version-win 92 | if: matrix.config.os == 'windows-latest' 93 | run: | 94 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.tag }} -FilePath .github/last_tag.txt 95 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.tag_commit }} -FilePath .github/last_tag_commit.txt 96 | Out-File -InputObject ${{ steps.get-latest-tag-win.outputs.commit}} -FilePath .github/last_commit.txt 97 | 98 | - name: Get the last tag on unix (macOS and linux) 99 | id: get-latest-tag-unix 100 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' || matrix.config.os == 'ubuntu-latest' 101 | run: | 102 | git tag 103 | latest_tag=$(git describe --abbrev=0 --tags) 104 | latest_tag_commit=$(git rev-list -n 1 $latest_tag) 105 | latest_commit=$(git rev-list HEAD -n 1) 106 | echo "tag=${latest_tag}" >> $GITHUB_OUTPUT 107 | echo "tag_commit=${latest_tag_commit}" >> $GITHUB_OUTPUT 108 | echo "commit=${latest_commit}" >> $GITHUB_OUTPUT 109 | 110 | - name: Pull the last tag value to use in the Rscript on unix (macOS and linux) 111 | id: get-version-unix 112 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' || matrix.config.os == 'ubuntu-latest' 113 | run: | 114 | echo "${{ steps.get-latest-tag-unix.outputs.tag }}" > .github/last_tag.txt 115 | echo "${{ steps.get-latest-tag-unix.outputs.tag_commit }}" > .github/last_tag_commit.txt 116 | echo "${{ steps.get-latest-tag-unix.outputs.commit }}" > .github/last_commit.txt 117 | 118 | - name: Edit the version info for safe version using R code 119 | run: | 120 | # Get the version 121 | # get the last tag from the repository 122 | tag_label <- readLines(".github/last_tag.txt") 123 | # get commits from from the repository 124 | tag_commit <- readLines(".github/last_tag_commit.txt") 125 | last_commit <- readLines(".github/last_commit.txt") 126 | message("The tag_label is ", tag_label) 127 | if (substr(tag_label, start = 1, stop = 6) == "v3.30.") { 128 | ver_num_full <- strsplit(tag_label, split = "v3.30", fixed = TRUE)[[1]][2] 129 | ver_num <- strsplit(ver_num_full, split = ".", fixed = TRUE)[[1]][2] 130 | if(length(grep("-", ver_num)) > 0) { 131 | ver_num <- strsplit(ver_num, split = "-", fixed = TRUE)[[1]][1] 132 | } 133 | } else { 134 | ver_num <- "unknown" 135 | } 136 | message("tag commit: ", tag_commit) 137 | message("last commit: ", last_commit) 138 | if(tag_commit == last_commit) { 139 | # figure out the version using the tag 140 | if(ver_num == "unknown") { 141 | fix_ver_num <- "unknown" 142 | } else { 143 | ver_num_full_split <- strsplit(ver_num_full, split = ".", fixed = TRUE)[[1]] 144 | if(length(ver_num_full_split) == 3) { 145 | fix_ver_num <- ver_num_full_split[3] 146 | } else if(length(ver_num_full_split) == 2) { 147 | if(length(grep("-", ver_num_full_split, fixed = TRUE)) > 0) { 148 | fix_ver_num <- strsplit(ver_num_full_split[2], split = "-", fixed = TRUE)[[1]][2] 149 | fix_ver_num <- paste0("00-", fix_ver_num) 150 | } else { 151 | fix_ver_num <- "00" 152 | } 153 | } else { 154 | fix_ver_num <- "unknown" 155 | } 156 | } 157 | } else { 158 | fix_ver_num <- "beta: not an official version of SS" 159 | } 160 | message("The minor version label is ", ver_num) 161 | message("The patch version label is ", fix_ver_num) 162 | 163 | # add version numbers to files 164 | # safe file 165 | ver_info <- readLines("SS_versioninfo_330safe.tpl") 166 | ver_info_start <- grep('Create string with version info', ver_info, fixed = TRUE) 167 | ver_info[ver_info_start + 1] <- 168 | gsub('\\.xx', paste0('\\.', ver_num), ver_info[ver_info_start + 1]) 169 | ver_info[ver_info_start + 1] <- 170 | gsub('\\.yy', paste0('\\.', fix_ver_num), ver_info[ver_info_start+1]) 171 | writeLines(ver_info, "SS_versioninfo_330safe.tpl") 172 | #opt file 173 | ver_info <- readLines("SS_versioninfo_330opt.tpl") 174 | ver_info_start <- grep('Create string with version info', ver_info, fixed = TRUE) 175 | ver_info[ver_info_start + 1] <- 176 | gsub('\\.xx', paste0('\\.', ver_num), ver_info[ver_info_start + 1]) 177 | ver_info[ver_info_start + 1] <- 178 | gsub('\\.yy', paste0('\\.', fix_ver_num), ver_info[ver_info_start+1]) 179 | writeLines(ver_info, "SS_versioninfo_330opt.tpl") 180 | shell: Rscript {0} 181 | 182 | - name: Build stock synthesis for windows with admb docker image 183 | if: matrix.config.os == 'windows-latest' 184 | run: | 185 | cd Compile 186 | ./Make_SS_fast.bat || true 187 | ./Make_SS_safe.bat || true 188 | cd .. 189 | 190 | - name: Move exes to a new folder on windows 191 | if: matrix.config.os == 'windows-latest' 192 | run: | 193 | mkdir SS330 194 | chmod 777 SS330 195 | mv Compile/ss3.exe SS330/ 196 | mv Compile/ss3_opt.exe SS330/ 197 | mv SS330/ss3.exe SS330/ss3_win.exe 198 | mv SS330/ss3_opt.exe SS330/ss3_opt_win.exe 199 | 200 | - name: Build stock synthesis for mac with admb from source 201 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 202 | run: | 203 | rm -rf SS330 204 | rm -rf ss3_osx.tar 205 | mkdir SS330 206 | chmod 777 SS330 207 | /bin/bash ./Make_SS_330_new.sh -b SS330 208 | /bin/bash ./Make_SS_330_new.sh -b SS330 -o 209 | 210 | # - name: Build stock synthesis for mac m2 with admb docker image 211 | # if: matrix.config.os == 'macos-latest' 212 | # run: | 213 | # brew update 214 | # brew install docker 215 | # brew install --head colima 216 | # colima start --arch x86_64 217 | # docker pull johnoel/admb-13.2:linux 218 | 219 | # rm -rf SS330 220 | # rm -rf ss3_osx.tar 221 | # mkdir SS330 222 | # chmod 777 SS330 223 | # /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 224 | # /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 -o 225 | 226 | - name: Verify binary on mac 227 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 228 | run: | 229 | shasum -a 256 SS330/ss3 230 | shasum -a 256 SS330/ss3_opt 231 | 232 | - name: Delete unneeded files and change exe names on mac 233 | if: matrix.config.os == 'macos-13' || matrix.config.os == 'macos-latest' 234 | run: | 235 | cd SS330 236 | rm *.obj *.htp *.cpp ss3_opt.tpl ss3.tpl 237 | mv ss3 ss3_osx 238 | mv ss3_opt ss3_opt_osx 239 | 240 | - name: Build stock synthesis for linux with p flag and admb docker image 241 | if: matrix.config.os == 'ubuntu-latest' 242 | run: | 243 | rm -rf SS330 244 | rm -rf ss3_osx.tar 245 | mkdir SS330 246 | chmod 777 SS330 247 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 -p 248 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 -o -p 249 | 250 | - name: Verify binary on linux 251 | if: matrix.config.os == 'ubuntu-latest' 252 | run: | 253 | sha256sum SS330/ss3 254 | sha256sum SS330/ss3_opt 255 | 256 | - name: Delete unneeded files and change exe names on linux 257 | if: matrix.config.os == 'ubuntu-latest' 258 | run: | 259 | cd SS330 260 | rm *.obj *.htp *.cpp ss3_opt.tpl ss3.tpl 261 | mv ss3 ss3_linux 262 | mv ss3_opt ss3_opt_linux 263 | 264 | - name: Archive binaries 265 | if: success() 266 | uses: actions/upload-artifact@main 267 | with: 268 | name: ss3-${{ matrix.config.os }} 269 | path: SS330/ 270 | -------------------------------------------------------------------------------- /.github/workflows/email_when_close_pr.yml: -------------------------------------------------------------------------------- 1 | name: email_when_close_pr 2 | 3 | on: 4 | pull_request: 5 | types: [ closed ] 6 | 7 | jobs: 8 | email_when_close_pr: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Get Pull Request Number 12 | run: | 13 | echo github.event.pull_request.number = ${{ github.event.pull_request.number }} 14 | echo github.event.pull_request.html_url = ${{ github.event.pull_request.html_url }} 15 | env: 16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 17 | 18 | - name: Get issues 19 | id: get-issues 20 | uses: mondeja/pr-linked-issues-action@v2 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | 24 | - name: Print linked issue numbers 25 | run: echo ${{ steps.get-issues.outputs.issues }} 26 | 27 | - name: get email 28 | id: test 29 | uses: evvanErb/get-github-email-by-username-action@v2.0 30 | with: 31 | github-username: ${{ github.triggering_actor }} 32 | 33 | # send an email; cannot figure out how to get correct url to PR even just pasting in the link 34 | - name: send email 35 | uses: cinotify/github-action@main 36 | if: ${{ steps.get-issues.outputs.issues != null }} 37 | with: 38 | to: ${{ steps.test.outputs.email }} 39 | subject: 'PR in stock synthesis repository is closed; reminder to fill out info in associated issue' 40 | body: "PR #${{ github.event.pull_request.number }} in the nmfs-ost/ss3-source-code repository has been closed, please remember to move any pertinent information to the following linked issue(s): #${{ steps.get-issues.outputs.issues }}" 41 | -------------------------------------------------------------------------------- /.github/workflows/reference_files/warnings_ss_ref.txt: -------------------------------------------------------------------------------- 1 | /usr/local/admb/include/admodel.h:1915:38: warning: unused parameter 'gradients' [-Wunused-parameter] 2 | 1915 | virtual void report(const dvector& gradients){;}; 3 | | ~~~~~~~~~~~~~~~^~~~~~~~~ 4 | /usr/local/admb/include/admodel.h:2501:43: warning: unused parameter 'ofs' [-Wunused-parameter] 5 | 2501 | virtual void save_value(const ofstream& ofs, int prec,const dvector&, 6 | | ~~~~~~~~~~~~~~~~^~~ 7 | /usr/local/admb/include/admodel.h:2501:52: warning: unused parameter 'prec' [-Wunused-parameter] 8 | 2501 | virtual void save_value(const ofstream& ofs, int prec,const dvector&, 9 | | ~~~~^~~~ 10 | /usr/local/admb/include/admodel.h:2502:10: warning: unused parameter 'offset' [-Wunused-parameter] 11 | 2502 | int& offset){} 12 | | ~~~~~^~~~~~ 13 | -------------------------------------------------------------------------------- /.github/workflows/run-ss3-bootstrap.yml: -------------------------------------------------------------------------------- 1 | # Build SS3 and run Simple_with_DM_sizefreq model from test models 2 | # repo to make sure that ss3 makes bootstrap files and that all 3 | # .ss_new files were produced. 4 | # Only runs on a pull request and a push to main if the tpl files have changed. 5 | name: run-ss3-bootstrap-files 6 | on: 7 | workflow_dispatch: 8 | push: 9 | paths: 10 | - '**.tpl' 11 | branches: 12 | - main 13 | pull_request: 14 | types: ['opened', 'edited', 'reopened', 'synchronize', 'ready_for_review'] 15 | paths: 16 | - '**.tpl' 17 | branches: 18 | - main 19 | 20 | jobs: 21 | run-ss3-bootstrap-files: 22 | if: github.event.pull_request.draft == false 23 | runs-on: ubuntu-latest 24 | env: 25 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 26 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 27 | R_KEEP_PKG_SOURCE: yes 28 | 29 | steps: 30 | - name: Checkout ss3 repo 31 | uses: actions/checkout@v4 32 | 33 | - name: Checkout test models repo 34 | uses: actions/checkout@v4 35 | with: 36 | repository: 'nmfs-ost/ss3-test-models' 37 | path: test-models-repo 38 | 39 | - name: Install libcurl & other necessary packages 40 | run: | 41 | sudo add-apt-repository ppa:ubuntu-toolchain-r/test 42 | sudo apt-get update 43 | sudo apt-get install --only-upgrade libstdc++6 44 | sudo apt-get install -y libcurl4-openssl-dev 45 | sudo apt-get install -y libfontconfig1-dev 46 | sudo apt-get install -y libharfbuzz-dev libfribidi-dev 47 | 48 | - name: Setup R 49 | uses: r-lib/actions/setup-r@v2 50 | 51 | # - name: Get admb and put in path, linux 52 | # run: | 53 | # wget https://github.com/admb-project/admb/releases/download/admb-13.1/admb-13.1-linux.zip 54 | # sudo unzip admb-13.1-linux.zip -d /usr/local/bin 55 | # sudo chmod 755 /usr/local/bin/admb-13.1/bin/admb 56 | # echo "/usr/local/bin/admb-13.1/bin" >> $GITHUB_PATH 57 | 58 | # - name: Build stock synthesis 59 | # run: | 60 | # rm -rf SS330 61 | # mkdir SS330 62 | # /bin/bash ./Make_SS_330_new.sh -b SS330 63 | 64 | - name: Build stock synthesis with admb docker image 65 | run: | 66 | rm -rf SS330 67 | rm -rf ss3_osx.tar 68 | mkdir SS330 69 | chmod 777 SS330 70 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 71 | 72 | - name: Move exes, scripts to needed locations 73 | run: | 74 | mv test-models-repo/models test-models-repo/model_runs 75 | mv SS330/ss3 test-models-repo/model_runs/Simple_with_DM_sizefreq/ss3 76 | 77 | - name: Change permissions on ss3 exes 78 | run: sudo chmod a+x test-models-repo/model_runs/Simple_with_DM_sizefreq/ss3 79 | 80 | - name: Download R packages 81 | run: Rscript -e 'install.packages(c("remotes", "purrr", "arsenal", "utils"))' 82 | 83 | - name: Install r4ss 84 | run: Rscript -e 'remotes::install_github("r4ss/r4ss")' 85 | 86 | - name: Ensure that ss3 produces bootstrap files 87 | run: | 88 | # Use Simple_with_DM_sizefreq model to test bootstrap files 89 | boot_mod_dir <- grep("Simple_with_DM_sizefreq", list.dirs(file.path("test-models-repo", "model_runs"), full.names = TRUE, recursive = FALSE), value = TRUE) 90 | 91 | # Change to have model output 8 bootstrap files and run model 92 | boot_files <- r4ss::SS_read(dir = boot_mod_dir) 93 | boot_files$start$N_bootstraps <- 10 94 | r4ss::SS_write(boot_files, dir = boot_mod_dir, overwrite = TRUE) 95 | r4ss::run(dir = boot_mod_dir, exe = file.path(getwd(), "test-models-repo", "model_runs", "Simple_with_DM_sizefreq", "ss3")) 96 | 97 | bootstrap_length <- length(list.files(boot_mod_dir, pattern = "data_boot")) 98 | 99 | if(bootstrap_length != (boot_files$start$N_bootstraps - 2)){ 100 | stop("Did not produce the amount of bootstrap files specified.") 101 | } else { 102 | message("All bootstrap files produced.") 103 | } 104 | 105 | # Make sure values in echo and bootstrap files are different 106 | dat <- r4ss::SS_readdat(file = file.path(boot_mod_dir,"data_echo.ss_new")) 107 | boot1 <- r4ss::SS_readdat(file = file.path(boot_mod_dir,"data_boot_001.ss")) 108 | boot2 <- r4ss::SS_readdat(file = file.path(boot_mod_dir,"data_boot_002.ss")) 109 | 110 | # Unlist sizefreq data 111 | dat_sizefreq <- purrr::map_df(dat$sizefreq_data_list, ~.x) 112 | boot1_sizefreq <- purrr::map_df(boot1$sizefreq_data_list, ~.x) 113 | boot2_sizefreq <- purrr::map_df(boot2$sizefreq_data_list, ~.x) 114 | 115 | # Compare data and bootstrap files 116 | df <- data.frame( 117 | var = c("lencomp", "lencomp","catch","catch","CPUE","CPUE", "agecomp","agecomp","sizefreq","sizefreq"), 118 | data_compare = rep(c("dat_boot1", "boot1_boot2"), 5), 119 | n_diff = c( 120 | sum(summary(arsenal::comparedf(dat$lencomp, boot1$lencomp))$diffs.byvar.table$n), 121 | sum(summary(arsenal::comparedf(boot1$lencomp, boot2$lencomp))$diffs.byvar.table$n), 122 | length(summary(arsenal::comparedf(dat$catch, boot1$catch, int.as.num = TRUE))$diffs.table$row.x), 123 | length(summary(arsenal::comparedf(dat$catch, boot1$catch, int.as.num = TRUE))$diffs.table$row.x), 124 | length(summary(arsenal::comparedf(dat$CPUE, boot1$CPUE, int.as.num = TRUE))$diffs.table$row.x), 125 | length(summary(arsenal::comparedf(boot1$CPUE, boot2$CPUE, int.as.num = TRUE))$diffs.table$row.x), 126 | sum(summary(arsenal::comparedf(dat$agecomp, boot1$agecomp))$diffs.byvar.table$n), 127 | sum(summary(arsenal::comparedf(boot1$agecomp, boot2$agecomp))$diffs.byvar.table$n), 128 | sum(summary(arsenal::comparedf(dat_sizefreq, boot1_sizefreq))$diffs.byvar.table$n), 129 | sum(summary(arsenal::comparedf(boot1_sizefreq, boot2_sizefreq))$diffs.byvar.table$n) 130 | ) 131 | ) 132 | 133 | df_error <- df |> 134 | dplyr::filter(dplyr::if_any(dplyr::where(is.numeric), ~ .x < 20)) 135 | 136 | print_and_capture <- function(x){ 137 | paste(utils::capture.output(print(x)), collapse = "\n") 138 | } 139 | 140 | # Error if not enough changes made in bootstrap files 141 | if(length(df_error > 0)){ 142 | stop("Bootstrap data produced less than 20 changes in a data type, see printed table below.\n", print_and_capture(df_error)) 143 | } else { 144 | message("Bootstrap data types all have > 20 changes") 145 | } 146 | 147 | # Check that the model produced all .ss_new files and that they have correct content in them 148 | ss_new_files <- list.files(boot_mod_dir, pattern = ".ss_new") 149 | ss_new_validate <- c("control.ss_new", "data_echo.ss_new", "forecast.ss_new", "starter.ss_new", "wtatage.ss_new") 150 | 151 | new_dif <- setdiff(ss_new_files, ss_new_validate) 152 | if(length(new_dif) > 0){ 153 | stop("Not all .ss_new files were produced") 154 | } else { 155 | message("All .ss_new files produced") 156 | } 157 | 158 | ss_new_length <- purrr::map(ss_new_files, ~length(readLines(file.path(boot_mod_dir, .x)))) 159 | ss_new_too_short <- any(ss_new_length |> purrr::map_lgl(~.x < 10)) 160 | if(ss_new_too_short == TRUE){ 161 | stop("Error producing all lines in one or more .ss_new files") 162 | } else { 163 | message("All .ss_new files seem to be appropriate length") 164 | } 165 | shell: Rscript {0} 166 | -------------------------------------------------------------------------------- /.github/workflows/run-ss3-mcmc.yml: -------------------------------------------------------------------------------- 1 | # Check that MCMC using the NUTS algorithim in ADMB is reproducible with SS3 using the simple model. 2 | # Developed with Cole Monnahan. 3 | # Only runs on a pull request and a push to main if the tpl files have changed. 4 | name: run-ss3-mcmc 5 | on: 6 | workflow_dispatch: 7 | push: 8 | paths: 9 | - '**.tpl' 10 | branches: 11 | - main 12 | pull_request: 13 | types: ['opened', 'edited', 'reopened', 'synchronize', 'ready_for_review'] 14 | paths: 15 | - '**.tpl' 16 | branches: 17 | - main 18 | 19 | # Check MCMC works 20 | jobs: 21 | run-ss3-no-est: 22 | if: github.event.pull_request.draft == false 23 | runs-on: ubuntu-latest 24 | env: 25 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 26 | RSPM: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest" 27 | 28 | steps: 29 | - name: Checkout ss3 repo 30 | uses: actions/checkout@v4 31 | 32 | - name: Checkout models repo 33 | uses: actions/checkout@v4 34 | with: 35 | repository: 'nmfs-ost/ss3-test-models' 36 | path: test-models-repo 37 | 38 | - name: Update Ubuntu packages 39 | run: | 40 | sudo add-apt-repository ppa:ubuntu-toolchain-r/test 41 | sudo apt-get update 42 | sudo apt-get install --only-upgrade libstdc++6 43 | 44 | - name: Setup R 45 | uses: r-lib/actions/setup-r@v2 46 | 47 | # - name: Get admb and put in path, linux 48 | # run: | 49 | # wget https://github.com/admb-project/admb/releases/download/admb-13.1/admb-13.1-linux.zip 50 | # sudo unzip admb-13.1-linux.zip -d /usr/local/bin 51 | # sudo chmod 755 /usr/local/bin/admb-13.1/bin/admb 52 | # echo "/usr/local/bin/admb-13.1/bin" >> $GITHUB_PATH 53 | 54 | # - name: Build stock synthesis 55 | # run: | 56 | # rm -rf SS330 57 | # mkdir SS330 58 | # /bin/bash ./Make_SS_330_new.sh -b SS330 59 | 60 | - name: Build stock synthesis with admb docker image 61 | run: | 62 | rm -rf SS330 63 | rm -rf ss3_osx.tar 64 | mkdir SS330 65 | chmod 777 SS330 66 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 67 | 68 | - name: Move exes, scripts to needed locations 69 | run: | 70 | mv test-models-repo/models/Simple/starter.ss starter.ss 71 | mv test-models-repo/models/Simple/forecast.ss forecast.ss 72 | mv test-models-repo/models/Simple/control.ss control.ss 73 | mv test-models-repo/models/Simple/data.ss data.ss 74 | mv SS330/ss3 ss3 75 | ls 76 | 77 | - name: Change permissions on ss3 exe 78 | run: sudo chmod a+x ss3 79 | 80 | - name: Run models without estimation 81 | run: | 82 | # Run NUTS algorithim in ADMB to make sure still works with stock synthesis 83 | # run the simple model with -hbf option to get the necessary files 84 | system("./ss3 -hbf") 85 | # run the simple model 30 times using the NUTS algorithim 86 | niters <- 100 87 | for (i in 1:niters) { 88 | message("Running iteration ", i) 89 | # run simple model 90 | system("./ss3 -maxfn 0 -phase 40 -nohess -mcmc 10 -nuts -mcseed 1 -max_treedepth 3") 91 | # read in adaption.csv 92 | adapt_df <- read.csv("adaptation.csv") 93 | if(i == 1) { 94 | compare_val <- adapt_df[nrow(adapt_df), "stepsize__"] 95 | message("Getting ref value from iteration ", i, ". Ref value is " , 96 | compare_val) 97 | } else { 98 | message("Checking iteration ", i) 99 | val_to_check <- adapt_df[nrow(adapt_df), "stepsize__"] 100 | if(!identical(val_to_check, compare_val)) { 101 | stop("First step size for iteration ", i ," (step size =", 102 | val_to_check, 103 | ")", " was not equal to values in the first iteration (", 104 | compare_val, ").") 105 | } 106 | } 107 | } 108 | message("No differences in step size among stock synthesis runs.") 109 | shell: Rscript {0} -------------------------------------------------------------------------------- /.github/workflows/run-ss3-no-est.yml: -------------------------------------------------------------------------------- 1 | # Build SS3 and run test models without estimation (from .par) and no hessian 2 | # Runs on every push and PR (even draft PRs) ONLY if tpl files have changed. 3 | name: run-ss3-no-est 4 | on: 5 | workflow_dispatch: 6 | push: 7 | paths: 8 | - '**.tpl' 9 | # branches: 10 | # - main 11 | pull_request: 12 | paths: 13 | - '**.tpl' 14 | branches: 15 | - main 16 | 17 | # Run models without estimation, then try to run again with .ss_new 18 | jobs: 19 | run-ss3-no-est: 20 | runs-on: ubuntu-latest 21 | env: 22 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 23 | RSPM: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest" 24 | 25 | steps: 26 | - name: Checkout ss3 repo 27 | uses: actions/checkout@v4 28 | 29 | # - name: Get last compiled version of SS; alternative to compiling in same wkflow 30 | # uses: dawidd6/action-download-artifact@v2 31 | # with: 32 | # workflow: build-centos.yml 33 | # workflow_conclusion: success 34 | # name: ss_linux 35 | # path: ss_linux 36 | 37 | - name: Checkout models repo 38 | uses: actions/checkout@v4 39 | with: 40 | repository: 'nmfs-ost/ss3-test-models' 41 | path: test-models-repo 42 | 43 | - name: Update Ubuntu packages 44 | run: | 45 | sudo add-apt-repository ppa:ubuntu-toolchain-r/test 46 | sudo apt-get update 47 | sudo apt-get install --only-upgrade libstdc++6 48 | 49 | - name: Setup R 50 | uses: r-lib/actions/setup-r@v2 51 | 52 | # - name: Get admb and put in path, linux 53 | # run: | 54 | # wget https://github.com/admb-project/admb/releases/download/admb-13.1/admb-13.1-linux.zip 55 | # sudo unzip admb-13.1-linux.zip -d /usr/local/bin 56 | # sudo chmod 755 /usr/local/bin/admb-13.1/bin/admb 57 | # echo "/usr/local/bin/admb-13.1/bin" >> $GITHUB_PATH 58 | 59 | # - name: Build ss3 60 | # run: | 61 | # rm -rf SS330 62 | # mkdir SS330 63 | # /bin/bash ./Make_SS_330_new.sh -b SS330 64 | 65 | - name: Build stock synthesis with admb docker image 66 | run: | 67 | rm -rf SS330 68 | rm -rf ss3_osx.tar 69 | mkdir SS330 70 | chmod 777 SS330 71 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 72 | 73 | - name: Move exes and R scripts to needed locations 74 | run: | 75 | mv test-models-repo/models test-models-repo/model_runs 76 | mv SS330/ss3 test-models-repo/model_runs/ss3 77 | mv test-models-repo/.github/r_scripts/run_from_par.R test-models-repo/run_from_par.R 78 | mv test-models-repo/.github/r_scripts/run_compare_noest.R test-models-repo/run_compare_noest.R 79 | 80 | - name: Change permissions on ss3 exes 81 | run: sudo chmod a+x test-models-repo/model_runs/ss3 82 | 83 | - name: Run test models without estimation 84 | run: | 85 | cd test-models-repo && Rscript run_from_par.R 86 | 87 | - name: Download R packages to run models in parallel 88 | run: Rscript -e 'install.packages(c("parallely", "furrr", "future"))' 89 | 90 | - name: Run comparison using run_compare_noest.R file in test models repo 91 | run: | 92 | mkdir test-models-repo/run_R 93 | cd test-models-repo && Rscript run_compare_noest.R 94 | 95 | - name: Determine results of test using check_failed.R file in test models repo 96 | run: cd test-models-repo && Rscript .github/r_scripts/check_failed.R 97 | 98 | - name: Run models using the .ss_new files 99 | run: | 100 | # Get model names and file paths 101 | mod_names <- list.dirs("test-models-repo/model_runs", 102 | full.names = FALSE, recursive = FALSE) 103 | mod_paths <- list.dirs("test-models-repo/model_runs", 104 | full.names = TRUE, recursive = FALSE) 105 | print(mod_names) 106 | 107 | # Create function to run models using .ss_new files 108 | run_ssnew <- function(dir) { 109 | wd <- getwd() 110 | print(wd) 111 | on.exit(system(paste0("cd ", wd))) 112 | # delete old starter files, rename forecast.ss_new and starter.ss_new files 113 | file.remove(file.path(dir, "starter.ss")) 114 | file.remove(file.path("forecast.ss")) 115 | file.rename(file.path(dir, "starter.ss_new"), file.path(dir,"starter.ss")) 116 | file.rename(file.path(dir, "forecast.ss_new"), file.path(dir,"forecast.ss")) 117 | # rename control and data files to standardized names (from the starter files) 118 | start <- readLines(file.path(dir, "starter.ss")) 119 | first_val_line <- grep("0=use init values in control file", start, fixed = TRUE) 120 | datname <- start[first_val_line-2] 121 | ctlname <- start[first_val_line-1] 122 | print(datname) 123 | print(ctlname) 124 | file.remove(file.path(dir, datname)) 125 | file.remove(file.path(dir, ctlname)) 126 | file.rename(file.path(dir,"data_echo.ss_new"), file.path(dir, datname)) 127 | file.rename(file.path(dir, "control.ss_new"), file.path(dir, ctlname)) 128 | # run the models without estimation 129 | file.remove(file.path(dir, "Report.sso")) 130 | # see if model finishes without error 131 | system(paste0("cd ", dir, " && ../ss3 -stopph 0 -nohess")) 132 | model_ran <- file.exists(file.path(dir, "control.ss_new")) 133 | return(model_ran) 134 | } 135 | 136 | # Setup parallel 137 | ncores <- parallelly::availableCores(omit = 1) 138 | future::plan(future::multisession, workers = ncores) 139 | 140 | # Run models in parallel using .ss_new files and print out error messages 141 | mod_ran <- furrr::future_map(mod_paths, function(x) {tryCatch(run_ssnew(x), 142 | error = function(e) print(e) 143 | )} 144 | ) 145 | mod_errors <- mod_names[unlist(lapply(mod_ran, function(x) "simpleError" %in% class(x)))] 146 | success <- TRUE 147 | 148 | # Print list of models with errors if there are any 149 | if(length(mod_errors) > 0) { 150 | message("Model code with errors were: ", paste0(mod_errors, collapse = ", "), 151 | ". See error list above for more details.") 152 | success <- FALSE 153 | } else { 154 | message("All code ran without error, but model runs may still have failed.") 155 | } 156 | 157 | # Print list of models that didn't run if there are any 158 | mod_no_run <- mod_names[unlist(lapply(mod_ran, function(x) isFALSE(x)))] # false means model didn't run 159 | if(length(mod_no_run) > 0) { 160 | message("Models that didn't run are ", paste0(mod_no_run, collapse = ", ")) 161 | success <- FALSE 162 | } else { 163 | message("All models ran without error.") 164 | } 165 | 166 | # Determine if job fails or passes 167 | if(success == FALSE) { 168 | stop("Job failed due to code with errors or models that didn't run.") 169 | } else { 170 | message("Job passed! All models successfully ran.") 171 | } 172 | shell: Rscript {0} 173 | 174 | - name: Archive results 175 | uses: actions/upload-artifact@main 176 | if: always() 177 | with: 178 | name: 'result_textfiles' 179 | path: test-models-repo/run_R/ 180 | -------------------------------------------------------------------------------- /.github/workflows/run-ss3-with-est.yml: -------------------------------------------------------------------------------- 1 | # Build SS3 and run test models with estimation and hessian 2 | # Only runs manual run or if run-ss3-no-est.yml runs successfully 3 | name: run-ss3-with-est 4 | on: 5 | workflow_dispatch: 6 | # push: 7 | # paths: 8 | # - '**.tpl' 9 | # branches: 10 | # - main 11 | workflow_run: 12 | workflows: ["run-ss3-no-est"] 13 | types: 14 | - completed 15 | # pull_request: 16 | # types: ['opened', 'edited', 'reopened', 'synchronize', 'ready_for_review'] 17 | # paths: 18 | # - '**.tpl' 19 | # branches: 20 | # - main 21 | 22 | # Run fast running SS3 test models with estimation 23 | jobs: 24 | run-ss3-with-est: 25 | if: ${{ github.event_name == 'workflow_dispatch' || (github.event.pull_request.draft == 'false' && github.event.workflow_run.conclusion == 'success') }} 26 | runs-on: ubuntu-latest 27 | env: 28 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 29 | RSPM: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest" 30 | 31 | steps: 32 | - name: Checkout ss3 repo 33 | uses: actions/checkout@v4 34 | 35 | - name: Checkout models repo 36 | uses: actions/checkout@v4 37 | with: 38 | repository: 'nmfs-ost/ss3-test-models' 39 | path: test-models-repo 40 | 41 | - name: Update Ubuntu packages 42 | run: | 43 | sudo add-apt-repository ppa:ubuntu-toolchain-r/test 44 | sudo apt-get update 45 | sudo apt-get install --only-upgrade libstdc++6 46 | 47 | - name: Setup R 48 | uses: r-lib/actions/setup-r@v2 49 | 50 | # - name: Get admb and put in path (not using docker), linux 51 | # run: | 52 | # wget https://github.com/admb-project/admb/releases/download/admb-13.2/admb-13.2-linux.zip 53 | # sudo unzip admb-13.2-linux.zip -d /usr/local/bin 54 | # sudo chmod 755 /usr/local/bin/admb-13.2/bin/admb 55 | # echo "/usr/local/bin/admb-13.2/bin" >> $GITHUB_PATH 56 | 57 | # - name: Build stock synthesis 58 | # run: | 59 | # rm -rf SS330 60 | # mkdir SS330 61 | # /bin/bash ./Make_SS_330_new.sh -b SS330 62 | 63 | - name: Build stock synthesis with admb docker image 64 | run: | 65 | rm -rf SS330 66 | rm -rf ss3_osx.tar 67 | mkdir SS330 68 | chmod 777 SS330 69 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 70 | 71 | - name: Move exes and R scripts to needed locations 72 | run: | 73 | mv test-models-repo/models test-models-repo/model_runs 74 | mv SS330/ss3 test-models-repo/model_runs/ss3 75 | 76 | - name: Change permissions on ss3 exes 77 | run: sudo chmod a+x test-models-repo/model_runs/ss3 78 | 79 | - name: Download R packages to run test models in parallel parallel 80 | run: Rscript -e 'install.packages(c("parallely", "furrr", "future"))' 81 | 82 | - name: Run test models 83 | run: | 84 | # Setup parallel 85 | ncores <- parallelly::availableCores(omit = 1) 86 | future::plan(future::multisession, workers = ncores) 87 | 88 | # Get model names and file paths 89 | mod_names <- list.dirs(file.path("test-models-repo", "model_runs"), full.names = FALSE, recursive = FALSE) 90 | mod_paths <- list.dirs(file.path("test-models-repo", "model_runs"), full.names = TRUE, recursive = FALSE) 91 | print(mod_names) 92 | 93 | # Create function to run models with estimation 94 | run_ss <- function(dir) { 95 | wd <- getwd() 96 | print(wd) 97 | on.exit(system(paste0("cd ", wd))) 98 | 99 | # Rename the reference files 100 | file.rename(file.path(dir, "ss_summary.sso"), 101 | file.path(dir, "ss_summary_ref.sso")) 102 | file.rename(file.path(dir, "warning.sso"), 103 | file.path(dir, "warning_ref.sso")) 104 | file.copy(file.path(dir, "ss3.par"), file.path(dir, "ss3_ref.par")) 105 | 106 | # Run the models with estimation and see if model finishes without error 107 | message("running ss3 on ", basename(dir)) 108 | system(paste0("cd ", dir, " && ../ss3 -nox")) 109 | model_ran <- file.exists(file.path(dir, "control.ss_new")) 110 | return(model_ran) 111 | } 112 | 113 | # Run test models in parallel and print out error messages 114 | mod_ran <- furrr::future_map(mod_paths, function(x){tryCatch(run_ss(x), 115 | error = function(e) print(e))}) 116 | 117 | mod_errors <- mod_names[unlist(lapply(mod_ran, function(x) "simpleError" %in% class(x)))] 118 | success <- TRUE 119 | 120 | # Print list of models with errors if there are any 121 | if(length(mod_errors) > 0) { 122 | message("Model code with errors were: ", paste0(mod_errors, collapse = ", "), 123 | ". See error list above for more details.") 124 | success <- FALSE 125 | } else { 126 | message("All code ran without error, but model runs may still have failed.") 127 | } 128 | 129 | # Print list of models that didn't run if there are any 130 | mod_no_run <- mod_names[unlist(lapply(mod_ran, function(x) isFALSE(x)))] # false means model didn't run 131 | if(length(mod_no_run) > 0) { 132 | message("Models that didn't run are ", paste0(mod_no_run, collapse = ", ")) 133 | success <- FALSE 134 | } else { 135 | message("All models ran without error.") 136 | } 137 | 138 | # Determine if job fails or passes 139 | if(success == FALSE) { 140 | stop("Job failed due to code with errors or models that didn't run.") 141 | } else { 142 | message("All models successfully ran.") 143 | } 144 | shell: Rscript {0} 145 | 146 | - name: Run comparison using compare.R file in test models repo 147 | run: | 148 | source("test-models-repo/.github/r_scripts/compare.R") 149 | orig_wd <- getwd() 150 | setwd("test-models-repo") 151 | on.exit(orig_wd) 152 | dir.create("run_R") 153 | 154 | # Get model folder names 155 | mod_fold <- file.path("model_runs") 156 | mod_names <- list.dirs(mod_fold, full.names = FALSE, recursive = FALSE) 157 | message("Will compare ref runs to new results for these models:") 158 | print(mod_names) 159 | message("Notable changes in total likelihood, max gradients, ", 160 | " and number of warnings:") 161 | compare_list <- vector(mode = "list", length = length(mod_names)) 162 | for(i in mod_names) { 163 | pos <- which(mod_names == i) 164 | sum_file <- file.path(mod_fold, i, "ss_summary.sso") 165 | if (i == "Simple") { 166 | file.copy(sum_file, file.path("run_R", paste0(i, "_ss_summary.sso"))) 167 | } 168 | ref_sum_file <- file.path(mod_fold, i, "ss_summary_ref.sso") 169 | 170 | par_file <- file.path(mod_fold, i, "ss3.par") 171 | ref_par_file <- file.path(mod_fold, i, "ss3_ref.par") 172 | 173 | warn_file <- file.path(mod_fold, i, "warning.sso") 174 | ref_warn_file <- file.path(mod_fold, i, "warning_ref.sso") 175 | 176 | fail_file <- file.path("run_R", "test_failed.csv") 177 | 178 | compare_list[[pos]] <- compare_ss_runs(mod_name = i, 179 | sum_file = sum_file, ref_sum_file = ref_sum_file, 180 | par_file = par_file, ref_par_file = ref_par_file, 181 | warn_file = warn_file, ref_warn_file = ref_warn_file, 182 | hessian = TRUE, 183 | new_file = NULL, fail_file = fail_file) 184 | } 185 | 186 | # Write out all model results 187 | compare_df <- do.call("rbind", compare_list) 188 | compare_df_print <- format(compare_df, digits = 6, nsmall = 3, 189 | justify = "left") 190 | message("See saved artifact all_results.csv for all compared values and their differences.") 191 | 192 | # Write all model comparison results to csv 193 | write.csv(compare_df_print, "run_R/all_results.csv", row.names = FALSE) 194 | 195 | # Write all model changes to csv 196 | message("See saved artifact all_changes.csv for only changed values (even if the threshold was too low to fail the job).") 197 | filtered_df <- compare_df[compare_df$diff != 0, ] 198 | filtered_df <- format(filtered_df, digits = 6, nsmall = 3, 199 | justify = "left") 200 | write.csv(filtered_df, "run_R/all_changes.csv", row.names = FALSE) 201 | shell: Rscript {0} 202 | 203 | - name: Determine results of test 204 | run: cd test-models-repo && Rscript .github/r_scripts/check_failed.R 205 | 206 | - name: Archive results 207 | uses: actions/upload-artifact@main 208 | if: always() 209 | with: 210 | name: 'result_textfiles' 211 | path: test-models-repo/run_R/ 212 | -------------------------------------------------------------------------------- /.github/workflows/test-r4ss-with-ss3.yml: -------------------------------------------------------------------------------- 1 | # Check that r4ss functions SS_output() and SS_plots() run with changes in ss3 2 | # using the test models. 3 | # Only runs on a pull request and a push to main if tpl files have changed. 4 | name: test-r4ss-with-ss3 5 | on: 6 | workflow_dispatch: 7 | push: 8 | paths: 9 | - '**.tpl' 10 | branches: 11 | - main 12 | pull_request: 13 | types: ['opened', 'edited', 'reopened', 'synchronize', 'ready_for_review'] 14 | paths: 15 | - '**.tpl' 16 | branches: 17 | - main 18 | 19 | # Run models without estimation, then run r4ss SS_plots() and SS_output() 20 | jobs: 21 | test-r4ss-with-ss3: 22 | if: github.event.pull_request.draft == false 23 | runs-on: ubuntu-latest 24 | env: 25 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 26 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 27 | R_KEEP_PKG_SOURCE: yes 28 | 29 | steps: 30 | - name: Checkout ss3 repo 31 | uses: actions/checkout@v4 32 | 33 | - name: Checkout models repo 34 | uses: actions/checkout@v4 35 | with: 36 | repository: 'nmfs-ost/ss3-test-models' 37 | path: test-models-repo 38 | 39 | - name: Install libcurl and other necessary linux packages 40 | run: | 41 | sudo add-apt-repository ppa:ubuntu-toolchain-r/test 42 | sudo apt-get update 43 | sudo apt-get install --only-upgrade libstdc++6 44 | sudo apt-get install -y libcurl4-openssl-dev 45 | sudo apt-get install -y libfontconfig1-dev 46 | sudo apt-get install -y libharfbuzz-dev libfribidi-dev 47 | 48 | - name: Setup R 49 | uses: r-lib/actions/setup-r@v2 50 | 51 | - name: Install R packages to run in parallel 52 | run: Rscript -e 'install.packages(c("remotes","parallely", "furrr", "future"))' 53 | 54 | - name: Install r4ss 55 | run: Rscript -e 'remotes::install_github("r4ss/r4ss")' 56 | 57 | # - name: Get admb and put in path, linux 58 | # run: | 59 | # wget https://github.com/admb-project/admb/releases/download/admb-13.2/admb-13.2-linux.zip 60 | # sudo unzip admb-13.2-linux.zip -d /usr/local/bin 61 | # sudo chmod 755 /usr/local/bin/admb-13.2/bin/admb 62 | # echo "/usr/local/bin/admb-13.2/bin" >> $GITHUB_PATH 63 | 64 | # - name: Build stock synthesis 65 | # run: | 66 | # rm -rf SS330 67 | # mkdir SS330 68 | # /bin/bash ./Make_SS_330_new.sh -b SS330 69 | 70 | - name: Build stock synthesis with admb docker image 71 | run: | 72 | rm -rf SS330 73 | rm -rf ss3_osx.tar 74 | mkdir SS330 75 | chmod 777 SS330 76 | /bin/bash ./Make_SS_330_new.sh --admb docker -b SS330 77 | 78 | - name: Move exes and R scripts to needed locations 79 | run: | 80 | mv test-models-repo/models test-models-repo/model_runs 81 | mv SS330/ss3 test-models-repo/model_runs/ss3 82 | mv test-models-repo/.github/r_scripts/run_from_par.R test-models-repo/run_from_par.R 83 | mv test-models-repo/.github/r_scripts/run_compare_noest.R test-models-repo/run_compare_noest.R 84 | 85 | - name: Change permissions on ss3 exes 86 | run: sudo chmod a+x test-models-repo/model_runs/ss3 87 | 88 | - name: Run models without estimation using the run_from_par.R script in the test models repo 89 | run: | 90 | cd test-models-repo && Rscript run_from_par.R 91 | 92 | - name: Run SS_output() and SS_plots() on test models 93 | run: | 94 | # Setup parallel 95 | ncores <- parallelly::availableCores(omit = 1) 96 | future::plan(future::multisession, workers = ncores) 97 | 98 | # Get model names 99 | mod_names <- list.dirs("test-models-repo/model_runs", full.names = FALSE, recursive = FALSE) 100 | print(mod_names) 101 | if(length(mod_names) == 0) { 102 | stop("Did not r4ss on any models; perhaps path to models is not correct?") 103 | } 104 | 105 | # Run SS_output() on test models in parallel and return errors 106 | out <- furrr::future_map(mod_names, function(i) { 107 | tryCatch(r4ss::SS_output(file.path("test-models-repo", "model_runs", i), 108 | verbose = FALSE, hidewarn = TRUE, printstats = FALSE), 109 | error = function(e) { 110 | print(e) 111 | } 112 | ) 113 | }) 114 | 115 | # Run SS_plots() on test models in parallel and return errors 116 | plots <- furrr::future_map(out, function(x) { 117 | tryCatch(r4ss::SS_plots(x, verbose = FALSE, openfile = FALSE), 118 | error = function(e) { 119 | print(e) 120 | }) 121 | }) 122 | 123 | # Determine if job fails and return model names with errors from either function if there are any 124 | out_issues <- mod_names[unlist(lapply(out, function(x) "error" %in% class(x)))] 125 | plotting_issues<- mod_names[unlist(lapply(plots, function(x) "error" %in% class(x)))] 126 | 127 | if(length(out_issues) == 0 & length(plotting_issues) == 0) { 128 | message("All r4ss functions completed successfully!") 129 | } else { 130 | message("There were some errors. SS_output() failed to run for model_runs ", 131 | paste0(out_issues, collapse = ", "), "; SS_plots() failed to run for ", 132 | "models ", paste0(plotting_issues, collapse = " ,"), ". More info ", 133 | "below.") 134 | message("Problems with SS_output():") 135 | for(i in out_issues) { 136 | message(i) 137 | tmp_loc <- which(mod_names == i) 138 | print(out[[tmp_loc]]) 139 | } 140 | message("Problems with SS_plot()") 141 | for(p in plotting_issues) { 142 | message(p) 143 | tmp_loc <- which(mod_names == p) 144 | print(plots[[tmp_loc]]) 145 | } 146 | q(status = 1) 147 | } 148 | shell: Rscript {0} 149 | 150 | 151 | # could archive files, but the archive will be large, so may not be worth it. 152 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.bak 2 | *.temp 3 | *.tmp 4 | *.pdf 5 | *.cpp 6 | *.exe 7 | *.htp 8 | *.obj 9 | *.log 10 | ss.tpl 11 | ss3.tpl 12 | ss_opt.tpl 13 | ss_trans.tpl 14 | ~$*.* 15 | Compile/ss.log 16 | Compile/ss3.log 17 | .Rproj.user 18 | *.Rproj 19 | .Rhistory 20 | Compile/ssFileLabels.txt 21 | StockSynthesis.code-workspace 22 | Compile/Make_SS_warn.bat 23 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "*.tpl": "c", 4 | "*.htp": "c" 5 | } 6 | } -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | We follow the [NFMS Fisheries Toolbox Code of Conduct](https://github.com/nmfs-fish-tools/Resources/blob/master/CODE_OF_CONDUCT.md). 2 | 3 | The NFMS Fisheries Toolbox Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 4 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 5 | 6 | [homepage]: https://www.contributor-covenant.org 7 | 8 | For answers to common questions about this code of conduct, see 9 | https://www.contributor-covenant.org/faq 10 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Stock Synthesis 2 | 3 | ## General Information 4 | 5 | Thank you for your interest in contributing to Stock Synthesis! We strive to follow the [NMFS Fisheries Toolbox Contribution Guide](https://github.com/nmfs-fish-tools/Resources/blob/master/CONTRIBUTING.md). Note that these are guidelines, not rules, and we are open to collaborations in other ways that may work better for you. Please feel free to reach out to us by opening an issue in this repository or by emailing the developers at nmfs.stock.synthesis@noaa.gov. 6 | 7 | This project and everyone participating in it is governed by the [NMFS Fisheries Toolbox Code of Conduct](https://github.com/nmfs-fish-tools/Resources/blob/master/CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to [fisheries.toolbox@noaa.gov](mailto:fisheries.toolbox@noaa.gov). Note that the maintainers of SS do not have access to this email account, so unacceptable behavior of the maintainers can also be reported here. 8 | 9 | ## How can I contribute? 10 | 11 | We welcome all contributions! For example, report bugs or request features through an [issue](https://github.com/nmfs-ost/ss3-source-code/issues), suggest code changes through a pull request, or suggest changes to the [Stock Synthesis manual](https://github.com/nmfs-ost/ss3-doc). 12 | 13 | ## How to submit a pull request to Stock Synthesis 14 | 15 | All code contributions should be submitted as pull requests to be reviewed by an SS3 team member. 16 | 17 | 1. [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the stock synthesis repository (or, if you are a member of the ss3-team, clone the repository). 18 | 2. Create a new branch, make the changes, and test out by [building stock synthesis](#how-to-build-stock-synthesis-from-source) locally. 19 | 3. Commit the changes and push up to the github fork. 20 | 4. [Submit a pull request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) to the main branch of Stock Synthesis Repository. 21 | 5. An SS3 team member will work with you to accept or make corrections to the pull request. 22 | 23 | ## How to build Stock Synthesis from source 24 | 25 | 1. Have a local version of the stock synthesis repository (though forking or cloning). 26 | 2. Download [ADMB](http://www.admb-project.org/). The current version of ADMB used to compile Stock Synthesis is listed in [SS_versioninfo_330safe.tpl](https://github.com/nmfs-ost/ss3-source-code/blob/main/SS_versioninfo_330safe.tpl). 27 | 3. Add ADMB to the PATH or use the ADMB command window for the following commands. 28 | 4. On Linux or Mac: 29 | - Change directory to the cloned Stock Synthesis repository and use the script [Make_SS_330_new.sh](https://github.com/nmfs-ost/ss3-source-code/blob/main/Make_SS_330_new.sh) by calling `./Make_SS_330_new.sh`. To see all options for the function, use `./Make_SS_330_new.sh --help`. 30 | - Follow the instructions found in the [GNUmakefile](https://github.com/nmfs-ost/ss3-source-code/blob/main/GNUmakefile) which will allow you to compile Stock Synthesis using the command `~/ss3-source-code$ make` within a command line opened in the cloned Stock Synthesis repository folder. 31 | 6. On Windows: Change directory to the cloned Stock Synthesis repository's [Compile](https://github.com/nmfs-ost/ss3-source-code/tree/main/Compile) subfolder and call the [Make_SS_safe batch script](https://github.com/nmfs-ost/ss3-source-code/blob/main/Compile/Make_SS_safe.bat) to build the "safe" version of Stock Synthesis. (to build the fast (aka optimized) version of Stock Synthesis, call the Make_SS_fast.bat batch scripts instead). Upon calling the batch script, SS3 will be built in the Compile subfolder. 32 | 33 | # Still have a question on the contributing workflow? 34 | 35 | Please email nmfs.stock.synthesis@noaa.gov for assistance. 36 | -------------------------------------------------------------------------------- /Change_log_for_SS_3.30.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nmfs-ost/ss3-source-code/5b7bfe34fc3ee955cbd172b316a3b5f65831d0e9/Change_log_for_SS_3.30.xlsx -------------------------------------------------------------------------------- /Compile/Make_SS_fast.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | @REM compiling ss.exe (safe executable) with generic path 4 | @REM requires "Compile" directory in the same directory where 5 | @REM the .tpl files and this .bat file sit. 6 | 7 | cd .. 8 | 9 | @REM deleted temporary file 10 | del SS_functions.temp 11 | 12 | @REM create SS_functions.temp file combining various functions 13 | copy/b SS_biofxn.tpl+SS_miscfxn.tpl+SS_selex.tpl+SS_popdyn.tpl+SS_recruit.tpl+SS_benchfore.tpl+SS_expval.tpl+SS_objfunc.tpl+SS_write.tpl+SS_write_ssnew.tpl+SS_write_report.tpl+SS_ALK.tpl+SS_timevaryparm.tpl+SS_tagrecap.tpl SS_functions.temp 14 | 15 | @REM combine remaining files to create ss3_opt.tpl 16 | copy/b SS_versioninfo_330opt.tpl+SS_readstarter.tpl+SS_readdata_330.tpl+SS_readcontrol_330.tpl+SS_param.tpl+SS_prelim.tpl+SS_global.tpl+SS_proced.tpl+SS_functions.temp "Compile\ss3_opt.tpl" 17 | 18 | cd Compile 19 | 20 | if defined ADMB_HOME ( 21 | if exist "%ADMB_HOME%\\admb.cmd" ( 22 | @echo "-- Building ss_opt.exe with %ADMB_HOME%\admb.cmd in '%CD%' --" 23 | set CXX=g++ 24 | %ADMB_HOME%\\admb.cmd -f ss3_opt 25 | goto CHECK 26 | ) 27 | ) 28 | 29 | @REM check if admb.cmd is in path 30 | for /f "tokens=*" %%i in ('where admb.cmd 2^>^&1 ^| findstr "admb.cmd"') do ( 31 | @echo "-- Building ss_opt.exe with admb.cmd in '%CD%' --" 32 | set CXX=g++ 33 | admb -f ss3_opt.tpl 34 | goto CHECK 35 | ) 36 | 37 | @REM compile executable 38 | for /f "tokens=*" %%i in ('where docker.exe 2^>^&1 ^| findstr "docker.exe"') do ( 39 | @echo "-- Building ss_opt.exe with docker in '%CD%' --" 40 | for /f "tokens=*" %%j in ('ver ^| findstr "10.0.1"') do ( 41 | set "ISWINDOWS10=found" 42 | ) 43 | if defined ISWINDOWS10 ( 44 | docker run --rm --mount source=%CD%,destination=C:\compile,type=bind --workdir C:\\compile johnoel/admb-13.2:windows10 -f ss3_opt.tpl 45 | ) else ( 46 | docker run --rm --mount source=%CD%,destination=C:\compile,type=bind --workdir C:\\compile johnoel/admb-13.2:windows -f ss3_opt.tpl 47 | ) 48 | goto CHECK 49 | ) 50 | 51 | :CHECK 52 | if not exist ss3_opt.exe ( 53 | @echo "Error: Unable to build ss3_opt.exe" 54 | exit /b 1 55 | ) 56 | -------------------------------------------------------------------------------- /Compile/Make_SS_safe.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | ::compiling ss.exe (safe executable) with generic path 4 | ::requires "Compile" directory in the same directory as the .tpl files and this .bat file 5 | 6 | cd .. 7 | 8 | ::deleted temporary file 9 | del SS_functions.temp 10 | 11 | ::create SS_functions.temp file combining various functions 12 | copy/b SS_biofxn.tpl+SS_miscfxn.tpl+SS_selex.tpl+SS_popdyn.tpl+SS_recruit.tpl+SS_benchfore.tpl+SS_expval.tpl+SS_objfunc.tpl+SS_write.tpl+SS_write_ssnew.tpl+SS_write_report.tpl+SS_ALK.tpl+SS_timevaryparm.tpl+SS_tagrecap.tpl SS_functions.temp 13 | 14 | ::combine remaining files to create ss3.tpl 15 | copy/b SS_versioninfo_330safe.tpl+SS_readstarter.tpl+SS_readdata_330.tpl+SS_readcontrol_330.tpl+SS_param.tpl+SS_prelim.tpl+SS_global.tpl+SS_proced.tpl+SS_functions.temp "Compile\ss3.tpl" 16 | ::combine remaining files to create ss3.tpl 17 | copy/b SS_versioninfo_330safe.tpl+SS_readstarter.tpl+SS_readdata_330.tpl+SS_readcontrol_330.tpl+SS_param.tpl+SS_prelim.tpl+SS_global.tpl+SS_proced.tpl+SS_functions.temp "Compile\ss3.tpl" 18 | 19 | ::path=c:\admb;C:\rtools40\mingw64\bin;%path% 20 | 21 | cd Compile 22 | 23 | if defined ADMB_HOME ( 24 | if exist "%ADMB_HOME%\\admb.cmd" ( 25 | @echo "-- Building ss3.exe with %ADMB_HOME%\admb.cmd in '%CD%' --" 26 | set CXX=g++ 27 | %ADMB_HOME%\\admb.cmd ss3 28 | goto CHECK 29 | ) 30 | ) 31 | 32 | @REM check if admb.cmd is in path 33 | for /f "tokens=*" %%i in ('where admb.cmd 2^>^&1 ^| findstr "admb.cmd"') do ( 34 | @echo "-- Building ss3.exe with admb.cmd in '%CD%' --" 35 | set CXX=g++ 36 | admb ss3.tpl 37 | goto CHECK 38 | ) 39 | 40 | @REM compile executable 41 | for /f "tokens=*" %%i in ('where docker.exe 2^>^&1 ^| findstr "docker.exe"') do ( 42 | @echo "-- Building ss3.exe with docker in '%CD%' --" 43 | for /f "tokens=*" %%j in ('ver ^| findstr "10.0.1"') do ( 44 | set "ISWINDOWS10=found" 45 | ) 46 | if defined ISWINDOWS10 ( 47 | docker run --rm --mount source=%CD%,destination=C:\compile,type=bind --workdir C:\\compile johnoel/admb-13.2:windows10 ss3.tpl 48 | ) else ( 49 | docker run --rm --mount source=%CD%,destination=C:\compile,type=bind --workdir C:\\compile johnoel/admb-13.2:windows ss3.tpl 50 | ) 51 | goto CHECK 52 | ) 53 | 54 | :CHECK 55 | if not exist ss3.exe ( 56 | @echo "Error: Unable to build ss3.exe" 57 | exit /b 1 58 | ) 59 | -------------------------------------------------------------------------------- /Compile/Make_SS_warn.bat: -------------------------------------------------------------------------------- 1 | 2 | ::compiling ss.exe (safe executable) with generic path 3 | ::requires "Compile" directory in the same directory as the .tpl files and this .bat file 4 | 5 | cd .. 6 | 7 | ::deleted temporary file 8 | del SS_functions.temp 9 | 10 | ::create SS_functions.temp file combining various functions 11 | copy/b SS_biofxn.tpl+SS_miscfxn.tpl+SS_selex.tpl+SS_popdyn.tpl+SS_recruit.tpl+SS_benchfore.tpl+SS_expval.tpl+SS_objfunc.tpl+SS_write.tpl+SS_write_ssnew.tpl+SS_write_report.tpl+SS_ALK.tpl+SS_timevaryparm.tpl+SS_tagrecap.tpl SS_functions.temp 12 | 13 | ::combine remaining files to create ss3.tpl 14 | copy/b SS_versioninfo_330safe.tpl+SS_readstarter.tpl+SS_readdata_330.tpl+SS_readcontrol_330.tpl+SS_param.tpl+SS_prelim.tpl+SS_global.tpl+SS_proced.tpl+SS_functions.temp "Compile\ss3.tpl" 15 | 16 | ::path=c:\admb;C:\rtools40\mingw64\bin;%path% 17 | 18 | cd Compile 19 | 20 | if exist ss3.exe ( 21 | if exist ss3_old.exe ( 22 | del ss3_old.exe 23 | ) 24 | ren ss3.exe ss3_old.exe 25 | ) 26 | 27 | tpl2cpp ss3 28 | 29 | g++ -c -std=c++17 -O2 -D_FILE_OFFSET_BITS=64 -DUSE_ADMB_CONTRIBS -D_USE_MATH_DEFINES -I. -I"C:\ADMB-13.2\include" -I"C:\ADMB-13.2\include\contrib" -Wall -Wextra -o ss3.obj ss3.cpp 30 | 31 | g++ -static -o ss3.exe ss3.obj "C:\ADMB-13.2\lib\libadmb-contrib-mingw64-g++12.a" 32 | 33 | dir *.exe 34 | -------------------------------------------------------------------------------- /GNUmakefile: -------------------------------------------------------------------------------- 1 | # The admb script should be included in the system PATH. If not, the 2 | # path to the script can be manually set (See MY_ADMB_HOME below). 3 | # 4 | # Usage: 5 | # ./stock-synthesis/$ make 6 | 7 | # Uncomment MY_ADMB_HOME to manually set path to admb script 8 | # and ignore system enviroment PATH. 9 | # Note: Need to add directory character '/' at the end. 10 | # MY_ADMB_HOME=~/admb-main/ 11 | 12 | # Uncomment the variables below for static and/or debugging builds. 13 | # STATIC_BUILD= -p 14 | # DEBUG= -g 15 | 16 | export CXXFLAGS=-Wall -Wextra 17 | 18 | all: clean 19 | $(MAKE) ss3 20 | $(MAKE) ss3_opt 21 | 22 | docker: 23 | chmod -R 777 $(CURDIR) 24 | $(MAKE) USE_DOCKER=yes all 25 | 26 | ss3: ss3.tpl 27 | ifdef USE_DOCKER 28 | ifeq ($(OS),Windows_NT) 29 | docker run --rm --volume $(CURDIR):C:\\workdir\\ss --workdir C:\\workdir\\ss johnoel/admb-13.2:windows-ltsc2022-winlibs ss3.tpl 30 | else 31 | docker run --rm --volume $(CURDIR):/workdir/ss:rw --workdir /workdir/ss johnoel/admb-13.2:linux ss3.tpl 32 | endif 33 | else 34 | $(MY_ADMB_HOME)admb $(DEBUG)$(STATIC_BUILD) ss3.tpl 35 | endif 36 | 37 | ss3_opt: ss3_opt.tpl 38 | ifdef USE_DOCKER 39 | ifeq ($(OS),Windows_NT) 40 | docker run --rm --volume $(CURDIR):C:\\workdir\\ss_opt --workdir C:\\workdir\\ss_opt johnoel/admb-13.2:windows ss3_opt.tpl 41 | else 42 | docker run --rm --volume $(CURDIR):/workdir/ss_opt:rw --workdir /workdir/ss_opt johnoel/admb-13.2:linux ss3_opt.tpl 43 | endif 44 | else 45 | $(MY_ADMB_HOME)admb -f $(DEBUG)$(STATIC_BUILD) ss3_opt.tpl 46 | endif 47 | 48 | ss3.tpl: SS_functions.temp 49 | cat SS_versioninfo_330safe.tpl SS_readstarter.tpl SS_readdata_330.tpl SS_readcontrol_330.tpl SS_param.tpl SS_prelim.tpl SS_global.tpl SS_proced.tpl SS_functions.temp > ss3.tpl 50 | 51 | ss3_opt.tpl: SS_functions.temp 52 | cat SS_versioninfo_330opt.tpl SS_readstarter.tpl SS_readdata_330.tpl SS_readcontrol_330.tpl SS_param.tpl SS_prelim.tpl SS_global.tpl SS_proced.tpl SS_functions.temp > ss3_opt.tpl 53 | 54 | SS_functions.temp: 55 | cat SS_biofxn.tpl SS_miscfxn.tpl SS_selex.tpl SS_popdyn.tpl SS_recruit.tpl SS_benchfore.tpl SS_expval.tpl SS_objfunc.tpl SS_write.tpl SS_write_ssnew.tpl SS_write_report.tpl SS_ALK.tpl SS_timevaryparm.tpl SS_tagrecap.tpl > SS_functions.temp 56 | 57 | clean: 58 | @rm -vf ss3 59 | @rm -vf ss3_opt 60 | @rm -vf ss3.tpl 61 | @rm -vf ss3_opt.tpl 62 | @rm -vf SS_functions.temp 63 | @rm -vf ss3.cpp 64 | @rm -vf ss3.htp 65 | @rm -vf ss3.obj 66 | @rm -vf ss3_opt.cpp 67 | @rm -vf ss3_opt.htp 68 | @rm -vf ss3_opt.obj 69 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Creative Commons Legal Code 2 | 3 | CC0 1.0 Universal 4 | 5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE 6 | LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN 7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS 8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES 9 | REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS 10 | PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM 11 | THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED 12 | HEREUNDER. 13 | 14 | Statement of Purpose 15 | 16 | The laws of most jurisdictions throughout the world automatically confer 17 | exclusive Copyright and Related Rights (defined below) upon the creator 18 | and subsequent owner(s) (each and all, an "owner") of an original work of 19 | authorship and/or a database (each, a "Work"). 20 | 21 | Certain owners wish to permanently relinquish those rights to a Work for 22 | the purpose of contributing to a commons of creative, cultural and 23 | scientific works ("Commons") that the public can reliably and without fear 24 | of later claims of infringement build upon, modify, incorporate in other 25 | works, reuse and redistribute as freely as possible in any form whatsoever 26 | and for any purposes, including without limitation commercial purposes. 27 | These owners may contribute to the Commons to promote the ideal of a free 28 | culture and the further production of creative, cultural and scientific 29 | works, or to gain reputation or greater distribution for their Work in 30 | part through the use and efforts of others. 31 | 32 | For these and/or other purposes and motivations, and without any 33 | expectation of additional consideration or compensation, the person 34 | associating CC0 with a Work (the "Affirmer"), to the extent that he or she 35 | is an owner of Copyright and Related Rights in the Work, voluntarily 36 | elects to apply CC0 to the Work and publicly distribute the Work under its 37 | terms, with knowledge of his or her Copyright and Related Rights in the 38 | Work and the meaning and intended legal effect of CC0 on those rights. 39 | 40 | 1. Copyright and Related Rights. A Work made available under CC0 may be 41 | protected by copyright and related or neighboring rights ("Copyright and 42 | Related Rights"). Copyright and Related Rights include, but are not 43 | limited to, the following: 44 | 45 | i. the right to reproduce, adapt, distribute, perform, display, 46 | communicate, and translate a Work; 47 | ii. moral rights retained by the original author(s) and/or performer(s); 48 | iii. publicity and privacy rights pertaining to a person's image or 49 | likeness depicted in a Work; 50 | iv. rights protecting against unfair competition in regards to a Work, 51 | subject to the limitations in paragraph 4(a), below; 52 | v. rights protecting the extraction, dissemination, use and reuse of data 53 | in a Work; 54 | vi. database rights (such as those arising under Directive 96/9/EC of the 55 | European Parliament and of the Council of 11 March 1996 on the legal 56 | protection of databases, and under any national implementation 57 | thereof, including any amended or successor version of such 58 | directive); and 59 | vii. other similar, equivalent or corresponding rights throughout the 60 | world based on applicable law or treaty, and any national 61 | implementations thereof. 62 | 63 | 2. Waiver. To the greatest extent permitted by, but not in contravention 64 | of, applicable law, Affirmer hereby overtly, fully, permanently, 65 | irrevocably and unconditionally waives, abandons, and surrenders all of 66 | Affirmer's Copyright and Related Rights and associated claims and causes 67 | of action, whether now known or unknown (including existing as well as 68 | future claims and causes of action), in the Work (i) in all territories 69 | worldwide, (ii) for the maximum duration provided by applicable law or 70 | treaty (including future time extensions), (iii) in any current or future 71 | medium and for any number of copies, and (iv) for any purpose whatsoever, 72 | including without limitation commercial, advertising or promotional 73 | purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each 74 | member of the public at large and to the detriment of Affirmer's heirs and 75 | successors, fully intending that such Waiver shall not be subject to 76 | revocation, rescission, cancellation, termination, or any other legal or 77 | equitable action to disrupt the quiet enjoyment of the Work by the public 78 | as contemplated by Affirmer's express Statement of Purpose. 79 | 80 | 3. Public License Fallback. Should any part of the Waiver for any reason 81 | be judged legally invalid or ineffective under applicable law, then the 82 | Waiver shall be preserved to the maximum extent permitted taking into 83 | account Affirmer's express Statement of Purpose. In addition, to the 84 | extent the Waiver is so judged Affirmer hereby grants to each affected 85 | person a royalty-free, non transferable, non sublicensable, non exclusive, 86 | irrevocable and unconditional license to exercise Affirmer's Copyright and 87 | Related Rights in the Work (i) in all territories worldwide, (ii) for the 88 | maximum duration provided by applicable law or treaty (including future 89 | time extensions), (iii) in any current or future medium and for any number 90 | of copies, and (iv) for any purpose whatsoever, including without 91 | limitation commercial, advertising or promotional purposes (the 92 | "License"). The License shall be deemed effective as of the date CC0 was 93 | applied by Affirmer to the Work. Should any part of the License for any 94 | reason be judged legally invalid or ineffective under applicable law, such 95 | partial invalidity or ineffectiveness shall not invalidate the remainder 96 | of the License, and in such case Affirmer hereby affirms that he or she 97 | will not (i) exercise any of his or her remaining Copyright and Related 98 | Rights in the Work or (ii) assert any associated claims and causes of 99 | action with respect to the Work, in either case contrary to Affirmer's 100 | express Statement of Purpose. 101 | 102 | 4. Limitations and Disclaimers. 103 | 104 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 105 | surrendered, licensed or otherwise affected by this document. 106 | b. Affirmer offers the Work as-is and makes no representations or 107 | warranties of any kind concerning the Work, express, implied, 108 | statutory or otherwise, including without limitation warranties of 109 | title, merchantability, fitness for a particular purpose, non 110 | infringement, or the absence of latent or other defects, accuracy, or 111 | the present or absence of errors, whether or not discoverable, all to 112 | the greatest extent permissible under applicable law. 113 | c. Affirmer disclaims responsibility for clearing rights of other persons 114 | that may apply to the Work or any use thereof, including without 115 | limitation any person's Copyright and Related Rights in the Work. 116 | Further, Affirmer disclaims responsibility for obtaining any necessary 117 | consents, permissions or other rights required for any use of the 118 | Work. 119 | d. Affirmer understands and acknowledges that Creative Commons is not a 120 | party to this document and has no duty or obligation with respect to 121 | this CC0 or use of the Work. 122 | -------------------------------------------------------------------------------- /Make_SS_330_new.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # script modified by N. Schindler 05-17-2022 4 | 5 | # output settings 6 | function display_settings() 7 | { 8 | echo "-- $Type Settings --" 9 | echo "ADMB_HOME = $ADMB_HOME" 10 | echo "Source Dir = $SRC_DIR" 11 | echo "Build Dir = $BUILD_DIR" 12 | echo "Build exe = $BUILD_TYPE" 13 | echo "Warnings = $WARNINGS" 14 | } 15 | 16 | # want help? 17 | function usage() 18 | { 19 | echo "" 20 | echo "Call this script as follows:" 21 | echo " ./Make_SS_330_new.sh [(-s | --source) source_dir] [(-b | --build) build_dir]" 22 | echo " [(-a | --admb) admb_dir] [[-o | --opt] | [-f | --safe] [-p] " 23 | echo " [-w | --warn] [-d | --debug] [-h | --help]" 24 | echo "Notes:" 25 | echo " -p is an ADMB flag to build statically and will be passed. " 26 | echo " -w re-compiles with common warnings enabled. " 27 | echo " -d will display the settings used to build SS. " 28 | echo "" 29 | echo "The default is the 'safe' build to directory SS330." 30 | echo "" 31 | display_settings 32 | } 33 | 34 | # create safe source tpl 35 | function cat_safe_files() 36 | { 37 | # concatenate all tpl files to a single file 38 | cat SS_biofxn.tpl SS_miscfxn.tpl SS_selex.tpl SS_popdyn.tpl SS_recruit.tpl SS_benchfore.tpl SS_expval.tpl SS_objfunc.tpl SS_write.tpl SS_write_ssnew.tpl SS_write_report.tpl SS_ALK.tpl SS_timevaryparm.tpl SS_tagrecap.tpl > SS_functions.temp 39 | cat SS_versioninfo_330safe.tpl SS_readstarter.tpl SS_readdata_330.tpl SS_readcontrol_330.tpl SS_param.tpl SS_prelim.tpl SS_global.tpl SS_proced.tpl SS_functions.temp > $BUILD_DIR/ss3.tpl 40 | } 41 | 42 | # create opt source tpl 43 | function cat_opt_files() 44 | { 45 | # concatenate all tpl files to a single file 46 | cat SS_biofxn.tpl SS_miscfxn.tpl SS_selex.tpl SS_popdyn.tpl SS_recruit.tpl SS_benchfore.tpl SS_expval.tpl SS_objfunc.tpl SS_write.tpl SS_write_ssnew.tpl SS_write_report.tpl SS_ALK.tpl SS_timevaryparm.tpl SS_tagrecap.tpl > SS_functions.temp 47 | cat SS_versioninfo_330opt.tpl SS_readstarter.tpl SS_readdata_330.tpl SS_readcontrol_330.tpl SS_param.tpl SS_prelim.tpl SS_global.tpl SS_proced.tpl SS_functions.temp > $BUILD_DIR/ss3_opt.tpl 48 | } 49 | 50 | # default directories 51 | SRC_DIR=. 52 | BUILD_DIR=SS330 53 | # other defaults (safe build is the default) 54 | BUILD_TYPE=ss3 55 | WARNINGS=off 56 | DEBUG=off 57 | GREP= 58 | Type=Current 59 | STATICFLAG= 60 | OPTFLAG= 61 | 62 | if [ "$1" == "" ] ; then 63 | Type=Default 64 | display_settings 65 | usage 66 | exit 1 67 | fi 68 | 69 | while [ "$1" != "" ]; do 70 | case $1 in 71 | # debug 72 | -d | --debug ) DEBUG=on 73 | ;; 74 | # show standard warnings 75 | -w | --warn ) WARNINGS=on 76 | ;; 77 | # check for new source directory 78 | -s | --source ) shift 79 | SRC_DIR=$1 80 | ;; 81 | # check for new build directory 82 | -b | --build ) shift 83 | BUILD_DIR=$1 84 | ;; 85 | # check for ADMB directory and set 86 | -a | --admb ) shift 87 | if [[ "$1" == "docker" ]] ; then 88 | ADMB_HOME=docker 89 | else 90 | ADMB_HOME=$1 91 | export ADMB_HOME 92 | PATH=$ADMB_HOME:$PATH 93 | fi 94 | ;; 95 | # output help - usage 96 | -h | --help ) Type=Default 97 | usage 98 | exit 99 | ;; 100 | # build statically? (admb flag passed through - not documented) 101 | -p ) STATICFLAG=-p 102 | ;; 103 | # build safe version 104 | -f | --safe ) BUILD_TYPE=ss3 105 | ;; 106 | # build fast version 107 | -o | --opt ) BUILD_TYPE=ss3_opt 108 | OPTFLAG=-f 109 | ;; 110 | esac 111 | shift 112 | done 113 | 114 | # change to the source dir 115 | cd $SRC_DIR 116 | 117 | # delete the temp file if it exists 118 | if [ -f SS_functions.temp ]; then 119 | rm SS_functions.temp 120 | fi 121 | 122 | # create source files in build dir 123 | if [[ ! -d "$BUILD_DIR" ]]; then 124 | mkdir -p $BUILD_DIR 125 | fi 126 | case $BUILD_TYPE in 127 | ss3_opt ) grep "opt" SS_versioninfo_330opt.tpl 128 | cat_opt_files 129 | ;; 130 | ss3 ) grep "safe" SS_versioninfo_330safe.tpl 131 | cat_safe_files 132 | ;; 133 | esac 134 | if [ ! -f $BUILD_DIR/$BUILD_TYPE.tpl ]; then 135 | echo "Error: Unable to find $BUILD_DIR/$BUILD_TYPE.tpl" 136 | exit 137 | fi 138 | 139 | # if admb is not in path, use docker to build. 140 | if [[ "$ADMB_HOME" != "docker" ]] ; then 141 | if [[ "$OS" == "Windows_NT" ]] ; then 142 | if [[ -x "$(command -v admb.sh)" ]] ; then 143 | ADMB_HOME="$(dirname $(command -v admb.sh))" 144 | else 145 | unset ADMB_HOME 146 | fi 147 | else 148 | if [[ -x "$(command -v admb)" ]] ; then 149 | ADMB_HOME="$(dirname $(command -v admb))" 150 | else 151 | unset ADMB_HOME 152 | fi 153 | fi 154 | if [[ -z "$ADMB_HOME" ]] ; then 155 | ADMB_HOME=docker 156 | fi 157 | fi 158 | 159 | # debug info 160 | if [[ "$DEBUG" == "on" ]] ; then 161 | display_settings 162 | else 163 | if [[ "$ADMB_HOME" == "docker" ]] ; then 164 | echo "-- Building $BUILD_TYPE with docker in '$BUILD_DIR' --" 165 | else 166 | echo "-- Building $BUILD_TYPE in '$BUILD_DIR' --" 167 | fi 168 | fi 169 | 170 | # change to build dir and build 171 | if [[ "$ADMB_HOME" == "docker" ]] ; then 172 | if [[ "$OS" == "Windows_NT" ]] ; then 173 | if [[ "`ver`" =~ "Version 10.0.1" ]]; then 174 | WINDOWS10=true 175 | fi 176 | if [[ "$WARNINGS" == "on" ]] ; then 177 | if [[ "$WINDOWS10" == "true" ]] ; then 178 | docker run --env CXXFLAGS="-Wall -Wextra" --rm --mount source=`cygpath -w $PWD`\\$BUILD_DIR,destination=C:\\$BUILD_TYPE,mount=bind --workdir C:\\$BUILD_TYPE johnoel/admb-13.2:windows10 $BUILD_TYPE.tpl 179 | else 180 | docker run --env CXXFLAGS="-Wall -Wextra" --rm --mount source=`cygpath -w $PWD`\\$BUILD_DIR,destination=C:\\$BUILD_TYPE,mount=bind --workdir C:\\$BUILD_TYPE johnoel/admb-13.2:windows $BUILD_TYPE.tpl 181 | fi 182 | else 183 | if [[ "$WINDOWS10" == "true" ]] ; then 184 | docker run --rm --mount source=`cygpath -w $PWD`\\$BUILD_DIR,destination=C:\\$BUILD_TYPE,mount=bind --workdir C:\\$BUILD_TYPE johnoel/admb-13.2:windows10 $BUILD_TYPE.tpl 185 | else 186 | docker run --rm --mount source=`cygpath -w $PWD`\\$BUILD_DIR,destination=C:\\$BUILD_TYPE,mount=bind --workdir C:\\$BUILD_TYPE johnoel/admb-13.2:windows $BUILD_TYPE.tpl 187 | fi 188 | fi 189 | else 190 | if [[ "$WARNINGS" == "on" ]] ; then 191 | docker run --env CXXFLAGS="-Wall -Wextra" --rm --mount source=$PWD/$BUILD_DIR,destination=/$BUILD_TYPE,type=bind --workdir /$BUILD_TYPE johnoel/admb-13.2:linux $BUILD_TYPE.tpl 192 | else 193 | docker run --rm --mount source=$PWD/$BUILD_DIR,destination=/$BUILD_TYPE,type=bind --workdir /$BUILD_TYPE johnoel/admb-13.2:linux $BUILD_TYPE.tpl 194 | fi 195 | fi 196 | else 197 | command pushd $BUILD_DIR > /dev/null 198 | if [[ "$WARNINGS" == "on" ]] ; then 199 | export CXXFLAGS="-Wall -Wextra" 200 | fi 201 | if [[ "$OS" == "Windows_NT" ]] ; then 202 | admb.sh $OPTFLAG $STATICFLAG $BUILD_TYPE 203 | chmod a+x $BUILD_TYPE 204 | else 205 | admb $OPTFLAG $STATICFLAG $BUILD_TYPE 206 | fi 207 | command popd > /dev/null 208 | fi 209 | 210 | # output warnings 211 | #if [[ "$WARNINGS" == "on" ]] ; then 212 | # echo "... compiling a second time to get warnings ..." 213 | # g++ -c -std=c++0x -O3 -I. -I"$ADMB_HOME/include" -I"/$ADMB_HOME/include/contrib" -o$BUILD_TYPE.obj $BUILD_TYPE.cpp -Wall -Wextra 214 | #fi 215 | 216 | exit 217 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Stock Synthesis 3.30 2 | ![GitHub release (latest by date)](https://img.shields.io/github/v/release/nmfs-ost/ss3-source-code) 3 | ![GitHub Release Date](https://img.shields.io/github/release-date/nmfs-ost/ss3-source-code) 4 | [![GitHub last commit](https://img.shields.io/github/last-commit/nmfs-ost/ss3-source-code)](https://github.com/nmfs-ost/ss3-source-code/commits/main) 5 | ![GitHub release (latest by date)](https://img.shields.io/github/downloads/nmfs-ost/ss3-source-code/latest/total) 6 | 7 | Stock Synthesis (SS3) is a generalized age-structured population dynamics model implemented in [ADMB](http://www.admb-project.org/). It is used to assess the effect of fisheries on fish and shellfish stocks while taking into account the influence of environmental factors. 8 | 9 | # Table of contents 10 | - [Citing Stock Synthesis](#citing-stock-synthesis) 11 | - [Installation](#installation) 12 | - [How can I learn how to use Stock Synthesis?](#how-can-i-learn-how-to-use-stock-synthesis) 13 | - [How do I ask questions about Stock Synthesis?](#how-do-i-ask-questions-about-stock-synthesis) 14 | - [How can I contribute to Stock Synthesis?](#how-can-i-contribute-to-stock-synthesis) 15 | - [Tools for working with Stock Synthesis](#tools-for-working-with-stock-synthesis) 16 | - [Disclaimer](#disclaimer) 17 | 18 | 19 | ## Citing Stock Synthesis 20 | 21 | Please cite Stock Synthesis as: 22 | 23 | ``` 24 | Methot, R.D. and Wetzel, C.R. (2013). Stock Synthesis: A biological and statistical 25 | framework for fish stock assessment and fishery management. Fisheries Research, 26 | 142: 86-99. https://doi.org/10.1016/j.fishres.2012.10.012 27 | ``` 28 | 29 | ## Installation 30 | 31 | Download the latest compiled versions from [Github Releases](https://github.com/nmfs-ost/ss3-source-code/releases). For information on specific changes with each release, please refer to the [change log on GitHub](https://github.com/orgs/nmfs-ost/projects/11) for changes from v.3.30.19 onward and the [Excel spreadsheet version of the change log](https://github.com/nmfs-ost/ss3-source-code/blob/v3.30.19/Change_log_for_SS_3.30.xlsx?raw=true) for changes prior to v.3.30.19. 32 | 33 | ## How can I learn how to use Stock Synthesis? 34 | 35 | To learn more about how to use Stock Synthesis, see the [SS3 website](https://nmfs-ost.github.io/ss3-website/) for tutorials to [get started](https://nmfs-ost.github.io/ss3-website/qmds/getting_started_ss3.html) and [build your own models](https://nmfs-ost.github.io/ss3-website/qmds/ss3_model_tips.html) as well as topic-focused vignettes. 36 | 37 | The [Stock Synthesis user manual](https://nmfs-ost.github.io/ss3-doc/SS330_User_Manual_release.html) provides the complete documentation of Stock Synthesis. 38 | 39 | ## How do I ask questions about Stock Synthesis? 40 | 41 | Please look for answers or submit questions to the [Stock Synthesis Google Group](https://groups.google.com/g/ss3-forum) (must request to join first). Questions can also be asked by opening an [issue](https://github.com/nmfs-ost/ss3-source-code/issues) in this repository or by emailing nmfs.stock.synthesis@noaa.gov. 42 | 43 | ## How can I contribute to Stock Synthesis? 44 | 45 | Have feature requests or bug reports? Want to contribute code? Please open an [issue](https://github.com/nmfs-ost/ss3-source-code/issues) or submit a pull request. For complete details, please see [CONTRIBUTING.md](CONTRIBUTING.md) 46 | 47 | This project and everyone participating in it is governed by the [NMFS Fisheries Toolbox Code of Conduct](https://github.com/nmfs-fish-tools/Resources/blob/master/CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. 48 | 49 | ## Tools for working with Stock Synthesis 50 | 51 | As Stock Synthesis usage has grown, so has the number of tools to work with it. These include [repositories on github with the stock-synthesis topic](https://github.com/topics/stock-synthesis) as well as: 52 | 53 | - [r4ss](https://github.com/r4ss/r4ss): Create plots of Stock Synthesis output and functions to work with Stock Synthesis in R. 54 | - [ss3diags](https://github.com/PIFSCstockassessments/ss3diags): Run advanced diagnostics for Stock Synthesis models. 55 | - [ss3sim](https://github.com/ss3sim/ss3sim): Conduct simulation studies using Stock Synthesis. 56 | - [SSI](https://vlab.noaa.gov/web/stock-synthesis/document-library/-/document_library/0LmuycloZeIt/view/5042951): Stock Synthesis Interface, a GUI for developing models and running Stock Synthesis. Links to r4ss. 57 | - [SS3 Shiny helper app](https://connect.fisheries.noaa.gov/ss3-helper/): Visualize common selectivity pattern options available within Stock Synthesis. 58 | - [SSMSE](https://github.com/nmfs-fish-tools/SSMSE): Use Stock Synthesis operating models in Management Strategy Evaluation. 59 | - [sa4ss](https://github.com/nwfsc-assess/sa4ss): Create accessible R markdown stock assessment documents with results from Stock Synthesis models. Note this tool is intended for use by analysts within the Northwest and Southwest Fisheries Science Centers currently. 60 | - Data limited tools - Options included Simple Stock Synthesis ([SSS](https://github.com/shcaba/SSS)) and Extended Simple Stock Synthesis ([XSSS](https://github.com/chantelwetzel-noaa/XSSS)), as well as [SS-DL-tool](https://github.com/shcaba/SS-DL-tool), a shiny app that includes XSSS and SSS in its functionality. 61 | 62 | Have a tool to work with Stock Synthesis that should be mentioned here? Open an issue or pull request to let us know! 63 | 64 | ## Disclaimer 65 | 66 | This repository is a scientific product and is not official communication of the National Oceanic and 67 | Atmospheric Administration, or the United States Department of Commerce. All NOAA GitHub project 68 | code is provided on an ‘as is’ basis and the user assumes responsibility for its use. Any claims against the 69 | Department of Commerce or Department of Commerce bureaus stemming from the use of this GitHub 70 | project will be governed by all applicable Federal law. Any reference to specific commercial products, 71 | processes, or services by service mark, trademark, manufacturer, or otherwise, does not constitute or 72 | imply their endorsement, recommendation or favoring by the Department of Commerce. The Department 73 | of Commerce seal and logo, or the seal and logo of a DOC bureau, shall not be used in any manner to 74 | imply endorsement of any commercial product or activity by DOC or the United States Government. 75 | -------------------------------------------------------------------------------- /SS_ALK.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #20. **SS_ALK.tpl** 2 | // SS_Label_file # * Make_AgeLength_Key() // calculates age-length key for a particular season and subseason; uses calc_ALK or calc_ALK_log 3 | // SS_Label_file # * calc_ALK_range() // allows for condensing range of lengths for each age, but no longer used 4 | // SS_Label_file # * calc_ALK() // calculates normal distribution of length-at-age 5 | // SS_Label_file # * calc_ALK_log() // for lognormal distribution of length-at-age 6 | // SS_Label_file # 7 | 8 | FUNCTION void Make_AgeLength_Key(const int s, const int subseas) 9 | { 10 | //******************************************************************** 11 | /* SS_Label_FUNCTION 31 Make_AgeLength_Key */ 12 | // this is called for each subseason of each year 13 | // checks to see if a re-calc of the ALK is needed for that time step 14 | // if it is, then it loops through all possible biological entities "g" (sex, growth pattern, settlement event, platoon) 15 | // then it retrieves the previously calculated and stored mean size-at-age from Ave_Size(t,subseas,gstart) 16 | // moves these mean sizes into a _W working vector 17 | // then it calls calc_ALK to make and store the age-length key for that subseason for each biological entity 18 | 19 | int gstart = 0; 20 | dvariable dvar_platoon_ratio = platoon_sd_ratio; 21 | dvariable dvar_between_platoon = sd_between_platoon; 22 | dvariable dvar_within_platoon = sd_within_platoon; 23 | dvar_vector use_Ave_Size_W(0, nages); 24 | dvar_vector use_SD_Size(0, nages); 25 | imatrix ALK_range_use(0, nages, 1, 2); 26 | ALK_idx = (s - 1) * N_subseas + subseas; 27 | if (ALK_subseas_update(ALK_idx) == 1) // so need to calculate 28 | { 29 | ALK_subseas_update(ALK_idx) = 0; // reset to 0 to indicate update has been done 30 | gp = 0; 31 | // calculate the between and within stdev ratio 32 | // when sd_ratio_rd is > 0, values are constant and calculations are already done. 33 | if (sd_ratio_rd < 0) 34 | { 35 | dvar_platoon_ratio = MGparm(sd_ratio_param_ptr); 36 | dvar_between_platoon = sqrt(1. / (1. + dvar_platoon_ratio * dvar_platoon_ratio)); 37 | dvar_within_platoon = dvar_platoon_ratio * dvar_between_platoon; 38 | platoon_sd_ratio = value(dvar_platoon_ratio); 39 | sd_between_platoon = value(dvar_between_platoon); 40 | sd_within_platoon = value(dvar_within_platoon); 41 | } 42 | 43 | for (int sex = 1; sex <= gender; sex++) 44 | for (GPat = 1; GPat <= N_GP; GPat++) 45 | { 46 | gp = gp + 1; 47 | gstart = g_Start(gp); // base platoon 48 | for (settle = 1; settle <= N_settle_timings; settle++) 49 | { 50 | gstart += N_platoon; 51 | if (recr_dist_pattern(GPat, settle, 0) > 0) 52 | { 53 | 54 | // update the sd_within and sb_between here. Used to be in growth2 function 55 | // SS_Label_Info_16.5.2 #do calculations related to std.dev. of size-at-age 56 | // SS_Label_Info_16.5.3 #if (y=styr), calc CV_G(gp,s,a) by interpolation on age or LAA 57 | // doing this just at y=styr prevents the CV from changing as time-vary growth updates over time 58 | g = gstart; 59 | if (CV_const(gp) > 0 && y == styr) 60 | { 61 | for (a = 0; a <= nages; a++) 62 | { 63 | if (real_age(g, ALK_idx, a) < AFIX) 64 | { 65 | CV_G(gp, ALK_idx, a) = CVLmin(gp); 66 | } 67 | else if (real_age(g, ALK_idx, a) >= AFIX2_forCV) 68 | { 69 | CV_G(gp, ALK_idx, a) = CVLmax(gp); 70 | } 71 | else if (CV_depvar_a == 0) 72 | { 73 | CV_G(gp, ALK_idx, a) = CVLmin(gp) + (Ave_Size(t, subseas, g, a) - Lmin(gp)) * CV_delta(gp); 74 | } 75 | else 76 | { 77 | CV_G(gp, ALK_idx, a) = CVLmin(gp) + (real_age(g, ALK_idx, a) - AFIX) * CV_delta(gp); 78 | } 79 | } // end age loop 80 | } 81 | else 82 | { 83 | // already set constant to CVLmi 84 | } 85 | // SS_Label_Info_16.5.4 #calc stddev of size-at-age from CV_G(gp,s,a) and Ave_Size(t,g,a) 86 | if (CV_depvar_b == 0) 87 | { 88 | Sd_Size_within(ALK_idx, g) = SD_add_to_LAA + elem_prod(CV_G(gp, ALK_idx), Ave_Size(t, subseas, g)); 89 | } 90 | else 91 | { 92 | Sd_Size_within(ALK_idx, g) = SD_add_to_LAA + CV_G(gp, ALK_idx); 93 | } 94 | // SS_Label_Info_16.3.5 #if platoons being used, calc the stddev between platoons 95 | if (N_platoon > 1) 96 | { 97 | Sd_Size_between(ALK_idx, g) = Sd_Size_within(ALK_idx, g) * dvar_between_platoon; 98 | Sd_Size_within(ALK_idx, g) *= dvar_within_platoon; 99 | } 100 | 101 | if (docheckup == 1) 102 | { 103 | echoinput << "with lingrow; subseas: " << subseas << " sex: " << sx(g) << " gp: " << GP4(g) << " g: " << g << endl; 104 | echoinput << "size " << Ave_Size(t, subseas, g)(0, min(6, nages)) << " @nages " << Ave_Size(t, subseas, g, nages) << endl; 105 | if (CV_depvar_b == 0) 106 | echoinput << "CV " << CV_G(gp, ALK_idx)(0, min(6, nages)) << " @nages " << CV_G(gp, ALK_idx, nages) << endl; 107 | echoinput << "sd " << Sd_Size_within(ALK_idx, g)(0, min(6, nages)) << " @nages " << Sd_Size_within(ALK_idx, g, nages) << endl; 108 | } 109 | 110 | // end sd_within updating 111 | 112 | for (gp2 = 1; gp2 <= N_platoon; gp2++) // loop the platoons 113 | { 114 | g = gstart + ishadow(gp2); 115 | 116 | use_Ave_Size_W = Ave_Size(t, subseas, gstart); 117 | use_SD_Size = Sd_Size_within(ALK_idx, gstart); 118 | if (N_platoon > 1) 119 | { 120 | use_Ave_Size_W += shadow(gp2) * Sd_Size_between(ALK_idx, gstart); 121 | Ave_Size(t, subseas, g) = use_Ave_Size_W; // only needed for reporting because use_Ave_Size_W used for calcs 122 | Sd_Size_within(ALK_idx, g) = use_SD_Size; // ditto; also same sd is used for all platoons 123 | } 124 | 125 | if (Grow_logN == 0) 126 | { 127 | ALK(ALK_idx, g) = calc_ALK(len_bins, use_Ave_Size_W, use_SD_Size); 128 | } 129 | else 130 | { 131 | ALK(ALK_idx, g) = calc_ALK_log(log_len_bins, use_Ave_Size_W, use_SD_Size); 132 | } 133 | } // end platoon loop 134 | } 135 | } // end settle loop 136 | } // end growth pattern&gender loop 137 | } 138 | } // end Make_AgeLength_Key 139 | 140 | FUNCTION imatrix calc_ALK_range(const dvector& len_bins, const dvar_vector& mean_len_at_age, const dvar_vector& sd_len_at_age, 141 | const double ALK_tolerance) 142 | { 143 | // SS_Label_FUNCTION_31.2 # calc_ALK_range finds the range for the distribution of length for each age 144 | int a, z = 0; // declare indices 145 | int nlength = len_bins.indexmax(); // find number of lengths 146 | int nages = mean_len_at_age.indexmax(); // find number of ages 147 | imatrix ALK_range(0, nages, 1, 2); // stores minimum and maximum 148 | dvariable len_dev; 149 | double ALK_tolerance_2; 150 | ALK_tolerance_2 = 1.0 - ALK_tolerance; 151 | for (a = 0; a <= nages; a++) 152 | { 153 | if (ALK_tolerance == 0.00) 154 | { 155 | ALK_range(a, 1) = 1; 156 | ALK_range(a, 2) = nlength; 157 | } 158 | else 159 | { 160 | z = 1; 161 | temp = 0.0; 162 | while (temp < ALK_tolerance && z < nlength) 163 | { 164 | len_dev = (len_bins(z) - mean_len_at_age(a)) / (sd_len_at_age(a)); 165 | temp = cumd_norm(len_dev); 166 | z++; 167 | } 168 | ALK_range(a, 1) = z; 169 | temp = 0.0; 170 | while (temp < ALK_tolerance_2 && z < nlength) 171 | { 172 | len_dev = (len_bins(z) - mean_len_at_age(a)) / (sd_len_at_age(a)); 173 | temp = cumd_norm(len_dev); 174 | z++; 175 | } // end length loop 176 | ALK_range(a, 2) = min(z, nlength); 177 | } 178 | } // end age loop 179 | return (ALK_range); 180 | } 181 | 182 | // the function calc_ALK is called by Make_AgeLength_Key to calculate the distribution of length for each age 183 | FUNCTION dvar_matrix calc_ALK(const dvector& len_bins, const dvar_vector& mean_len_at_age, const dvar_vector& sd_len_at_age) 184 | { 185 | // the function calc_ALK is called by Make_AgeLength_Key to calculate the distribution of length for each age 186 | RETURN_ARRAYS_INCREMENT(); 187 | // SS_Label_FUNCTION_31.2 #Calculate the ALK 188 | int a, z; // declare indices 189 | int nlength = len_bins.indexmax(); // find number of lengths 190 | int nages = mean_len_at_age.indexmax(); // find number of ages 191 | dvar_matrix ALK_w(0, nages, 1, nlength); // create matrix to return with length vectors for each age 192 | dvar_vector AL(1, nlength + 1); // create temporary vector 193 | dvariable len_dev; 194 | // ALK_count++; 195 | ALK_w.initialize(); 196 | for (a = 0; a <= nages; a++) 197 | { 198 | AL.initialize(); 199 | for (z = 1; z <= nlength; z++) 200 | { 201 | len_dev = (len_bins(z) - mean_len_at_age(a)) / (sd_len_at_age(a)); 202 | AL(z) = cumd_norm(len_dev); 203 | } 204 | AL(nlength + 1, nlength + 1) = 1.0; 205 | ALK_w(a) = first_difference(AL); 206 | ALK_w(a, 1) += AL(1); // because first bin is from cumulative calc 207 | } // end age loop 208 | 209 | RETURN_ARRAYS_DECREMENT(); 210 | return (ALK_w); 211 | } 212 | 213 | FUNCTION dvar_matrix calc_ALK_log(const dvector& len_bins, const dvar_vector& mean_len_at_age, const dvar_vector& sd_len_at_age) 214 | { 215 | RETURN_ARRAYS_INCREMENT(); 216 | //SS_Label_FUNCTION_31.3 #Calculate the ALK with lognormal error, called when Grow_logN==1 217 | int a, z; // declare indices 218 | int nlength = len_bins.indexmax(); // find number of lengths 219 | int nages = mean_len_at_age.indexmax(); // find number of ages 220 | dvar_matrix ALK_w(0, nages, 1, nlength); // create matrix to return with length vectors for each age 221 | dvar_vector AL(1, nlength + 1); // create temporary vector 222 | dvariable len_dev; 223 | dvariable temp; 224 | 225 | AL(1) = 0.0; 226 | AL(nlength + 1) = 1.0; // terminal values that are not recalculated 227 | 228 | for (a = 0; a <= nages; a++) 229 | { 230 | temp = log(mean_len_at_age(a)) - 0.5 * sd_len_at_age(a) * sd_len_at_age(a); 231 | for (z = 2; z <= nlength; z++) 232 | { 233 | len_dev = (len_bins(z) - temp) / (sd_len_at_age(a)); 234 | AL(z) = cumd_norm(len_dev); 235 | } // end length loop 236 | ALK_w(a) = first_difference(AL); 237 | } // end age loop 238 | RETURN_ARRAYS_DECREMENT(); 239 | return (ALK_w); 240 | } 241 | 242 | -------------------------------------------------------------------------------- /SS_miscfxn.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #10. **SS_miscfxn.tpl** 2 | // SS_Label_file # * Join_Fxn() // joins line segments in differentiable way 3 | // SS_Label_file # * get_age_age() // for ageing error 4 | // SS_Label_file # * get_catch_mult() 5 | // SS_Label_file # 6 | 7 | //******************************************************************** 8 | /* SS_Label_FUNCTION 42 Join_Fxn */ 9 | FUNCTION dvariable Join_Fxn(const prevariable& MinPoss, const prevariable& MaxPoss, const prevariable& Inflec, const prevariable& Xvar, const prevariable& Y1, const prevariable& Y2) 10 | { 11 | RETURN_ARRAYS_INCREMENT(); 12 | dvariable Yresult; 13 | dvariable join; 14 | join = 1.000 / (1.000 + mfexp(1000.0 * (Xvar - Inflec) / (MaxPoss - MinPoss))); // steep joiner at the inflection 15 | Yresult = Y1 * (join) + Y2 * (1.000 - join); 16 | RETURN_ARRAYS_DECREMENT(); 17 | return Yresult; 18 | } 19 | 20 | //******************************************************************** 21 | /* SS_Label_FUNCTION 45 get_age_age */ 22 | FUNCTION void get_age_age(const int Keynum, const int AgeKey_StartAge, const int AgeKey_Linear1, const int AgeKey_Linear2) 23 | { 24 | // FUTURE: calculate adjustment to oldest age based on continued ageing of old fish 25 | age_age(Keynum).initialize(); 26 | dvariable age; 27 | dvar_vector age_err_parm(1, 7); 28 | dvariable temp; 29 | 30 | if (Keynum == Use_AgeKeyZero) 31 | { 32 | // SS_Label_45.1 set age_err_parm to mgp_adj, so can be time-varying according to MGparm options 33 | for (a = 1; a <= 7; a++) 34 | { 35 | age_err_parm(a) = mgp_adj(AgeKeyParm - 1 + a); 36 | } 37 | age_err(Use_AgeKeyZero, 1)(0, AgeKey_StartAge) = r_ages(0, AgeKey_StartAge) + 0.5; 38 | age_err(Use_AgeKeyZero, 2)(0, AgeKey_StartAge) = age_err_parm(5) * (r_ages(0, AgeKey_StartAge) + 0.5) / (age_err_parm(1) + 0.5); 39 | // SS_Label_45.3 calc ageing bias 40 | if (AgeKey_Linear1 == 0) 41 | { 42 | age_err(Use_AgeKeyZero, 1)(AgeKey_StartAge, nages) = 0.5 + r_ages(AgeKey_StartAge, nages) + age_err_parm(2) + (age_err_parm(3) - age_err_parm(2)) * (1.0 - mfexp(-age_err_parm(4) * (r_ages(AgeKey_StartAge, nages) - age_err_parm(1)))) / (1.0 - mfexp(-age_err_parm(4) * (r_ages(nages) - age_err_parm(1)))); 43 | } 44 | else 45 | { 46 | age_err(Use_AgeKeyZero, 1)(AgeKey_StartAge, nages) = 0.5 + r_ages(AgeKey_StartAge, nages) + age_err_parm(2) + (age_err_parm(3) - age_err_parm(2)) * (r_ages(AgeKey_StartAge, nages) - age_err_parm(1)) / (r_ages(nages) - age_err_parm(1)); 47 | } 48 | // SS_Label_45.4 calc ageing variance 49 | if (AgeKey_Linear2 == 0) 50 | { 51 | age_err(Use_AgeKeyZero, 2)(AgeKey_StartAge, nages) = age_err_parm(5) + (age_err_parm(6) - age_err_parm(5)) * (1.0 - mfexp(-age_err_parm(7) * (r_ages(AgeKey_StartAge, nages) - age_err_parm(1)))) / (1.0 - mfexp(-age_err_parm(7) * (r_ages(nages) - age_err_parm(1)))); 52 | } 53 | else 54 | { 55 | age_err(Use_AgeKeyZero, 2)(AgeKey_StartAge, nages) = age_err_parm(5) + (age_err_parm(6) - age_err_parm(5)) * (r_ages(AgeKey_StartAge, nages) - age_err_parm(1)) / (r_ages(nages) - age_err_parm(1)); 56 | } 57 | } 58 | 59 | // SS_Label_45.5 calc distribution of age' for each age 60 | for (a = 0; a <= nages; a++) 61 | { 62 | if (age_err(Keynum, 1, a) <= -1) 63 | { 64 | age_err(Keynum, 1, a) = r_ages(a) + 0.5; 65 | } 66 | age = age_err(Keynum, 1, a); 67 | 68 | for (b = 2; b <= n_abins; b++) // so the lower tail is accumulated into the first age' bin 69 | age_age(Keynum, b, a) = cumd_norm((age_bins(b) - age) / age_err(Keynum, 2, a)); 70 | 71 | for (b = 1; b <= n_abins - 1; b++) 72 | age_age(Keynum, b, a) = age_age(Keynum, b + 1, a) - age_age(Keynum, b, a); 73 | 74 | age_age(Keynum, n_abins, a) = 1. - age_age(Keynum, n_abins, a); // so remainder is accumulated into the last age' bin 75 | } 76 | 77 | if (gender == 2) // copy ageing error matrix into male location also 78 | { 79 | L2 = n_abins; 80 | A2 = nages + 1; 81 | for (b = 1; b <= n_abins; b++) 82 | for (a = 0; a <= nages; a++) 83 | { 84 | age_age(Keynum, b + L2, a + A2) = age_age(Keynum, b, a); 85 | } 86 | } 87 | return; 88 | } // end age_age key 89 | 90 | FUNCTION void get_catch_mult(int y, int catch_mult_pointer) 91 | { 92 | /* SS_Label_FUNCTION 47 catch_multiplier */ 93 | int j; 94 | j = 0; 95 | for (f = 1; f <= Nfleet; f++) 96 | { 97 | if (need_catch_mult(f) == 1) 98 | { 99 | catch_mult(y, f) = mgp_adj(catch_mult_pointer + j); 100 | j++; 101 | } 102 | } 103 | return; 104 | } 105 | 106 | //******************************************************************** 107 | /* SS_Label_FUNCTION 4XX Comp_logL */ 108 | FUNCTION dvariable Comp_logL_multinomial(const double& Nsamp, const dvector& obs_comp, const dvar_vector& exp_comp) 109 | { 110 | dvariable logL; 111 | // logL = - Nsamp * obs_comp(tail_L, tail_H) * log(exp_comp(tail_L, tail_H)); 112 | // the call to this function does the subsetting to tail_L and tail_H, so this function can operate cleanly on the entirety of the passed vector 113 | logL = - Nsamp * obs_comp * log(exp_comp); 114 | return (logL); 115 | } 116 | 117 | FUNCTION dvariable Comp_logL_Dirichlet(const double& Nsamp, const dvariable& dirichlet_Parm, const dvector& obs_comp, const dvar_vector& exp_comp) 118 | { 119 | dvariable logL; 120 | logL = sum(gammln(Nsamp * obs_comp + dirichlet_Parm * exp_comp)) - sum(gammln(dirichlet_Parm * exp_comp)); 121 | return (logL); 122 | } 123 | -------------------------------------------------------------------------------- /SS_proced.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #8. **SS_proced.tpl** 2 | // SS_Label_file # -
PROCEDURE_SECTION
3 | // SS_Label_file # 4 | // SS_Label_file # - do iterations under control of ADMB, in each iteration, call: setup_recdevs(), get_initial_conditions(), get_time_series(), evaluate_the_objective_function() 5 | // SS_Label_file # - writes to *parmtrace.sso* 6 | // SS_Label_file # - calls get_posteriors() // to write to *posteriors.sso* 7 | // SS_Label_file # - upon reaching convergence, or if in mceval, do Dynamic_Bzero by calling those functions again with fishery_on_off=0 8 | // SS_Label_file # 9 | // SS_Label_file # - call setup_Benchmark(), Get_Benchmark(), Get_Forecast() 10 | 11 | // **************************************************************************************************************** 12 | // SS_Label_Section_7.0 #PROCEDURE_SECTION 13 | PROCEDURE_SECTION 14 | { 15 | Mgmt_quant.initialize(); 16 | Extra_Std.initialize(); 17 | CrashPen.initialize(); 18 | Smry_Table.initialize(); 19 | niter++; 20 | if (mceval_phase()) 21 | mceval_counter++; // increment the counter 22 | 23 | if (initial_params::mc_phase == 1) // in MCMC phase 24 | { 25 | if (mcmc_counter == 0) 26 | { 27 | SR_parm(1) += MCMC_bump; 28 | cout << mcmc_counter << " adjusted SR_parm in first mcmc call " << SR_parm(1) << " by " << MCMC_bump << endl; 29 | } 30 | 31 | mcmc_counter++; 32 | } 33 | 34 | if (mcmcFlag == 1) // so will do mcmc this run or is in mceval 35 | { 36 | if (Do_ParmTrace == 1) 37 | Do_ParmTrace = 4; // to get all iterations 38 | if (Do_ParmTrace == 2) 39 | Do_ParmTrace = 3; // to get all iterations 40 | if (mcmc_counter > 10 || mceval_counter > 10) 41 | Do_ParmTrace = 0; 42 | } 43 | 44 | // SS_Label_Info_7.3 #get Hrate from the parameter vector F_rate 45 | // note that in SS_global BETWEEN_PHASES is where F_rate, which is the parameter, gets assigned a starting value Hrate from Hrate calculated by hybrid in previous PH 46 | // be careful about phases for when this mapping occurs for a whole fleet, versus estimation phase which can be value specific 47 | if (N_Fparm > 0) 48 | { 49 | for (g = 1; g <= N_Fparm; g++) 50 | { 51 | f = Fparm_loc[g](1); 52 | t = Fparm_loc[g](2); 53 | if (current_phase() >= F_PH_time(f, t)) 54 | { 55 | Hrate(f, t) = F_rate(g); 56 | } 57 | } 58 | } 59 | 60 | // SS_Label_Info_7.4 #Do the time series calculations 61 | if (mceval_counter == 0 || (mceval_counter > burn_intvl && ((double(mceval_counter) / double(thin_intvl)) - double((mceval_counter / thin_intvl)) == 0))) // check to see if burn in period is over 62 | { 63 | 64 | // create bigsaver to simplfy some condition statements later 65 | if ((save_for_report > 0) || ((sd_phase() || mceval_phase()) && (initial_params::mc_phase == 0))) // (SAVE || ( (SD || EVAL) && (!MCMC) ) ) 66 | { 67 | bigsaver = 1; 68 | } 69 | else 70 | { 71 | bigsaver = 0; 72 | } 73 | setup_recdevs(); 74 | y = styr; 75 | // SS_Label_Info_7.4.1 #Call fxn get_initial_conditions() to get the virgin and initial equilibrium population 76 | get_initial_conditions(); 77 | if (do_once == 1) 78 | echoinput << "Finished initial_conditions" << endl; 79 | // SS_Label_Info_7.4.2 #Call fxn get_time_series() to do population calculations for each year and get expected values for observations 80 | get_time_series(); // in procedure_section 81 | if (do_once == 1) 82 | { 83 | echoinput << "Finished time_series" << endl; 84 | } 85 | 86 | // SS_Label_Info_7.4.3 #Call fxn evaluate_the_objective_function() 87 | evaluate_the_objective_function(); 88 | 89 | // SS_Label_Info_7.6 #If sdphase or mcevalphase, do benchmarks and forecast and derived quantities 90 | if ((sd_phase() || mceval_phase()) && (initial_params::mc_phase == 0)) 91 | { 92 | 93 | // SS_Label_Info_7.6.1 #Call fxn Get_Benchmarks() 94 | if (Do_Benchmark > 0) 95 | { 96 | did_MSY = 0; // so that benchmarks will get calculated here 97 | setup_Benchmark(); 98 | Get_Benchmarks(show_MSY); 99 | } 100 | did_MSY = 1; // set flag to not calculate the benchmarks again in final section 101 | 102 | if (Do_Dyn_Bzero > 0) // do dynamic Bzero 103 | { 104 | fishery_on_off = 0; 105 | setup_recdevs(); 106 | y = styr; 107 | get_initial_conditions(); 108 | get_time_series(); 109 | if (Do_Forecast > 0) 110 | { 111 | show_MSY = 0; 112 | Get_Forecast(); 113 | } 114 | k = Do_Dyn_Bzero; 115 | for (j = styr - 2; j <= YrMax; j++) 116 | { 117 | Extra_Std(k) = SSB_yr(j); 118 | k++; 119 | } 120 | if (More_Std_Input(12) == 2) 121 | { 122 | for (j = styr - 2; j <= YrMax; j++) 123 | { 124 | Extra_Std(k) = exp_rec(j, 4); 125 | k++; 126 | } 127 | } 128 | } // end dynamic Bzero calculations, will write after big report 129 | 130 | fishery_on_off = 1; 131 | if (mceval_phase() > 0) 132 | save_for_report = 1; 133 | if (mceval_phase() == 0) 134 | { 135 | show_MSY = 1; 136 | } // so only show details if not in mceval 137 | if (show_MSY == 1) 138 | { 139 | echoinput << "Start benchmark and forecast, if requested" << endl; 140 | } 141 | setup_recdevs(); 142 | y = styr; 143 | get_initial_conditions(); 144 | get_time_series(); // in write_big_report 145 | evaluate_the_objective_function(); 146 | if (Do_Benchmark > 0) 147 | { 148 | setup_Benchmark(); 149 | Get_Benchmarks(show_MSY); 150 | } 151 | 152 | // SS_Label_Info_7.6.2 #Call fxn Get_Forecast() 153 | if (Do_Forecast > 0) 154 | { 155 | if (show_MSY == 1) 156 | report5 << "THIS FORECAST FOR PURPOSES OF STD REPORTING" << endl; // controls writing to forecast-report.sso 157 | Get_Forecast(); 158 | } 159 | 160 | // SS_Label_Info_7.7 #Call fxn Process_STDquant() to move calculated values into sd_containers 161 | Process_STDquant(); 162 | if (mceval_phase() == 0) 163 | { 164 | echoinput << "Finished benchmark, forecast, and sdreporting" << endl; 165 | } 166 | } // end of things to do in std_phase 167 | 168 | // SS_Label_Info_7.9 #Do screen output of procedure results from this iteration 169 | if (current_phase() <= max_phase + 1) 170 | phase_output(current_phase()) = value(obj_fun); 171 | if (rundetail > 1) 172 | { 173 | if (Svy_N > 0) 174 | cout << " CPUE " << surv_like << endl; 175 | if (nobs_disc > 0) 176 | cout << " Disc " << disc_like << endl; 177 | if (nobs_mnwt > 0) 178 | cout << " MnWt " << mnwt_like << endl; 179 | if (Nobs_l_tot > 0) 180 | cout << " Length " << length_like_tot << endl; 181 | if (Nobs_a_tot > 0) 182 | cout << " AGE " << age_like_tot << endl; 183 | if (nobs_ms_tot > 0) 184 | cout << " L-at-A " << sizeage_like << endl; 185 | if (SzFreq_Nmeth > 0) 186 | cout << " sizefreq " << SzFreq_like << endl; 187 | if (Do_TG > 0) 188 | cout << " TG-fleetcomp " << TG_like1 << endl 189 | << " TG-negbin " << TG_like2 << endl; 190 | cout << " Recr " << recr_like << " sum_recdev: " << sum_recdev << endl; 191 | cout << " InitEQ_Regime " << regime_like << endl; 192 | cout << " Parm_Priors " << parm_like << endl; 193 | cout << " Parm_devs " << parm_dev_like << endl; 194 | cout << " SoftBound " << SoftBoundPen << endl; 195 | cout << " F_ballpark " << F_ballpark_like << endl; 196 | if (F_Method > 1) 197 | { 198 | cout << "Catch " << sum(catch_like) << endl; 199 | } 200 | cout << " EQUL_catch " << sum(equ_catch_like) << endl; 201 | cout << " crash " << CrashPen << endl; 202 | } 203 | if (rundetail > 0) 204 | { 205 | temp = norm2(recdev(recdev_start, recdev_end)); 206 | temp = sqrt((temp + 0.0000001) / (double(recdev_end - recdev_start + 1))); 207 | if (mcmc_counter == 0 && mceval_counter == 0) 208 | { 209 | cout << current_phase() << " " << niter << " -log(L): " << obj_fun << " Spbio: " << value(SSB_yr(styr)) << " " << value(SSB_yr(endyr)); 210 | } 211 | else if (mcmc_counter > 0) 212 | { 213 | cout << " MCMC: " << mcmc_counter << " -log(L): " << obj_fun << " Spbio: " << value(SSB_yr(styr)) << " " << value(SSB_yr(endyr)); 214 | } 215 | else if (mceval_counter > 0) 216 | { 217 | cout << " MCeval: " << mceval_counter << " -log(L): " << obj_fun << " Spbio: " << value(SSB_yr(styr)) << " " << value(SSB_yr(endyr)); 218 | } 219 | if (F_Method > 1 && sum(catch_like) > 0.01) 220 | { 221 | cout << " cat " << sum(catch_like); 222 | } 223 | else if (CrashPen > 0.01) 224 | { 225 | cout << " crash " << CrashPen; 226 | } 227 | cout << endl; 228 | } 229 | // SS_Label_Info_7.10 #Write parameter values to ParmTrace 230 | if ((Do_ParmTrace == 1 && obj_fun <= last_objfun) || Do_ParmTrace == 4) // only report active parameters 231 | { 232 | ParmTrace << current_phase(); 233 | if (sd_phase()) 234 | { 235 | ParmTrace << "_sd"; 236 | finished_minimize = 3; 237 | } // so flag is no longer==2 238 | if (finished_minimize == 2) 239 | ParmTrace << "_hs"; // each Hessian calculation takes 4 calls, all will get this flag, so output processor needs to create a 1-4 counter 240 | if (finished_minimize == 1) 241 | finished_minimize = 2; // this prevents _hs flag for the one iteration that occurs after minimizer ends and before first tweak of Hessian 242 | if (mceval_phase()) 243 | ParmTrace << "_mc"; 244 | 245 | ParmTrace << " " << niter << " "; 246 | ParmTrace.precision(10); 247 | ParmTrace << obj_fun << " " << obj_fun - last_objfun << " " << value(SSB_yr(styr)) << " " << value(SSB_yr(endyr)); 248 | ParmTrace.precision(2); 249 | ParmTrace << " " << biasadj(styr) << " " << max(biasadj) << " " << biasadj(endyr); 250 | ParmTrace.precision(7); 251 | for (j = 1; j <= MGparm_PH.indexmax(); j++) 252 | { 253 | if (MGparm_PH(j) >= 0) 254 | { 255 | ParmTrace << " " << MGparm(j); 256 | } 257 | } 258 | for (j = 1; j <= SR_parm_PH.indexmax(); j++) 259 | { 260 | if (SR_parm_PH(j) >= 0) 261 | { 262 | ParmTrace << " " << SR_parm(j); 263 | } 264 | } 265 | if (recdev_cycle > 0) 266 | { 267 | for (j = 1; j <= recdev_cycle; j++) 268 | { 269 | if (recdev_cycle_PH(j) >= 0) 270 | { 271 | ParmTrace << " " << recdev_cycle_parm(j); 272 | } 273 | } 274 | } 275 | if (recdev_early_PH > 0) 276 | { 277 | ParmTrace << " " << recdev_early; 278 | } 279 | if (recdev_PH > 0) 280 | { 281 | if (do_recdev == 1) 282 | { 283 | ParmTrace << " " << recdev1; 284 | } 285 | if (do_recdev >= 2) 286 | { 287 | ParmTrace << " " << recdev2; 288 | } 289 | } 290 | if (Fcast_recr_PH2 > 0 && Do_Forecast > 0) 291 | { 292 | ParmTrace << Fcast_recruitments << " "; 293 | if (Do_Impl_Error > 0) 294 | ParmTrace << Fcast_impl_error << " "; 295 | } 296 | 297 | for (f = 1; f <= N_init_F; f++) 298 | { 299 | if (init_F_PH(f) > 0) 300 | { 301 | ParmTrace << " " << init_F(f); 302 | } 303 | } 304 | if (N_Fparm > 0) // continuous F 305 | { 306 | for (k = 1; k <= N_Fparm; k++) 307 | { 308 | if (Fparm_PH[k] > 0) 309 | { 310 | ParmTrace << " " << F_rate(k); 311 | } 312 | } 313 | } 314 | 315 | for (f = 1; f <= Q_Npar2; f++) 316 | { 317 | if (Q_parm_PH(f) > 0) 318 | { 319 | ParmTrace << " " << Q_parm(f); 320 | } 321 | } 322 | for (k = 1; k <= selparm_PH.indexmax(); k++) 323 | { 324 | if (selparm_PH(k) > 0) 325 | { 326 | ParmTrace << " " << selparm(k); 327 | } 328 | } 329 | for (k = 1; k <= TG_parm_PH.indexmax(); k++) 330 | { 331 | if (TG_parm_PH(k) > 0) 332 | { 333 | ParmTrace << " " << TG_parm(k); 334 | } 335 | } 336 | if (N_parm_dev > 0) 337 | { 338 | for (j = 1; j <= N_parm_dev; j++) 339 | { 340 | if (parm_dev_PH(j) > 0) 341 | ParmTrace << parm_dev(j) << " "; 342 | } 343 | } 344 | ParmTrace.precision(10); 345 | k = min(current_phase(), max_lambda_phase); 346 | if (F_Method > 1) 347 | ParmTrace << " Catch " << catch_like * column(catch_lambda, k); 348 | if (N_init_F > 0) 349 | ParmTrace << " Equil_catch " << equ_catch_like * column(init_equ_lambda, k); 350 | if (Svy_N > 0) 351 | ParmTrace << " Survey " << k << " " << surv_like * column(surv_lambda, k) << " " << elem_prod(surv_like, column(surv_lambda, k)); 352 | if (nobs_disc > 0) 353 | ParmTrace << " Discard " << disc_like * column(disc_lambda, k) << " " << elem_prod(disc_like, column(disc_lambda, k)); 354 | if (nobs_mnwt > 0) 355 | ParmTrace << " Mean_body_wt " << mnwt_like * column(mnwt_lambda, k) << " " << elem_prod(mnwt_like, column(mnwt_lambda, k)); 356 | if (Nobs_l_tot > 0) 357 | ParmTrace << " Length " << length_like_tot * column(length_lambda, k) << " " << elem_prod(length_like_tot, column(length_lambda, k)); 358 | if (Nobs_a_tot > 0) 359 | ParmTrace << " Age " << age_like_tot * column(age_lambda, k) << " " << elem_prod(age_like_tot, column(age_lambda, k)); 360 | if (nobs_ms_tot > 0) 361 | ParmTrace << " Size_at_age " << sizeage_like * column(sizeage_lambda, k) << " " << elem_prod(sizeage_like, column(sizeage_lambda, k)); 362 | if (SzFreq_Nmeth > 0) 363 | ParmTrace << " SizeFreq " << SzFreq_like * column(SzFreq_lambda, k) << " " << elem_prod(SzFreq_like, column(SzFreq_lambda, k)); 364 | if (Do_Morphcomp > 0) 365 | ParmTrace << " Morph " << Morphcomp_lambda(k) * Morphcomp_like; 366 | if (Do_TG > 0) 367 | ParmTrace << " Tag_comp " << TG_like1 * column(TG_lambda1, k) << " " << elem_prod(TG_like1, column(TG_lambda1, k)); 368 | if (Do_TG > 0) 369 | ParmTrace << " Tag_negbin " << TG_like2 * column(TG_lambda2, k) << " " << elem_prod(TG_like2, column(TG_lambda2, k)); 370 | ParmTrace << " Recr_dev " << recr_like * recrdev_lambda(k); 371 | ParmTrace << " Regime " << regime_like * regime_lambda(k); 372 | ParmTrace << " Fore_Recdev " << Fcast_recr_like; 373 | ParmTrace << " Parm_priors " << parm_like * parm_prior_lambda(k); 374 | if (SoftBound > 0) 375 | ParmTrace << " Softbounds " << SoftBoundPen; 376 | if (N_parm_dev > 0) 377 | ParmTrace << " Parm_devs " << (sum(parm_dev_like)) * parm_dev_lambda(k); 378 | if (F_ballpark_yr > 0) 379 | ParmTrace << " F_Ballpark " << F_ballpark_lambda(k) * F_ballpark_like; 380 | ParmTrace << endl; 381 | } 382 | else if ((Do_ParmTrace == 2 && obj_fun <= last_objfun) || Do_ParmTrace == 3) // report active and inactive parameters 383 | { 384 | ParmTrace << current_phase() << " " << niter << " " << obj_fun << " " << obj_fun - last_objfun 385 | << " " << value(SSB_yr(styr)) << " " << value(SSB_yr(endyr)) << " " << biasadj(styr) << " " << max(biasadj) << " " << biasadj(endyr); 386 | ParmTrace << " " << MGparm << " "; 387 | ParmTrace << SR_parm << " "; 388 | if (recdev_cycle > 0) 389 | ParmTrace << recdev_cycle_parm; 390 | if (recdev_do_early > 0) 391 | ParmTrace << recdev_early << " "; 392 | if (do_recdev == 1) 393 | { 394 | ParmTrace << recdev1 << " "; 395 | } 396 | if (do_recdev >= 2) 397 | { 398 | ParmTrace << recdev2 << " "; 399 | } 400 | if (Do_Forecast > 0) 401 | ParmTrace << Fcast_recruitments << " "; 402 | if (Do_Impl_Error > 0) 403 | ParmTrace << Fcast_impl_error << " "; 404 | if (N_init_F > 0) 405 | ParmTrace << init_F << " "; 406 | if (N_Fparm > 0) 407 | ParmTrace << F_rate << " "; 408 | if (Q_Npar > 0) 409 | ParmTrace << Q_parm << " "; 410 | ParmTrace << selparm << " "; 411 | if (Do_TG > 0) 412 | ParmTrace << TG_parm << " "; 413 | if (N_parm_dev > 0) 414 | { 415 | for (j = 1; j <= N_parm_dev; j++) 416 | { 417 | ParmTrace << parm_dev(j); 418 | } 419 | } 420 | ParmTrace << endl; 421 | } 422 | if (obj_fun <= last_objfun) 423 | last_objfun = obj_fun; 424 | docheckup = 0; // turn off reporting to checkup.sso 425 | // SS_Label_Info_7.11 #Call fxn get_posteriors if in mceval_phase 426 | if (mceval_phase()) 427 | { 428 | get_posteriors(); 429 | 430 | //SS_Label_Info_7.12 #write report_mce_XXXX.sso and compreport_mce_XXXX.sso for each MCEVAL 431 | // warning<= 2) 433 | { 434 | write_bodywt = 0; 435 | pick_report_use(54) = 0; 436 | pick_report_use(55) = 0; 437 | save_for_report = 1; 438 | write_bigoutput(); 439 | if (Do_Dyn_Bzero > 0) 440 | write_Bzero_output(); 441 | save_for_report = 0; 442 | write_bodywt = 0; 443 | } 444 | } 445 | } // end doing of the calculations 446 | if (mceval_phase() || initial_params::mc_phase == 1) 447 | { 448 | No_Report = 1; // flag to skip output reports after MCMC and McEVAL 449 | } 450 | } 451 | // SS_Label_Info_7.13 #End of PROCEDURE_SECTION 452 | 453 | -------------------------------------------------------------------------------- /SS_recruit.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #13. **SS_recruit.tpl** 2 | // SS_Label_file # * Spawn_Recr() // gets expected mean recruits from input spawning biomass 3 | // SS_Label_file # * apply_recdev() // applies recdev to the expected mean recruits 4 | // SS_Label_file # * Equil_Spawn_Recr_Fxn() // gets equilibrium recruitment and spawning biomass from an input SPR 5 | 6 | //******************************************************************** 7 | /* SS_Label_FUNCTION 43 Spawner-recruitment function */ 8 | // SPAWN-RECR: function: to calc R from S 9 | FUNCTION dvariable Spawn_Recr(const prevariable& SSB_virgin_adj, const prevariable& Recr_virgin_adj, const prevariable& SSB_current) 10 | { 11 | RETURN_ARRAYS_INCREMENT(); 12 | dvariable NewRecruits; 13 | dvariable SSB_BH1; 14 | dvariable recdev_offset; 15 | dvariable steepness; 16 | dvariable Shepherd_c; 17 | dvariable Shepherd_c2; 18 | dvariable Hupper; 19 | dvariable steep2; 20 | dvariable SSB_curr_adj; 21 | dvariable join; 22 | dvariable SRZ_0; 23 | dvariable srz_min; 24 | dvariable SRZ_surv; 25 | 26 | // SS_Label_43.1 add 0.1 to input spawning biomass value to make calculation more rebust 27 | SSB_curr_adj = SSB_current + 0.100; // robust 28 | 29 | regime_change = SR_parm_work(N_SRparm2 - 1); // this is a persistent deviation off the S/R curve 30 | 31 | // SS_Label_43.3 calculate expected recruitment from the input spawning biomass and the SR curve 32 | // functions below use Recr_virgin_adj,SSB_virgin_adj which could have been adjusted adjusted above from R0,SSB_virgin 33 | switch (SR_fxn) 34 | { 35 | case 1: // previous placement for B-H constrained 36 | { 37 | warnstream << "B-H constrained curve is now Spawn-Recr option #6"; 38 | write_message (FATAL, 0); // EXIT! 39 | break; 40 | } 41 | // SS_Label_43.3.2 Ricker 42 | case 2: // ricker 43 | { 44 | steepness = SR_parm_work(2); 45 | NewRecruits = Recr_virgin_adj * SSB_curr_adj / SSB_virgin_adj * mfexp(steepness * (1. - SSB_curr_adj / SSB_virgin_adj)); 46 | break; 47 | } 48 | // SS_Label_43.3.3 Beverton-Holt 49 | case 3: // Beverton-Holt 50 | { 51 | steepness = SR_parm_work(2); 52 | alpha = 4.0 * steepness * Recr_virgin / (5. * steepness - 1.); 53 | beta = (SSB_virgin_adj * (1. - steepness)) / (5. * steepness - 1.); 54 | NewRecruits = (4. * steepness * Recr_virgin_adj * SSB_curr_adj) / 55 | (SSB_virgin_adj * (1. - steepness) + (5. * steepness - 1.) * SSB_curr_adj); 56 | break; 57 | } 58 | 59 | // Beverton-Holt with alpha beta 60 | /* 61 | case 3: // Beverton-Holt 62 | { 63 | steepness=SR_parm_work(2); 64 | alpha = 4.0 * steepness*Recr_virgin / (5.*steepness-1.); 65 | beta = (SSB_virgin_adj*(1.-steepness)) / (5.*steepness-1.); 66 | NewRecruits = (alpha*SSB_curr_adj) / (beta+SSB_curr_adj); 67 | break; 68 | } 69 | */ 70 | 71 | // SS_Label_43.3.4 constant expected recruitment 72 | case 4: // none 73 | { 74 | NewRecruits = Recr_virgin_adj; 75 | break; 76 | } 77 | // SS_Label_43.3.5 Hockey stick 78 | case 5: // hockey stick where "steepness" is now the fraction of B0 below which recruitment declines linearly 79 | // the 3rd parameter allows for a minimum recruitment level 80 | { 81 | steepness = SR_parm_work(2); 82 | temp = SR_parm_work(3) * Recr_virgin_adj + SSB_curr_adj / (steepness * SSB_virgin_adj) * (Recr_virgin_adj - SR_parm_work(3) * Recr_virgin_adj); // linear decrease below steepness*SSB_virgin_adj 83 | NewRecruits = Join_Fxn(0.0 * SSB_virgin_adj, SSB_virgin_adj, steepness * SSB_virgin_adj, SSB_curr_adj, temp, Recr_virgin_adj); 84 | break; 85 | } 86 | 87 | // SS_Label_43.3.6 Beverton-Holt, with constraint to have constant R about Bzero 88 | case 6: //Beverton-Holt constrained 89 | { 90 | steepness = SR_parm_work(2); 91 | alpha = 4.0 * steepness * Recr_virgin / (5. * steepness - 1.); 92 | beta = (SSB_virgin_adj * (1. - steepness)) / (5. * steepness - 1.); 93 | if (SSB_curr_adj > SSB_virgin_adj) 94 | { 95 | SSB_BH1 = SSB_virgin_adj; 96 | } 97 | else 98 | { 99 | SSB_BH1 = SSB_curr_adj; 100 | } 101 | NewRecruits = (4. * steepness * Recr_virgin_adj * SSB_BH1) / (SSB_virgin_adj * (1. - steepness) + (5. * steepness - 1.) * SSB_BH1); 102 | break; 103 | } 104 | 105 | // SS_Label_43.3.7 survival based 106 | case 7: // survival based, so constrained such that recruits cannot exceed fecundity 107 | { 108 | // PPR_0=SSB_virgin_adj/Recr_virgin_adj; // pups per recruit at virgin 109 | // Surv_0=1./PPR_0; // recruits per pup at virgin 110 | // Pups_0=SSB_virgin_adj; // total population fecundity is the number of pups produced 111 | // Sfrac=SR_parm(2); 112 | SRZ_0 = log(1.0 / (SSB_virgin_adj / Recr_virgin_adj)); 113 | steepness = SR_parm_work(2); 114 | srz_min = SRZ_0 * (1.0 - steepness); 115 | SRZ_surv = mfexp((1. - pow((SSB_curr_adj / SSB_virgin_adj), SR_parm_work(3))) * (srz_min - SRZ_0) + SRZ_0); // survival 116 | NewRecruits = SSB_curr_adj * SRZ_surv; 117 | exp_rec(y, 1) = NewRecruits; // expected arithmetic mean recruitment 118 | // SS_Label_43.3.7.1 Do variation in recruitment by adjusting survival 119 | // if(SR_env_target==1) SRZ_surv*=mfexp(SR_parm(N_SRparm2-2)* env_data(y,SR_env_link)); // environ effect on survival 120 | if (recdev_cycle > 0) 121 | { 122 | gg = y - (styr + (int((y - styr) / recdev_cycle)) * recdev_cycle) + 1; 123 | SRZ_surv *= mfexp(recdev_cycle_parm(gg)); 124 | } 125 | exp_rec(y, 2) = SSB_curr_adj * SRZ_surv; 126 | exp_rec(y, 2) *= mfexp(regime_change); // adjust for regime which includes env and block effects; and forecast adjustments 127 | SRZ_surv *= mfexp(-biasadj(y) * half_sigmaRsq); // bias adjustment 128 | exp_rec(y, 3) = SSB_curr_adj * SRZ_surv; 129 | if (y <= recdev_end) 130 | { 131 | if (recdev_doit(y) > 0) 132 | SRZ_surv *= mfexp(recdev(y)); // recruitment deviation 133 | } 134 | else if (Do_Forecast > 0) 135 | { 136 | SRZ_surv *= mfexp(Fcast_recruitments(y)); 137 | } 138 | join = 1. / (1. + mfexp(100 * (SRZ_surv - 1.))); 139 | SRZ_surv = SRZ_surv * join + (1. - join) * 1.0; 140 | NewRecruits = SSB_curr_adj * SRZ_surv; 141 | exp_rec(y, 4) = NewRecruits; 142 | break; 143 | } 144 | 145 | // SS_Label_43.3.8 Shepherd 146 | case 8: // Shepherd 3-parameter SRR. per Punt & Cope 2017 147 | { 148 | Shepherd_c = SR_parm_work(3); 149 | Shepherd_c2 = pow(0.2, SR_parm_work(3)); 150 | Hupper = 1.0 / (5.0 * Shepherd_c2); 151 | steepness = 0.2 + (SR_parm_work(2) - 0.2) / (0.8) * (Hupper - 0.2); 152 | temp = (SSB_curr_adj) / (SSB_virgin_adj); 153 | NewRecruits = (5. * steepness * Recr_virgin_adj * (1. - Shepherd_c2) * temp) / 154 | (1.0 - 5.0 * steepness * Shepherd_c2 + (5. * steepness - 1.) * pow(temp, Shepherd_c)); 155 | break; 156 | } 157 | 158 | // SS_Label_43.3.8 Ricker-power 159 | case 9: // Ricker power 3-parameter SRR. per Punt & Cope 2017 160 | { 161 | steepness = SR_parm_work(2); 162 | dvariable RkrPower = SR_parm_work(3); 163 | temp = SSB_curr_adj / SSB_virgin_adj; 164 | temp2 = posfun(1.0 - temp, 0.0000001, temp3); 165 | temp = 1.0 - temp2; // Rick's new line to stabilize recruitment at R0 if B>B0 166 | dvariable RkrTop = log(5.0 * steepness) * pow(temp2, RkrPower) / pow(0.8, RkrPower); 167 | NewRecruits = Recr_virgin_adj * temp * mfexp(RkrTop); 168 | break; 169 | } 170 | } 171 | RETURN_ARRAYS_DECREMENT(); 172 | return NewRecruits; 173 | } // end spawner_recruitment 174 | 175 | FUNCTION void apply_recdev(prevariable& NewRecruits, const prevariable& Recr_virgin_adj) 176 | { 177 | RETURN_ARRAYS_INCREMENT(); 178 | // SS_Label_43.4 For non-survival based SRR, get recruitment deviations by adjusting recruitment itself 179 | exp_rec(y, 1) = NewRecruits; // expected arithmetic mean recruitment 180 | // exp_rec(y,2) is with regime shift or other env effect; 181 | // exp_rec(y,3) is with bias adjustment 182 | // exp_rec(y,4) is with dev 183 | regime_change = SR_parm_work(N_SRparm2 - 1); // this is a persistent deviation off the S/R curve 184 | 185 | if (recdev_cycle > 0) 186 | { 187 | gg = y - (styr + (int((y - styr) / recdev_cycle)) * recdev_cycle) + 1; 188 | NewRecruits *= mfexp(recdev_cycle_parm(gg)); 189 | } 190 | NewRecruits *= mfexp(regime_change); // adjust for regime which includes env and block effects; and forecast adjustments 191 | exp_rec(y, 2) = NewRecruits; // adjusted for env and special forecast conditions 192 | if (SR_fxn != 4) 193 | NewRecruits *= mfexp(-biasadj(y) * half_sigmaRsq); // bias adjustment 194 | exp_rec(y, 3) = NewRecruits; 195 | 196 | if (y <= recdev_end) 197 | { 198 | if (recdev_doit(y) > 0) 199 | { 200 | if (do_recdev >= 3) 201 | { 202 | NewRecruits = Recr_virgin_adj * mfexp(recdev(y)); // recruitment deviation 203 | } 204 | else if (SR_fxn != 7) 205 | { 206 | NewRecruits *= mfexp(recdev(y)); // recruitment deviation 207 | } 208 | } 209 | } 210 | 211 | else if (Do_Forecast > 0) 212 | { 213 | switch (int(Fcast_Loop_Control(3))) 214 | { 215 | case 0: 216 | { 217 | NewRecruits = exp_rec(y, 2); 218 | if (SR_fxn != 4) 219 | NewRecruits *= mfexp(-biasadj(y) * half_sigmaRsq); // bias adjustment 220 | exp_rec(y, 3) = NewRecruits; 221 | break; 222 | } 223 | case 1: 224 | { 225 | exp_rec(y, 2) *= Fcast_Loop_Control(4); // apply fcast multiplier to the regime-adjusted expected value 226 | NewRecruits = exp_rec(y, 2); 227 | if (SR_fxn != 4) 228 | NewRecruits *= mfexp(-biasadj(y) * half_sigmaRsq); // bias adjustment 229 | exp_rec(y, 3) = NewRecruits; 230 | break; 231 | } 232 | case 2: // use multiplier of R0 233 | { 234 | exp_rec(y, 2) = Recr_virgin_adj * Fcast_Loop_Control(4); // apply fcast multiplier to the virgin recruitment 235 | NewRecruits = exp_rec(y, 2); 236 | if (SR_fxn != 4) 237 | NewRecruits *= mfexp(-biasadj(y) * half_sigmaRsq); // bias adjustment 238 | exp_rec(y, 3) = NewRecruits; 239 | break; 240 | } 241 | case 4: 242 | { 243 | // fall through to case 3 244 | // case 3 also will do averaging of recr_dist in another section of code 245 | } 246 | case 3: // use recent mean 247 | { 248 | // values going into the mean have already been bias adjusted and had dev applied, so take straight mean 249 | NewRecruits = 0.0; 250 | for (j = Fcast_Rec_yr1; j <= Fcast_Rec_yr2; j++) 251 | { 252 | NewRecruits += exp_rec(j, 4); 253 | } 254 | NewRecruits /= (Fcast_Rec_yr2 - Fcast_Rec_yr1 + 1); 255 | if(Fcast_Loop_Control(3) == 4) NewRecruits *= Fcast_Loop_Control(4); // apply multiplier 256 | exp_rec(y, 2) = NewRecruits; 257 | exp_rec(y, 3) = NewRecruits; // store in the bias-adjusted field 258 | break; 259 | } 260 | } 261 | // note that if user requests "mean" as base forecast recr, then devs are still applied 262 | // so, phase for forecast recdevs must be <0 to assure that forecast recr do not get added variability 263 | if (do_recdev > 0) 264 | NewRecruits *= mfexp(Fcast_recruitments(y)); // recruitment deviation 265 | } 266 | exp_rec(y, 4) = NewRecruits; 267 | RETURN_ARRAYS_DECREMENT(); 268 | } // end spawner_recruitment 269 | 270 | //******************************************************************** 271 | /* SS_Label_FUNCTION 44 Equil_Spawn_Recr_Fxn */ 272 | // SPAWN-RECR: function Equil_Spawn_Recr_Fxn 273 | FUNCTION dvar_vector Equil_Spawn_Recr_Fxn(const prevariable& SRparm2, const prevariable& SRparm3, 274 | const prevariable& SSB_virgin, const prevariable& Recr_virgin, const prevariable& SPR_temp) 275 | { 276 | RETURN_ARRAYS_INCREMENT(); 277 | dvar_vector Equil_Spawn_Recr_Calc(1, 2); // values to return 1 is B_equil, 2 is R_equil 278 | dvariable B_equil; 279 | dvariable R_equil; 280 | dvariable temp; 281 | dvariable steepness; 282 | dvariable join; 283 | dvariable Shepherd_c; 284 | dvariable Shepherd_c2; 285 | dvariable SRZ_0; 286 | dvariable srz_min; 287 | dvariable SRZ_surv; 288 | 289 | steepness = SRparm2; // common usage but some different 290 | // SS_Label_44.1 calc equilibrium SpawnBio and Recruitment from input SPR_temp, which is spawning biomass per recruit at some given F level 291 | switch (SR_fxn) 292 | { 293 | case 1: // previous placement for B-H constrained 294 | { 295 | warnstream << "B-H constrained curve is now Spawn-Recr option #6"; 296 | write_message (FATAL, 0); // EXIT! 297 | break; 298 | } 299 | // SS_Label_44.1.1 Beverton-Holt with flattop beyond Bzero 300 | case 6: //Beverton-Holt 301 | { 302 | alpha = 4.0 * steepness * Recr_virgin / (5. * steepness - 1.); 303 | beta = (SSB_virgin * (1. - steepness)) / (5. * steepness - 1.); 304 | B_equil = alpha * SPR_temp - beta; 305 | B_equil = posfun(B_equil, 0.0001, temp); 306 | R_equil = (4. * steepness * Recr_virgin * B_equil) / (SSB_virgin * (1. - steepness) + (5. * steepness - 1.) * B_equil); 307 | break; 308 | } 309 | // SS_Label_44.1.2 Ricker 310 | case 2: // Ricker 311 | { 312 | B_equil = SSB_virgin * (1. + (log(Recr_virgin / SSB_virgin) + log(SPR_temp)) / steepness); 313 | R_equil = Recr_virgin * B_equil / SSB_virgin * mfexp(steepness * (1. - B_equil / SSB_virgin)); 314 | 315 | break; 316 | } 317 | // SS_Label_44.1.3 Beverton-Holt 318 | case 3: // same as case 6 319 | { 320 | alpha = 4.0 * steepness * Recr_virgin / (5. * steepness - 1.); 321 | beta = (SSB_virgin * (1. - steepness)) / (5. * steepness - 1.); 322 | B_equil = alpha * SPR_temp - beta; 323 | B_equil = posfun(B_equil, 0.0001, temp); 324 | R_equil = (4. * steepness * Recr_virgin * B_equil) / (SSB_virgin * (1. - steepness) + (5. * steepness - 1.) * B_equil); //Beverton-Holt 325 | break; 326 | } 327 | 328 | // SS_Label_44.1.4 constant recruitment 329 | case 4: // constant; no bias correction 330 | { 331 | B_equil = SPR_temp * Recr_virgin; 332 | R_equil = Recr_virgin; 333 | break; 334 | } 335 | // SS_Label_44.1.5 Hockey Stick 336 | case 5: // hockey stick 337 | { 338 | alpha = SRparm3 * Recr_virgin; // min recruitment level 339 | // temp=SSB_virgin/R0*steepness; // spawners per recruit at inflection 340 | beta = (Recr_virgin - alpha) / (steepness * SSB_virgin); // slope of recruitment on spawners below the inflection 341 | B_equil = Join_Fxn(0.0 * SSB_virgin / Recr_virgin, SSB_virgin / Recr_virgin, SSB_virgin / Recr_virgin * steepness, SPR_temp, alpha / ((1. / SPR_temp) - beta), SPR_temp * Recr_virgin); 342 | R_equil = Join_Fxn(0.0 * SSB_virgin, SSB_virgin, SSB_virgin * steepness, B_equil, alpha + beta * B_equil, Recr_virgin); 343 | break; 344 | } 345 | // SS_Label_44.1.7 3 parameter survival based 346 | case 7: // survival 347 | { 348 | SRZ_0 = log(1.0 / (SSB_virgin / Recr_virgin)); 349 | srz_min = SRZ_0 * (1.0 - steepness); 350 | B_equil = SSB_virgin * (1. - (log(1. / SPR_temp) - SRZ_0) / pow((srz_min - SRZ_0), (1. / SRparm3))); 351 | B_equil = posfun(B_equil, 0.0001, temp); 352 | SRZ_surv = mfexp((1. - pow((B_equil / SSB_virgin), SRparm3)) * (srz_min - SRZ_0) + SRZ_0); // survival 353 | R_equil = B_equil * SRZ_surv; 354 | break; 355 | } 356 | 357 | // SS_Label_44.1.8 3 parameter Shepherd 358 | case 8: // Shepherd 359 | { 360 | dvariable Shep_top; 361 | dvariable Shep_bot; 362 | dvariable Hupper; 363 | dvariable Shep_top2; 364 | // Andre's FORTRAN 365 | // TOP = 5*Steep*(1-0.2**POWER)*SPR/SPRF0-(1-5*Steep*0.2**POWER) 366 | // BOT = (5*Steep-1) 367 | // REC = (TOP/BOT)**(1.0/POWER)*SPRF0/SPR 368 | // Power = exp(logC); 369 | // Hupper = 1.0/(5.0 * pow(0.2,Power)); 370 | Shepherd_c = SRparm3; 371 | Shepherd_c2 = pow(0.2, SRparm3); 372 | Hupper = 1.0 / (5.0 * Shepherd_c2); 373 | steepness = 0.2 + (SRparm2 - 0.2) / (0.8) * (Hupper - 0.2); 374 | Shep_top = 5.0 * steepness * (1.0 - Shepherd_c2) * (SPR_temp * Recr_virgin) / SSB_virgin - (1.0 - 5.0 * steepness * Shepherd_c2); 375 | Shep_bot = 5.0 * steepness - 1.0; 376 | Shep_top2 = posfun(Shep_top, 0.001, temp); 377 | R_equil = (SSB_virgin / SPR_temp) * pow((Shep_top2 / Shep_bot), (1.0 / SRparm3)); 378 | B_equil = R_equil * SPR_temp; 379 | break; 380 | } 381 | 382 | // SS_Label_43.3.8 Ricker-power 383 | case 9: // Ricker power 3-parameter SRR. per Punt & Cope 2017 384 | { 385 | steepness = SRparm2; 386 | dvariable RkrPower = SRparm3; 387 | temp = SSB_virgin / (SPR_temp * Recr_virgin); 388 | dvariable RkrTop = pow(0.8, RkrPower) * log(temp) / log(5.0 * steepness); 389 | RkrTop = posfun(RkrTop, 0.000001, CrashPen); 390 | R_equil = temp * Recr_virgin * (1.0 - pow(RkrTop, 1.0 / RkrPower)); 391 | B_equil = R_equil * SPR_temp; 392 | break; 393 | } 394 | 395 | /* 396 | case 19: // re-parameterized Shepherd 397 | { 398 | dvariable Shep_top; 399 | dvariable Shep_bot; 400 | dvariable Hupper; 401 | dvariable Shep_top2; 402 | // Andre's FORTRAN 403 | // TOP = 5*Steep*(1-0.2**POWER)*SPR/SPRF0-(1-5*Steep*0.2**POWER) 404 | // BOT = (5*Steep-1) 405 | // REC = (TOP/BOT)**(1.0/POWER)*SPRF0/SPR 406 | // Power = exp(logC); 407 | // Hupper = 1.0/(5.0 * pow(0.2,Power)); 408 | Shepherd_c=exp(SRparm3); 409 | Shepherd_c2=pow(0.2,Shepherd_c); 410 | Hupper=1.0/(5.0*Shepherd_c2); 411 | steepness=0.20001+((0.8)/(1.0+exp(-SRparm2))-0.2)/(0.8)*(Hupper-0.2); 412 | // steep2=0.20001+(steepness-0.2)/(0.8)*(Hupper-0.2); 413 | Shep_top=5.0*steepness*(1.0-Shepherd_c2)*(SPR_temp*Recr_virgin)/SSB_virgin-(1.0-5.0*steepness*Shepherd_c2); 414 | Shep_bot=5.0*steepness-1.0; 415 | Shep_top2=posfun(Shep_top,0.001,temp); 416 | R_equil=(SSB_virgin/SPR_temp) * pow((Shep_top2/Shep_bot),(1.0/Shepherd_c)); 417 | B_equil=R_equil*SPR_temp; 418 | break; 419 | } 420 | 421 | // SS_Label_43.3.8 Ricker-power 422 | case 20: // Ricker power 3-parameter SRR. per Punt & Cope 2017 423 | { 424 | // Hupper = 10.0; 425 | // Steep = 0.2 + (Hupper - 0.2)/(1+exp(-1*Steep2))+1.0e-5; 426 | // Top = pow(0.8,Power)*log(SPRF0/SPR)/log(5.0*Steep); 427 | // Top = posfun(Top,0.000001,Penal); 428 | // Recs = (SPRF0/SPR) * (1.0 - pow(Top,1.0/Power)); 429 | // Recs = posfun(Recs,0.0001,Penal); 430 | // if (Recs < 0) Rec2 = 0; else Rec2 = Recs; 431 | steepness = 0.2 + (10.0 - 0.2)/(1+exp(-SR_parm_work(2))); 432 | dvariable RkrPower=exp(SR_parm_work(3)); 433 | temp=SSB_virgin/(SPR_temp*Recr_virgin); 434 | dvariable RkrTop = pow(0.8,RkrPower)*log(temp)/log(5.0*steepness); 435 | RkrTop = posfun(RkrTop,0.000001,CrashPen); 436 | R_equil = temp *Recr_virgin * (1.0 - pow(RkrTop,1.0/RkrPower)); 437 | B_equil=R_equil*SPR_temp; 438 | break; 439 | } 440 | */ 441 | } 442 | Equil_Spawn_Recr_Calc(1) = B_equil; 443 | Equil_Spawn_Recr_Calc(2) = R_equil; 444 | RETURN_ARRAYS_DECREMENT(); 445 | return Equil_Spawn_Recr_Calc; 446 | } // end Equil_Spawn_Recr_Fxn 447 | -------------------------------------------------------------------------------- /SS_tagrecap.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #22. **SS_tagrecap.tpl** 2 | // SS_Label_file # * Tag_Recapture() // calculates expected values for number of tags returned by each fleet(and area), in each time step, for each tag release group 3 | // SS_Label_file # 4 | 5 | FUNCTION void Tag_Recapture() 6 | { 7 | // SS_Label_Info_24.15 #do tag mortality, movement and recapture revise 7/10/2019 8 | dvariable TG_init_loss; 9 | dvariable TG_chron_loss; 10 | TG_recap_exp.initialize(); 11 | 12 | // get reporting rates by fleet that will be used for all Tag Groups 13 | for (f = 1; f <= Nfleet1; f++) 14 | { 15 | j = 3 * N_TG + f; 16 | if (TG_parm_PH(j) == -1000.) 17 | { 18 | TG_report(f) = TG_report(f - 1); 19 | } // do nothing keep same value 20 | else 21 | { 22 | if (TG_parm_PH(j) > -1000.) 23 | { 24 | k = j; 25 | } 26 | else 27 | { 28 | k = -1000 - TG_parm_PH(j); 29 | } 30 | TG_report(f) = mfexp(TG_parm(k)) / (1. + mfexp(TG_parm(k))); 31 | } 32 | j += Nfleet1; 33 | if (TG_parm_PH(j) == -1000.) 34 | { 35 | TG_rep_decay(f) = TG_rep_decay(f - 1); 36 | } // do nothing keep same value 37 | else 38 | { 39 | if (TG_parm_PH(j) > -1000.) 40 | { 41 | k = j; 42 | } 43 | else 44 | { 45 | k = -1000 - TG_parm_PH(j); 46 | } 47 | TG_rep_decay(f) = TG_parm(k); 48 | } 49 | } 50 | for (TG = 1; TG <= N_TG; TG++) 51 | { 52 | firstseas = int(TG_release(TG, 4)); // release season 53 | t = int(TG_release(TG, 5)); // release t index calculated in data section from year and season of release 54 | p = int(TG_release(TG, 2)); // release area 55 | gg = int(TG_release(TG, 6)); // gender (1=fem; 2=male; 0=both 56 | a1 = int(TG_release(TG, 7)); // age at release 57 | 58 | TG_alive.initialize(); 59 | if (gg == 0) 60 | { 61 | for (g = 1; g <= gmorph; g++) 62 | { 63 | TG_alive(p, g) = natage(t, p, g, a1); 64 | } // gets both genders 65 | } 66 | else 67 | { 68 | for (g = 1; g <= gmorph; g++) 69 | { 70 | if (sx(g) == gg) 71 | { 72 | TG_alive(p, g) = natage(t, p, g, a1); 73 | } // only does the selected gender 74 | } 75 | } 76 | if (TG_parm_PH(TG) == -1000.) 77 | { 78 | } // do nothing keep same TG_init_loss 79 | else 80 | { 81 | if (TG_parm_PH(TG) > -1000.) 82 | { 83 | k = TG; 84 | } 85 | else 86 | { 87 | k = -1000 - TG_parm_PH(TG); 88 | } 89 | TG_init_loss = mfexp(TG_parm(k)) / (1. + mfexp(TG_parm(k))); 90 | } 91 | 92 | // get chronic loss parameter 93 | j = TG + N_TG; 94 | if (TG_parm_PH(j) == -1000.) 95 | { 96 | } // do nothing keep same value 97 | else 98 | { 99 | if (TG_parm_PH(j) > -1000.) 100 | { 101 | k = j; 102 | } 103 | else 104 | { 105 | k = -1000 - TG_parm_PH(j) + N_TG; 106 | } 107 | TG_chron_loss = mfexp(TG_parm(k)) / (1. + mfexp(TG_parm(k))); 108 | } 109 | TG_alive /= sum(TG_alive); // proportions across morphs at age a1 in release area p at time of release t 110 | TG_alive *= TG_release(TG, 8); // number released as distributed across morphs 111 | TG_alive *= (1. - TG_init_loss); // initial mortality 112 | if (save_for_report > 0) 113 | { 114 | TG_save(TG, 1) = value(TG_init_loss); 115 | TG_save(TG, 2) = value(TG_chron_loss); 116 | } 117 | TG_t = 0; 118 | for (y = TG_release(TG, 3); y <= endyr; y++) 119 | { 120 | for (s = firstseas; s <= nseas; s++) 121 | { 122 | if (save_for_report > 0 && TG_t <= TG_endtime(TG)) 123 | { 124 | TG_save(TG, 3 + TG_t) = value(sum(TG_alive)); 125 | } // OK to do simple sum because only selected morphs are populated 126 | 127 | for (p = 1; p <= pop; p++) 128 | { 129 | for (g = 1; g <= gmorph; g++) 130 | if (TG_use_morph(TG, g) > 0) 131 | { 132 | for (f = 1; f <= Nfleet; f++) 133 | if (fleet_area(f) == p) 134 | { 135 | // calculate recaptures by fleet 136 | // NOTE: Sel_for_tag(t,f,g,a1) = sel_ret_num(s,f,g,a1)*Hrate(f,t) 137 | if (F_Method == 1) 138 | { 139 | TG_recap_exp(TG, TG_t, f) += TG_alive(p, g) // tags recaptured 140 | * mfexp(-(natM(t, p, GP3(g), a1) + TG_chron_loss) * seasdur_half(s)) * Sel_for_tag(t, f, g, a1) * TG_report(f) * mfexp(TG_t * TG_rep_decay(f)); 141 | } 142 | else // use for method 2 and 3 143 | { 144 | TG_recap_exp(TG, TG_t, f) += TG_alive(p, g) * Sel_for_tag(t, f, g, a1) / (Z_rate(t, p, g, a1) + TG_chron_loss) * (1. - mfexp(-seasdur(s) * (Z_rate(t, p, g, a1) + TG_chron_loss))) * TG_report(f) * mfexp(TG_t * TG_rep_decay(f)); 145 | } 146 | 147 | if (docheckup == 1) 148 | echoinput << " TG_" << TG << " y_" << y << " s_" << s << " area_" << p << " g_" << g << " GP3_" << GP3(g) << " f_" << f << " a1_" << a1 << " Sel_" << Sel_for_tag(t, f, g, a1) << " TG_alive_" << TG_alive(p, g) << " TG_obs_" << TG_recap_obs(TG, TG_t, f) << " TG_exp_" << TG_recap_exp(TG, TG_t, f) << endl; 149 | } // end fleet loop for recaptures 150 | TG_alive(p, g) *= mfexp(-seasdur(s) * (Z_rate(t, p, g, a1) + TG_chron_loss)); 151 | } // end morph loop 152 | } // end area loop 153 | 154 | if (Hermaphro_Option != 0) 155 | { 156 | if (Hermaphro_seas == -1 || Hermaphro_seas == s) 157 | { 158 | k = gmorph / 2; 159 | for (p = 1; p <= pop; p++) // area 160 | for (g = 1; g <= k; g++) // loop females 161 | if (use_morph(g) > 0) 162 | { 163 | if (Hermaphro_Option == 1) 164 | { 165 | TG_alive(p, g + k) += TG_alive(p, g) * Hermaphro_val(GP4(g), a1); // increment males with females 166 | TG_alive(p, g) *= (1. - Hermaphro_val(GP4(g), a1)); // decrement females 167 | } 168 | else if (Hermaphro_Option == -1) 169 | { 170 | TG_alive(p, g) += TG_alive(p, g + k) * Hermaphro_val(GP4(g + k), a1); // increment females with males 171 | TG_alive(p, g + k) *= (1. - Hermaphro_val(GP4(g + k), a1)); // decrement males 172 | } 173 | } 174 | } 175 | } 176 | 177 | if (do_migration > 0) // movement between areas of tags 178 | { 179 | TG_alive_temp = TG_alive; 180 | TG_alive = 0.0; 181 | for (g = 1; g <= gmorph; g++) 182 | if (use_morph(g) > 0) 183 | { 184 | for (p = 1; p <= pop; p++) // source population 185 | for (p2 = 1; p2 <= pop; p2++) // destination population 186 | { 187 | k = move_pattern(s, GP4(g), p, p2); 188 | if (k > 0) 189 | TG_alive(p2, g) += TG_alive_temp(p, g) * migrrate(y, k, a1); 190 | } 191 | } 192 | if (docheckup == 1) 193 | echoinput << " Tag_alive after survival and movement " << endl 194 | << TG_alive << endl; 195 | } 196 | t++; // increment seasonal time counter 197 | if (TG_t < TG_endtime(TG)) 198 | TG_t++; 199 | if (s == nseas && a1 < nages) 200 | a1++; 201 | } // end seasons 202 | firstseas = 1; // so start with season 1 in year following the tag release 203 | } // end years 204 | } // end loop of tag groups 205 | } // end having tag groups 206 | 207 | -------------------------------------------------------------------------------- /SS_timevaryparm.tpl: -------------------------------------------------------------------------------- 1 | // SS_Label_file #21. **SS_timevaryparm.tpl** 2 | // SS_Label_file # * make_timevaryparm() // makes parameters a function of input environmental data time series 3 | // SS_Label_file # * make_densitydependent_parm() // for the current year, changes a parameter value as a function of summary bio or recruitment at beginning of this year 4 | // SS_Label_file # 5 | 6 | //********************************************************************* 7 | /* SS_Label_Function_14 #make_timevaryparm(): create trend and block time series */ 8 | FUNCTION void make_timevaryparm() 9 | { 10 | dvariable baseparm; 11 | baseparm_min = -999.; // fill array with default 12 | baseparm_max = 999; // fill array with default 13 | dvariable endtrend; 14 | dvariable infl_year; 15 | dvariable slope; 16 | dvariable norm_styr; 17 | // note: need to implement the approach that keeps within bounds of base parameter 18 | 19 | int timevary_parm_cnt_all; 20 | timevary_parm_cnt_all = 0; 21 | if (do_once == 1) 22 | echoinput << endl 23 | << "**********************" << endl 24 | << "number of parameters with timevary: " << timevary_cnt << endl; 25 | 26 | for (int tvary = 1; tvary <= timevary_cnt; tvary++) 27 | { 28 | ivector timevary_setup(1, 14); 29 | timevary_setup(1, 14) = timevary_def[tvary](1, 14); 30 | if (do_once == 1) 31 | echoinput << "timevary #: " << tvary << endl 32 | << "setup: " << timevary_setup << endl; 33 | // what type of parameter is being affected? get the baseparm and its bounds 34 | switch (timevary_setup(1)) // parameter type 35 | { 36 | case 1: // MG 37 | { 38 | baseparm = MGparm(timevary_setup(2)); // index of base parm 39 | baseparm_min(tvary) = MGparm_LO(timevary_setup(2)); 40 | baseparm_max(tvary) = MGparm_HI(timevary_setup(2)); 41 | if (do_once == 1) 42 | echoinput << "base MGparm " << baseparm << endl; 43 | for (j = timevary_setup(3); j < timevary_def[tvary + 1](3); j++) 44 | { 45 | timevary_parm_cnt_all++; 46 | timevary_parm(timevary_parm_cnt_all) = MGparm(N_MGparm + j); 47 | if (do_once == 1) 48 | echoinput << j << " timevary_parm: " << timevary_parm(timevary_parm_cnt_all) << endl; 49 | } 50 | parm_timevary(tvary) = baseparm; // fill timeseries with base parameter, just in case 51 | break; 52 | } 53 | case 2: // SR 54 | { 55 | baseparm = SR_parm(timevary_setup(2)); // index of base parm 56 | baseparm_min(tvary) = SR_parm_LO(timevary_setup(2)); 57 | baseparm_max(tvary) = SR_parm_HI(timevary_setup(2)); 58 | if (do_once == 1) 59 | echoinput << "base SR_parm " << baseparm << endl; 60 | for (j = timevary_setup(3); j < timevary_def[tvary + 1](3); j++) 61 | { 62 | timevary_parm_cnt_all++; 63 | timevary_parm(timevary_parm_cnt_all) = SR_parm(N_SRparm(SR_fxn) + 3 + j - timevary_parm_start_SR + 1); 64 | if (do_once == 1) 65 | echoinput << j << " timevary_parm: " << timevary_parm(timevary_parm_cnt_all) << endl; 66 | } 67 | parm_timevary(tvary) = baseparm; // fill timeseries with base parameter, just in case 68 | break; 69 | } 70 | case 3: // Q 71 | { 72 | baseparm = Q_parm(timevary_setup(2)); // index of base parm 73 | baseparm_min(tvary) = Q_parm_LO(timevary_setup(2)); 74 | baseparm_max(tvary) = Q_parm_HI(timevary_setup(2)); 75 | if (do_once == 1) 76 | echoinput << "base Qparm " << baseparm << endl; 77 | for (j = timevary_setup(3); j < timevary_def[tvary + 1](3); j++) 78 | { 79 | timevary_parm_cnt_all++; 80 | timevary_parm(timevary_parm_cnt_all) = Q_parm(Q_Npar + j - timevary_parm_start_Q + 1); 81 | if (do_once == 1) 82 | echoinput << j << " timevary_parm: " << timevary_parm(timevary_parm_cnt_all) << endl; 83 | } 84 | parm_timevary(tvary) = baseparm; // fill timeseries with base parameter, just in case 85 | break; 86 | } 87 | case 5: // selex 88 | { 89 | baseparm = selparm(timevary_setup(2)); // index of base parm 90 | baseparm_min(tvary) = selparm_LO(timevary_setup(2)); 91 | baseparm_max(tvary) = selparm_HI(timevary_setup(2)); 92 | if (do_once == 1) 93 | echoinput << "base selparm " << baseparm << endl; 94 | for (j = timevary_setup(3); j < timevary_def[tvary + 1](3); j++) 95 | { 96 | timevary_parm_cnt_all++; 97 | timevary_parm(timevary_parm_cnt_all) = selparm(N_selparm + j - timevary_parm_start_sel + 1); 98 | if (do_once == 1) 99 | echoinput << j << " timevary_parm: " << timevary_parm(timevary_parm_cnt_all) << endl; 100 | } 101 | parm_timevary(tvary) = baseparm; // fill timeseries with base parameter, just in case 102 | break; 103 | } 104 | } 105 | 106 | timevary_parm_cnt = timevary_setup(3); // first parameter used to create timevary effect on baseparm 107 | if (timevary_setup(4) > 0) // block 108 | { 109 | if (do_once == 1) 110 | echoinput << "block pattern " << z << endl; 111 | z = timevary_setup(4); // specified block pattern 112 | g = 1; 113 | temp = baseparm; 114 | for (a = 1; a <= Nblk(z); a++) 115 | { 116 | switch (timevary_setup(5)) 117 | { 118 | case 0: 119 | { 120 | temp = baseparm * mfexp(timevary_parm(timevary_parm_cnt)); 121 | timevary_parm_cnt++; 122 | break; 123 | } 124 | case 1: 125 | { 126 | temp = baseparm + timevary_parm(timevary_parm_cnt); 127 | timevary_parm_cnt++; 128 | break; 129 | } 130 | case 2: 131 | { 132 | temp = timevary_parm(timevary_parm_cnt); // direct assignment of block value 133 | timevary_parm_cnt++; 134 | break; 135 | } 136 | case 3: 137 | { 138 | temp += timevary_parm(timevary_parm_cnt); // block as offset from previous block 139 | timevary_parm_cnt++; 140 | break; 141 | } 142 | } 143 | 144 | for (int y1 = Block_Design(z, g); y1 <= Block_Design(z, g + 1); y1++) // loop years for this block 145 | { 146 | parm_timevary(tvary, y1) = temp; 147 | } 148 | g += 2; 149 | } 150 | // timevary_parm_cnt--; // back out last increment 151 | } // end uses blocks 152 | 153 | else if (timevary_setup(4) < 0) // trend 154 | { 155 | // timevary_parm(timevary_parm_cnt+0) = offset for the trend at endyr; 3 options available below 156 | // timevary_parm(timevary_parm_cnt+1) = inflection year; 2 options available 157 | // timevary_parm(timevary_parm_cnt+2) = stddev of normal at inflection year 158 | // calc endyr value, 159 | if (do_once == 1) 160 | echoinput << "logistic trend over time " << endl; 161 | if (timevary_setup(4) == -1) // use logistic transform to keep with bounds of the base parameter 162 | { 163 | endtrend = log((baseparm_max(tvary) - baseparm_min(tvary) + 0.0000002) / (baseparm - baseparm_min(tvary) + 0.0000001) - 1.) / (-2.); // transform the base parameter 164 | endtrend += timevary_parm(timevary_parm_cnt); // add the offset Note that offset value is in the transform space 165 | endtrend = baseparm_min(tvary) + (baseparm_max(tvary) - baseparm_min(tvary)) / (1. + mfexp(-2. * endtrend)); // backtransform 166 | infl_year = log(0.5) / (-2.); // transform the base parameter 167 | infl_year += timevary_parm(timevary_parm_cnt + 1); // add the offset Note that offset value is in the transform space 168 | infl_year = r_years(styr) + (r_years(endyr) - r_years(styr)) / (1. + mfexp(-2. * infl_year)); // backtransform 169 | } 170 | else if (timevary_setup(4) == -2) // set ending value directly 171 | { 172 | endtrend = timevary_parm(timevary_parm_cnt); 173 | infl_year = timevary_parm(timevary_parm_cnt + 1); 174 | } 175 | else if (timevary_setup(4) == -3) // use parm as fraction of way between bounds 176 | { 177 | endtrend = baseparm_min(tvary) + (baseparm_max(tvary) - baseparm_min(tvary)) * timevary_parm(timevary_parm_cnt); 178 | infl_year = r_years(styr) + (r_years(endyr) - r_years(styr)) * timevary_parm(timevary_parm_cnt + 1); 179 | } 180 | slope = timevary_parm(timevary_parm_cnt + 2); 181 | timevary_parm_cnt += 3; 182 | 183 | norm_styr = cumd_norm((r_years(styr) - infl_year) / slope); 184 | temp = (endtrend - baseparm) / (cumd_norm((r_years(endyr) - infl_year) / slope) - norm_styr); // delta in cum_norm between styr and endyr 185 | 186 | for (int y1 = styr; y1 <= YrMax; y1++) 187 | { 188 | if (y1 <= endyr) 189 | { 190 | parm_timevary(tvary, y1) = baseparm + temp * (cumd_norm((r_years(y1) - infl_year) / slope) - norm_styr); 191 | } 192 | else 193 | { 194 | parm_timevary(tvary, y1) = parm_timevary(tvary, endyr); 195 | } 196 | } 197 | parm_timevary(tvary, styr - 1) = baseparm; 198 | } 199 | 200 | if (timevary_setup(7) > 0) // env link (negative value indicates density-dependence which is calculated year-by-year in different function) 201 | { 202 | if (do_once == 1) 203 | echoinput << "env_link to env_variable: " << timevary_setup(7) << " using link_type " << timevary_setup(6) << endl; 204 | switch (int(timevary_setup(6))) 205 | { 206 | case 1: // exponential env link 207 | { 208 | for (int y1 = styr - 1; y1 <= YrMax; y1++) 209 | { 210 | parm_timevary(tvary, y1) *= mfexp(timevary_parm(timevary_parm_cnt) * (env_data(y1, timevary_setup(7)))); 211 | } 212 | timevary_parm_cnt++; 213 | break; 214 | } 215 | case 2: // linear env link 216 | { 217 | for (int y1 = styr - 1; y1 <= YrMax; y1++) 218 | { 219 | parm_timevary(tvary, y1) += timevary_parm(timevary_parm_cnt) * env_data(y1, timevary_setup(7)); 220 | } 221 | timevary_parm_cnt++; 222 | break; 223 | } 224 | case 3: // result constrained by baseparm_min-max; input values are unit normal 225 | { 226 | dvariable temp; 227 | double p_range = baseparm_max(tvary) - baseparm_min(tvary); 228 | 229 | for (int y1 = env_data_minyr(timevary_setup(7)); y1 <= env_data_maxyr(timevary_setup(7)); y1++) 230 | { 231 | temp = log((parm_timevary(tvary, y1) - baseparm_min(tvary) + 1.0e-7) / (baseparm_max(tvary) - parm_timevary(tvary, y1) + 1.0e-7)); 232 | temp += timevary_parm(timevary_parm_cnt) * env_data(y1, timevary_setup(7)); 233 | parm_timevary(tvary, y1) = baseparm_min(tvary) + p_range / (1.0 + exp(-temp)); 234 | } 235 | timevary_parm_cnt++; 236 | break; 237 | } 238 | case 4: // logistic env link 239 | { 240 | // first parm is offset; second is slope 241 | for (int y1 = styr - 1; y1 <= YrMax; y1++) 242 | { 243 | parm_timevary(tvary, y1) *= 2.00000 / (1.00000 + mfexp(-timevary_parm(timevary_parm_cnt + 1) * (env_data(y1, timevary_setup(7)) - timevary_parm(timevary_parm_cnt)))); 244 | } 245 | timevary_parm_cnt += 2; 246 | break; 247 | } 248 | } 249 | } 250 | // SS_Label_Info_14.3 #Create parm dev randwalks if needed 251 | if (timevary_setup(8) > 0) // devs 252 | { 253 | k = timevary_setup(8); // dev used 254 | if (do_once == 1) 255 | echoinput << "dev vector #: " << k << endl; 256 | parm_dev_stddev(k) = timevary_parm(timevary_parm_cnt); 257 | parm_dev_rho(k) = timevary_parm(timevary_parm_cnt + 1); 258 | int picker = timevary_setup(9); // selects the method for creating time-vary parameter from dev vector 259 | 260 | switch (picker) 261 | { 262 | case 1: 263 | { 264 | for (j = timevary_setup(10); j <= timevary_setup(11); j++) 265 | { 266 | parm_timevary(tvary, j) *= mfexp(parm_dev(k, j) * parm_dev_stddev(k)); 267 | } 268 | break; 269 | } 270 | case 2: 271 | { 272 | for (j = timevary_setup(10); j <= timevary_setup(11); j++) 273 | { 274 | parm_timevary(tvary, j) += parm_dev(k, j) * parm_dev_stddev(k); 275 | } 276 | break; 277 | } 278 | case 3: 279 | { 280 | parm_dev_rwalk(k, timevary_setup(10)) = parm_dev(k, timevary_setup(10)) * parm_dev_stddev(k); 281 | parm_timevary(tvary, timevary_setup(10)) += parm_dev_rwalk(k, timevary_setup(10)); 282 | for (j = timevary_setup(10) + 1; j <= timevary_setup(11); j++) 283 | { 284 | parm_dev_rwalk(k, j) = parm_dev_rwalk(k, j - 1) + parm_dev(k, j) * parm_dev_stddev(k); 285 | parm_timevary(tvary, j) += parm_dev_rwalk(k, j); 286 | } 287 | break; 288 | } 289 | case 4: // mean reverting random walk 290 | { 291 | parm_dev_rwalk(k, timevary_setup(10)) = parm_dev(k, timevary_setup(10)) * parm_dev_stddev(k); // 1st yr dev 292 | parm_timevary(tvary, timevary_setup(10)) += parm_dev_rwalk(k, timevary_setup(10)); // add dev to current value 293 | for (j = timevary_setup(10) + 1; j <= timevary_setup(11); j++) 294 | { 295 | // =(1-rho)*mean + rho*prevval + dev // where mean = 0.0 296 | parm_dev_rwalk(k, j) = parm_dev_rho(k) * parm_dev_rwalk(k, j - 1) + parm_dev(k, j) * parm_dev_stddev(k); // update MRRW using annual dev 297 | parm_timevary(tvary, j) += parm_dev_rwalk(k, j); // add dev to current value of annual parameter, which may previously be adjusted by block or env 298 | } 299 | break; 300 | } 301 | case 6: // mean reverting random walk with penalty to keep rmse near 1.0 302 | { 303 | parm_dev_rwalk(k, timevary_setup(10)) = parm_dev(k, timevary_setup(10)) * parm_dev_stddev(k); // 1st yr dev 304 | parm_timevary(tvary, timevary_setup(10)) += parm_dev_rwalk(k, timevary_setup(10)); // add dev to current value 305 | for (j = timevary_setup(10) + 1; j <= timevary_setup(11); j++) 306 | { 307 | // =(1-rho)*mean + rho*prevval + dev // where mean = 0.0 308 | parm_dev_rwalk(k, j) = parm_dev_rho(k) * parm_dev_rwalk(k, j - 1) + parm_dev(k, j) * parm_dev_stddev(k); // update MRRW using annual dev 309 | parm_timevary(tvary, j) += parm_dev_rwalk(k, j); // add dev to current value of annual parameter, which may previously be adjusted by block or env 310 | } 311 | break; 312 | } 313 | case 5: // mean reverting random walk constrained by base parameter's min-max: 314 | { 315 | // NOTE: if the stddev parameter is greater than 1.8, the distribution of adjusted parameters will become U-shaped 316 | dvariable temp; 317 | double p_range = baseparm_max(tvary) - baseparm_min(tvary); 318 | int j = timevary_setup(10); 319 | parm_dev_rwalk(k, j) = parm_dev(k, j) * parm_dev_stddev(k); // 1st yr dev 320 | // p_base=(parm_timevary(tvary,j)-baseparm_min(tvary))/(baseparm_max(tvary)-baseparm_min(tvary)); // convert parm to (0,1) scale 321 | // temp=log(p_base/(1.-p_base)) + parm_dev_rwalk(k,j); // convert to logit and add dev; so dev must be in units of the logit 322 | temp = log((parm_timevary(tvary, j) - baseparm_min(tvary) + 1.0e-7) / (baseparm_max(tvary) - parm_timevary(tvary, j) + 1.0e-7)); 323 | parm_timevary(tvary, j) = baseparm_min(tvary) + p_range / (1.0 + exp(-temp - parm_dev_rwalk(k, j))); 324 | for (j = timevary_setup(10) + 1; j <= timevary_setup(11); j++) 325 | { 326 | // =(1-rho)*mean + rho*prevval + dev // where mean = 0.0 327 | parm_dev_rwalk(k, j) = parm_dev_rho(k) * parm_dev_rwalk(k, j - 1) + parm_dev(k, j) * parm_dev_stddev(k); // update MRRW using annual dev 328 | temp = log((parm_timevary(tvary, j) - baseparm_min(tvary) + 1.0e-7) / (baseparm_max(tvary) - parm_timevary(tvary, j) + 1.0e-7)); 329 | parm_timevary(tvary, j) = baseparm_min(tvary) + p_range / (1.0 + exp(-temp - parm_dev_rwalk(k, j))); 330 | } 331 | break; 332 | } 333 | } 334 | if (timevary_setup(14) == 1) // continue_last 335 | { 336 | for (j = timevary_setup(11) + 1; j <= YrMax; j++) 337 | parm_timevary(tvary, j) = parm_timevary(tvary, timevary_setup(11)); 338 | } 339 | } 340 | if (do_once == 1) 341 | echoinput << "result by year: " << parm_timevary(tvary) << endl; 342 | } 343 | } // end timevary_parm setup for all years 344 | 345 | FUNCTION void make_densitydependent_parm(int const y1) 346 | { 347 | 348 | for (int tvary = 1; tvary <= timevary_cnt; tvary++) 349 | { 350 | ivector timevary_setup(1, 13); 351 | timevary_setup(1, 13) = timevary_def[tvary](1, 13); 352 | if (timevary_setup(7) < 0) // density-dependent 353 | { 354 | int env_var = timevary_setup(7); 355 | timevary_parm_cnt = timevary_setup(3); // link parameter index 356 | if (do_once == 1) 357 | echoinput << y1 << " density-dependent to env_variable: " << env_var << " using link_type " 358 | << timevary_setup(6) << " env: " << env_data(y1, env_var) << " parm: " << timevary_parm(timevary_parm_cnt) << endl; 359 | switch (int(timevary_setup(6))) 360 | { 361 | case 1: // exponential env link 362 | { 363 | parm_timevary(tvary, y1) *= mfexp(timevary_parm(timevary_parm_cnt) * env_data(y1, env_var)); 364 | break; 365 | } 366 | case 2: // linear env link 367 | { 368 | parm_timevary(tvary, y1) += timevary_parm(timevary_parm_cnt) * env_data(y1, env_var); 369 | break; 370 | } 371 | case 3: // result constrained by baseparm_min-max; input values are unit normal 372 | { 373 | dvariable temp; 374 | double p_range = baseparm_max(tvary) - baseparm_min(tvary); 375 | temp = log((parm_timevary(tvary, y1) - baseparm_min(tvary) + 1.0e-7) / (baseparm_max(tvary) - parm_timevary(tvary, y1) + 1.0e-7)); 376 | temp += timevary_parm(timevary_parm_cnt) * env_data(y1, env_var); 377 | parm_timevary(tvary, y1) = baseparm_min(tvary) + p_range / (1.0 + exp(-temp)); 378 | break; 379 | } 380 | case 4: // logistic env link 381 | { 382 | // first parm is offset ; second is slope 383 | parm_timevary(tvary, y1) = 2.00000 / (1.00000 + mfexp(-timevary_parm(timevary_parm_cnt + 1) * (env_data(y1, env_var) - timevary_parm(timevary_parm_cnt)))); 384 | break; 385 | } 386 | } 387 | } 388 | } 389 | } 390 | 391 | -------------------------------------------------------------------------------- /SS_versioninfo_330opt.tpl: -------------------------------------------------------------------------------- 1 | DATA_SECTION 2 | !! // Stock Synthesis 3 | !! // Developed by Richard Methot, NOAA Fisheries 4 | 5 | !! // SS_Label_Section_1.0 #DATA_SECTION 6 | 7 | !! // SS_Label_Info_1.1.1 #Create string with version info 8 | !!version_info += "#V3.30.xx.yy;_fast(opt);_compile_date:_"; 9 | !!version_info += __DATE__; 10 | !!version_info += ";_Stock_Synthesis_by_Richard_Methot_(NOAA)_using_ADMB_13.2"; 11 | !!version_info2 += "#_Stock_Synthesis_is_a_work_of_the_U.S._Government_and_is_not_subject_to_copyright_protection_in_the_United_States."; 12 | !!version_info2 += "#_Foreign_copyrights_may_apply._See_copyright.txt_for_more_information."; 13 | !!version_info2 += "#_User_support_available_at:_https://groups.google.com/g/ss3-forum_and_NMFS.Stock.Synthesis@noaa.gov"; 14 | !!version_info2 += "#_User_info_available_at:_https://nmfs-ost.github.io/ss3-website/"; 15 | !!version_info2 += "#_Source_code_at:_https://github.com/nmfs-ost/ss3-source-code"; 16 | 17 | -------------------------------------------------------------------------------- /SS_versioninfo_330safe.tpl: -------------------------------------------------------------------------------- 1 | DATA_SECTION 2 | !! // Stock Synthesis 3 | !! // Developed by Richard Methot, NOAA Fisheries 4 | 5 | !! // SS_Label_Section_1.0 #DATA_SECTION 6 | 7 | !! // SS_Label_Info_1.1.1 #Create string with version info 8 | !!version_info += "#V3.30.xx.yy;_safe;_compile_date:_"; 9 | !!version_info += __DATE__; 10 | !!version_info += ";_Stock_Synthesis_by_Richard_Methot_(NOAA)_using_ADMB_13.2"; 11 | !!version_info2 += "#_Stock_Synthesis_is_a_work_of_the_U.S._Government_and_is_not_subject_to_copyright_protection_in_the_United_States."; 12 | !!version_info2 += "#_Foreign_copyrights_may_apply._See_copyright.txt_for_more_information."; 13 | !!version_info2 += "#_User_support_available_at:_https://groups.google.com/g/ss3-forum_and_NMFS.Stock.Synthesis@noaa.gov"; 14 | !!version_info2 += "#_User_info_available_at:_https://nmfs-ost.github.io/ss3-website/"; 15 | !!version_info2 += "#_Source_code_at:_https://github.com/nmfs-ost/ss3-source-code"; 16 | !!version_info2 += ""; 17 | !!#define DO_ONCE 18 | -------------------------------------------------------------------------------- /StockSynthesis.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": "." 5 | } 6 | ], 7 | "settings": { 8 | "files.associations": { 9 | "*.tpl": "c", 10 | "\"*.extension\":": "\"tpl\"", 11 | "*.htp": "c", 12 | "ostream": "c", 13 | "iosfwd": "c" 14 | }, 15 | "explorer.excludeGitIgnore": true 16 | } 17 | } -------------------------------------------------------------------------------- /coding_style.md: -------------------------------------------------------------------------------- 1 | # Draft programming style for SS3 2 | SS3 is coded in C++ and ADMB TPL which have different requirements for compilation. 3 | C++ code is contained in 'LOCAL_CALCS' sections. 4 | The following is to enhance clarity while building using ADMB tools. 5 | 6 | ## LOCAL_CALCS Sections 7 | These sections are between the statements 'LOCAL_CALCS' and 'END_CALCS' 8 | each of which must be one space from the left. 9 | 10 | If using clang-format, make 11 | sure these comments are at the beginning and ending of the section: 12 | // clang-format on, and 13 | // clang-format off. 14 | 15 | Example: 16 | 17 | LOCAL_CALCS 18 | // clang-format on 19 | . . . 20 | // clang-format off 21 | END_CALCS 22 | 23 | 24 | ### Math Expressions 25 | Use spaces on each side of a mathematical operator (+ - * / =), unless 26 | the expression is used as an index for an array, vector, matrix, etc. 27 | 28 | Examples: 29 | 30 | n = (maxread-minread) / binwidth2 + 1; 31 | 32 | bins(z) = bins(z-1) + width; 33 | 34 | ### Control Statements 35 | Use a space before the left parenthesis and after the right 36 | parenthesis in control statements (e.g. for, do while, while, if, else). 37 | 38 | Use curly braces for all code blocks. For short blocks, they may be on the same line as the conditional statement, but for large blocks, put them on their 39 | own line (so that the closing curly brace is obvious). 40 | 41 | Example: 42 | 43 | if (a > b) 44 | { 45 | . . . // lots of code 46 | b = 2; 47 | . . . // more code 48 | } 49 | else { 50 | b = 4; 51 | } 52 | 53 | #### Logic Expressions 54 | Use a space on each side of the logical operators (< > <= >= == !=). 55 | Logical expressions used as indices may contain spaces or not 56 | depending on what makes it most clear. 57 | 58 | Examples: 59 | 60 | if (this > that) 61 | 62 | for (f = 1; f <= number; f++) 63 | 64 | 65 | ## TPL Sections 66 | TPL sections deserve special consideration. 67 | While the LOCAL_CALCS section is treated as pure C++ code, 68 | the other sections of .tpl files are templates that are 69 | translated to C++ code before compilation, so there are 70 | additional syntax rules that must be taken into account. 71 | 72 | ### Indices 73 | Items with multiple indices cannot have spaces between them. 74 | In C++, spaces can be inserted, e.g. matrix1 (1, j, 5) while in 75 | TPL, the same item would be matrix1(1,j,5). For the sake of 76 | consistency, this can be done in the C++ sections also. 77 | 78 | Examples: 79 | 80 | vector length(1,nlength); 81 | 82 | ivector parmlist(1,2*number); 83 | 84 | ### Semi-colons 85 | Use semi-colons to end each statement. 86 | Clang-format will join statements together if they are not separated 87 | by semi-colons. 88 | 89 | ## Indenting 90 | Use 2 spaces to indent each level of code to make it obvious what 91 | code belongs together. 92 | -------------------------------------------------------------------------------- /pretty_tpl.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | rem file: pretty_tpl.bat 3 | rem 4 | rem prettify tpl code by running through clang-format 5 | rem and then tpl-format (to correct issues). 6 | rem this assumes presence of clang-format.exe, 7 | rem tpl-format.exe, and the file .clang-format 8 | rem 9 | rem if using LOCAL_CALCS/END_CALCS, clang-format can 10 | rem only work within those sections. Use // clang-format on 11 | rem to turn it on after a LOCAL_CALCS and // clang-format off 12 | rem to turn it off before END_CALCS for normal tpl code. 13 | rem (Also place a // clang-format off at the beginning of the code). 14 | rem 15 | rem usage: pretty_tpl filename 16 | rem 17 | echo formatting file %1 18 | clang-format -i %1 19 | tpl-format %1 20 | rem 21 | rem and that's it! 22 | -------------------------------------------------------------------------------- /tpl-format.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nmfs-ost/ss3-source-code/5b7bfe34fc3ee955cbd172b316a3b5f65831d0e9/tpl-format.exe --------------------------------------------------------------------------------