├── .buildkite
├── hooks
│ └── pre-command
├── pipeline.yml
└── scripts
│ └── run_models.sh
├── .github
├── ISSUE_TEMPLATE
│ ├── bug-report.yml
│ ├── config.yml
│ └── feature-request.yml
├── PULL_REQUEST_TEMPLATE
│ └── maintainer_pull_request_template.md
├── pull_request_template.md
└── workflows
│ └── auto-release.yml
├── .gitignore
├── .quickstart
└── quickstart.yml
├── CHANGELOG.md
├── DECISIONLOG.md
├── LICENSE
├── README.md
├── dbt_project.yml
├── docs
├── catalog.json
├── index.html
└── manifest.json
├── integration_tests
├── .gitignore
├── ci
│ └── sample.profiles.yml
├── dbt_project.yml
├── macros
│ └── snowflake_seed_data.sql
├── packages.yml
├── requirements.txt
├── seeds
│ ├── audit_log_data.csv
│ ├── brand_data.csv
│ ├── brand_data_postgres.csv
│ ├── daylight_time_data.csv
│ ├── domain_name_data.csv
│ ├── group_data.csv
│ ├── organization_data.csv
│ ├── organization_tag_data.csv
│ ├── organization_tag_data_snowflake.csv
│ ├── schedule_data.csv
│ ├── schedule_holiday_data.csv
│ ├── ticket_chat_data.csv
│ ├── ticket_chat_event_data.csv
│ ├── ticket_comment_data.csv
│ ├── ticket_data.csv
│ ├── ticket_field_history_data.csv
│ ├── ticket_form_history_data.csv
│ ├── ticket_schedule_data.csv
│ ├── ticket_tag_data.csv
│ ├── time_zone_data.csv
│ ├── user_data.csv
│ ├── user_data_snowflake.csv
│ ├── user_tag_data.csv
│ └── user_tag_data_snowflake.csv
└── tests
│ ├── consistency
│ ├── consistency_sla_policies.sql
│ ├── consistency_sla_policy_count.sql
│ ├── consistency_ticket_backlog.sql
│ ├── consistency_ticket_enriched.sql
│ ├── consistency_ticket_field_history.sql
│ ├── consistency_ticket_metrics.sql
│ └── consistency_ticket_summary.sql
│ └── integrity
│ ├── metrics_count_match.sql
│ ├── sla_count_match.sql
│ ├── sla_first_reply_time_match.sql
│ └── sla_metrics_parity.sql
├── macros
├── clean_schedule.sql
├── coalesce_cast.sql
├── count_tokens.sql
├── extract_schedule_day.sql
├── extract_support_role_changes.sql
├── fivetran_week_end.sql
└── fivetran_week_start.sql
├── models
├── agent_work_time
│ ├── int_zendesk__ticket_work_time_business.sql
│ └── int_zendesk__ticket_work_time_calendar.sql
├── intermediate
│ ├── int_zendesk__assignee_updates.sql
│ ├── int_zendesk__comment_metrics.sql
│ ├── int_zendesk__latest_ticket_form.sql
│ ├── int_zendesk__organization_aggregates.sql
│ ├── int_zendesk__requester_updates.sql
│ ├── int_zendesk__schedule_history.sql
│ ├── int_zendesk__schedule_holiday.sql
│ ├── int_zendesk__schedule_spine.sql
│ ├── int_zendesk__schedule_timezones.sql
│ ├── int_zendesk__ticket_aggregates.sql
│ ├── int_zendesk__ticket_historical_assignee.sql
│ ├── int_zendesk__ticket_historical_group.sql
│ ├── int_zendesk__ticket_historical_satisfaction.sql
│ ├── int_zendesk__ticket_historical_status.sql
│ ├── int_zendesk__ticket_schedules.sql
│ ├── int_zendesk__updates.sql
│ ├── int_zendesk__user_aggregates.sql
│ └── int_zendesk__user_role_history.sql
├── reply_times
│ ├── int_zendesk__comments_enriched.sql
│ ├── int_zendesk__ticket_first_reply_time_business.sql
│ ├── int_zendesk__ticket_reply_times.sql
│ └── int_zendesk__ticket_reply_times_calendar.sql
├── resolution_times
│ ├── int_zendesk__ticket_first_resolution_time_business.sql
│ ├── int_zendesk__ticket_full_resolution_time_business.sql
│ └── int_zendesk__ticket_resolution_times_calendar.sql
├── sla_policy
│ ├── agent_work_time
│ │ ├── int_zendesk__agent_work_time_business_hours.sql
│ │ ├── int_zendesk__agent_work_time_calendar_hours.sql
│ │ └── int_zendesk__agent_work_time_filtered_statuses.sql
│ ├── int_zendesk__sla_policy_applied.sql
│ ├── reply_time
│ │ ├── int_zendesk__commenter_reply_at.sql
│ │ ├── int_zendesk__reply_time_business_hours.sql
│ │ ├── int_zendesk__reply_time_calendar_hours.sql
│ │ └── int_zendesk__reply_time_combined.sql
│ └── requester_wait_time
│ │ ├── int_zendesk__requester_wait_time_business_hours.sql
│ │ ├── int_zendesk__requester_wait_time_calendar_hours.sql
│ │ └── int_zendesk__requester_wait_time_filtered_statuses.sql
├── ticket_history
│ ├── int_zendesk__field_calendar_spine.sql
│ ├── int_zendesk__field_history_enriched.sql
│ ├── int_zendesk__field_history_pivot.sql
│ ├── int_zendesk__field_history_scd.sql
│ └── int_zendesk__updater_information.sql
├── unstructured
│ ├── intermediate
│ │ ├── int_zendesk__ticket_comment_document.sql
│ │ ├── int_zendesk__ticket_comment_documents_grouped.sql
│ │ └── int_zendesk__ticket_document.sql
│ ├── zendesk__document.sql
│ └── zendesk_unstructured.yml
├── utils
│ ├── int_zendesk__calendar_spine.sql
│ └── int_zendesk__timezone_daylight.sql
├── zendesk.yml
├── zendesk__sla_policies.sql
├── zendesk__ticket_backlog.sql
├── zendesk__ticket_enriched.sql
├── zendesk__ticket_field_history.sql
├── zendesk__ticket_metrics.sql
└── zendesk__ticket_summary.sql
└── packages.yml
/.buildkite/hooks/pre-command:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | # Export secrets for Docker containers.
6 | # Restrict exposing secrets only to the steps that need them
7 | export GCLOUD_SERVICE_KEY=$(gcloud secrets versions access latest --secret="GCLOUD_SERVICE_KEY" --project="dbt-package-testing-363917")
8 | export CI_POSTGRES_DBT_HOST=$(gcloud secrets versions access latest --secret="CI_POSTGRES_DBT_HOST" --project="dbt-package-testing-363917")
9 | export CI_POSTGRES_DBT_USER=$(gcloud secrets versions access latest --secret="CI_POSTGRES_DBT_USER" --project="dbt-package-testing-363917")
10 | export CI_POSTGRES_DBT_PASS=$(gcloud secrets versions access latest --secret="CI_POSTGRES_DBT_PASS" --project="dbt-package-testing-363917")
11 | export CI_POSTGRES_DBT_DBNAME=$(gcloud secrets versions access latest --secret="CI_POSTGRES_DBT_DBNAME" --project="dbt-package-testing-363917")
12 | export CI_REDSHIFT_DBT_DBNAME=$(gcloud secrets versions access latest --secret="CI_REDSHIFT_DBT_DBNAME" --project="dbt-package-testing-363917")
13 | export CI_REDSHIFT_DBT_HOST=$(gcloud secrets versions access latest --secret="CI_REDSHIFT_DBT_HOST" --project="dbt-package-testing-363917")
14 | export CI_REDSHIFT_DBT_PASS=$(gcloud secrets versions access latest --secret="CI_REDSHIFT_DBT_PASS" --project="dbt-package-testing-363917")
15 | export CI_REDSHIFT_DBT_USER=$(gcloud secrets versions access latest --secret="CI_REDSHIFT_DBT_USER" --project="dbt-package-testing-363917")
16 | export CI_SNOWFLAKE_DBT_ACCOUNT=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_ACCOUNT" --project="dbt-package-testing-363917")
17 | export CI_SNOWFLAKE_DBT_DATABASE=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_DATABASE" --project="dbt-package-testing-363917")
18 | export CI_SNOWFLAKE_DBT_PASS=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_PASS" --project="dbt-package-testing-363917")
19 | export CI_SNOWFLAKE_DBT_ROLE=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_ROLE" --project="dbt-package-testing-363917")
20 | export CI_SNOWFLAKE_DBT_USER=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_USER" --project="dbt-package-testing-363917")
21 | export CI_SNOWFLAKE_DBT_WAREHOUSE=$(gcloud secrets versions access latest --secret="CI_SNOWFLAKE_DBT_WAREHOUSE" --project="dbt-package-testing-363917")
22 | export CI_DATABRICKS_DBT_HOST=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_HOST" --project="dbt-package-testing-363917")
23 | export CI_DATABRICKS_DBT_HTTP_PATH=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_HTTP_PATH" --project="dbt-package-testing-363917")
24 | export CI_DATABRICKS_DBT_TOKEN=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_TOKEN" --project="dbt-package-testing-363917")
25 | export CI_DATABRICKS_DBT_CATALOG=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_CATALOG" --project="dbt-package-testing-363917")
--------------------------------------------------------------------------------
/.buildkite/pipeline.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - label: ":postgres: Run Tests - Postgres"
3 | key: "run-dbt-postgres"
4 | plugins:
5 | - docker#v3.13.0:
6 | image: "python:3.8"
7 | shell: [ "/bin/bash", "-e", "-c" ]
8 | environment:
9 | - "BASH_ENV=/tmp/.bashrc"
10 | - "CI_POSTGRES_DBT_DBNAME"
11 | - "CI_POSTGRES_DBT_HOST"
12 | - "CI_POSTGRES_DBT_PASS"
13 | - "CI_POSTGRES_DBT_USER"
14 | commands: |
15 | bash .buildkite/scripts/run_models.sh postgres
16 |
17 | - label: ":snowflake-db: Run Tests - Snowflake"
18 | key: "run_dbt_snowflake"
19 | plugins:
20 | - docker#v3.13.0:
21 | image: "python:3.8"
22 | shell: [ "/bin/bash", "-e", "-c" ]
23 | environment:
24 | - "BASH_ENV=/tmp/.bashrc"
25 | - "CI_SNOWFLAKE_DBT_ACCOUNT"
26 | - "CI_SNOWFLAKE_DBT_DATABASE"
27 | - "CI_SNOWFLAKE_DBT_PASS"
28 | - "CI_SNOWFLAKE_DBT_ROLE"
29 | - "CI_SNOWFLAKE_DBT_USER"
30 | - "CI_SNOWFLAKE_DBT_WAREHOUSE"
31 | commands: |
32 | bash .buildkite/scripts/run_models.sh snowflake
33 |
34 | - label: ":gcloud: Run Tests - BigQuery"
35 | key: "run_dbt_bigquery"
36 | plugins:
37 | - docker#v3.13.0:
38 | image: "python:3.8"
39 | shell: [ "/bin/bash", "-e", "-c" ]
40 | environment:
41 | - "BASH_ENV=/tmp/.bashrc"
42 | - "GCLOUD_SERVICE_KEY"
43 | commands: |
44 | bash .buildkite/scripts/run_models.sh bigquery
45 |
46 | - label: ":amazon-redshift: Run Tests - Redshift"
47 | key: "run_dbt_redshift"
48 | plugins:
49 | - docker#v3.13.0:
50 | image: "python:3.8"
51 | shell: [ "/bin/bash", "-e", "-c" ]
52 | environment:
53 | - "BASH_ENV=/tmp/.bashrc"
54 | - "CI_REDSHIFT_DBT_DBNAME"
55 | - "CI_REDSHIFT_DBT_HOST"
56 | - "CI_REDSHIFT_DBT_PASS"
57 | - "CI_REDSHIFT_DBT_USER"
58 | commands: |
59 | bash .buildkite/scripts/run_models.sh redshift
60 |
61 | - label: ":databricks: Run Tests - Databricks"
62 | key: "run_dbt_databricks"
63 | plugins:
64 | - docker#v3.13.0:
65 | image: "python:3.8"
66 | shell: [ "/bin/bash", "-e", "-c" ]
67 | environment:
68 | - "BASH_ENV=/tmp/.bashrc"
69 | - "CI_DATABRICKS_DBT_HOST"
70 | - "CI_DATABRICKS_DBT_HTTP_PATH"
71 | - "CI_DATABRICKS_DBT_TOKEN"
72 | - "CI_DATABRICKS_DBT_CATALOG"
73 | commands: |
74 | bash .buildkite/scripts/run_models.sh databricks
--------------------------------------------------------------------------------
/.buildkite/scripts/run_models.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -euo pipefail
4 |
5 | apt-get update
6 | apt-get install libsasl2-dev
7 |
8 | python3 -m venv venv
9 | . venv/bin/activate
10 | pip install --upgrade pip setuptools
11 | pip install -r integration_tests/requirements.txt
12 | mkdir -p ~/.dbt
13 | cp integration_tests/ci/sample.profiles.yml ~/.dbt/profiles.yml
14 |
15 | db=$1
16 | echo `pwd`
17 | cd integration_tests
18 | dbt deps
19 | dbt seed --target "$db" --full-refresh
20 | dbt run --target "$db" --full-refresh
21 | dbt run --target "$db"
22 | dbt test --target "$db"
23 | dbt run --vars '{using_audit_log: true, zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false, using_organizations: false, using_ticket_chat: true}' --target "$db" --full-refresh
24 | dbt test --target "$db"
25 | dbt run --vars '{using_audit_log: true, internal_user_criteria: "role like '\''%agent%'\''", using_schedule_histories: false, using_holidays: false, using_ticket_chat: true}' --target "$db" --full-refresh
26 | dbt test --target "$db"
27 | dbt run --vars '{using_holidays: true, using_ticket_chat: true}' --target "$db" --full-refresh
28 | dbt test --target "$db"
29 |
30 | dbt run-operation fivetran_utils.drop_schemas_automation --target "$db"
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug-report.yml:
--------------------------------------------------------------------------------
1 | name: 🐞 Bug
2 | description: Report a bug or an issue you've found within the dbt package
3 | title: "[Bug]
"
4 | labels: ["type:bug"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for using the Fivetran dbt package and for taking the time to fill out this bug report. Your contributions help improve this package for the entire community of users!
10 | - type: checkboxes
11 | attributes:
12 | label: Is there an existing issue for this?
13 | description: Please search to see if an issue already exists for the bug you encountered.
14 | options:
15 | - label: I have searched the existing issues
16 | required: true
17 | - type: textarea
18 | attributes:
19 | label: Describe the issue
20 | description: A concise description of the problem you're experiencing. Also, please provide the steps to reproduce the issue if applicable.
21 | validations:
22 | required: true
23 | - type: textarea
24 | id: logs
25 | attributes:
26 | label: Relevant error log or model output
27 | description: |
28 | If applicable, provide the relevant error log or describe the problematic model output.
29 | render: shell
30 | validations:
31 | required: false
32 | - type: textarea
33 | attributes:
34 | label: Expected behavior
35 | description: A concise description of what you expected to happen.
36 | validations:
37 | required: true
38 | - type: textarea
39 | attributes:
40 | label: Possible solution
41 | description: Were you able to investigate and/or discover a potential fix to this bug in your investigation? If so, it would be much appreciated if you could submit code samples to show us how your fix resolved this issue.
42 | validations:
43 | required: false
44 | - type: textarea
45 | attributes:
46 | label: dbt Project configurations
47 | description: Please provide the variables and any other project specific configurations from your `dbt_project.yml`.
48 | validations:
49 | required: true
50 | - type: textarea
51 | attributes:
52 | label: Package versions
53 | description: Please provide the contents of your `packages.yml`.
54 | validations:
55 | required: true
56 | - type: dropdown
57 | id: database
58 | attributes:
59 | label: What database are you using dbt with?
60 | multiple: true
61 | options:
62 | - postgres
63 | - redshift
64 | - snowflake
65 | - bigquery
66 | - databricks
67 | - other (mention it in "Additional Context")
68 | validations:
69 | required: true
70 | - type: dropdown
71 | id: orchestration_type
72 | attributes:
73 | label: How are you running this dbt package?
74 | multiple: true
75 | options:
76 | - Fivetran Quickstart Data Model
77 | - Fivetran Transformations
78 | - dbt Core™
79 | - dbt Cloud™
80 | - other (mention it in "Additional Context")
81 | validations:
82 | required: true
83 | - type: textarea
84 | attributes:
85 | label: dbt Version
86 | description: Run `dbt --version` in your CLI or dbt cloud environment and copy the contents. Additionally, if you are using Fivetran dbt Transformations, provide the contents of the `dbtVersion` configuration in your `deployment.yml`.
87 | validations:
88 | required: true
89 | - type: textarea
90 | attributes:
91 | label: Additional Context
92 | description: |
93 | Links? References? Anything that will give us more context about the issue you are encountering!
94 |
95 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
96 | validations:
97 | required: false
98 | - type: checkboxes
99 | id: pr
100 | attributes:
101 | label: Are you willing to open a PR to help address this issue?
102 | description: Our team will assess this issue and let you know if we will add it to a future sprint. However, if you would like to expedite the solution, we encourage you to contribute to the package via a PR. Our team will then work with you to approve and merge your contributions as soon as possible.
103 | options:
104 | - label: Yes.
105 | - label: Yes, but I will need assistance.
106 | - label: No.
107 | required: false
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | contact_links:
2 | - name: Provide feedback to our dbt package team
3 | url: https://www.surveymonkey.com/r/DQ7K7WW
4 | about: Fill out our survey form to provide valuable feedback to the Fivetran team developing and maintaining the dbt packages.
5 | - name: Fivetran connector question
6 | url: https://support.fivetran.com/hc
7 | about: Have a question about your connector? Check out the Fivetran support portal for more details.
8 | - name: What is dbt
9 | url: https://docs.getdbt.com/docs/introduction
10 | about: Check out the dbt docs for all dbt related information
11 | - name: Hang out in dbt Slack
12 | url: https://www.getdbt.com/community/
13 | about: Have a question or just want to chat with fellow data friends, join dbt Slack and hangout in the tools-fivetran channel with us!
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature-request.yml:
--------------------------------------------------------------------------------
1 | name: 🎉 Feature
2 | description: Suggest a new feature for the Fivetran dbt package
3 | title: "[Feature] "
4 | labels: ["type:enhancement"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for using the Fivetran dbt package and for taking the time to fill out this feature request. Your contributions help improve this package for the entire community of users!
10 | - type: checkboxes
11 | attributes:
12 | label: Is there an existing feature request for this?
13 | description: Please search to see if an issue already exists for the feature you would like.
14 | options:
15 | - label: I have searched the existing issues
16 | required: true
17 | - type: textarea
18 | attributes:
19 | label: Describe the Feature
20 | description: A clear and concise description of what you want to happen and why you want the new feature.
21 | validations:
22 | required: true
23 | - type: textarea
24 | attributes:
25 | label: How would you implement this feature?
26 | description: |
27 | How would you build out this feature with your existing data? Any code examples you can provide to help accelerate development on this issue?
28 | validations:
29 | required: true
30 | - type: textarea
31 | attributes:
32 | label: Describe alternatives you've considered
33 | description: |
34 | A clear and concise description of any alternative solutions or features you've considered.
35 | validations:
36 | required: false
37 | - type: checkboxes
38 | id: contributing
39 | attributes:
40 | label: Are you interested in contributing this feature?
41 | description: Our team will assess this feature and let you know if we will add it to a future sprint. However, if you would like to expedite the feature, we encourage you to contribute to the package via a PR. Our team will then work with you to approve and merge your contributions as soon as possible.
42 | options:
43 | - label: Yes.
44 | - label: Yes, but I will need assistance.
45 | - label: No.
46 | required: false
47 | - type: textarea
48 | attributes:
49 | label: Anything else?
50 | description: |
51 | Links? References? Anything that will give us more context about the feature you are suggesting!
52 | validations:
53 | required: false
54 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/maintainer_pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## PR Overview
2 | **This PR will address the following Issue/Feature:**
3 |
4 | **This PR will result in the following new package version:**
5 |
6 |
7 | **Please provide the finalized CHANGELOG entry which details the relevant changes included in this PR:**
8 |
9 |
10 | ## PR Checklist
11 | ### Basic Validation
12 | Please acknowledge that you have successfully performed the following commands locally:
13 | - [ ] dbt run –full-refresh && dbt test
14 | - [ ] dbt run (if incremental models are present) && dbt test
15 |
16 | Before marking this PR as "ready for review" the following have been applied:
17 | - [ ] The appropriate issue has been linked, tagged, and properly assigned
18 | - [ ] All necessary documentation and version upgrades have been applied
19 | - [ ] docs were regenerated (unless this PR does not include any code or yml updates)
20 | - [ ] BuildKite integration tests are passing
21 | - [ ] Detailed validation steps have been provided below
22 |
23 | ### Detailed Validation
24 | Please share any and all of your validation steps:
25 |
26 |
27 | ### If you had to summarize this PR in an emoji, which would it be?
28 |
29 | :dancer:
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | **Please provide your name and company**
2 |
3 | **Link the issue/feature request which this PR is meant to address**
4 |
5 |
6 | **Detail what changes this PR introduces and how this addresses the issue/feature request linked above.**
7 |
8 | **How did you validate the changes introduced within this PR?**
9 |
10 | **Which warehouse did you use to develop these changes?**
11 |
12 | **Did you update the CHANGELOG?**
13 |
14 |
15 | - [ ] Yes
16 |
17 | **Did you update the dbt_project.yml files with the version upgrade (please leverage standard semantic versioning)? (In both your main project and integration_tests)**
18 |
19 |
20 | - [ ] Yes
21 |
22 | **Typically there are additional maintenance changes required before this will be ready for an upcoming release. Are you comfortable with the Fivetran team making a few commits directly to your branch?**
23 |
24 |
25 | - [ ] Yes
26 | - [ ] No
27 |
28 | **If you had to summarize this PR in an emoji, which would it be?**
29 |
30 | :dancer:
31 |
32 | **Feedback**
33 |
34 | We are so excited you decided to contribute to the Fivetran community dbt package! We continue to work to improve the packages and would greatly appreciate your [feedback](https://www.surveymonkey.com/r/DQ7K7WW) on our existing dbt packages or what you'd like to see next.
35 |
36 | **PR Template**
37 | - [Community Pull Request Template](?expand=1&template=pull_request_template.md) (default)
38 |
39 | - [Maintainer Pull Request Template](?expand=1&template=maintainer_pull_request_template.md) (to be used by maintainers)
40 |
--------------------------------------------------------------------------------
/.github/workflows/auto-release.yml:
--------------------------------------------------------------------------------
1 | name: 'auto release'
2 | on:
3 | pull_request:
4 | types:
5 | - closed
6 | branches:
7 | - main
8 |
9 | jobs:
10 | call-workflow-passing-data:
11 | if: github.event.pull_request.merged
12 | uses: fivetran/dbt_package_automations/.github/workflows/auto-release.yml@main
13 | secrets: inherit
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | env/
2 | target/
3 | dbt_modules/
4 | logs/
5 | .DS_Store
6 | dbt_packages/
7 | package-lock.yml
--------------------------------------------------------------------------------
/.quickstart/quickstart.yml:
--------------------------------------------------------------------------------
1 | database_key: zendesk_database
2 | schema_key: zendesk_schema
3 |
4 | dbt_versions: ">=1.3.0 <2.0.0"
5 |
6 | table_variables:
7 | using_audit_log:
8 | - audit_log
9 | using_schedules:
10 | - daylight_time
11 | - schedule
12 | - time_zone
13 | using_domain_names:
14 | - domain_name
15 | using_user_tags:
16 | - user_tag
17 | using_ticket_form_history:
18 | - ticket_form_history
19 | using_organization_tags:
20 | - organization_tag
21 | using_schedule_histories:
22 | - audit_log
23 | using_holidays:
24 | - schedule_holiday
25 | using_brands:
26 | - brand
27 | using_organizations:
28 | - organization
29 | using_ticket_chat:
30 | - ticket_chat
31 | - ticket_chat_event
32 |
33 | destination_configurations:
34 | databricks:
35 | dispatch:
36 | - macro_namespace: dbt_utils
37 | search_order: [ 'spark_utils', 'dbt_utils' ]
38 |
39 | public_models: [
40 | "zendesk__ticket_enriched",
41 | "zendesk__ticket_metrics",
42 | "zendesk__ticket_summary",
43 | "zendesk__ticket_field_history",
44 | "zendesk__sla_policies",
45 | "zendesk__ticket_backlog"
46 | ]
47 |
--------------------------------------------------------------------------------
/dbt_project.yml:
--------------------------------------------------------------------------------
1 | name: 'zendesk'
2 | version: '0.23.0'
3 |
4 | config-version: 2
5 | require-dbt-version: [">=1.3.0", "<2.0.0"]
6 | on-run-start: '{{ fivetran_utils.empty_variable_warning("ticket_field_history_columns", "zendesk_ticket_field_history") }}'
7 | models:
8 | zendesk:
9 | +schema: zendesk
10 | +materialized: table
11 | agent_work_time:
12 | +materialized: ephemeral
13 | intermediate:
14 | +schema: zendesk_intermediate
15 | +materialized: table
16 | int_zendesk__schedule_timezones:
17 | +materialized: ephemeral
18 | int_zendesk__schedule_holiday:
19 | +materialized: ephemeral
20 | reply_times:
21 | +materialized: ephemeral
22 | resolution_times:
23 | +materialized: ephemeral
24 | sla_policy:
25 | +schema: zendesk_intermediate
26 | +materialized: table
27 | int_zendesk__commenter_reply_at:
28 | +materialized: ephemeral
29 | ticket_history:
30 | +schema: zendesk_intermediate
31 | +materialized: ephemeral
32 | unstructured:
33 | +schema: zendesk_unstructured
34 | +materialized: table
35 | utils:
36 | +materialized: ephemeral
37 | vars:
38 | zendesk:
39 | ticket_field_history_columns: ['assignee_id', 'status', 'priority']
40 | ticket_field_history_updater_columns: []
41 | audit_log: "{{ ref('stg_zendesk__audit_log') }}"
42 | brand: "{{ ref('stg_zendesk__brand') }}"
43 | daylight_time: "{{ ref('stg_zendesk__daylight_time') }}"
44 | domain_name: "{{ ref('stg_zendesk__domain_name') }}"
45 | field_history: "{{ ref('stg_zendesk__ticket_field_history') }}"
46 | group: "{{ ref('stg_zendesk__group') }}"
47 | organization_tag: "{{ ref('stg_zendesk__organization_tag') }}"
48 | organization: "{{ ref('stg_zendesk__organization') }}"
49 | schedule_holiday: "{{ ref('stg_zendesk__schedule_holiday') }}"
50 | schedule: "{{ ref('stg_zendesk__schedule') }}"
51 | ticket_comment: "{{ ref('stg_zendesk__ticket_comment') }}"
52 | ticket_form_history: "{{ ref('stg_zendesk__ticket_form_history') }}"
53 | ticket_schedule: "{{ ref('stg_zendesk__ticket_schedule') }}"
54 | ticket_tag: "{{ ref('stg_zendesk__ticket_tag') }}"
55 | ticket: "{{ ref('stg_zendesk__ticket') }}"
56 | time_zone: "{{ ref('stg_zendesk__time_zone') }}"
57 | user_tag: "{{ ref('stg_zendesk__user_tag') }}"
58 | user: "{{ ref('stg_zendesk__user') }}"
59 | ticket_chat: "{{ ref('stg_zendesk__ticket_chat') }}"
60 | ticket_chat_event: "{{ ref('stg_zendesk__ticket_chat_event') }}"
61 |
62 | using_schedules: true
63 | using_domain_names: true
64 | using_user_tags: true
65 | using_ticket_form_history: true
66 | using_organization_tags: true
67 |
68 | ticket_field_history_extension_months: 0 # how long to extend a ticket's field history past its closure date
69 | ticket_field_history_timeframe_years: 50 # how far back to pull tickets' field histories. default is everything
70 |
--------------------------------------------------------------------------------
/integration_tests/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | target/
3 | dbt_modules/
4 | logs/
5 | .DS_Store
6 | dbt_packages/
7 | package-lock.yml
--------------------------------------------------------------------------------
/integration_tests/ci/sample.profiles.yml:
--------------------------------------------------------------------------------
1 |
2 | # HEY! This file is used in the dbt package integrations tests with Buildkite.
3 | # You should __NEVER__ check credentials into version control. Thanks for reading :)
4 |
5 | config:
6 | send_anonymous_usage_stats: False
7 | use_colors: True
8 |
9 | integration_tests:
10 | target: redshift
11 | outputs:
12 | redshift:
13 | type: redshift
14 | host: "{{ env_var('CI_REDSHIFT_DBT_HOST') }}"
15 | user: "{{ env_var('CI_REDSHIFT_DBT_USER') }}"
16 | pass: "{{ env_var('CI_REDSHIFT_DBT_PASS') }}"
17 | dbname: "{{ env_var('CI_REDSHIFT_DBT_DBNAME') }}"
18 | port: 5439
19 | schema: zendesk_integration_tests_63
20 | threads: 8
21 | bigquery:
22 | type: bigquery
23 | method: service-account-json
24 | project: 'dbt-package-testing'
25 | schema: zendesk_integration_tests_63
26 | threads: 8
27 | keyfile_json: "{{ env_var('GCLOUD_SERVICE_KEY') | as_native }}"
28 | snowflake:
29 | type: snowflake
30 | account: "{{ env_var('CI_SNOWFLAKE_DBT_ACCOUNT') }}"
31 | user: "{{ env_var('CI_SNOWFLAKE_DBT_USER') }}"
32 | password: "{{ env_var('CI_SNOWFLAKE_DBT_PASS') }}"
33 | role: "{{ env_var('CI_SNOWFLAKE_DBT_ROLE') }}"
34 | database: "{{ env_var('CI_SNOWFLAKE_DBT_DATABASE') }}"
35 | warehouse: "{{ env_var('CI_SNOWFLAKE_DBT_WAREHOUSE') }}"
36 | schema: zendesk_integration_tests_63
37 | threads: 8
38 | postgres:
39 | type: postgres
40 | host: "{{ env_var('CI_POSTGRES_DBT_HOST') }}"
41 | user: "{{ env_var('CI_POSTGRES_DBT_USER') }}"
42 | pass: "{{ env_var('CI_POSTGRES_DBT_PASS') }}"
43 | dbname: "{{ env_var('CI_POSTGRES_DBT_DBNAME') }}"
44 | port: 5432
45 | schema: zendesk_integration_tests_63
46 | threads: 8
47 | databricks:
48 | catalog: "{{ env_var('CI_DATABRICKS_DBT_CATALOG') }}"
49 | host: "{{ env_var('CI_DATABRICKS_DBT_HOST') }}"
50 | http_path: "{{ env_var('CI_DATABRICKS_DBT_HTTP_PATH') }}"
51 | schema: zendesk_integration_tests_63
52 | threads: 8
53 | token: "{{ env_var('CI_DATABRICKS_DBT_TOKEN') }}"
54 | type: databricks
--------------------------------------------------------------------------------
/integration_tests/macros/snowflake_seed_data.sql:
--------------------------------------------------------------------------------
1 | {% macro snowflake_seed_data(seed_name) %}
2 |
3 | {% if target.type == 'snowflake' %}
4 | {{ return(ref(seed_name ~ '_snowflake')) }}
5 | {% else %}
6 | {{ return(ref(seed_name)) }}
7 | {% endif %}
8 |
9 | {% endmacro %}
--------------------------------------------------------------------------------
/integration_tests/packages.yml:
--------------------------------------------------------------------------------
1 | packages:
2 | - local: ../
3 |
--------------------------------------------------------------------------------
/integration_tests/requirements.txt:
--------------------------------------------------------------------------------
1 | dbt-snowflake>=1.3.0,<1.8.0
2 | dbt-bigquery>=1.3.0,<1.8.0
3 | dbt-redshift>=1.3.0,<1.8.0
4 | dbt-postgres>=1.3.0,<1.8.0
5 | dbt-spark>=1.3.0,<1.8.0
6 | dbt-spark[PyHive]>=1.3.0,<1.8.0
7 | dbt-databricks>=1.6.0,<1.8.0
8 | certifi==2025.1.31
--------------------------------------------------------------------------------
/integration_tests/seeds/audit_log_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,action,actor_id,change_description,created_at,source_id,source_label,source_type
2 | 579796,2024-05-28 21:53:06.793000,update,37253,"Workweek changed from {:sun=>{""01:45""=>""02:45""}, :mon=>{""09:00""=>""20:00""}, :tue=>{""09:00""=>""20:00""}, :wed=>{""08:00""=>""20:00""}, :thu=>{""08:00""=>""20:00""}, :fri=>{""08:00""=>""20:00""}} to {:sun=>{""03:00""=>""04:00""}, :mon=>{""08:00""=>""20:00""}, :tue=>{""08:00""=>""20:00""}, :wed=>{""07:15""=>""20:00""}, :thu=>{""07:15""=>""20:00""}, :fri=>{""07:15""=>""20:00""}}",2024-05-28 21:51:37.000000,18542,Workweek: Central US Schedule,zendesk/business_hours/workweek
3 | 2679952,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:thu=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}, :mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :wed=>{""09:00""=>""17:00""}} to {:mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :wed=>{""09:00""=>""17:00""}, :thu=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}}",2024-05-21 11:20:29.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek
4 | 293556,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :wed=>{""09:00""=>""17:00""}, :thu=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}}",2024-05-21 11:20:28.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek
5 | 4441364,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:wed=>{""09:00""=>""17:00""}, :thu=>{""09:00""=>""17:00""}, :mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}} to {:mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :wed=>{""09:00""=>""17:00""}, :thu=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}}",2024-05-21 11:20:10.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek
6 | 70900,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=>{""09:00""=>""17:00""}, :tue=>{""09:00""=>""17:00""}, :wed=>{""09:00""=>""17:00""}, :thu=>{""09:00""=>""17:00""}, :fri=>{""09:00""=>""17:00""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek
7 | 70901,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {"mon":{"10:00":"20:00"},"tue":{"10:00":"20:00"},"wed":{"10:00":"20:00"},"thu":{"10:00":"20:00"},"fri":{"10:00":"20:00"}} to {"mon":{"10:00":"22:00"},"tue":{"10:00":"22:00"},"wed":{"10:00":"22:00"},"thu":{"10:00":"22:00"},"fri":{"10:00":"22:00"}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek
8 | 70902,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:mon=>{""09:00""=>""10:45"", ""11:45""=>""12:45"", ""13:45""=>""14:45"", ""15:15""=>""16:15"", ""19:00""=>""20:00"", ""17:30""=>""18:30""}, :tue=>{""00:15""=>""13:15"", ""13:30""=>""18:30"", ""18:45""=>""21:45"", ""22:00""=>""24:00""}, :wed=>{""09:00""=>""21:00""}, :thu=>{""17:00""=>""18:00"", ""19:45""=>""20:45"", ""09:00""=>""10:45"", ""12:15""=>""13:15"", ""14:30""=>""15:30""}, :fri=>{""09:00""=>""12:45"", ""19:15""=>""22:30"", ""14:45""=>""15:45"", ""17:30""=>""18:30""}} to {:mon=>{""09:00""=>""10:45"", ""11:45""=>""12:45"", ""13:45""=>""14:45"", ""15:15""=>""16:15"", ""17:30""=>""18:30"", ""19:00""=>""20:00""}, :tue=>{""00:15""=>""13:15"", ""13:30""=>""18:30"", ""18:45""=>""21:45"", ""22:00""=>""24:00""}, :wed=>{""02:30""=>""21:45""}, :thu=>{""09:00""=>""10:45"", ""12:15""=>""13:15"", ""14:30""=>""15:30"", ""17:00""=>""18:00"", ""19:45""=>""20:45""}, :fri=>{""09:00""=>""12:45"", ""14:45""=>""15:45"", ""17:30""=>""18:30"", ""19:15""=>""22:30""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek
9 | 70903,2024-05-28 16:18:59.359000,update,267996,"Guide role changed from not set to Admin\\nExplore role changed from not set to Admin\\nSupport role changed from not set to Admin\\nTalk role changed from not set to Admin",2024-03-06 14:39:16.000000,1111,Team member 1,user
10 | 70904,2024-05-28 16:18:59.359000,update,267996,"Support role changed from Staff to not set\\nGuide role changed from Agent to not set\\nTalk role changed from Agent to not set\\nExplore role changed from Viewer to not set",2025-03-06 14:39:16.000000,2222,Team member 2,user
11 | 70905,2024-05-28 16:18:59.359000,update,267996,Support role changed from Admin to Advisor,2025-03-06 14:39:16.000000,1111,Team member 1,user
--------------------------------------------------------------------------------
/integration_tests/seeds/brand_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_deleted,_fivetran_synced,active,brand_url,default,has_help_center,help_center_state,logo_content_type,logo_content_url,logo_deleted,logo_file_name,logo_height,logo_id,logo_inline,logo_mapped_content_url,logo_size,logo_url,logo_width,name,subdomain,url
2 | 36000,FALSE,2021-02-16 20:08:56,TRUE,5e52582af2ab7c3a6e7fa6856b4cd945,TRUE,TRUE,enabled,image/png,5e52582af2ab7c3a6e7fa6856b4cd945,FALSE,2abdc594c0ad6eb2438448b3cbf7da56,22,3600,FALSE,5e52582af2ab7c3a6e7fa6856b4cd945,1961,5e52582af2ab7c3a6e7fa6856b4cd945,80,2abdc594c0ad6eb2438448b3cbf7da56,2abdc594c0ad6eb2438448b3cbf7da56,5e52582af2ab7c3a6e7fa6856b4cd945
--------------------------------------------------------------------------------
/integration_tests/seeds/brand_data_postgres.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_deleted,_fivetran_synced,active,brand_url,"""default""",has_help_center,help_center_state,logo_content_type,logo_content_url,logo_deleted,logo_file_name,logo_height,logo_id,logo_inline,logo_mapped_content_url,logo_size,logo_url,logo_width,name,subdomain,url
2 | 36000,FALSE,2021-02-16 20:08:56,TRUE,5e52582af2ab7c3a6e7fa6856b4cd945,TRUE,TRUE,enabled,image/png,5e52582af2ab7c3a6e7fa6856b4cd945,FALSE,2abdc594c0ad6eb2438448b3cbf7da56,22,3600,FALSE,5e52582af2ab7c3a6e7fa6856b4cd945,1961,5e52582af2ab7c3a6e7fa6856b4cd945,80,2abdc594c0ad6eb2438448b3cbf7da56,2abdc594c0ad6eb2438448b3cbf7da56,5e52582af2ab7c3a6e7fa6856b4cd945
--------------------------------------------------------------------------------
/integration_tests/seeds/daylight_time_data.csv:
--------------------------------------------------------------------------------
1 | time_zone,year,_fivetran_synced,daylight_end_utc,daylight_offset,daylight_start_utc
2 | Montevideo,1970,2020-03-05T05:04:14.920Z,1970-06-14T00:00:00,1,1970-04-25T00:00:00
3 | Cairo,1970,2020-03-05T05:04:14.920Z,1970-10-01T03:00:00,1,1970-05-01T01:00:00
--------------------------------------------------------------------------------
/integration_tests/seeds/domain_name_data.csv:
--------------------------------------------------------------------------------
1 | index,organization_id,_fivetran_synced,domain_name
2 | 0,370335,2020-08-25 08:08:42,f89dedacd29b4d5b0825d81a0c0fedfa
3 | 0,370355,2020-08-25 08:08:42,266cb5d33605c86846121770b4d312d1
4 | 0,370364,2020-08-25 08:08:42,7878cf470a679110108b3da5ea619a0c
5 | 0,370298,2020-08-04 02:12:24,6fc5425be3db5b4f70eaf61c4323672c
6 | 0,370298,2020-08-04 02:12:24,040fd698880646db8df23291671e4b57
7 | 0,370298,2020-08-04 02:12:24,421319ed5d78b90c3048510efd8df758
8 | 0,370298,2020-08-04 02:12:24,bafeca689fec90acd8878e2fb025e355
9 | 0,370298,2020-08-04 02:12:24,4ba5ff9deb372b42e67838630b39ea3c
10 | 0,370298,2020-08-04 02:12:24,7ae869ebf95fd6538c3af28cbbf29e53
11 | 0,370298,2020-08-04 02:12:24,2c94112ed0107c614f8c5ea6b1944962
--------------------------------------------------------------------------------
/integration_tests/seeds/group_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_deleted,_fivetran_synced,created_at,name,updated_at,url
2 | 360008426314,FALSE,2021-01-19 20:08:24.256,2020-02-04 00:03:11,Non-Support - (Billing Tickets),2020-02-04 00:40:57,https://zendesk.com/api/v2/groups/360008426314.json
3 | 360013670454,FALSE,2021-01-19 20:08:24.256,2020-09-01 10:16:50,Assignment Group 7,2020-09-01 10:16:50,https://zendesk.com/api/v2/groups/360013670454.json
4 | 360014827554,FALSE,2021-01-19 20:08:24.256,2020-10-07 21:53:42,Information Technology,2020-10-07 21:57:54,https://zendesk.com/api/v2/groups/360014827554.json
5 | 360008418973,FALSE,2021-01-19 20:08:24.256,2020-02-04 00:41:49,Non-Support - (Prospect Tickets),2020-02-04 00:41:49,https://zendesk.com/api/v2/groups/360008418973.json
6 | 360013670414,FALSE,2021-01-19 20:08:24.255,2020-09-01 10:15:17,Assignment Group 4,2020-09-01 10:15:17,https://zendesk.com/api/v2/groups/360013670414.json
7 | 360013656353,FALSE,2021-01-19 20:08:24.255,2020-09-01 10:15:55,Assignment Group 5,2020-09-01 10:15:55,https://zendesk.com/api/v2/groups/360013656353.json
8 | 360013656373,FALSE,2021-01-19 20:08:24.255,2020-09-01 10:16:23,Assignment Group 6,2020-09-01 10:16:23,https://zendesk.com/api/v2/groups/360013656373.json
9 | 360013670374,FALSE,2021-01-19 20:08:24.254,2020-09-01 10:14:39,Assignment Group 3,2020-09-01 10:14:39,https://zendesk.com/api/v2/groups/360013670374.json
--------------------------------------------------------------------------------
/integration_tests/seeds/organization_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,created_at,details,external_id,group_id,name,notes,shared_comments,shared_tickets,updated_at,url
2 | 370326104793,2020-04-24 14:13:07.083,2020-02-13 22:45:02,,,,name1,,FALSE,FALSE,2020-02-13 22:45:02,https://zendesk.com/api/v2/organizations/370326104793.json
3 | 370326181493,2020-04-24 14:13:07.091,2020-02-13 22:50:18,,,,name2,,FALSE,FALSE,2020-02-13 22:50:18,https://zendesk.com/api/v2/organizations/370326181493.json
4 | 370325990093,2020-04-24 14:13:02.924,2020-02-13 22:19:31,,,,name3,,FALSE,FALSE,2020-02-13 22:19:31,https://zendesk.com/api/v2/organizations/370325990093.json
5 | 370557178694,2020-11-19 02:08:25.601,2020-11-18 21:11:48,,,,name4,,FALSE,FALSE,2020-11-18 21:59:43,https://zendesk.com/api/v2/organizations/370557178694.json
6 | 370326279253,2020-04-24 14:13:07.111,2020-02-13 23:01:03,,,,name5,,FALSE,FALSE,2020-02-13 23:01:03,https://zendesk.com/api/v2/organizations/370326279253.json
7 | 370324281773,2020-06-12 02:09:09.143,2020-02-11 05:51:37,,,,name6,,TRUE,TRUE,2020-06-11 21:41:32,https://zendesk.com/api/v2/organizations/370324281773.json
8 | 370325960473,2020-04-24 14:13:02.767,2020-02-13 21:25:16,,,,name7,,FALSE,FALSE,2020-02-13 21:25:16,https://zendesk.com/api/v2/organizations/370325960473.json
9 | 370326052813,2020-04-24 14:13:07.056,2020-02-13 22:31:14,,,,name8,,FALSE,FALSE,2020-02-13 22:31:14,https://zendesk.com/api/v2/organizations/370326052813.json
10 | 370326014913,2020-04-24 14:13:07.006,2020-02-13 22:26:05,,,,name9,,FALSE,FALSE,2020-02-13 22:26:05,https://zendesk.com/api/v2/organizations/370326014913.json
11 | 370301409353,2020-04-24 14:12:43.388,2019-12-16 23:28:32,,,,name10,,FALSE,FALSE,2019-12-16 23:28:32,https://zendesk.com/api/v2/organizations/370301409353.json
--------------------------------------------------------------------------------
/integration_tests/seeds/organization_tag_data.csv:
--------------------------------------------------------------------------------
1 | organization_id,tag,_fivetran_synced
2 | 370364,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-25 08:08:42
3 | 370401,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-12 20:16:04
4 | 370401,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-12 20:16:04
5 | 370297,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-04 02:12:16
6 | 370297,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:20:08
7 | 370321,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:29:08
8 | 370354,63ad9d34f3503826e5f649ae6b7ac92c,2020-09-03 02:08:46
9 | 370319,63ad9d34f3503826e5f649ae6b7ac92c,2020-12-18 14:08:28
10 | 370321,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:20:40
11 | 370334,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
12 | 370334,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
13 | 370298,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
--------------------------------------------------------------------------------
/integration_tests/seeds/organization_tag_data_snowflake.csv:
--------------------------------------------------------------------------------
1 | organization_id,"TAG",_fivetran_synced
2 | 370364,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-25 08:08:42
3 | 370401,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-12 20:16:04
4 | 370401,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-12 20:16:04
5 | 370297,63ad9d34f3503826e5f649ae6b7ac92c,2020-08-04 02:12:16
6 | 370297,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:20:08
7 | 370321,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:29:08
8 | 370354,63ad9d34f3503826e5f649ae6b7ac92c,2020-09-03 02:08:46
9 | 370319,63ad9d34f3503826e5f649ae6b7ac92c,2020-12-18 14:08:28
10 | 370321,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:20:40
11 | 370334,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
12 | 370334,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
13 | 370298,63ad9d34f3503826e5f649ae6b7ac92c,2020-04-24 14:13:42
--------------------------------------------------------------------------------
/integration_tests/seeds/schedule_data.csv:
--------------------------------------------------------------------------------
1 | end_time,id,start_time,_fivetran_deleted,_fivetran_synced,end_time_utc,name,start_time_utc,time_zone,created_at
2 | 8400,360000310393,7680,FALSE,2021-01-19 20:08:37.690,8820,SupportCA,8100,Pacific Time (US & Canada),2019-11-18 20:23:34
3 | 5520,360000310393,4800,FALSE,2021-01-19 20:08:37.690,5940,SupportCA,5220,Pacific Time (US & Canada),2019-11-18 20:23:34
4 | 6960,360000310393,6240,FALSE,2021-01-19 20:08:37.690,7380,SupportCA,6660,Pacific Time (US & Canada),2019-11-18 20:23:34
5 | 2640,360000310393,1920,FALSE,2021-01-19 20:08:37.689,3060,SupportCA,2340,Pacific Time (US & Canada),2019-11-18 20:23:34
6 | 4080,360000310393,3360,FALSE,2021-01-19 20:08:37.690,4500,SupportCA,3780,Pacific Time (US & Canada),2019-11-18 20:23:34
--------------------------------------------------------------------------------
/integration_tests/seeds/schedule_holiday_data.csv:
--------------------------------------------------------------------------------
1 | id,schedule_id,_fivetran_deleted,_fivetran_synced,end_date,name,start_date
2 | 1163028,360000,FALSE,2023-01-19 21:53:06.281,2022-12-27,Test Holiday,2022-12-26
3 | 1163027,360000,FALSE,2023-01-19 21:53:06.281,2022-12-29,Test Holiday 2,2022-12-29
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_chat_data.csv:
--------------------------------------------------------------------------------
1 | chat_id,_fivetran_synced,authenticated,backend,channel,conversation_id,initiator,integration_id,tags,ticket_id,user_id,visitor_id,webpath
2 | 4323.953441.TyDt23helloZt4E,2024-03-19 16:51:27.753000,false,chat,chat,,1,,[],45,387066542,21432-1JDmGTcw35VtqY,[]
3 | 4323.953441.TyDs6revFIOI0Y,2024-03-19 16:51:27.748000,false,chat,chat,,1,,[],48,387066542,21432-1JDmGTcw35VtqY,[]
4 | 4323.953441.T6OrgbvMIqPplK,2024-03-19 16:51:11.713000,false,chat,chat,,1,,,6605,387066542,21432-1JDmGTcw35VtqY,[]
5 | 4323.953441.TyDs6revFIOI0Y,2024-03-19 16:51:27.764000,false,chat,chat,,1,,[],50,387066542,21432-1JDmGTcw35VtqY,[]
6 | 4323.953441.TyDfrDh7BV3QA,2024-03-19 16:51:30.989000,false,chat,chat,,1,,,44,21652596,9421471-1JDmGTcw35VtqY,[]
7 | 4323.953441.T6OrgbvMIqPplK,2024-03-19 16:51:27.756000,false,chat,chat,,1,,[],49,387066542,21432-1JDmGTcw35VtqY,[]
8 | 4323.953441.TzkBGrapJoA,2024-03-19 16:51:29.274000,false,chat,chat,,1,,[],47,387066542,21432-1JDmGTcw35VtqY,[]
9 | 4323.953441.TzkArUd3CjKI,2024-03-19 16:51:25.454000,false,chat,chat,,1,,[],52,221276,9421471-1JRmMeNdc68s72a,[]
10 | 4323.953441.TzAcedetjkcKyq,2024-03-19 16:51:21.717000,false,chat,native_messaging,465895dda,1,6427a8897416c83,[],53,223438084,9471-1J4223fqJrp,[]
11 | 4323.953441.TyDt23helloZt4E,2024-03-19 16:51:21.712000,false,chat,native_messaging,423dac0843a,1,658d883,[],54,2253454356,94271-1JXmgdg45wSDKy92,[]
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_chat_event_data.csv:
--------------------------------------------------------------------------------
1 | chat_id,created_at,_Company_synced,actor_id,chat_index,external_message_id,filename,is_history_context,message,message_id,message_source,mime_type,original_message,original_message_type,parent_message_id,reason,size,status,status_updated_at,type,url
2 | 4323.953441.TyDt23helloZt4E,2023-12-12 11:20:28.994000,2024-03-19 16:51:27.764000,agent:403969141094,3,,,,,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company"",""type"":""BUSINESS""},""content"":{""form"":{""block_chat_input"":false,""fields"":[{""label"":""Name"",""name"":""dataCapture.systemField.requester.name"",""text"":{""max_size"":256,""min_size"":1,""text"":""Name""}}]}},""id"":""658d83d7dfb6b3d"",""metadata"":{""fields"":{""formId"":{""string_value"":""DataCaptureForm""},""nextNodeId"":{""string_value"":""01HJRCFQ4VB_01HJRCFQ4P04853CC0""},""nodeId"":{""string_value"":""01HJRCFQ4XJVB_01HJRCFQ4PX5KEPQ""}}},""received"":""2023-12-28T14:18:20.084Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
3 | 4323.953441.TyDs6revFIOI0Y,2023-12-12 11:16:58.905000,2024-03-19 16:51:27.757000,agent:403959218893,5,,,,Hi there. Got a question? I'm here to help.,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""text"":{""actions"":[{""reply"":{""payload"":""goto_node=01HJRW4WKGCHGK66_01HJRCSAC5F658SSW""},""text"":""Talk to a human""}],""text"":""Hi there. Got a question? I'm here to help.""}},""id"":""658d847c05d00c4f6fc71512"",""metadata"":{""fields"":{}},""received"":""2023-12-28T14:21:48.518Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatDepartmentTransfer,
4 | 4323.953441.T6OrgbvMIqPplK,2023-12-08 05:07:51.946000,2024-03-19 16:51:30.989000,403969141094,2,,,,TALK TO A HUMAN,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""form"":{""block_chat_input"":false,""fields"":[{""label"":""Name"",""name"":""dataCapture.systemField.requester.name"",""text"":{""max_size"":256,""min_size"":1,""text"":""Name""}},{""email"":{""email"":""email""},""label"":""Email"",""name"":""dataCapture.systemField.requester.email""}]}},""id"":""6580349a2468"",""metadata"":{""fields"":{""formId"":{""string_value"":""DataCaptureForm""},""nextNodeId"":{""string_value"":""01HJRCQ1Y9HSRDAV_01HJRCQ1YAYFHKZ6""},""nodeId"":{""string_value"":""01HJRCQ2GDS9HSRDAV_01HJRCE6R1DNZJ77B4""}}},""received"":""2023-12-28T14:21:54.564Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatEmailChanged,
5 | 4323.953441.TyDs6revFIOI0Y,2023-12-12 11:16:29.190000,2024-03-19 16:51:27.756000,403959218893,0,,,,Thanks. Let me connect you with a customer support agent.,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""form"":{""block_chat_input"":false,""fields"":[{""label"":""Name"",""name"":""dataCapture.systemField.requester.name"",""text"":{""max_size"":256,""min_size"":1,""text"":""name""}},{""email"":{""email"":""email""},""label"":""Email"",""name"":""dataCapture.systemField.requester.email""}]}},""id"":""65bce6aa6d8d10f746"",""metadata"":{""fields"":{""formId"":{""string_value"":""DataCaptureForm""},""nextNodeId"":{""string_value"":""01HJRCQ1S9HSRDAV_01HJRCQ1Y5ED3MAYFHKZ6""},""nodeId"":{""string_value"":""01HJRCQ19HSRDAV_01HJRCQ1Y51DNZJ77B4""}}},""received"":""2024-02-02T12:57:14.359Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
6 | 4323.953441.TyDfrDh7BV3QA,2023-12-12 10:27:50.256000,2024-03-19 16:51:27.753000,403969141094,0,,,,,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company"",""type"":""BUSINESS""},""content"":{""form"":{""block_chat_input"":false,""fields"":[{""label"":""Name"",""name"":""dataCapture.systemField.requester.name"",""text"":{""max_size"":256,""min_size"":1,""text"":""Name""}}]}},""id"":""658d82df5de0112a19d6fc0c"",""metadata"":{""fields"":{""formId"":{""string_value"":""DataCaptureForm""},""nextNodeId"":{""string_value"":""01HJRCD7DDWVQD_01Y7W3Z1EWAGQEQA""},""nodeId"":{""string_value"":""01HJRCD505DDDWVQD_01HJRCD91MNZG59""}}},""received"":""2023-12-28T14:14:55.007Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatJoin,
7 | 4323.953441.T6OrgbvMIqPplK,2023-12-08 05:07:28.408000,2024-03-19 16:51:30.989000,403959218893,0,,,,No problem. Just share a few details and I'll connect you with someone from our team.,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""text"":{""actions"":[],""text"":""Thanks. Let me connect you with a customer support agent.""}},""id"":""658d843188000ddefeeba3f2"",""metadata"":{""fields"":{}},""received"":""2023-12-28T14:20:33.333Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
8 | 4323.953441.TzkBGrapJoA,2023-12-28 14:22:12.840000,2024-03-19 16:51:21.702000,403969141094,0,,,,No problem. Just share a few details and I'll connect you with someone from our team.,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""form"":{""block_chat_input"":false,""fields"":[{""label"":""Name"",""name"":""dataCapture.systemField.requester.name"",""text"":{""max_size"":256,""min_size"":1,""text"":""Name""}},{""email"":{""email"":""email""},""label"":""Email"",""name"":""dataCapture.systemField.requester.email""}]}},""id"":""658d841e69c2dcac0bc5a"",""metadata"":{""fields"":{""formId"":{""string_value"":""DataCaptureForm""},""nextNodeId"":{""string_value"":""01HEYB2GDS9HSRDAV_01HJRCQ1Y5EDYFHKZ6""},""nodeId"":{""string_value"":""01HJRCQ1Y4MJEYB2GDS9HSRDAV_01HJRCQ1NZJ77B4""}}},""received"":""2023-12-28T14:20:14.563Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
9 | 4323.953441.TzkArUd3CjKI,2023-12-28 14:20:35.032000,2024-03-19 16:51:23.267000,403959218893,0,,,,Hi there. Got a question? I'm here to help.,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""text"":{""actions"":[],""text"":""No problem. Just share a few details and I'll connect you with someone from our team.""}},""id"":""658d8481d00e97935de863ab"",""metadata"":{""fields"":{}},""received"":""2023-12-28T14:21:53.345Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
10 | 4323.953441.TzAcedetjkcKyq,2023-12-22 10:37:20.489000,2024-03-19 16:51:25.454000,403969141094,0,,,,,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""text"":{""actions"":[],""text"":""No problem. Just share a few details and I'll connect you with someone from our team.""}},""id"":""65bce6a92a2fe21c"",""metadata"":{""fields"":{}},""received"":""2024-02-02T12:57:13.130Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
11 | 4323.953441.TyDt23helloZt4E,2023-12-12 11:20:10.181000,2024-03-19 16:51:27.764000,403959218893,0,,,,TALK TO A HUMAN,,,,"{""author"":{""avatar_url"":""url"",""display_name"":""Company Bot"",""type"":""BUSINESS""},""content"":{""text"":{""actions"":[{""reply"":{""payload"":""goto_node=01HJRCSAC466_01HJRCSAC4FV78SSW""},""text"":""Talk to a human""}],""text"":""Hi there. Got a question? I'm here to help.""}},""id"":""65bce6a06765305e2a2fd629"",""metadata"":{""fields"":{}},""received"":""2024-02-02T12:57:04.171Z"",""source"":{""type"":""zd:answerBot""}}",,,,,,,ChatMessage,
12 |
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_comment_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,body,created,facebook_comment,public,ticket_id,tweet,user_id,voice_comment
2 | 1030820361033,2020-03-05 04:47:57.580,body1,2020-02-11 07:49:14,FALSE,TRUE,1077,FALSE,402935450113,FALSE
3 | 1198167194074,2020-09-07 08:09:18.997,body2,2020-06-26 20:47:27,FALSE,TRUE,7636,FALSE,413068955393,FALSE
4 | 1227221959693,2020-09-02 14:08:34.410,body3,2020-07-21 19:33:46,FALSE,TRUE,7367,FALSE,413068955393,FALSE
5 | 1227005852153,2020-09-02 14:08:34.410,body4,2020-07-21 17:07:54,FALSE,TRUE,7367,FALSE,413068955393,FALSE
6 | 1225658181514,2020-09-02 14:08:34.410,body5,2020-07-20 20:16:47,FALSE,TRUE,7367,FALSE,413068955393,FALSE
7 | 1189989966994,2020-09-07 08:09:18.991,body6,2020-06-19 20:40:41,FALSE,TRUE,7636,FALSE,413068955393,FALSE
8 | 1228317430153,2020-09-02 14:08:34.412,body7,2020-07-22 16:41:28,FALSE,TRUE,7367,FALSE,413068955393,FALSE
9 | 1227234557733,2020-09-02 14:08:34.411,body8,2020-07-21 19:43:27,FALSE,TRUE,7367,FALSE,413068955393,FALSE
10 | 1227270483653,2020-09-02 14:08:34.411,body9,2020-07-21 20:07:17,FALSE,TRUE,7367,FALSE,413068955393,FALSE
11 | 1335841328993,2020-11-04 14:09:01.386,body10,2020-10-19 13:56:05,FALSE,TRUE,13628,FALSE,424142574593,FALSE
12 | 1338387572833,2020-11-04 14:09:01.388,body11,2020-10-21 09:08:41,FALSE,TRUE,13628,FALSE,424142574593,FALSE
13 | 1326976058654,2020-11-06 14:09:43.392,body12,2020-10-11 13:13:55,FALSE,TRUE,13095,FALSE,423362500353,FALSE
14 | 1326333791494,2020-10-26 02:11:04.021,body13,2020-10-10 15:40:36,FALSE,TRUE,13057,FALSE,423362500353,FALSE
15 | 1326246720513,2020-10-26 02:11:04.021,body14,2020-10-10 14:39:46,FALSE,TRUE,13057,FALSE,423362500353,FALSE
16 | 1326861102034,2020-11-06 14:09:43.391,body15,2020-10-11 08:02:56,FALSE,TRUE,13095,FALSE,423362500353,FALSE
17 | 1326864438354,2020-11-06 14:09:43.391,body16,2020-10-11 08:08:54,FALSE,TRUE,13095,FALSE,423362500353,FALSE
18 | 1350940278314,2020-11-15 08:12:04.339,body17,2020-11-01 02:20:20,FALSE,TRUE,14352,FALSE,423362500353,FALSE
19 | 1326296442114,2020-11-06 14:09:43.388,body18,2020-10-10 14:51:51,FALSE,TRUE,13095,FALSE,423362500353,FALSE
20 | 1326862829914,2020-10-26 02:11:04.025,body19,2020-10-11 08:05:48,FALSE,TRUE,13057,FALSE,423362500353,FALSE
21 | 1326808961753,2020-10-26 02:11:04.023,body20,2020-10-11 07:53:43,FALSE,TRUE,13057,FALSE,423362500353,FALSE
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,allow_channelback,assignee_id,brand_id,created_at,description,due_at,external_id,forum_topic_id,group_id,has_incidents,is_public,organization_id,priority,problem_id,recipient,requester_id,status,subject,submitter_id,system_client,ticket_form_id,type,updated_at,url,via_channel,via_source_from_id,via_source_from_title,via_source_rel,via_source_to_address,via_source_to_name,merged_ticket_ids,via_source_from_address,followup_ids,via_followup_source_id
2 | 1595,2020-03-20 02:32:49.426,FALSE,,360003529474,2020-02-19 01:54:52,description1,,,,360006965034,FALSE,TRUE,370295712714,,,email@email.com,396331237134,deleted,subject1,396331237134,,360002048693,incident,2020-02-19 01:55:11,https://zendesk.com/api/v2/tickets/1595.json,web,,,,example@email.com,,[],,,
3 | 16988,2021-01-13 20:09:16.325,FALSE,418284131934,360003529474,2020-12-22 00:19:23,description1,,,,360013366274,FALSE,TRUE,370469077513,,,email@email.com,1500656884401,solved,subject1,1500656884401,,360002048693,,2021-01-13 18:42:39,https://zendesk.com/api/v2/tickets/16988.json,email,,,,example@email.com,Support,[],,[],
4 | 14173,2020-11-11 20:08:45.130,FALSE,396371699653,360003529474,2020-10-28 12:03:02,description1,,,,360006965034,FALSE,TRUE,370321120273,,,email@email.com,424883466453,closed,subject1,424883466453,,360002048693,,2020-11-11 17:01:32,https://zendesk.com/api/v2/tickets/14173.json,email,,,,example@email.com,Support,[],,,
5 | 11071,2020-10-02 14:08:33.216,FALSE,1111,360003529474,2020-08-28 18:06:36,Ticket to test field history changes,,,,360006965034,FALSE,TRUE,,urgent,,email@email.com,2222,closed,subject1,2222,,360002048693,,2020-11-15 11:01:27,https://zendesk.com/api/v2/tickets/11071.json,email,,,,X,Support,[],,,
6 | 1966,2020-03-25 20:32:23.617,FALSE,396315360434,360003529474,2020-02-27 06:05:08,description1,,,,360006965034,FALSE,TRUE,370295721514,,,email@email.com,402813302773,closed,subject1,402813302773,,360002048693,,2020-03-25 16:03:26,https://zendesk.com/api/v2/tickets/1966.json,email,,,,example@email.com,Support,[1967],,,
7 | 11013,2020-10-02 20:08:20.449,FALSE,402851697393,360003529474,2020-08-27 23:09:52,description1,,,,360008376313,FALSE,TRUE,370297881854,,,email@email.com,419688934974,deleted,subject1,419688934974,,360002048693,,2020-09-02 15:53:16,https://zendesk.com/api/v2/tickets/11013.json,email,,,,X,Support,[],,,
8 | 1404,2020-03-05 04:53:46.466,FALSE,396371699653,360003529474,2020-02-13 21:43:58,description1,,,,360006965034,FALSE,TRUE,370295709874,,,email@email.com,403125197514,closed,subject1,403125197514,,360002048693,,2020-02-28 01:01:57,https://zendesk.com/api/v2/tickets/1404.json,email,,,,example@email.com,Support,,,,
9 | 4721,2020-05-14 20:12:36.297,FALSE,396371706773,360003529474,2020-04-20 14:31:46,description1,,,,360006965034,FALSE,TRUE,370295719414,,,email@email.com,402862357193,closed,subject1,402862357193,,360002048693,,2020-05-14 20:04:34,https://zendesk.com/api/v2/tickets/4721.json,email,,,,example@email.com,Support,[],,,
10 | 6171,2020-06-01 02:11:39.760,FALSE,396334400494,360003529474,2020-05-17 17:50:31,description1,,,,360006965034,FALSE,TRUE,370295713034,,,email@email.com,410930434074,closed,subject1,410930434074,,360002048693,,2020-05-31 23:03:46,https://zendesk.com/api/v2/tickets/6171.json,email,,,,example@email.com,Support,[],,,
11 | 6605,2020-06-10 02:10:24.202,FALSE,396315360434,360003529474,2020-05-26 22:29:50,description1,,,,360006965034,FALSE,TRUE,370295719754,,,email@email.com,410416672973,closed,subject1,410416672973,,360002048693,,2020-06-09 23:03:49,https://zendesk.com/api/v2/tickets/6605.json,email,,,,example@email.com,Support,[],,,
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_field_history_data.csv:
--------------------------------------------------------------------------------
1 | field_name,ticket_id,updated,_fivetran_synced,user_id,value
2 | status,11071,2020-08-28 11:01:27,2020-10-02 14:08:33.216,,open
3 | assignee_id,11071,2020-08-28 11:01:27,2020-03-11 14:32:23.872,,1111
4 | priority,11071,2020-08-28 11:01:27,2020-03-11 14:32:23.872,,normal
5 | status,11071,2020-08-29 11:01:27,2020-10-02 14:08:33.216,,pending
6 | status,11071,2020-09-02 11:01:27,2020-10-02 14:08:33.216,,on-hold
7 | status,11071,2020-09-05 11:01:27,2020-10-02 14:08:33.216,,pending
8 | status,11071,2020-09-15 11:01:27,2020-10-02 14:08:33.216,,open
9 | status,11071,2020-09-30 11:01:27,2020-10-02 14:08:33.216,,pending
10 | priority,11071,2020-09-30 11:01:27,2020-03-11 14:32:23.872,,medium
11 | status,11071,2020-10-01 11:01:27,2020-10-02 14:08:33.216,,on-hold
12 | status,11071,2020-10-15 11:01:27,2020-10-02 14:08:33.216,,open
13 | priority,11071,2020-10-15 11:01:27,2020-03-11 14:32:23.872,,high
14 | status,11071,2020-10-17 11:01:27,2020-10-02 14:08:33.216,,on-hold
15 | status,11071,2020-10-20 11:01:27,2020-10-02 14:08:33.216,,pending
16 | status,11071,2020-11-02 11:01:27,2020-10-02 14:08:33.216,,open
17 | priority,11071,2020-11-02 11:01:27,2020-03-11 14:32:23.872,,urgent
18 | status,11071,2020-11-14 11:01:27,2020-10-02 14:08:33.216,,solved
19 | status,11071,2020-11-15 11:01:27,2020-10-02 14:08:33.216,,closed
20 | status,6964,2020-06-01 21:11:59,2020-07-02 02:09:05.984,,solved
21 | status,974,2020-02-10 21:47:41,2020-03-12 02:32:23.808,,solved
22 | priority,980,2020-02-10 22:06:57,2020-03-12 02:32:23.808,,solved
23 | status,8205,2020-07-07 23:01:47,2020-08-07 02:09:08.192,,solved
24 | status,103,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
25 | assignee_id,108,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,1111
26 | status,95,2020-02-10 08:36:39,2020-03-11 14:32:23.872,,solved
27 | status,107,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
28 | status,102,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
29 | status,102,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
30 | status,226,2020-02-10 08:35:52,2020-03-11 14:32:20.096,,solved
31 | status,229,2020-02-10 08:35:52,2020-03-11 14:32:20.096,,solved
32 | status,11016,2020-09-02 15:53:15,2020-10-02 20:08:19.701,,solved
33 | status,102,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
34 | status,982,2020-02-10 22:32:56,2020-03-12 02:32:29.045,,solved
35 | status,141,2020-02-10 08:36:25,2020-03-11 14:32:23.861,,solved
36 | status,1192,2020-02-12 00:22:26,2020-03-13 02:35:56.661,,solved
37 | status,102,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved
38 | status,312,2020-02-10 08:35:18,2020-03-11 14:32:09.781,,solved
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_form_history_data.csv:
--------------------------------------------------------------------------------
1 | id,updated_at,_fivetran_deleted,_fivetran_synced,active,created_at,display_name,end_user_visible,name
2 | 360200,2019-12-18 18:52:02,FALSE,2021-02-16 20:08:55,FALSE,2019-11-07 23:14:54,2be0ef4b34cc85f6e3582b3bef65cd4e,TRUE,2be0ef4b34cc85f6e3582b3bef65cd4e
3 | 360204,2020-01-20 01:27:51,FALSE,2020-02-05 20:16:15,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
4 | 360204,2020-09-14 22:45:49,FALSE,2020-12-28 14:08:18,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
5 | 360204,2021-02-11 22:42:12,FALSE,2021-02-16 20:08:55,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
6 | 360204,2020-02-10 03:34:33,FALSE,2020-02-10 20:19:14,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
7 | 360204,2020-06-02 14:44:52,FALSE,2020-09-14 20:08:14,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
8 | 360204,2021-02-02 17:33:15,FALSE,2021-02-04 14:08:48,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
9 | 360204,2020-02-11 01:43:20,FALSE,2020-02-15 14:20:08,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
10 | 360204,2021-02-04 16:29:37,FALSE,2021-02-11 20:09:11,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
11 | 360204,2020-02-15 17:13:29,FALSE,2020-03-05 02:23:07,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
12 | 360204,2020-03-05 02:29:38,FALSE,2020-06-01 02:11:19,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
13 | 360204,2020-12-28 16:17:21,FALSE,2021-02-02 14:08:59,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
14 | 360204,2020-02-05 20:27:15,FALSE,2020-02-10 02:18:09,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
15 | 360204,2020-06-01 07:57:13,FALSE,2020-06-02 14:11:12,TRUE,2019-11-13 22:40:56,1fa61d55d8504dbd25f92f31a9326b83,TRUE,1fa61d55d8504dbd25f92f31a9326b83
16 | 360205,2020-12-23 17:11:16,FALSE,2020-12-28 14:08:18,TRUE,2019-11-13 22:12:51,41f1a810ab161e4c3ee35e1afec238dd,TRUE,41f1a810ab161e4c3ee35e1afec238dd
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_schedule_data.csv:
--------------------------------------------------------------------------------
1 | created_at,ticket_id,_fivetran_synced,schedule_id
2 | 2020-02-04 15:26:26,123,2020-02-29 09:09:19.575,3600
3 | 2019-05-29 15:32:24,76900,2019-09-29 13:30:54.240,3600
4 | 2019-06-09 09:42:37,77569,2019-09-29 13:31:29.639,3600
5 | 2019-06-20 19:53:39,79336,2019-09-29 13:55:33.914,3600
6 | 2019-07-11 23:03:43,79616,2019-09-29 13:56:16.796,3600
7 | 2019-07-16 20:11:01,79972,2019-09-29 13:56:51.605,3600
8 | 2019-07-27 15:49:32,90640,2019-09-29 14:06:09.724,3600
9 | 2019-09-19 02:16:54,93969,2019-09-29 14:50:29.411,3600
10 | 2019-09-25 21:43:09,94490,2020-04-24 23:13:16.632,3600
11 | 2019-09-03 13:57:05,94490,2020-04-24 23:13:16.634,3600
--------------------------------------------------------------------------------
/integration_tests/seeds/ticket_tag_data.csv:
--------------------------------------------------------------------------------
1 | tag,ticket_id,_fivetran_synced
2 | customer,123,2020-03-05 04:50:07.657
3 | customer,123,2020-03-05 04:50:07.657
4 | customer,123,2020-03-05 04:50:07.657
5 | prospect,123,2020-03-05 04:50:07.657
6 | customer,455,2020-03-05 04:50:30.253
7 | customer,455,2020-03-05 04:50:30.253
8 | prospect,455,2020-03-05 04:50:30.253
9 | connector,789,2020-03-05 04:54:18.030
10 | prospect,1011,2020-03-05 04:54:18.030
11 | customer,1012,2020-03-05 04:54:31.717
--------------------------------------------------------------------------------
/integration_tests/seeds/time_zone_data.csv:
--------------------------------------------------------------------------------
1 | time_zone,_fivetran_synced,standard_offset
2 | London,2022-01-19T03:03:33.969Z,+00:00
3 | Dublin,2022-01-19T03:03:33.969Z,+00:00
--------------------------------------------------------------------------------
/integration_tests/seeds/user_data.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,active,alias,authenticity_token,chat_only,created_at,details,email,external_id,last_login_at,locale,locale_id,moderator,name,notes,only_private_comments,organization_id,phone,remote_photo_url,restricted_agent,role,shared,shared_agent,signature,suspended,ticket_restriction,time_zone,two_factor_auth_enabled,updated_at,url,verified
2 | 1111,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:55:12,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,Arthur Agent,,FALSE,370326203233,,,TRUE,agent,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:55:12,https://fivetran1813.zendesk.com/api/v2/users/403958466973.json,TRUE
3 | 2222,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,Earnest End User,,FALSE,370297696174,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:37,https://fivetran1813.zendesk.com/api/v2/users/403969371634.json,TRUE
4 | 403957746773,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:35:14,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319191913,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:35:15,https://fivetran1813.zendesk.com/api/v2/users/403957746773.json,TRUE
5 | 403970285734,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 23:07:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:07:41,https://fivetran1813.zendesk.com/api/v2/users/403970285734.json,TRUE
6 | 403969943274,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:57:51,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:57:52,https://fivetran1813.zendesk.com/api/v2/users/403969943274.json,TRUE
7 | 403959084893,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 23:14:08,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326207973,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:14:08,https://fivetran1813.zendesk.com/api/v2/users/403959084893.json,TRUE
8 | 403969141074,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:34:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319212353,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:34:41,https://fivetran1813.zendesk.com/api/v2/users/403969141074.json,TRUE
9 | 403957960093,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370331279213,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:38,https://fivetran1813.zendesk.com/api/v2/users/403957960093.json,TRUE
10 | 403969141094,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:34:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319212353,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:34:41,https://fivetran1813.zendesk.com/api/v2/users/403969141094.json,TRUE
11 | 403959218893,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 23:18:06,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326130213,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:18:06,https://fivetran1813.zendesk.com/api/v2/users/403959218893.json,TRUE
--------------------------------------------------------------------------------
/integration_tests/seeds/user_data_snowflake.csv:
--------------------------------------------------------------------------------
1 | id,_fivetran_synced,"""ACTIVE""",alias,authenticity_token,chat_only,created_at,details,email,external_id,last_login_at,locale,locale_id,moderator,name,notes,only_private_comments,organization_id,phone,remote_photo_url,restricted_agent,role,shared,shared_agent,signature,suspended,ticket_restriction,time_zone,two_factor_auth_enabled,updated_at,url,verified
2 | 403958466973,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:55:12,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370297696174,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:55:12,https://fivetran1813.zendesk.com/api/v2/users/403958466973.json,TRUE
3 | 403969371634,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:37,https://fivetran1813.zendesk.com/api/v2/users/403969371634.json,TRUE
4 | 403957746773,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:35:14,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319191913,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:35:15,https://fivetran1813.zendesk.com/api/v2/users/403957746773.json,TRUE
5 | 403970285734,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 23:07:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:07:41,https://fivetran1813.zendesk.com/api/v2/users/403970285734.json,TRUE
6 | 403969943274,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:57:51,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:57:52,https://fivetran1813.zendesk.com/api/v2/users/403969943274.json,TRUE
7 | 403959084893,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 23:14:08,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326207973,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:14:08,https://fivetran1813.zendesk.com/api/v2/users/403959084893.json,TRUE
8 | 403969141074,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:34:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319212353,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:34:41,https://fivetran1813.zendesk.com/api/v2/users/403969141074.json,TRUE
9 | 403957960093,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370331279213,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:38,https://fivetran1813.zendesk.com/api/v2/users/403957960093.json,TRUE
10 | 403969141094,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 22:34:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319212353,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:34:41,https://fivetran1813.zendesk.com/api/v2/users/403969141094.json,TRUE
11 | 403959218893,2020-03-05 05:03:34.208,"TRUE",,,FALSE,2020-02-26 23:18:06,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326130213,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:18:06,https://fivetran1813.zendesk.com/api/v2/users/403959218893.json,TRUE
--------------------------------------------------------------------------------
/integration_tests/seeds/user_tag_data.csv:
--------------------------------------------------------------------------------
1 | tag,user_id,_fivetran_synced
2 | 362ebc379b4b1b33b9656a2232a9e673,40092,2020-10-24 08:14:06
3 | 266e0d3d29830abfe7d4ed98b47966f7,40092,2020-10-24 08:14:06
4 | 266e0d3d29830abfe7d4ed98b47966f7,39882,2020-03-05 05:02:30
5 | ef43d72019ff152f0b1209e962a2f4f2,40282,2020-10-22 14:12:10
6 | ef43d72019ff152f0b1209e962a2f4f2,40284,2021-02-13 02:09:18
7 | ef43d72019ff152f0b1209e962a2f4f2,40285,2021-02-13 02:09:18
8 | d041ea3c4cb5f32804365f2f732dd88a,42370,2020-10-13 20:15:06
9 | b545892c9465b8e00d32b20fcd55caf7,41244,2021-02-16 08:09:05
10 | 266e0d3d29830abfe7d4ed98b47966f7,40386,2021-02-16 08:09:05
11 | 362ebc379b4b1b33b9656a2232a9e673,40386,2021-02-16 08:09:05
--------------------------------------------------------------------------------
/integration_tests/seeds/user_tag_data_snowflake.csv:
--------------------------------------------------------------------------------
1 | "TAG",user_id,_fivetran_synced
2 | 362ebc379b4b1b33b9656a2232a9e673,40092,2020-10-24 08:14:06
3 | 266e0d3d29830abfe7d4ed98b47966f7,40092,2020-10-24 08:14:06
4 | 266e0d3d29830abfe7d4ed98b47966f7,39882,2020-03-05 05:02:30
5 | ef43d72019ff152f0b1209e962a2f4f2,40282,2020-10-22 14:12:10
6 | ef43d72019ff152f0b1209e962a2f4f2,40284,2021-02-13 02:09:18
7 | ef43d72019ff152f0b1209e962a2f4f2,40285,2021-02-13 02:09:18
8 | d041ea3c4cb5f32804365f2f732dd88a,42370,2020-10-13 20:15:06
9 | b545892c9465b8e00d32b20fcd55caf7,41244,2021-02-16 08:09:05
10 | 266e0d3d29830abfe7d4ed98b47966f7,40386,2021-02-16 08:09:05
11 | 362ebc379b4b1b33b9656a2232a9e673,40386,2021-02-16 08:09:05
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_sla_policies.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | ticket_id,
10 | sla_policy_name,
11 | metric,
12 | sla_applied_at,
13 | target,
14 | in_business_hours,
15 | sla_breach_at,
16 | sla_elapsed_time,
17 | is_active_sla,
18 | is_sla_breach
19 | from {{ target.schema }}_zendesk_prod.zendesk__sla_policies
20 | ),
21 |
22 | dev as (
23 | select
24 | ticket_id,
25 | sla_policy_name,
26 | metric,
27 | sla_applied_at,
28 | target,
29 | in_business_hours,
30 | sla_breach_at,
31 | sla_elapsed_time,
32 | is_active_sla,
33 | is_sla_breach
34 | from {{ target.schema }}_zendesk_dev.zendesk__sla_policies
35 | ),
36 |
37 | prod_not_in_dev as (
38 | -- rows from prod not found in dev
39 | select * from prod
40 | except distinct
41 | select * from dev
42 | ),
43 |
44 | dev_not_in_prod as (
45 | -- rows from dev not found in prod
46 | select * from dev
47 | except distinct
48 | select * from prod
49 | ),
50 |
51 | combine as (
52 | select
53 | *,
54 | 'from prod' as source
55 | from prod_not_in_dev
56 |
57 | union all -- union since we only care if rows are produced
58 |
59 | select
60 | *,
61 | 'from dev' as source
62 | from dev_not_in_prod
63 | ),
64 |
65 | final as (
66 | select
67 | *,
68 | max(sla_elapsed_time) over (partition by ticket_id, metric, sla_applied_at) as max_sla_elapsed_time,
69 | min(sla_elapsed_time) over (partition by ticket_id, metric, sla_applied_at) as min_sla_elapsed_time
70 |
71 | from combine
72 | {{ "where ticket_id not in " ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ "" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}
73 | )
74 |
75 | select *
76 | from final
77 | where
78 | {# Take differences in runtime into account #}
79 | max_sla_elapsed_time - min_sla_elapsed_time > 5
80 | and date(sla_applied_at) < current_date
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_sla_policy_count.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | ticket_id,
10 | count(*) as total_slas
11 | from {{ target.schema }}_zendesk_prod.zendesk__sla_policies
12 | where date(sla_applied_at) < current_date
13 | {{ "and ticket_id not in " ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ "" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}
14 | group by 1
15 | ),
16 |
17 | dev as (
18 | select
19 | ticket_id,
20 | count(*) as total_slas
21 | from {{ target.schema }}_zendesk_dev.zendesk__sla_policies
22 | where date(sla_applied_at) < current_date
23 | {{ "and ticket_id not in " ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ "" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}
24 | group by 1
25 | ),
26 |
27 | final as (
28 | select
29 | prod.ticket_id as prod_ticket_id,
30 | dev.ticket_id as dev_ticket_id,
31 | prod.total_slas as prod_sla_total,
32 | dev.total_slas as dev_sla_total
33 | from prod
34 | full outer join dev
35 | on dev.ticket_id = prod.ticket_id
36 | )
37 |
38 | select *
39 | from final
40 | where prod_sla_total != dev_sla_total
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_ticket_backlog.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | {{ dbt_utils.star(from=ref('zendesk__ticket_backlog'), except=var('consistency_test_exclude_fields', '[]')) }}
10 | from {{ target.schema }}_zendesk_prod.zendesk__ticket_backlog
11 | ),
12 |
13 | dev as (
14 | select
15 | {{ dbt_utils.star(from=ref('zendesk__ticket_backlog'), except=var('consistency_test_exclude_fields', '[]')) }}
16 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_backlog
17 |
18 | {# Make sure we're only comparing one schema since this current update (v0.19.0) added mult-schema support. Can remove for future releases #}
19 | {{ "where source_relation = '" ~ (var("zendesk_database", target.database)|lower ~ "." ~ var("zendesk_schema", "zendesk")) ~ "'" if 'source_relation' in var("consistency_test_exclude_fields", '[]') }}
20 |
21 | ),
22 |
23 | prod_not_in_dev as (
24 | -- rows from prod not found in dev
25 | select * from prod
26 | except distinct
27 | select * from dev
28 | ),
29 |
30 | dev_not_in_prod as (
31 | -- rows from dev not found in prod
32 | select * from dev
33 | except distinct
34 | select * from prod
35 | ),
36 |
37 | final as (
38 | select
39 | *,
40 | 'from prod' as source
41 | from prod_not_in_dev
42 |
43 | union all -- union since we only care if rows are produced
44 |
45 | select
46 | *,
47 | 'from dev' as source
48 | from dev_not_in_prod
49 | )
50 |
51 | select *
52 | from final
53 | where date_day < current_date
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_ticket_enriched.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | {{ dbt_utils.star(from=ref('zendesk__ticket_enriched'), except=var('consistency_test_exclude_fields', '[]')) }}
10 | from {{ target.schema }}_zendesk_prod.zendesk__ticket_enriched
11 | where true
12 | and {{ dbt.datediff(dbt.current_timestamp(), "updated_at", "minute") }} >= 60
13 | ),
14 |
15 | dev as (
16 | select
17 | {{ dbt_utils.star(from=ref('zendesk__ticket_enriched'), except=var('consistency_test_exclude_fields', '[]')) }}
18 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_enriched
19 | where true
20 | and {{ dbt.datediff(dbt.current_timestamp(), "updated_at", "minute") }} >= 60
21 |
22 | {# Make sure we're only comparing one schema since this current update (v0.19.0) added mult-schema support. Can remove for future releases #}
23 | {{ "and source_relation = '" ~ (var("zendesk_database", target.database)|lower ~ "." ~ var("zendesk_schema", "zendesk")) ~ "'" if 'source_relation' in var("consistency_test_exclude_fields", '[]') }}
24 | ),
25 |
26 | prod_not_in_dev as (
27 | -- rows from prod not found in dev
28 | select * from prod
29 | except distinct
30 | select * from dev
31 | ),
32 |
33 | dev_not_in_prod as (
34 | -- rows from dev not found in prod
35 | select * from dev
36 | except distinct
37 | select * from prod
38 | ),
39 |
40 | final as (
41 | select
42 | *,
43 | 'from prod' as source
44 | from prod_not_in_dev
45 |
46 | union all -- union since we only care if rows are produced
47 |
48 | select
49 | *,
50 | 'from dev' as source
51 | from dev_not_in_prod
52 | )
53 |
54 | select *
55 | from final
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_ticket_field_history.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | {{ dbt_utils.star(from=ref('zendesk__ticket_field_history'), except=var('consistency_test_exclude_fields', '[]')) }}
10 | from {{ target.schema }}_zendesk_prod.zendesk__ticket_field_history
11 | ),
12 |
13 | dev as (
14 | select
15 | {{ dbt_utils.star(from=ref('zendesk__ticket_field_history'), except=var('consistency_test_exclude_fields', '[]')) }}
16 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_field_history
17 |
18 | {# Make sure we're only comparing one schema since this current update (v0.19.0) added mult-schema support. Can remove for future releases #}
19 | {{ "where source_relation = '" ~ (var("zendesk_database", target.database)|lower ~ "." ~ var("zendesk_schema", "zendesk")) ~ "'" if 'source_relation' in var("consistency_test_exclude_fields", '[]') }}
20 | ),
21 |
22 | prod_not_in_dev as (
23 | -- rows from prod not found in dev
24 | select * from prod
25 | except distinct
26 | select * from dev
27 | ),
28 |
29 | dev_not_in_prod as (
30 | -- rows from dev not found in prod
31 | select * from dev
32 | except distinct
33 | select * from prod
34 | ),
35 |
36 | final as (
37 | select
38 | *,
39 | 'from prod' as source
40 | from prod_not_in_dev
41 |
42 | union all -- union since we only care if rows are produced
43 |
44 | select
45 | *,
46 | 'from dev' as source
47 | from dev_not_in_prod
48 | )
49 |
50 | select *
51 | from final
52 | where date_day < current_date
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_ticket_metrics.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | ticket_id,
10 | first_reply_time_business_minutes,
11 | first_reply_time_calendar_minutes
12 | from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics
13 | ),
14 |
15 | dev as (
16 | select
17 | ticket_id,
18 | first_reply_time_business_minutes,
19 | first_reply_time_calendar_minutes
20 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics
21 |
22 | {# Make sure we're only comparing one schema since this current update (v0.19.0) added mult-schema support. Can remove for future releases #}
23 | {{ "where source_relation = '" ~ (var("zendesk_database", target.database)|lower ~ "." ~ var("zendesk_schema", "zendesk")) ~ "'" if 'source_relation' in var("consistency_test_exclude_fields", '[]') }}
24 | ),
25 |
26 | final as (
27 | select
28 | prod.ticket_id,
29 | prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,
30 | dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,
31 | prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,
32 | dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes
33 | from prod
34 | full outer join dev
35 | on dev.ticket_id = prod.ticket_id
36 | )
37 |
38 | select *
39 | from final
40 | where (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5
41 | or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)
42 | {{ "and ticket_id not in " ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ "" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}
--------------------------------------------------------------------------------
/integration_tests/tests/consistency/consistency_ticket_summary.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with prod as (
8 | select
9 | user_count,
10 | active_agent_count,
11 | deleted_user_count,
12 | end_user_count,
13 | suspended_user_count,
14 | new_ticket_count,
15 | on_hold_ticket_count,
16 | open_ticket_count,
17 | pending_ticket_count,
18 | solved_ticket_count,
19 | problem_ticket_count,
20 | assigned_ticket_count,
21 | reassigned_ticket_count,
22 | reopened_ticket_count,
23 | surveyed_satisfaction_ticket_count,
24 | unassigned_unsolved_ticket_count,
25 | unreplied_ticket_count,
26 | unreplied_unsolved_ticket_count,
27 | unsolved_ticket_count,
28 | recovered_ticket_count,
29 | deleted_ticket_count
30 |
31 | from {{ target.schema }}_zendesk_prod.zendesk__ticket_summary
32 | ),
33 |
34 | dev as (
35 | select
36 | user_count,
37 | active_agent_count,
38 | deleted_user_count,
39 | end_user_count,
40 | suspended_user_count,
41 | new_ticket_count,
42 | on_hold_ticket_count,
43 | open_ticket_count,
44 | pending_ticket_count,
45 | solved_ticket_count,
46 | problem_ticket_count,
47 | assigned_ticket_count,
48 | reassigned_ticket_count,
49 | reopened_ticket_count,
50 | surveyed_satisfaction_ticket_count,
51 | unassigned_unsolved_ticket_count,
52 | unreplied_ticket_count,
53 | unreplied_unsolved_ticket_count,
54 | unsolved_ticket_count,
55 | recovered_ticket_count,
56 | deleted_ticket_count
57 |
58 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_summary
59 |
60 | {# Make sure we're only comparing one schema since this current update (v0.19.0) added mult-schema support. Can remove for future releases #}
61 | {{ "where source_relation = '" ~ (var("zendesk_database", target.database)|lower ~ "." ~ var("zendesk_schema", "zendesk")) ~ "'" if 'source_relation' in var("consistency_test_exclude_fields", '[]') }}
62 | ),
63 |
64 | joined as (
65 |
66 | select
67 | prod.user_count as prod_user_count,
68 | dev.user_count as dev_user_count,
69 | prod.active_agent_count as prod_active_agent_count,
70 | dev.active_agent_count as dev_active_agent_count,
71 | prod.deleted_user_count as prod_deleted_user_count,
72 | dev.deleted_user_count as dev_deleted_user_count,
73 | prod.end_user_count as prod_end_user_count,
74 | dev.end_user_count as dev_end_user_count,
75 | prod.suspended_user_count as prod_suspended_user_count,
76 | dev.suspended_user_count as dev_suspended_user_count,
77 | prod.new_ticket_count as prod_new_ticket_count,
78 | dev.new_ticket_count as dev_new_ticket_count,
79 | prod.on_hold_ticket_count as prod_on_hold_ticket_count,
80 | dev.on_hold_ticket_count as dev_on_hold_ticket_count,
81 | prod.open_ticket_count as prod_open_ticket_count,
82 | dev.open_ticket_count as dev_open_ticket_count,
83 | prod.pending_ticket_count as prod_pending_ticket_count,
84 | dev.pending_ticket_count as dev_pending_ticket_count,
85 | prod.solved_ticket_count as prod_solved_ticket_count,
86 | dev.solved_ticket_count as dev_solved_ticket_count,
87 | prod.problem_ticket_count as prod_problem_ticket_count,
88 | dev.problem_ticket_count as dev_problem_ticket_count,
89 | prod.assigned_ticket_count as prod_assigned_ticket_count,
90 | dev.assigned_ticket_count as dev_assigned_ticket_count,
91 | prod.reassigned_ticket_count as prod_reassigned_ticket_count,
92 | dev.reassigned_ticket_count as dev_reassigned_ticket_count,
93 | prod.reopened_ticket_count as prod_reopened_ticket_count,
94 | dev.reopened_ticket_count as dev_reopened_ticket_count,
95 | prod.surveyed_satisfaction_ticket_count as prod_surveyed_satisfaction_ticket_count,
96 | dev.surveyed_satisfaction_ticket_count as dev_surveyed_satisfaction_ticket_count,
97 | prod.unassigned_unsolved_ticket_count as prod_unassigned_unsolved_ticket_count,
98 | dev.unassigned_unsolved_ticket_count as dev_unassigned_unsolved_ticket_count,
99 | prod.unreplied_ticket_count as prod_unreplied_ticket_count,
100 | dev.unreplied_ticket_count as dev_unreplied_ticket_count,
101 | prod.unreplied_unsolved_ticket_count as prod_unreplied_unsolved_ticket_count,
102 | dev.unreplied_unsolved_ticket_count as dev_unreplied_unsolved_ticket_count,
103 | prod.unsolved_ticket_count as prod_unsolved_ticket_count,
104 | dev.unsolved_ticket_count as dev_unsolved_ticket_count,
105 | prod.recovered_ticket_count as prod_recovered_ticket_count,
106 | dev.recovered_ticket_count as dev_recovered_ticket_count,
107 | prod.deleted_ticket_count as prod_deleted_ticket_count,
108 | dev.deleted_ticket_count as dev_deleted_ticket_count
109 |
110 | from prod
111 | cross join dev
112 | )
113 |
114 | select *
115 | from joined
116 | where -- sometimes one of the below metrics will be off by 6-8, but let's leave 5 for now
117 | abs(prod_user_count - dev_user_count) > 5
118 | or abs(prod_active_agent_count - dev_active_agent_count) > 5
119 | or abs(prod_deleted_user_count - dev_deleted_user_count) > 5
120 | or abs(prod_end_user_count - dev_end_user_count) > 5
121 | or abs(prod_suspended_user_count - dev_suspended_user_count) > 5
122 | or abs(prod_new_ticket_count - dev_new_ticket_count) > 5
123 | or abs(prod_on_hold_ticket_count - dev_on_hold_ticket_count) > 5
124 | or abs(prod_open_ticket_count - dev_open_ticket_count) > 8
125 | or abs(prod_pending_ticket_count - dev_pending_ticket_count) > 5
126 | or abs(prod_solved_ticket_count - dev_solved_ticket_count) > 5
127 | or abs(prod_problem_ticket_count - dev_problem_ticket_count) > 5
128 | or abs(prod_assigned_ticket_count - dev_assigned_ticket_count) > 5
129 | or abs(prod_reassigned_ticket_count - dev_reassigned_ticket_count) > 5
130 | or abs(prod_reopened_ticket_count - dev_reopened_ticket_count) > 5
131 | or abs(prod_surveyed_satisfaction_ticket_count - dev_surveyed_satisfaction_ticket_count) > 5
132 | or abs(prod_unassigned_unsolved_ticket_count - dev_unassigned_unsolved_ticket_count) > 5
133 | or abs(prod_unreplied_ticket_count - dev_unreplied_ticket_count) > 5
134 | or abs(prod_unreplied_unsolved_ticket_count - dev_unreplied_unsolved_ticket_count) > 5
135 | or abs(prod_unsolved_ticket_count - dev_unsolved_ticket_count) > 5
136 | or abs(prod_recovered_ticket_count - dev_recovered_ticket_count) > 5
137 | or abs(prod_deleted_ticket_count - dev_deleted_ticket_count) > 5
--------------------------------------------------------------------------------
/integration_tests/tests/integrity/metrics_count_match.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | -- check that all the tickets are accounted for in the metrics
8 | with stg_count as (
9 | select
10 | source_relation,
11 | count(*) as stg_ticket_count
12 | from {{ ref('stg_zendesk__ticket') }}
13 | group by 1
14 | ),
15 |
16 | metric_count as (
17 | select
18 | source_relation,
19 | count(*) as metric_ticket_count
20 | from {{ ref('zendesk__ticket_metrics') }}
21 | group by 1
22 | )
23 |
24 | select
25 | stg_count.source_relation as stg_source_relation,
26 | metric_count.source_relation as model_source_relation,
27 | stg_ticket_count,
28 | metric_ticket_count
29 | from stg_count
30 | full join metric_count
31 | using(source_relation)
32 | where coalesce(stg_ticket_count, -1) != coalesce(metric_ticket_count, -2)
--------------------------------------------------------------------------------
/integration_tests/tests/integrity/sla_count_match.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | -- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model
8 | with source as (
9 | select
10 | *,
11 | case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name, source_relation order by valid_starting_at) else 1 end as latest_sla
12 | from {{ ref('stg_zendesk__ticket_field_history') }}
13 | ),
14 |
15 | source_filter as (
16 | select
17 | ticket_id,
18 | source_relation,
19 | count(*) as source_row_count
20 | from source
21 | where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')
22 | and value is not null
23 | and latest_sla = 1
24 | group by 1,2
25 | ),
26 |
27 | sla_policies as (
28 | select
29 | ticket_id,
30 | source_relation,
31 | count(*) as end_model_row_count
32 | from {{ ref('zendesk__sla_policies') }}
33 | group by 1,2
34 | ),
35 |
36 | match_check as (
37 | select
38 | coalesce(sla_policies.source_relation, source_filter.source_relation) as source_relation,
39 | sla_policies.ticket_id,
40 | end_model_row_count,
41 | source_row_count
42 | from sla_policies
43 | full outer join source_filter
44 | on source_filter.ticket_id = sla_policies.ticket_id
45 | and source_filter.source_relation = sla_policies.source_relation
46 | )
47 |
48 | select *
49 | from match_check
50 | where end_model_row_count != source_row_count
51 | {{ "and ticket_id not in " ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ "" if var('fivetran_integrity_sla_count_match_tickets',[]) }}
--------------------------------------------------------------------------------
/integration_tests/tests/integrity/sla_first_reply_time_match.sql:
--------------------------------------------------------------------------------
1 |
2 | {{ config(
3 | tags="fivetran_validations",
4 | enabled=var('fivetran_validation_tests_enabled', false)
5 | ) }}
6 |
7 | with ticket_metrics as (
8 | select
9 | ticket_id,
10 | source_relation,
11 | first_reply_time_business_minutes
12 | from {{ ref('zendesk__ticket_metrics') }}
13 | ),
14 |
15 | sla_policies as (
16 | select
17 | ticket_id,
18 | source_relation,
19 | sla_elapsed_time
20 | from {{ ref('zendesk__sla_policies') }}
21 | where metric = 'first_reply_time'
22 | and in_business_hours
23 | ),
24 |
25 | match_check as (
26 | select
27 | ticket_metrics.source_relation,
28 | ticket_metrics.ticket_id,
29 | ticket_metrics.first_reply_time_business_minutes,
30 | sla_policies.sla_elapsed_time
31 | from ticket_metrics
32 | full outer join sla_policies
33 | on ticket_metrics.ticket_id = sla_policies.ticket_id
34 | and ticket_metrics.source_relation = sla_policies.source_relation
35 | )
36 |
37 | select *
38 | from match_check
39 | where abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2
40 | {{ "and ticket_id not in " ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ "" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}
--------------------------------------------------------------------------------
/integration_tests/tests/integrity/sla_metrics_parity.sql:
--------------------------------------------------------------------------------
1 | {{ config(
2 | tags="fivetran_validations",
3 | enabled=var('fivetran_validation_tests_enabled', false)
4 | ) }}
5 |
6 | /*
7 | This test is to ensure the sla_elapsed_time from zendesk__sla_policies matches the corresponding time in zendesk__ticket_metrics.
8 | */
9 |
10 | with dev_slas as (
11 | select *
12 | from {{ target.schema }}_zendesk_dev.zendesk__sla_policies
13 | where in_business_hours
14 |
15 | ), dev_metrics as (
16 | select *
17 | from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics
18 |
19 | ), dev_compare as (
20 | select
21 | dev_slas.source_relation,
22 | dev_slas.ticket_id,
23 | dev_slas.metric,
24 | cast(dev_slas.sla_elapsed_time as {{ dbt.type_int() }}) as time_from_slas,
25 | case when metric = 'agent_work_time' then dev_metrics.agent_work_time_in_business_minutes
26 | when metric = 'requester_wait_time' then dev_metrics.requester_wait_time_in_business_minutes
27 | when metric = 'first_reply_time' then dev_metrics.first_reply_time_business_minutes
28 | end as time_from_metrics
29 | from dev_slas
30 | left join dev_metrics
31 | on dev_metrics.ticket_id = dev_slas.ticket_id
32 | and dev_metrics.source_relation = dev_slas.source_relation
33 | )
34 |
35 | select *
36 | from dev_compare
37 | where abs(time_from_slas - time_from_metrics) >= 5
38 | {{ "and ticket_id not in " ~ var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) ~ "" if var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) }}
--------------------------------------------------------------------------------
/macros/clean_schedule.sql:
--------------------------------------------------------------------------------
1 | {% macro clean_schedule(column_name) -%}
2 | {{ return(adapter.dispatch('clean_schedule', 'zendesk')(column_name)) }}
3 | {%- endmacro %}
4 |
5 | {% macro default__clean_schedule(column_name) -%}
6 | replace(replace(replace(replace(cast({{ column_name }} as {{ dbt.type_string() }}), '{', ''), '}', ''), '"', ''), ' ', '')
7 | {%- endmacro %}
--------------------------------------------------------------------------------
/macros/coalesce_cast.sql:
--------------------------------------------------------------------------------
1 | {% macro coalesce_cast(column_list, datatype) -%}
2 | {{ return(adapter.dispatch('coalesce_cast', 'zendesk')(column_list, datatype)) }}
3 | {%- endmacro %}
4 |
5 | {% macro default__coalesce_cast(column_list, datatype) %}
6 | coalesce(
7 | {%- for column in column_list %}
8 | cast({{ column }} as {{ datatype }})
9 | {%- if not loop.last -%},{%- endif -%}
10 | {% endfor %}
11 | )
12 | {% endmacro %}
--------------------------------------------------------------------------------
/macros/count_tokens.sql:
--------------------------------------------------------------------------------
1 | {% macro count_tokens(column_name) -%}
2 | {{ return(adapter.dispatch('count_tokens', 'zendesk')(column_name)) }}
3 | {%- endmacro %}
4 |
5 | {% macro default__count_tokens(column_name) %}
6 | {{ dbt.length(column_name) }} / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.
7 | {% endmacro %}
--------------------------------------------------------------------------------
/macros/extract_schedule_day.sql:
--------------------------------------------------------------------------------
1 | {% macro extract_schedule_day(string, day) -%}
2 |
3 | {{ return(adapter.dispatch('extract_schedule_day', 'zendesk') (string, day)) }}
4 |
5 | {%- endmacro %}
6 |
7 | {% macro default__extract_schedule_day(string, day) %}
8 | {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %}
9 | regexp_extract({{ string }}, {{ regex }} )
10 |
11 | {%- endmacro %}
12 |
13 | {% macro bigquery__extract_schedule_day(string, day) %}
14 | {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %}
15 | regexp_extract({{ string }}, {{ regex }} )
16 |
17 | {%- endmacro %}
18 |
19 | {% macro snowflake__extract_schedule_day(string, day) %}
20 | {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %}
21 |
22 | REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 )
23 |
24 | {%- endmacro %}
25 |
26 | {% macro postgres__extract_schedule_day(string, day) %}
27 | {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %}
28 |
29 | (regexp_matches({{ string }}, {{ regex }}))[1]
30 |
31 | {%- endmacro %}
32 |
33 | {% macro redshift__extract_schedule_day(string, day) %}
34 |
35 | {% set regex = '"' ~ day ~ '"' ~ ':\\\{([^\\\}]*)\\\}' -%}
36 |
37 | '{' || REGEXP_SUBSTR({{ string }}, '{{ regex }}', 1, 1, 'e') || '}'
38 |
39 | {%- endmacro %}
40 |
41 | {% macro spark__extract_schedule_day(string, day) %}
42 | {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" | replace("{", "\\\{") | replace("}", "\\\}") %}
43 | regexp_extract({{ string }}, {{ regex }}, 1)
44 |
45 | {%- endmacro %}
--------------------------------------------------------------------------------
/macros/extract_support_role_changes.sql:
--------------------------------------------------------------------------------
1 | {% macro extract_support_role_changes(field) -%}
2 | {{ return(adapter.dispatch('extract_support_role_changes', 'zendesk') (field)) }}
3 | {%- endmacro %}
4 |
5 | {% macro default__extract_support_role_changes(field) %}
6 | {{ dbt.split_part(
7 | dbt.split_part(field, "'support role changed from '", 2),
8 | "'\\n'", 1)
9 | }}
10 | {%- endmacro %}
11 |
12 | {% macro postgres__extract_support_role_changes(field) %}
13 | {{ dbt.split_part(
14 | dbt.split_part(field, "'support role changed from '", 2),
15 | "'\n'", 1)
16 | }}
17 | {%- endmacro %}
18 |
19 | {% macro spark__extract_support_role_changes(field) %}
20 | regexp_extract({{ field }}, 'support role changed from (.*)', 1)
21 | {%- endmacro %}
--------------------------------------------------------------------------------
/macros/fivetran_week_end.sql:
--------------------------------------------------------------------------------
1 | {%- macro fivetran_week_end(dt) -%}
2 | {{ return(adapter.dispatch('fivetran_week_end', 'zendesk') (dt)) }}
3 | {%- endmacro -%}
4 |
5 | {%- macro default__fivetran_week_end(dt) -%}
6 | {{ dbt.last_day(dt, 'week') }}
7 | {%- endmacro %}
8 |
9 | {%- macro snowflake__fivetran_week_end(dt) -%}
10 | cast({{ dbt.dateadd('day', 6, zendesk.fivetran_week_start(dt)) }} as date)
11 | {%- endmacro %}
12 |
13 | {%- macro postgres__fivetran_week_end(dt) -%}
14 | cast({{ dbt.dateadd('day', 6, zendesk.fivetran_week_start(dt)) }} as date)
15 | {%- endmacro %}
16 |
17 | {%- macro duckdb__fivetran_week_end(dt) -%}
18 | {{ return(zendesk.postgres__fivetran_week_end(dt)) }}
19 | {%- endmacro %}
20 |
--------------------------------------------------------------------------------
/macros/fivetran_week_start.sql:
--------------------------------------------------------------------------------
1 | {%- macro fivetran_week_start(dt) -%}
2 | {{ return(adapter.dispatch('fivetran_week_start', 'zendesk') (dt)) }}
3 | {%- endmacro -%}
4 |
5 | {%- macro default__fivetran_week_start(dt) -%}
6 | cast({{ dbt.date_trunc('week', dt) }} as date)
7 | {%- endmacro %}
8 |
9 | {%- macro snowflake__fivetran_week_start(dt) -%}
10 | -- Adjust week start to Sunday
11 | cast(
12 | case
13 | when dayofweekiso({{ dt }}) = 7 then {{ dt }} -- dayofweekiso returns 7 for Sunday
14 | else {{ dbt.dateadd("day", "-1 * dayofweekiso(" ~ dt ~ ")", dt) }}
15 | end
16 | as date)
17 | {%- endmacro %}
18 |
19 | {%- macro postgres__fivetran_week_start(dt) -%}
20 | -- Sunday as week start date
21 | cast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, dt))) }} as date)
22 | {%- endmacro %}
23 |
24 | {%- macro duckdb__fivetran_week_start(dt) -%}
25 | {{ return(zendesk.postgres__fivetran_week_start(dt)) }}
26 | {%- endmacro %}
27 |
--------------------------------------------------------------------------------
/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql:
--------------------------------------------------------------------------------
1 | with ticket_historical_status as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__ticket_historical_status') }}
5 |
6 | ), calendar_minutes as (
7 |
8 | select
9 | source_relation,
10 | ticket_id,
11 | status,
12 | case when status in ('pending') then status_duration_calendar_minutes
13 | else 0 end as agent_wait_time_in_minutes,
14 | case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes
15 | else 0 end as requester_wait_time_in_minutes,
16 | case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes
17 | else 0 end as solve_time_in_minutes,
18 | case when status in ('new', 'open') then status_duration_calendar_minutes
19 | else 0 end as agent_work_time_in_minutes,
20 | case when status in ('hold') then status_duration_calendar_minutes
21 | else 0 end as on_hold_time_in_minutes,
22 | case when status = 'new' then status_duration_calendar_minutes
23 | else 0 end as new_status_duration_minutes,
24 | case when status = 'open' then status_duration_calendar_minutes
25 | else 0 end as open_status_duration_minutes,
26 | case when status = 'deleted' then 1
27 | else 0 end as ticket_deleted,
28 | first_value(valid_starting_at) over (partition by ticket_id, source_relation order by valid_starting_at desc, ticket_id, source_relation rows unbounded preceding) as last_status_assignment_date,
29 | case when lag(status) over (partition by ticket_id, source_relation order by valid_starting_at) = 'deleted' and status != 'deleted'
30 | then 1
31 | else 0
32 | end as ticket_recoveries
33 |
34 | from ticket_historical_status
35 |
36 | )
37 |
38 | select
39 | source_relation,
40 | ticket_id,
41 | last_status_assignment_date,
42 | sum(ticket_deleted) as ticket_deleted_count,
43 | sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,
44 | sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,
45 | sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,
46 | sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,
47 | sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,
48 | sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,
49 | sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,
50 | sum(ticket_recoveries) as total_ticket_recoveries
51 | from calendar_minutes
52 | group by 1, 2, 3
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__assignee_updates.sql:
--------------------------------------------------------------------------------
1 | with ticket_updates as (
2 | select *
3 | from {{ ref('int_zendesk__updates') }}
4 |
5 | ), ticket as (
6 | select *
7 | from {{ ref('stg_zendesk__ticket') }}
8 |
9 | ), ticket_requester as (
10 | select
11 | ticket.source_relation,
12 | ticket.ticket_id,
13 | ticket.assignee_id,
14 | ticket_updates.valid_starting_at
15 |
16 | from ticket
17 |
18 | left join ticket_updates
19 | on ticket_updates.ticket_id = ticket.ticket_id
20 | and ticket_updates.user_id = ticket.assignee_id
21 | and ticket_updates.source_relation = ticket.source_relation
22 |
23 | ), final as (
24 | select
25 | source_relation,
26 | ticket_id,
27 | assignee_id,
28 | max(valid_starting_at) as last_updated,
29 | count(*) as total_updates
30 | from ticket_requester
31 |
32 | group by 1, 2, 3
33 | )
34 |
35 | select *
36 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__comment_metrics.sql:
--------------------------------------------------------------------------------
1 | with ticket_comments as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__comments_enriched') }}
5 | ),
6 |
7 | comment_counts as (
8 | select
9 | source_relation,
10 | ticket_id,
11 | last_comment_added_at,
12 | sum(case when commenter_role = 'internal_comment' and is_public = true
13 | then 1
14 | else 0
15 | end) as count_public_agent_comments,
16 | sum(case when commenter_role = 'internal_comment'
17 | then 1
18 | else 0
19 | end) as count_agent_comments,
20 | sum(case when commenter_role = 'external_comment'
21 | then 1
22 | else 0
23 | end) as count_end_user_comments,
24 | sum(case when is_public = true
25 | then 1
26 | else 0
27 | end) as count_public_comments,
28 | sum(case when is_public = false
29 | then 1
30 | else 0
31 | end) as count_internal_comments,
32 | count(*) as total_comments,
33 | count(distinct case when commenter_role = 'internal_comment'
34 | then user_id
35 | end) as count_ticket_handoffs,
36 | sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'
37 | then 1
38 | else 0
39 | end) as count_agent_replies
40 | from ticket_comments
41 |
42 | group by 1, 2, 3
43 | ),
44 |
45 | final as (
46 | select
47 | *,
48 | count_public_agent_comments = 1 as is_one_touch_resolution,
49 | count_public_agent_comments = 2 as is_two_touch_resolution
50 | from comment_counts
51 | )
52 |
53 | select *
54 | from final
55 |
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__latest_ticket_form.sql:
--------------------------------------------------------------------------------
1 | --To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.
2 | {{ config(enabled=var('using_ticket_form_history', True)) }}
3 |
4 | with ticket_form_history as (
5 | select *
6 | from {{ ref('stg_zendesk__ticket_form_history') }}
7 | ),
8 |
9 | latest_ticket_form as (
10 | select
11 | *,
12 | row_number() over(partition by ticket_form_id, source_relation order by updated_at desc) as latest_form_index
13 | from ticket_form_history
14 | ),
15 |
16 | final as (
17 | select
18 | source_relation,
19 | ticket_form_id,
20 | created_at,
21 | updated_at,
22 | display_name,
23 | is_active,
24 | name,
25 | latest_form_index
26 | from latest_ticket_form
27 |
28 | where latest_form_index = 1
29 | )
30 |
31 | select *
32 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__organization_aggregates.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_organizations', True)) }}
2 |
3 | with organizations as (
4 | select *
5 | from {{ ref('stg_zendesk__organization') }}
6 |
7 | --If you use organization tags, this will be included, if not it will be ignored.
8 | {% if var('using_organization_tags', True) %}
9 | ), organization_tags as (
10 | select *
11 | from {{ ref('stg_zendesk__organization_tag') }}
12 |
13 | ), tag_aggregates as (
14 | select
15 | organizations.organization_id,
16 | organizations.source_relation,
17 | {{ fivetran_utils.string_agg('organization_tags.tags', "', '" ) }} as organization_tags
18 | from organizations
19 |
20 | left join organization_tags
21 | on organizations.organization_id = organization_tags.organization_id
22 | and organizations.source_relation = organization_tags.source_relation
23 |
24 |
25 | group by 1, 2
26 | {% endif %}
27 |
28 | --If you use using_domain_names tags this will be included, if not it will be ignored.
29 | {% if var('using_domain_names', True) %}
30 | ), domain_names as (
31 |
32 | select *
33 | from {{ ref('stg_zendesk__domain_name') }}
34 |
35 | ), domain_aggregates as (
36 | select
37 | organizations.organization_id,
38 | organizations.source_relation,
39 | {{ fivetran_utils.string_agg('domain_names.domain_name', "', '" ) }} as domain_names
40 | from organizations
41 |
42 | left join domain_names
43 | on organizations.organization_id = domain_names.organization_id
44 | and organizations.source_relation = domain_names.source_relation
45 |
46 | group by 1, 2
47 | {% endif %}
48 |
49 |
50 | ), final as (
51 | select
52 | organizations.*
53 |
54 | --If you use organization tags this will be included, if not it will be ignored.
55 | {% if var('using_organization_tags', True) %}
56 | ,tag_aggregates.organization_tags
57 | {% endif %}
58 |
59 | --If you use using_domain_names tags this will be included, if not it will be ignored.
60 | {% if var('using_domain_names', True) %}
61 | ,domain_aggregates.domain_names
62 | {% endif %}
63 |
64 | from organizations
65 |
66 | --If you use using_domain_names tags this will be included, if not it will be ignored.
67 | {% if var('using_domain_names', True) %}
68 | left join domain_aggregates
69 | on organizations.organization_id = domain_aggregates.organization_id
70 | and organizations.source_relation = domain_aggregates.source_relation
71 | {% endif %}
72 |
73 | --If you use organization tags this will be included, if not it will be ignored.
74 | {% if var('using_organization_tags', True) %}
75 | left join tag_aggregates
76 | on organizations.organization_id = tag_aggregates.organization_id
77 | and organizations.source_relation = tag_aggregates.source_relation
78 | {% endif %}
79 | )
80 |
81 | select *
82 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__requester_updates.sql:
--------------------------------------------------------------------------------
1 | with ticket_updates as (
2 | select *
3 | from {{ ref('int_zendesk__updates') }}
4 |
5 | ), ticket as (
6 | select *
7 | from {{ ref('stg_zendesk__ticket') }}
8 |
9 | ), ticket_requester as (
10 | select
11 | ticket.source_relation,
12 | ticket.ticket_id,
13 | ticket.requester_id,
14 | ticket_updates.valid_starting_at
15 |
16 | from ticket
17 |
18 | left join ticket_updates
19 | on ticket_updates.ticket_id = ticket.ticket_id
20 | and ticket_updates.user_id = ticket.requester_id
21 | and ticket_updates.source_relation = ticket.source_relation
22 |
23 | ), final as (
24 | select
25 | source_relation,
26 | ticket_id,
27 | requester_id,
28 | max(valid_starting_at) as last_updated,
29 | count(*) as total_updates
30 | from ticket_requester
31 |
32 | group by 1, 2, 3
33 | )
34 |
35 | select *
36 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__schedule_holiday.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }}
2 |
3 | with schedule as (
4 | select *
5 | from {{ var('schedule') }}
6 |
7 | ), schedule_holiday as (
8 | select *
9 | from {{ var('schedule_holiday') }}
10 |
11 | -- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.
12 | ), schedule_holiday_ranges as (
13 | select
14 | source_relation,
15 | holiday_name,
16 | schedule_id,
17 | cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from,
18 | cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until,
19 | cast({{ zendesk.fivetran_week_start('holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday,
20 | cast({{ zendesk.fivetran_week_start(dbt.dateadd('week', 1, 'holiday_end_date_at')) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday,
21 | -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.
22 | {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned
23 | from schedule_holiday
24 |
25 | -- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.
26 | ), expanded_holidays as (
27 | select
28 | schedule_holiday_ranges.*,
29 | cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number
30 | from schedule_holiday_ranges
31 | -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks
32 | cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers
33 | where schedule_holiday_ranges.holiday_weeks_spanned > 1
34 | and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned
35 |
36 | -- Define start and end times for each segment of a multi-week holiday.
37 | ), split_multiweek_holidays as (
38 |
39 | -- Business as usual for holidays that fall within a single week.
40 | select
41 | source_relation,
42 | holiday_name,
43 | schedule_id,
44 | holiday_valid_from,
45 | holiday_valid_until,
46 | holiday_starting_sunday,
47 | holiday_ending_sunday,
48 | holiday_weeks_spanned
49 | from schedule_holiday_ranges
50 | where holiday_weeks_spanned = 1
51 |
52 | union all
53 |
54 | -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.
55 | select
56 | source_relation,
57 | holiday_name,
58 | schedule_id,
59 | case
60 | when holiday_week_number = 1 -- first week in multiweek holiday
61 | then holiday_valid_from
62 | -- We have to use days in case warehouse does not truncate to Sunday.
63 | else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})
64 | end as holiday_valid_from,
65 | case
66 | when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday
67 | then holiday_valid_until
68 | -- We have to use days in case warehouse does not truncate to Sunday.
69 | else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday
70 | end as holiday_valid_until,
71 | case
72 | when holiday_week_number = 1 -- first week in multiweek holiday
73 | then holiday_starting_sunday
74 | -- We have to use days in case warehouse does not truncate to Sunday.
75 | else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})
76 | end as holiday_starting_sunday,
77 | case
78 | when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday
79 | then holiday_ending_sunday
80 | -- We have to use days in case warehouse does not truncate to Sunday.
81 | else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})
82 | end as holiday_ending_sunday,
83 | holiday_weeks_spanned
84 | from expanded_holidays
85 | where holiday_weeks_spanned > 1
86 |
87 | -- Create a record for each the holiday start and holiday end for each week to use downstream.
88 | ), split_holidays as (
89 | -- Creates a record that will be used for the time before a holiday
90 | select
91 | split_multiweek_holidays.*,
92 | holiday_valid_from as holiday_date,
93 | '0_gap' as holiday_start_or_end
94 | from split_multiweek_holidays
95 |
96 | union all
97 |
98 | -- Creates another record that will be used for the holiday itself
99 | select
100 | split_multiweek_holidays.*,
101 | holiday_valid_until as holiday_date,
102 | '1_holiday' as holiday_start_or_end
103 | from split_multiweek_holidays
104 | )
105 |
106 | select *
107 | from split_holidays
108 |
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_aggregates.sql:
--------------------------------------------------------------------------------
1 | with tickets as (
2 | select *
3 | from {{ ref('stg_zendesk__ticket') }}
4 |
5 | ), ticket_tags as (
6 |
7 | select *
8 | from {{ ref('stg_zendesk__ticket_tag') }}
9 |
10 | --If you use using_brands this will be included, if not it will be ignored.
11 | {% if var('using_brands', True) %}
12 | ), brands as (
13 |
14 | select *
15 | from {{ ref('stg_zendesk__brand') }}
16 | {% endif %}
17 |
18 | ), ticket_tag_aggregate as (
19 | select
20 | source_relation,
21 | ticket_tags.ticket_id,
22 | {{ fivetran_utils.string_agg( 'ticket_tags.tags', "', '" )}} as ticket_tags
23 | from ticket_tags
24 | group by 1, 2
25 |
26 | ), final as (
27 | select
28 | tickets.*,
29 | case when lower(tickets.type) = 'incident'
30 | then true
31 | else false
32 | end as is_incident,
33 | {% if var('using_brands', True) %}
34 | brands.name as ticket_brand_name,
35 | {% endif %}
36 | ticket_tag_aggregate.ticket_tags
37 | from tickets
38 |
39 | left join ticket_tag_aggregate
40 | on tickets.ticket_id = ticket_tag_aggregate.ticket_id
41 | and tickets.source_relation = ticket_tag_aggregate.source_relation
42 |
43 | {% if var('using_brands', True) %}
44 | left join brands
45 | on brands.brand_id = tickets.brand_id
46 | and brands.source_relation = tickets.source_relation
47 | {% endif %}
48 | )
49 |
50 | select *
51 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_historical_assignee.sql:
--------------------------------------------------------------------------------
1 | with assignee_updates as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__updates') }}
5 | where field_name = 'assignee_id'
6 |
7 | ), calculate_metrics as (
8 | select
9 | source_relation,
10 | ticket_id,
11 | field_name as assignee_id,
12 | value,
13 | ticket_created_date,
14 | valid_starting_at,
15 | lag(valid_starting_at) over (partition by source_relation, ticket_id order by valid_starting_at) as previous_update,
16 | lag(value) over (partition by source_relation, ticket_id order by valid_starting_at) as previous_assignee,
17 | first_value(valid_starting_at) over (partition by source_relation, ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,
18 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,
19 | first_value(valid_starting_at) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,
20 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,
21 | count(value) over (partition by source_relation, ticket_id) as assignee_stations_count
22 | from assignee_updates
23 |
24 | ), unassigned_time as (
25 | select
26 | source_relation,
27 | ticket_id,
28 | sum(case when assignee_id is not null and previous_assignee is null
29 | then {{ dbt.datediff("coalesce(previous_update, ticket_created_date)", "valid_starting_at", 'second') }} / 60
30 | else 0
31 | end) as ticket_unassigned_duration_calendar_minutes,
32 | count(distinct value) as unique_assignee_count
33 | from calculate_metrics
34 |
35 | group by 1, 2
36 |
37 | ), window_group as (
38 | select
39 | calculate_metrics.source_relation,
40 | calculate_metrics.ticket_id,
41 | calculate_metrics.first_agent_assignment_date,
42 | calculate_metrics.first_assignee_id,
43 | calculate_metrics.last_agent_assignment_date,
44 | calculate_metrics.last_assignee_id,
45 | calculate_metrics.assignee_stations_count
46 | from calculate_metrics
47 |
48 | {{ dbt_utils.group_by(n=7) }}
49 |
50 | ), final as (
51 | select
52 | window_group.*,
53 | unassigned_time.unique_assignee_count,
54 | unassigned_time.ticket_unassigned_duration_calendar_minutes
55 | from window_group
56 |
57 | left join unassigned_time
58 | on window_group.ticket_id = unassigned_time.ticket_id
59 | and window_group.source_relation = unassigned_time.source_relation
60 | )
61 |
62 | select *
63 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_historical_group.sql:
--------------------------------------------------------------------------------
1 | with ticket_group_history as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__updates') }}
5 | where field_name = 'group_id'
6 |
7 | ), group_breakdown as (
8 | select
9 | source_relation,
10 | ticket_id,
11 | valid_starting_at,
12 | valid_ending_at,
13 | value as group_id
14 | from ticket_group_history
15 |
16 | ), final as (
17 | select
18 | source_relation,
19 | ticket_id,
20 | count(group_id) as group_stations_count
21 | from group_breakdown
22 |
23 | group by 1, 2
24 | )
25 |
26 | select *
27 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql:
--------------------------------------------------------------------------------
1 | with satisfaction_updates as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__updates') }}
5 | where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code')
6 |
7 | ), latest_reason as (
8 | select
9 | source_relation,
10 | ticket_id,
11 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason
12 | from satisfaction_updates
13 |
14 | where field_name = 'satisfaction_reason_code'
15 |
16 | ), latest_comment as (
17 | select
18 | source_relation,
19 | ticket_id,
20 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment
21 | from satisfaction_updates
22 |
23 | where field_name = 'satisfaction_comment'
24 |
25 | ), first_and_latest_score as (
26 | select
27 | source_relation,
28 | ticket_id,
29 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,
30 | first_value(value) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score
31 | from satisfaction_updates
32 |
33 | where field_name = 'satisfaction_score' and value != 'offered'
34 |
35 | ), satisfaction_scores as (
36 | select
37 | source_relation,
38 | ticket_id,
39 | count(value) over (partition by source_relation, ticket_id) as count_satisfaction_scores,
40 | case when lag(value) over (partition by source_relation, ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'
41 | then 1
42 | else 0
43 | end as good_to_bad_score,
44 | case when lag(value) over (partition by source_relation, ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'
45 | then 1
46 | else 0
47 | end as bad_to_good_score
48 | from satisfaction_updates
49 | where field_name = 'satisfaction_score'
50 |
51 | ), score_group as (
52 | select
53 | source_relation,
54 | ticket_id,
55 | count_satisfaction_scores,
56 | sum(good_to_bad_score) as total_good_to_bad_score,
57 | sum(bad_to_good_score) as total_bad_to_good_score
58 | from satisfaction_scores
59 |
60 | group by 1, 2, 3
61 |
62 | ), window_group as (
63 | select
64 | satisfaction_updates.source_relation,
65 | satisfaction_updates.ticket_id,
66 | latest_reason.latest_satisfaction_reason,
67 | latest_comment.latest_satisfaction_comment,
68 | first_and_latest_score.first_satisfaction_score,
69 | first_and_latest_score.latest_satisfaction_score,
70 | score_group.count_satisfaction_scores,
71 | score_group.total_good_to_bad_score,
72 | score_group.total_bad_to_good_score
73 |
74 | from satisfaction_updates
75 |
76 | left join latest_reason
77 | on satisfaction_updates.ticket_id = latest_reason.ticket_id
78 | and satisfaction_updates.source_relation = latest_reason.source_relation
79 |
80 | left join latest_comment
81 | on satisfaction_updates.ticket_id = latest_comment.ticket_id
82 | and satisfaction_updates.source_relation = latest_comment.source_relation
83 |
84 | left join first_and_latest_score
85 | on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id
86 | and satisfaction_updates.source_relation = first_and_latest_score.source_relation
87 |
88 | left join score_group
89 | on satisfaction_updates.ticket_id = score_group.ticket_id
90 | and satisfaction_updates.source_relation = score_group.source_relation
91 |
92 | {{ dbt_utils.group_by(n=9) }}
93 |
94 | ), final as (
95 | select
96 | source_relation,
97 | ticket_id,
98 | latest_satisfaction_reason,
99 | latest_satisfaction_comment,
100 | first_satisfaction_score,
101 | latest_satisfaction_score,
102 | case when count_satisfaction_scores > 0
103 | then (count_satisfaction_scores - 1) --Subtracting one as the first score is always "offered".
104 | else count_satisfaction_scores
105 | end as count_satisfaction_scores,
106 | case when total_good_to_bad_score > 0
107 | then true
108 | else false
109 | end as is_good_to_bad_satisfaction_score,
110 | case when total_bad_to_good_score > 0
111 | then true
112 | else false
113 | end as is_bad_to_good_satisfaction_score
114 | from window_group
115 | )
116 |
117 | select *
118 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_historical_status.sql:
--------------------------------------------------------------------------------
1 | -- To do -- can we delete ticket_status_counter and unique_status_counter?
2 |
3 | with ticket_status_history as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__updates') }}
7 | where field_name = 'status'
8 |
9 | )
10 |
11 | select
12 | source_relation,
13 | ticket_id,
14 | valid_starting_at,
15 | valid_ending_at,
16 | {{ dbt.datediff(
17 | 'valid_starting_at',
18 | "coalesce(valid_ending_at, " ~ dbt.current_timestamp() ~ ")",
19 | 'minute') }} as status_duration_calendar_minutes,
20 | value as status,
21 | -- MIGHT BE ABLE TO DELETE ROWS BELOW
22 | row_number() over (partition by source_relation, ticket_id order by valid_starting_at) as ticket_status_counter,
23 | row_number() over (partition by source_relation, ticket_id, value order by valid_starting_at) as unique_status_counter
24 |
25 | from ticket_status_history
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__ticket_schedules.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True)) }}
2 |
3 | with ticket as (
4 |
5 | select *
6 | from {{ ref('stg_zendesk__ticket') }}
7 |
8 | ), ticket_schedule as (
9 |
10 | select *
11 | from {{ ref('stg_zendesk__ticket_schedule') }}
12 |
13 | ), schedule as (
14 |
15 | select *
16 | from {{ ref('stg_zendesk__schedule') }}
17 |
18 | ), default_schedules as (
19 | -- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.
20 | -- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.
21 |
22 | -- This portion of the query creates ticket_schedules for these "default" schedules, as the ticket_schedule table only includes
23 | -- trigger schedules
24 | select
25 | schedule_id,
26 | source_relation
27 | from (
28 |
29 | select
30 | schedule_id,
31 | source_relation,
32 | row_number() over (partition by source_relation order by created_at) = 1 as is_default_schedule
33 | from schedule
34 |
35 | ) as order_schedules
36 | where is_default_schedule
37 |
38 | ), default_schedule_events as (
39 |
40 | select
41 | ticket.ticket_id,
42 | ticket.source_relation,
43 | ticket.created_at as schedule_created_at,
44 | default_schedules.schedule_id
45 | from ticket
46 | join default_schedules
47 | on ticket.source_relation = default_schedules.source_relation
48 | left join ticket_schedule as first_schedule
49 | on first_schedule.ticket_id = ticket.ticket_id
50 | and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at
51 | and first_schedule.created_at >= ticket.created_at
52 | and first_schedule.source_relation = ticket.source_relation
53 | where first_schedule.ticket_id is null
54 |
55 | ), schedule_events as (
56 |
57 | select
58 | *
59 | from default_schedule_events
60 |
61 | union all
62 |
63 | select
64 | ticket_id,
65 | source_relation,
66 | created_at as schedule_created_at,
67 | schedule_id
68 | from ticket_schedule
69 |
70 | ), ticket_schedules as (
71 |
72 | select
73 | ticket_id,
74 | source_relation,
75 | schedule_id,
76 | schedule_created_at,
77 | coalesce(lead(schedule_created_at) over (partition by source_relation, ticket_id order by schedule_created_at)
78 | , {{ fivetran_utils.timestamp_add("hour", 1000, "" ~ dbt.current_timestamp() ~ "") }} ) as schedule_invalidated_at
79 | from schedule_events
80 |
81 | )
82 | select
83 | *
84 | from ticket_schedules
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__updates.sql:
--------------------------------------------------------------------------------
1 | with ticket_history as (
2 | select *
3 | from {{ ref('stg_zendesk__ticket_field_history') }}
4 |
5 | ), ticket_comment as (
6 | select *
7 | from {{ ref('stg_zendesk__ticket_comment') }}
8 |
9 | ), tickets as (
10 | select *
11 | from {{ ref('stg_zendesk__ticket') }}
12 |
13 | {% if var('using_ticket_chat', False) %}
14 | ), ticket_chat as (
15 |
16 | select *
17 | from {{ ref('stg_zendesk__ticket_chat') }}
18 |
19 | ), ticket_chat_event as (
20 |
21 | select *
22 | from {{ ref('stg_zendesk__ticket_chat_event') }}
23 | where lower(type) = 'chatmessage'
24 |
25 | ), ticket_chat_join as (
26 |
27 | select
28 | ticket_chat_event.*,
29 | ticket_chat.ticket_id
30 |
31 | from ticket_chat_event
32 | inner join ticket_chat
33 | on ticket_chat_event.chat_id = ticket_chat.chat_id
34 | and ticket_chat_event.source_relation = ticket_chat.source_relation
35 | {% endif %}
36 |
37 | ), updates_union as (
38 | select
39 | source_relation,
40 | ticket_id,
41 | field_name,
42 | value,
43 | null as is_public,
44 | user_id,
45 | valid_starting_at,
46 | valid_ending_at
47 | from ticket_history
48 |
49 | union all
50 |
51 | select
52 | source_relation,
53 | ticket_id,
54 | {#
55 | We want to be able to differentiate between ticket_comment and ticket_chat comments in the next CTE
56 | This is necessary because ticket_comment will batch together individual chat messages to the conversation level (in 1 record).
57 | We want to remove these aggregate conversations in favor of the individual messages
58 | #}
59 | cast('comment - not chat' as {{ dbt.type_string() }}) as field_name,
60 | body as value,
61 | is_public,
62 | user_id,
63 | created_at as valid_starting_at,
64 | lead(created_at) over (partition by source_relation, ticket_id order by created_at) as valid_ending_at
65 | from ticket_comment
66 |
67 | {% if var('using_ticket_chat', False) %}
68 | union all
69 |
70 | select
71 | source_relation,
72 | ticket_id,
73 | {#
74 | We want to be able to differentiate between ticket_comment and ticket_chat comments in the next CTE
75 | This is necessary because ticket_comment will batch together individual chat messages to the conversation level (in 1 record).
76 | We want to remove these aggregate conversations in favor of the individual messages
77 | #}
78 | cast('comment - chat' as {{ dbt.type_string() }}) as field_name,
79 | message as value,
80 | true as is_public,
81 | actor_id as user_id,
82 | created_at as valid_starting_at,
83 | lead(created_at) over (partition by source_relation, ticket_id order by created_at) as valid_ending_at
84 | from ticket_chat_join
85 | {% endif %}
86 |
87 | ), final as (
88 | select
89 | updates_union.source_relation,
90 | updates_union.ticket_id,
91 | {# Now group comments back together since the conversation batches are filtered out in the where clause #}
92 | case
93 | when updates_union.field_name in ('comment - chat', 'comment - not chat') then 'comment'
94 | else updates_union.field_name end as field_name,
95 | updates_union.value,
96 | updates_union.is_public,
97 | updates_union.user_id,
98 | updates_union.valid_starting_at,
99 | updates_union.valid_ending_at,
100 | tickets.created_at as ticket_created_date
101 | from updates_union
102 |
103 | left join tickets
104 | on tickets.ticket_id = updates_union.ticket_id
105 | and tickets.source_relation = updates_union.source_relation
106 |
107 | {#
108 | What's excluded: The chat conversation batches from ticket_comment. These are marked as `comment - not chat` and are associated with tickets from `chat` or `native_messaging` channels
109 | What's included:
110 | - Individual chat messages from ticket_chat_event. These are marked as `comment - chat`
111 | - True comments from ticket_comment. We know a record is a true ticket_comment if the ticket is NOT from `chat` or `native_messaging` channels
112 | #}
113 | where not (updates_union.field_name = 'comment - not chat' and lower(coalesce(tickets.created_channel, '')) in ('chat', 'native_messaging'))
114 |
115 | )
116 |
117 | select *
118 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__user_aggregates.sql:
--------------------------------------------------------------------------------
1 | with users as (
2 | select *
3 | from {{ ref('stg_zendesk__user') }}
4 |
5 | --If you use user tags this will be included, if not it will be ignored.
6 | {% if var('using_user_tags', True) %}
7 | ), user_tags as (
8 |
9 | select *
10 | from {{ ref('stg_zendesk__user_tag') }}
11 |
12 | ), user_tag_aggregate as (
13 | select
14 | user_tags.user_id,
15 | source_relation,
16 | {{ fivetran_utils.string_agg( 'user_tags.tags', "', '" )}} as user_tags
17 | from user_tags
18 | group by 1, 2
19 |
20 | {% endif %}
21 |
22 | ), final as (
23 | select
24 | users.*,
25 | users.role in ('agent','admin') as is_internal_role
26 |
27 | --If you use user tags this will be included, if not it will be ignored.
28 | {% if var('using_user_tags', True) %}
29 | , user_tag_aggregate.user_tags
30 | {% endif %}
31 | from users
32 |
33 | --If you use user tags this will be included, if not it will be ignored.
34 | {% if var('using_user_tags', True) %}
35 | left join user_tag_aggregate
36 | on users.user_id = user_tag_aggregate.user_id
37 | and users.source_relation = user_tag_aggregate.source_relation
38 | {% endif %}
39 | )
40 |
41 | select *
42 | from final
--------------------------------------------------------------------------------
/models/intermediate/int_zendesk__user_role_history.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_user_role_histories', True) and var('using_audit_log', False)) }}
2 |
3 | with audit_logs as (
4 | select
5 | source_relation,
6 | source_id as user_id,
7 | source_label as user_name,
8 | created_at,
9 | lower(change_description) as change_description
10 | from {{ var('audit_log') }}
11 | where
12 | lower(change_description) like '%support role changed from%'
13 | and source_type = 'user'
14 |
15 | ), users as (
16 | select *
17 | from {{ var('user') }}
18 |
19 | -- Split the change_description into "from" and "to" strings
20 | ), split_to_from as (
21 | select
22 | source_relation,
23 | user_id,
24 | user_name,
25 | created_at,
26 | change_description,
27 | -- extract and split change description for the support role
28 | trim({{ dbt.split_part(zendesk.extract_support_role_changes('change_description'), "' to '", 1) }}) as from_role,
29 | trim({{ dbt.split_part(zendesk.extract_support_role_changes('change_description'), "' to '", 2) }}) as to_role,
30 |
31 | -- Identify the first change record so we know user's beginning role
32 | min(created_at) over (partition by source_relation, user_id) as min_created_at_per_user
33 | from audit_logs
34 |
35 | -- Isolates the first "from" role as the base
36 | ), first_roles as (
37 | select
38 | source_relation,
39 | user_id,
40 | user_name,
41 | change_description,
42 | cast(null as {{ dbt.type_timestamp() }}) as valid_starting_at, --fill in with created_at of user later
43 | created_at as valid_ending_at, -- this it the created_at of the audit log entry
44 | from_role as role
45 | from split_to_from
46 | where created_at = min_created_at_per_user
47 |
48 | -- Captures all subsequent "to" roles
49 | ), role_changes as (
50 | select
51 | source_relation,
52 | user_id,
53 | user_name,
54 | change_description,
55 | created_at as valid_starting_at,
56 | lead(created_at) over (partition by source_relation, user_id order by created_at asc) as valid_ending_at,
57 | to_role as role
58 | from split_to_from
59 |
60 | ), unioned as (
61 | select *
62 | from first_roles
63 |
64 | union all
65 |
66 | select *
67 | from role_changes
68 |
69 | ), users_joined as (
70 | -- create history records for users with no changes
71 | select
72 | users.user_id,
73 | users.source_relation,
74 | lower(coalesce(unioned.role, users.role)) as role,
75 | coalesce(unioned.valid_starting_at, users.created_at, cast('1970-01-01' as {{ dbt.type_timestamp() }})) as valid_starting_at,
76 | coalesce(unioned.valid_ending_at, {{ dbt.current_timestamp() }}) as valid_ending_at,
77 | unioned.change_description,
78 | -- include these in case they're needed for the internal_user_criteria
79 | users.external_id,
80 | users.email,
81 | users.last_login_at,
82 | users.created_at,
83 | users.updated_at,
84 | users.name,
85 | users.organization_id,
86 | users.phone,
87 | users.ticket_restriction,
88 | users.time_zone,
89 | users.locale,
90 | users.is_active,
91 | users.is_suspended
92 | from users
93 | left join unioned
94 | on users.user_id = unioned.user_id
95 | and users.source_relation = unioned.source_relation
96 |
97 | ), final as (
98 | select
99 | user_id,
100 | source_relation,
101 | role,
102 | valid_starting_at,
103 | valid_ending_at,
104 | change_description,
105 |
106 | {% if var('internal_user_criteria', false) -%} -- apply the filter to historical roles if provided
107 | role in ('admin', 'agent') or {{ var('internal_user_criteria', false) }} as is_internal_role
108 | {% else -%}
109 | role not in ('not set', 'end-user') as is_internal_role
110 | {% endif -%}
111 | from users_joined
112 | )
113 |
114 | select *
115 | from final
--------------------------------------------------------------------------------
/models/reply_times/int_zendesk__comments_enriched.sql:
--------------------------------------------------------------------------------
1 | {% set using_user_role_histories = var('using_user_role_histories', True) and var('using_audit_log', False) %}
2 |
3 | with ticket_comment as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__updates') }}
7 | where field_name = 'comment'
8 |
9 | ), users as (
10 |
11 | select *
12 | from {{ ref('int_zendesk__user_role_history' if using_user_role_histories else 'int_zendesk__user_aggregates') }}
13 |
14 | ), joined as (
15 |
16 | select
17 |
18 | ticket_comment.*,
19 | case when commenter.role in ('not set', 'end-user') then 'external_comment'
20 | when commenter.is_internal_role then 'internal_comment'
21 | else 'unknown'
22 | end as commenter_role
23 |
24 | from ticket_comment
25 | join users as commenter
26 | on commenter.user_id = ticket_comment.user_id
27 | and commenter.source_relation = ticket_comment.source_relation
28 |
29 | {% if using_user_role_histories %}
30 | and ticket_comment.valid_starting_at >= commenter.valid_starting_at
31 | and ticket_comment.valid_starting_at < commenter.valid_ending_at
32 | {% endif %}
33 |
34 | ), add_previous_commenter_role as (
35 | /*
36 | In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.
37 | The below union explicitly identifies the previous commenter roles of public and not public comments.
38 | */
39 | select
40 | *,
41 | coalesce(
42 | lag(commenter_role) over (partition by source_relation, ticket_id order by valid_starting_at, commenter_role)
43 | , 'first_comment')
44 | as previous_commenter_role
45 | from joined
46 | where is_public
47 |
48 | union all
49 |
50 | select
51 | *,
52 | 'non_public_comment' as previous_commenter_role
53 | from joined
54 | where not is_public
55 | )
56 |
57 | select
58 | *,
59 | first_value(valid_starting_at) over (partition by source_relation, ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,
60 | sum(case when not is_public then 1 else 0 end) over (partition by source_relation, ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count
61 | from add_previous_commenter_role
--------------------------------------------------------------------------------
/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True)) }}
2 |
3 | with ticket_reply_times as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__ticket_reply_times') }}
7 |
8 | ), ticket_schedules as (
9 |
10 | select
11 | *
12 | from {{ ref('int_zendesk__ticket_schedules') }}
13 |
14 | ), schedule as (
15 |
16 | select *
17 | from {{ ref('int_zendesk__schedule_spine') }}
18 |
19 | ), first_reply_time as (
20 |
21 | select
22 | source_relation,
23 | ticket_id,
24 | end_user_comment_created_at,
25 | agent_responded_at
26 |
27 | from ticket_reply_times
28 | where is_first_comment
29 |
30 | ), ticket_first_reply_time as (
31 |
32 | select
33 | first_reply_time.source_relation,
34 | first_reply_time.ticket_id,
35 | ticket_schedules.schedule_created_at,
36 | ticket_schedules.schedule_invalidated_at,
37 | ticket_schedules.schedule_id,
38 |
39 | -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use
40 | min(first_reply_time.agent_responded_at) as agent_responded_at,
41 |
42 | ({{ dbt.datediff(
43 | "cast(" ~ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') ~ "as " ~ dbt.type_timestamp() ~ ")",
44 | "cast(ticket_schedules.schedule_created_at as " ~ dbt.type_timestamp() ~ ")",
45 | 'second') }} /60
46 | ) as start_time_in_minutes_from_week,
47 | greatest(0,
48 | (
49 | {{ dbt.datediff(
50 | 'ticket_schedules.schedule_created_at',
51 | 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',
52 | 'second') }}/60
53 | )) as raw_delta_in_minutes,
54 | {{ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') }} as start_week_date
55 |
56 | from first_reply_time
57 | join ticket_schedules
58 | on first_reply_time.ticket_id = ticket_schedules.ticket_id
59 | and first_reply_time.source_relation = ticket_schedules.source_relation
60 | {{ dbt_utils.group_by(n=5) }}
61 |
62 | ), weeks as (
63 |
64 | {{ dbt_utils.generate_series(52) }}
65 |
66 | ), weeks_cross_ticket_first_reply as (
67 | -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks
68 | select
69 |
70 | ticket_first_reply_time.*,
71 | cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number
72 |
73 | from ticket_first_reply_time
74 | cross join weeks
75 | where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1
76 |
77 | ), weekly_periods as (
78 |
79 | select
80 | weeks_cross_ticket_first_reply.*,
81 | -- for each week, at what minute do we start counting?
82 | cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,
83 | -- for each week, at what minute do we stop counting?
84 | cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time
85 | from weeks_cross_ticket_first_reply
86 |
87 | ), intercepted_periods as (
88 |
89 | select
90 | weekly_periods.source_relation,
91 | ticket_id,
92 | week_number,
93 | weekly_periods.schedule_id,
94 | ticket_week_start_time,
95 | ticket_week_end_time,
96 | schedule.start_time_utc as schedule_start_time,
97 | schedule.end_time_utc as schedule_end_time,
98 | least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes
99 | from weekly_periods
100 | join schedule on ticket_week_start_time <= schedule.end_time_utc
101 | and ticket_week_end_time >= schedule.start_time_utc
102 | and weekly_periods.schedule_id = schedule.schedule_id
103 | and weekly_periods.source_relation = schedule.source_relation
104 | -- this chooses the Daylight Savings Time or Standard Time version of the schedule
105 | -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week
106 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)
107 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)
108 |
109 | )
110 |
111 | select
112 | ticket_id,
113 | source_relation,
114 | sum(scheduled_minutes) as first_reply_time_business_minutes
115 | from intercepted_periods
116 | group by 1, 2
--------------------------------------------------------------------------------
/models/reply_times/int_zendesk__ticket_reply_times.sql:
--------------------------------------------------------------------------------
1 | with ticket_public_comments as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__comments_enriched') }}
5 | where is_public
6 |
7 | ), end_user_comments as (
8 |
9 | select
10 | source_relation,
11 | ticket_id,
12 | valid_starting_at as end_user_comment_created_at,
13 | ticket_created_date,
14 | commenter_role,
15 | previous_internal_comment_count,
16 | previous_commenter_role = 'first_comment' as is_first_comment
17 | from ticket_public_comments
18 | where (commenter_role = 'external_comment'
19 | and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments
20 | or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies
21 |
22 | ), reply_timestamps as (
23 |
24 | select
25 | end_user_comments.source_relation,
26 | end_user_comments.ticket_id,
27 | -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date
28 | -- Otherwise we will want to end user comment created date
29 | case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,
30 | end_user_comments.is_first_comment,
31 | min(case when is_first_comment
32 | and end_user_comments.commenter_role != 'external_comment'
33 | and (end_user_comments.previous_internal_comment_count > 0)
34 | then end_user_comments.end_user_comment_created_at
35 | else agent_comments.valid_starting_at end) as agent_responded_at
36 | from end_user_comments
37 | left join ticket_public_comments as agent_comments
38 | on agent_comments.ticket_id = end_user_comments.ticket_id
39 | and agent_comments.commenter_role = 'internal_comment'
40 | and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at
41 | and end_user_comments.source_relation = agent_comments.source_relation
42 | {{ dbt_utils.group_by(n=4) }}
43 |
44 | )
45 |
46 | select
47 | *,
48 | ({{ dbt.datediff(
49 | 'end_user_comment_created_at',
50 | 'agent_responded_at',
51 | 'second') }} / 60) as reply_time_calendar_minutes
52 | from reply_timestamps
53 | order by 1,2
--------------------------------------------------------------------------------
/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql:
--------------------------------------------------------------------------------
1 | with ticket as (
2 |
3 | select *
4 | from {{ ref('stg_zendesk__ticket') }}
5 |
6 | ), ticket_reply_times as (
7 |
8 | select *
9 | from {{ ref('int_zendesk__ticket_reply_times') }}
10 |
11 | )
12 |
13 | select
14 | ticket.source_relation,
15 | ticket.ticket_id,
16 | sum(case when is_first_comment then reply_time_calendar_minutes
17 | else null end) as first_reply_time_calendar_minutes,
18 | sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response
19 |
20 | from ticket
21 | left join ticket_reply_times
22 | on ticket.ticket_id = ticket_reply_times.ticket_id
23 | and ticket.source_relation = ticket_reply_times.source_relation
24 |
25 | group by 1, 2
--------------------------------------------------------------------------------
/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True)) }}
2 |
3 | with ticket_resolution_times_calendar as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}
7 |
8 | ), ticket_schedules as (
9 |
10 | select *
11 | from {{ ref('int_zendesk__ticket_schedules') }}
12 |
13 | ), schedule as (
14 |
15 | select *
16 | from {{ ref('int_zendesk__schedule_spine') }}
17 |
18 | ), ticket_first_resolution_time as (
19 |
20 | select
21 | ticket_resolution_times_calendar.source_relation,
22 | ticket_resolution_times_calendar.ticket_id,
23 | ticket_schedules.schedule_created_at,
24 | ticket_schedules.schedule_invalidated_at,
25 | ticket_schedules.schedule_id,
26 |
27 | -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use
28 | min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,
29 |
30 | ({{ dbt.datediff(
31 | "cast(" ~ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') ~ "as " ~ dbt.type_timestamp() ~ ")",
32 | "cast(ticket_schedules.schedule_created_at as " ~ dbt.type_timestamp() ~ ")",
33 | 'second') }} /60
34 | ) as start_time_in_minutes_from_week,
35 | greatest(0,
36 | (
37 | {{ dbt.datediff(
38 | 'ticket_schedules.schedule_created_at',
39 | 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',
40 | 'second') }}/60
41 | )) as raw_delta_in_minutes,
42 | {{ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') }} as start_week_date
43 |
44 | from ticket_resolution_times_calendar
45 | join ticket_schedules
46 | on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id
47 | and ticket_resolution_times_calendar.source_relation = ticket_schedules.source_relation
48 | {{ dbt_utils.group_by(n=5) }}
49 |
50 | ), weeks as (
51 |
52 | {{ dbt_utils.generate_series(52) }}
53 |
54 | ), weeks_cross_ticket_first_resolution_time as (
55 | -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks
56 | select
57 |
58 | ticket_first_resolution_time.*,
59 | cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number
60 |
61 | from ticket_first_resolution_time
62 | cross join weeks
63 | where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1
64 |
65 |
66 | ), weekly_periods as (
67 |
68 | select
69 |
70 | weeks_cross_ticket_first_resolution_time.*,
71 | cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,
72 | cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time
73 |
74 | from weeks_cross_ticket_first_resolution_time
75 |
76 | ), intercepted_periods as (
77 |
78 | select
79 | weekly_periods.source_relation,
80 | ticket_id,
81 | week_number,
82 | weekly_periods.schedule_id,
83 | ticket_week_start_time,
84 | ticket_week_end_time,
85 | schedule.start_time_utc as schedule_start_time,
86 | schedule.end_time_utc as schedule_end_time,
87 | least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes
88 | from weekly_periods
89 | join schedule
90 | on ticket_week_start_time <= schedule.end_time_utc
91 | and ticket_week_end_time >= schedule.start_time_utc
92 | and weekly_periods.schedule_id = schedule.schedule_id
93 | and weekly_periods.source_relation = schedule.source_relation
94 | -- this chooses the Daylight Savings Time or Standard Time version of the schedule
95 | -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week
96 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)
97 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)
98 |
99 | )
100 |
101 | select
102 | source_relation,
103 | ticket_id,
104 | sum(scheduled_minutes) as first_resolution_business_minutes
105 | from intercepted_periods
106 | group by 1, 2
--------------------------------------------------------------------------------
/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True)) }}
2 |
3 | with ticket_resolution_times_calendar as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}
7 |
8 | ), ticket_schedules as (
9 |
10 | select *
11 | from {{ ref('int_zendesk__ticket_schedules') }}
12 |
13 | ), schedule as (
14 |
15 | select *
16 | from {{ ref('int_zendesk__schedule_spine') }}
17 |
18 | ), ticket_full_resolution_time as (
19 |
20 | select
21 | ticket_resolution_times_calendar.source_relation,
22 | ticket_resolution_times_calendar.ticket_id,
23 | ticket_schedules.schedule_created_at,
24 | ticket_schedules.schedule_invalidated_at,
25 | ticket_schedules.schedule_id,
26 |
27 | -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use
28 | min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,
29 | ({{ dbt.datediff(
30 | "cast(" ~ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') ~ "as " ~ dbt.type_timestamp() ~ ")",
31 | "cast(ticket_schedules.schedule_created_at as " ~ dbt.type_timestamp() ~ ")",
32 | 'second') }} /60
33 | ) as start_time_in_minutes_from_week,
34 | greatest(0,
35 | (
36 | {{ dbt.datediff(
37 | 'ticket_schedules.schedule_created_at',
38 | 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',
39 | 'second') }}/60
40 | )) as raw_delta_in_minutes,
41 | {{ zendesk.fivetran_week_start('ticket_schedules.schedule_created_at') }} as start_week_date
42 |
43 | from ticket_resolution_times_calendar
44 | join ticket_schedules
45 | on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id
46 | and ticket_resolution_times_calendar.source_relation = ticket_schedules.source_relation
47 | {{ dbt_utils.group_by(n=5) }}
48 |
49 | ), weeks as (
50 |
51 | {{ dbt_utils.generate_series(52) }}
52 |
53 | ), weeks_cross_ticket_full_resolution_time as (
54 | -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks
55 | select
56 |
57 | ticket_full_resolution_time.*,
58 | cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number
59 |
60 | from ticket_full_resolution_time
61 | cross join weeks
62 | where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1
63 |
64 | ), weekly_periods as (
65 |
66 | select
67 |
68 | weeks_cross_ticket_full_resolution_time.*,
69 | cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,
70 | cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time
71 |
72 | from weeks_cross_ticket_full_resolution_time
73 |
74 | ), intercepted_periods as (
75 |
76 | select
77 | weekly_periods.source_relation,
78 | ticket_id,
79 | week_number,
80 | weekly_periods.schedule_id,
81 | ticket_week_start_time,
82 | ticket_week_end_time,
83 | schedule.start_time_utc as schedule_start_time,
84 | schedule.end_time_utc as schedule_end_time,
85 | least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes
86 | from weekly_periods
87 | join schedule on ticket_week_start_time <= schedule.end_time_utc
88 | and ticket_week_end_time >= schedule.start_time_utc
89 | and weekly_periods.schedule_id = schedule.schedule_id
90 | and weekly_periods.source_relation = schedule.source_relation
91 | -- this chooses the Daylight Savings Time or Standard Time version of the schedule
92 | -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week
93 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)
94 | and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)
95 |
96 | )
97 |
98 | select
99 | source_relation,
100 | ticket_id,
101 | sum(scheduled_minutes) as full_resolution_business_minutes
102 | from intercepted_periods
103 | group by 1, 2
--------------------------------------------------------------------------------
/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql:
--------------------------------------------------------------------------------
1 | with historical_solved_status as (
2 |
3 | select
4 | *,
5 | row_number() over (partition by source_relation, ticket_id order by valid_starting_at asc) as row_num
6 | from {{ ref('int_zendesk__ticket_historical_status') }}
7 | where status in ('solved', 'closed') -- Ideally we are looking for solved timestamps, but Zendesk sometimes (very infrequently) closes tickets without marking them as solved
8 |
9 | ), ticket as (
10 |
11 | select *
12 | from {{ ref('stg_zendesk__ticket') }}
13 |
14 | ), ticket_historical_assignee as (
15 |
16 | select *
17 | from {{ ref('int_zendesk__ticket_historical_assignee') }}
18 |
19 | ), ticket_historical_group as (
20 |
21 | select *
22 | from {{ ref('int_zendesk__ticket_historical_group') }}
23 |
24 | ), solved_times as (
25 |
26 | select
27 | source_relation,
28 | ticket_id,
29 | coalesce(min(case when status = 'solved' then valid_starting_at end), min(case when status = 'closed' then valid_starting_at end)) as first_solved_at,
30 | coalesce(max(case when status = 'solved' then valid_starting_at end), max(case when status = 'closed' then valid_starting_at end)) as last_solved_at,
31 | coalesce(sum(case when status = 'solved' then 1 else 0 end), sum(case when status = 'closed' then 1 else 0 end)) as solved_count
32 |
33 | from historical_solved_status
34 | group by 1, 2
35 |
36 | )
37 |
38 | select
39 | ticket.source_relation,
40 | ticket.ticket_id,
41 | ticket.created_at,
42 | solved_times.first_solved_at,
43 | solved_times.last_solved_at,
44 | ticket_historical_assignee.unique_assignee_count,
45 | ticket_historical_assignee.assignee_stations_count,
46 | ticket_historical_group.group_stations_count,
47 | ticket_historical_assignee.first_assignee_id,
48 | ticket_historical_assignee.last_assignee_id,
49 | ticket_historical_assignee.first_agent_assignment_date,
50 | ticket_historical_assignee.last_agent_assignment_date,
51 | ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,
52 | solved_times.solved_count as total_resolutions,
53 | case when solved_times.solved_count <= 1
54 | then 0
55 | else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.
56 | end as count_reopens,
57 |
58 | {{ dbt.datediff(
59 | 'ticket_historical_assignee.first_agent_assignment_date',
60 | 'solved_times.last_solved_at',
61 | 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,
62 | {{ dbt.datediff(
63 | 'ticket_historical_assignee.last_agent_assignment_date',
64 | 'solved_times.last_solved_at',
65 | 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,
66 | {{ dbt.datediff(
67 | 'ticket.created_at',
68 | 'solved_times.first_solved_at',
69 | 'minute' ) }} as first_resolution_calendar_minutes,
70 | {{ dbt.datediff(
71 | 'ticket.created_at',
72 | 'solved_times.last_solved_at',
73 | 'minute') }} as final_resolution_calendar_minutes
74 |
75 | from ticket
76 |
77 | left join ticket_historical_assignee
78 | on ticket.ticket_id = ticket_historical_assignee.ticket_id
79 | and ticket.source_relation = ticket_historical_assignee.source_relation
80 |
81 | left join ticket_historical_group
82 | on ticket.ticket_id = ticket_historical_group.ticket_id
83 | and ticket.source_relation = ticket_historical_group.source_relation
84 |
85 | left join solved_times
86 | on ticket.ticket_id = solved_times.ticket_id
87 | and ticket.source_relation = solved_times.source_relation
88 |
89 |
--------------------------------------------------------------------------------
/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql:
--------------------------------------------------------------------------------
1 | -- Calculate breach time for agent work time, calendar hours
2 | with agent_work_time_filtered_statuses as (
3 |
4 | select *
5 | from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}
6 | where not in_business_hours
7 |
8 | ), agent_work_time_calendar_minutes as (
9 |
10 | select
11 | *,
12 | {{ dbt.datediff(
13 | 'valid_starting_at',
14 | 'valid_ending_at',
15 | 'minute' )}} as calendar_minutes,
16 | sum({{ dbt.datediff(
17 | 'valid_starting_at',
18 | 'valid_ending_at',
19 | 'minute') }} )
20 | over (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes
21 | from agent_work_time_filtered_statuses
22 |
23 | ), agent_work_time_calendar_minutes_flagged as (
24 |
25 | select
26 | agent_work_time_calendar_minutes.*,
27 | target - running_total_calendar_minutes as remaining_target_minutes,
28 | case when (target - running_total_calendar_minutes) < 0
29 | and
30 | (lag(target - running_total_calendar_minutes) over
31 | (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at) >= 0
32 | or
33 | lag(target - running_total_calendar_minutes) over
34 | (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at) is null)
35 | then true else false end as is_breached_during_schedule
36 |
37 | from agent_work_time_calendar_minutes
38 |
39 | ), final as (
40 | select
41 | *,
42 | (remaining_target_minutes + calendar_minutes) as breach_minutes,
43 | {{ fivetran_utils.timestamp_add(
44 | 'minute',
45 | '(remaining_target_minutes + calendar_minutes)',
46 | 'valid_starting_at',
47 | ) }} as sla_breach_at
48 | from agent_work_time_calendar_minutes_flagged
49 |
50 | )
51 |
52 | select *
53 | from final
--------------------------------------------------------------------------------
/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql:
--------------------------------------------------------------------------------
1 | with agent_work_time_sla as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__sla_policy_applied') }}
5 | where metric = 'agent_work_time'
6 |
7 | ), ticket_historical_status as (
8 |
9 | select *
10 | from {{ ref('int_zendesk__ticket_historical_status') }}
11 |
12 | --This captures the statuses of the ticket while the agent work time sla was active for the ticket.
13 | ), agent_work_time_filtered_statuses as (
14 |
15 | select
16 | ticket_historical_status.source_relation,
17 | ticket_historical_status.ticket_id,
18 | greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,
19 | coalesce(
20 | ticket_historical_status.valid_ending_at,
21 | {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).
22 | ticket_historical_status.status as ticket_status,
23 | agent_work_time_sla.sla_applied_at,
24 | agent_work_time_sla.target,
25 | agent_work_time_sla.sla_policy_name,
26 | agent_work_time_sla.ticket_created_at,
27 | agent_work_time_sla.in_business_hours
28 | from ticket_historical_status
29 | join agent_work_time_sla
30 | on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id
31 | and ticket_historical_status.source_relation = agent_work_time_sla.source_relation
32 | where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as "agent work time"
33 | and sla_applied_at < valid_ending_at
34 |
35 | )
36 | select *
37 | from agent_work_time_filtered_statuses
--------------------------------------------------------------------------------
/models/sla_policy/int_zendesk__sla_policy_applied.sql:
--------------------------------------------------------------------------------
1 | -- step 1, figure out when sla was applied to tickets
2 |
3 | -- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-
4 | -- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this
5 | -- package to support those, please reach out to the Fivetran team on Slack.
6 |
7 | with ticket_field_history as (
8 |
9 | select *
10 | from {{ ref('int_zendesk__updates') }}
11 |
12 | ), sla_policy_name as (
13 |
14 | select
15 | *
16 | from {{ ref('int_zendesk__updates') }}
17 | where field_name = ('sla_policy')
18 |
19 | ), ticket as (
20 |
21 | select *
22 | from {{ ref('int_zendesk__ticket_aggregates') }}
23 |
24 | ), sla_policy_applied as (
25 |
26 | select
27 | ticket_field_history.source_relation,
28 | ticket_field_history.ticket_id,
29 | ticket.created_at as ticket_created_at,
30 | ticket_field_history.valid_starting_at,
31 | ticket.status as ticket_current_status,
32 | ticket_field_history.field_name as metric,
33 | case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.source_relation, ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,
34 | case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,
35 | cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,
36 | {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours
37 | from ticket_field_history
38 | join ticket
39 | on ticket.ticket_id = ticket_field_history.ticket_id
40 | and ticket.source_relation = ticket_field_history.source_relation
41 | where ticket_field_history.value is not null
42 | and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')
43 |
44 | ), final as (
45 | select
46 | sla_policy_applied.*,
47 | sla_policy_name.value as sla_policy_name
48 | from sla_policy_applied
49 | left join sla_policy_name
50 | on sla_policy_name.ticket_id = sla_policy_applied.ticket_id
51 | and sla_policy_name.source_relation = sla_policy_applied.source_relation
52 | and {{ dbt.date_trunc("second", "sla_policy_applied.valid_starting_at") }} >= {{ dbt.date_trunc("second", "sla_policy_name.valid_starting_at") }}
53 | and {{ dbt.date_trunc("second", "sla_policy_applied.valid_starting_at") }} < coalesce({{ dbt.date_trunc("second", "sla_policy_name.valid_ending_at") }}, {{ dbt.current_timestamp() }})
54 | where sla_policy_applied.latest_sla = 1
55 | )
56 |
57 | select *
58 | from final
--------------------------------------------------------------------------------
/models/sla_policy/reply_time/int_zendesk__commenter_reply_at.sql:
--------------------------------------------------------------------------------
1 | {% set using_user_role_histories = var('using_user_role_histories', True) and var('using_audit_log', False) %}
2 | with users as (
3 | select *
4 | from {{ ref('int_zendesk__user_role_history' if using_user_role_histories else 'int_zendesk__user_aggregates') }}
5 |
6 | ), ticket_updates as (
7 | select *
8 | from {{ ref('int_zendesk__updates') }}
9 |
10 | ), final as (
11 | select
12 | ticket_comment.source_relation,
13 | ticket_comment.ticket_id,
14 | ticket_comment.valid_starting_at as reply_at,
15 | commenter.role
16 | from ticket_updates as ticket_comment
17 |
18 | join users as commenter
19 | on ticket_comment.user_id = commenter.user_id
20 | and ticket_comment.source_relation = commenter.source_relation
21 | {% if using_user_role_histories %}
22 | and ticket_comment.valid_starting_at >= commenter.valid_starting_at
23 | and ticket_comment.valid_starting_at < commenter.valid_ending_at
24 | {% endif %}
25 |
26 | where field_name = 'comment'
27 | and ticket_comment.is_public
28 | and commenter.is_internal_role
29 | )
30 |
31 | select *
32 | from final
--------------------------------------------------------------------------------
/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql:
--------------------------------------------------------------------------------
1 | --REPLY TIME SLA
2 | -- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.
3 |
4 | with sla_policy_applied as (
5 |
6 | select *
7 | from {{ ref('int_zendesk__sla_policy_applied') }}
8 |
9 | ), final as (
10 | select
11 | *,
12 | {{ fivetran_utils.timestamp_add(
13 | "minute",
14 | "cast(target as " ~ dbt.type_int() ~ " )",
15 | "sla_applied_at" ) }} as sla_breach_at
16 | from sla_policy_applied
17 | where not in_business_hours
18 | and metric in ('next_reply_time', 'first_reply_time')
19 |
20 | )
21 |
22 | select *
23 | from final
24 |
--------------------------------------------------------------------------------
/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql:
--------------------------------------------------------------------------------
1 | -- Calculate breach time for requester wait time, calendar hours
2 | with requester_wait_time_filtered_statuses as (
3 |
4 | select *
5 | from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}
6 | where not in_business_hours
7 |
8 | ), requester_wait_time_calendar_minutes as (
9 |
10 | select
11 | *,
12 | {{ dbt.datediff(
13 | 'valid_starting_at',
14 | 'valid_ending_at',
15 | 'minute' )}} as calendar_minutes,
16 | sum({{ dbt.datediff(
17 | 'valid_starting_at',
18 | 'valid_ending_at',
19 | 'minute') }} )
20 | over (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes
21 | from requester_wait_time_filtered_statuses
22 |
23 | ), requester_wait_time_calendar_minutes_flagged as (
24 |
25 | select
26 | requester_wait_time_calendar_minutes.*,
27 | target - running_total_calendar_minutes as remaining_target_minutes,
28 | case when (target - running_total_calendar_minutes) < 0
29 | and
30 | (lag(target - running_total_calendar_minutes) over
31 | (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at) >= 0
32 | or
33 | lag(target - running_total_calendar_minutes) over
34 | (partition by source_relation, ticket_id, sla_applied_at order by valid_starting_at) is null)
35 | then true else false end as is_breached_during_schedule
36 |
37 | from requester_wait_time_calendar_minutes
38 |
39 | ), final as (
40 | select
41 | *,
42 | (remaining_target_minutes + calendar_minutes) as breach_minutes,
43 | {{ fivetran_utils.timestamp_add(
44 | 'minute',
45 | '(remaining_target_minutes + calendar_minutes)',
46 | 'valid_starting_at',
47 | ) }} as sla_breach_at
48 | from requester_wait_time_calendar_minutes_flagged
49 |
50 | )
51 |
52 | select *
53 | from final
--------------------------------------------------------------------------------
/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql:
--------------------------------------------------------------------------------
1 | with requester_wait_time_sla as (
2 |
3 | select *
4 | from {{ ref('int_zendesk__sla_policy_applied') }}
5 | where metric = 'requester_wait_time'
6 |
7 | ), ticket_historical_status as (
8 |
9 | select *
10 | from {{ ref('int_zendesk__ticket_historical_status') }}
11 |
12 | --This captures the statuses of the ticket while the requester wait time sla was active for the ticket.
13 | ), requester_wait_time_filtered_statuses as (
14 |
15 | select
16 | ticket_historical_status.ticket_id,
17 | ticket_historical_status.source_relation,
18 | greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,
19 | coalesce(
20 | ticket_historical_status.valid_ending_at,
21 | {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).
22 | ticket_historical_status.status as ticket_status,
23 | requester_wait_time_sla.sla_applied_at,
24 | requester_wait_time_sla.target,
25 | requester_wait_time_sla.sla_policy_name,
26 | requester_wait_time_sla.ticket_created_at,
27 | requester_wait_time_sla.in_business_hours
28 | from ticket_historical_status
29 | join requester_wait_time_sla
30 | on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id
31 | and ticket_historical_status.source_relation = requester_wait_time_sla.source_relation
32 | where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as "requester wait time"
33 | and sla_applied_at < valid_ending_at
34 |
35 | )
36 | select *
37 | from requester_wait_time_filtered_statuses
--------------------------------------------------------------------------------
/models/ticket_history/int_zendesk__field_calendar_spine.sql:
--------------------------------------------------------------------------------
1 | {{
2 | config(
3 | materialized='incremental',
4 | partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],
5 | unique_key='ticket_day_id',
6 | incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',
7 | file_format='delta'
8 | )
9 | }}
10 |
11 | with calendar as (
12 |
13 | select *
14 | from {{ ref('int_zendesk__calendar_spine') }}
15 | {% if is_incremental() %}
16 | where date_day >= (select max(date_day) from {{ this }})
17 | {% endif %}
18 |
19 | ), ticket as (
20 |
21 | select
22 | *,
23 | -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings
24 | cast( {{ dbt.date_trunc('day', "case when status != 'closed' then " ~ dbt.current_timestamp() ~ " else updated_at end") }} as date) as open_until
25 | from {{ var('ticket') }}
26 |
27 | ), joined as (
28 |
29 | select
30 | ticket.source_relation,
31 | calendar.date_day,
32 | ticket.ticket_id
33 | from calendar
34 | inner join ticket
35 | on calendar.date_day >= cast(ticket.created_at as date)
36 | -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)
37 | and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day
38 |
39 | ), surrogate_key as (
40 |
41 | select
42 | *,
43 | {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id', 'source_relation']) }} as ticket_day_id
44 | from joined
45 |
46 | )
47 |
48 | select *
49 | from surrogate_key
--------------------------------------------------------------------------------
/models/ticket_history/int_zendesk__field_history_enriched.sql:
--------------------------------------------------------------------------------
1 | with ticket_field_history as (
2 |
3 | select *
4 | from {{ ref('stg_zendesk__ticket_field_history') }}
5 |
6 | ), updater_info as (
7 | select *
8 | from {{ ref('int_zendesk__updater_information') }}
9 |
10 | ), final as (
11 | select
12 | ticket_field_history.*
13 |
14 | {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.
15 | {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.
16 |
17 | --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.
18 | {% if col in ['updater_is_active'] %}
19 | ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}
20 |
21 | {% elif col in ['updater_user_id','updater_organization_id'] %}
22 | ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}
23 |
24 | {% elif col in ['updater_last_login_at'] %}
25 | ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}
26 |
27 | {% else %}
28 | ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}
29 |
30 | {% endif %}
31 | {% endfor %}
32 | {% endif %}
33 |
34 | from ticket_field_history
35 |
36 | left join updater_info
37 | on ticket_field_history.user_id = updater_info.updater_user_id
38 | and ticket_field_history.source_relation = updater_info.source_relation
39 | )
40 | select *
41 | from final
42 |
--------------------------------------------------------------------------------
/models/ticket_history/int_zendesk__field_history_pivot.sql:
--------------------------------------------------------------------------------
1 | -- depends_on: {{ ref('stg_zendesk__ticket_field_history') }}
2 |
3 | {{
4 | config(
5 | materialized='incremental',
6 | partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],
7 | unique_key='ticket_day_id',
8 | incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',
9 | file_format='delta'
10 | )
11 | }}
12 |
13 | {% if execute and flags.WHICH in ('run', 'build') -%}
14 | {% set results = run_query('select distinct field_name from ' ~ var('field_history') ) %}
15 | {% set results_list = results.columns[0].values() %}
16 | {% endif -%}
17 |
18 | with field_history as (
19 |
20 | select
21 | source_relation,
22 | ticket_id,
23 | field_name,
24 | valid_ending_at,
25 | valid_starting_at
26 |
27 | --Only runs if the user passes updater fields through the final ticket field history model
28 | {% if var('ticket_field_history_updater_columns') %}
29 | ,
30 | {{ var('ticket_field_history_updater_columns') | join (", ")}}
31 |
32 | {% endif %}
33 |
34 | -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history
35 | ,case when value is null then 'is_null' else value end as value
36 |
37 | from {{ ref('int_zendesk__field_history_enriched') }}
38 | {% if is_incremental() %}
39 | where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})
40 | {% endif %}
41 |
42 | ), event_order as (
43 |
44 | select
45 | *,
46 | row_number() over (
47 | partition by source_relation, cast(valid_starting_at as date), ticket_id, field_name
48 | order by valid_starting_at desc
49 | ) as row_num
50 | from field_history
51 |
52 | ), filtered as (
53 |
54 | -- Find the last event that occurs on each day for each ticket
55 |
56 | select *
57 | from event_order
58 | where row_num = 1
59 |
60 | ), pivots as (
61 |
62 | -- For each column that is in both the ticket_field_history_columns variable and the field_history table,
63 | -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.
64 |
65 | select
66 | source_relation,
67 | ticket_id,
68 | cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day
69 |
70 | {% for col in results_list if col in var('ticket_field_history_columns') %}
71 | {% set col_xf = col|lower %}
72 | ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}
73 |
74 | --Only runs if the user passes updater fields through the final ticket field history model
75 | {% if var('ticket_field_history_updater_columns') %}
76 |
77 | {% for upd in var('ticket_field_history_updater_columns') %}
78 |
79 | {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.
80 |
81 | {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}
82 |
83 | ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}
84 |
85 | {% else %}
86 |
87 | ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}
88 |
89 | {% endif %}
90 | {% endfor %}
91 | {% endif %}
92 | {% endfor %}
93 |
94 | from filtered
95 | group by 1,2,3
96 |
97 | ), surrogate_key as (
98 |
99 | select
100 | *,
101 | {{ dbt_utils.generate_surrogate_key(['source_relation','ticket_id','date_day'])}} as ticket_day_id
102 | from pivots
103 |
104 | )
105 |
106 | select *
107 | from surrogate_key
108 |
--------------------------------------------------------------------------------
/models/ticket_history/int_zendesk__field_history_scd.sql:
--------------------------------------------------------------------------------
1 | -- model needs to materialize as a table to avoid erroneous null values
2 | {{ config( materialized='table') }}
3 |
4 | {% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}
5 |
6 | with change_data as (
7 |
8 | select *
9 | from {{ ref('int_zendesk__field_history_pivot') }}
10 |
11 | ), set_values as (
12 |
13 | -- each row of the pivoted table includes field values if that field was updated on that day
14 | -- we need to backfill to persist values that have been previously updated and are still valid
15 | select
16 | source_relation,
17 | date_day as valid_from,
18 | ticket_id,
19 | ticket_day_id
20 |
21 | {% for col in ticket_columns if col.name|lower not in ['source_relation','date_day','ending_day','ticket_id','ticket_day_id'] %}
22 |
23 | ,{{ col.name }}
24 | ,sum(case when {{ col.name }} is null
25 | then 0
26 | else 1
27 | end) over (partition by source_relation order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition
28 | {% endfor %}
29 |
30 | from change_data
31 |
32 | ), fill_values as (
33 | select
34 | source_relation,
35 | valid_from,
36 | ticket_id,
37 | ticket_day_id
38 |
39 | {% for col in ticket_columns if col.name|lower not in ['source_relation','date_day','ending_day','ticket_id','ticket_day_id'] %}
40 |
41 | ,first_value( {{ col.name }} ) over (partition by source_relation, {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}
42 |
43 | {% endfor %}
44 | from set_values
45 | )
46 |
47 | select *
48 | from fill_values
--------------------------------------------------------------------------------
/models/ticket_history/int_zendesk__updater_information.sql:
--------------------------------------------------------------------------------
1 | with users as (
2 | select *
3 | from {{ ref('int_zendesk__user_aggregates') }}
4 |
5 | --If using organizations, this will be included, if not it will be ignored.
6 | {% if var('using_organizations', True) %}
7 | ), organizations as (
8 | select *
9 | from {{ ref('int_zendesk__organization_aggregates') }}
10 | {% endif %}
11 |
12 | ), final as (
13 | select
14 | users.source_relation,
15 | users.user_id as updater_user_id
16 | ,users.name as updater_name
17 | ,users.role as updater_role
18 | ,users.email as updater_email
19 | ,users.external_id as updater_external_id
20 | ,users.locale as updater_locale
21 | ,users.is_active as updater_is_active
22 |
23 | --If you use user tags this will be included, if not it will be ignored.
24 | {% if var('using_user_tags', True) %}
25 | ,users.user_tags as updater_user_tags
26 | {% endif %}
27 |
28 | ,users.last_login_at as updater_last_login_at
29 | ,users.time_zone as updater_time_zone
30 | {% if var('using_organizations', True) %}
31 | ,organizations.organization_id as updater_organization_id
32 | {% endif %}
33 |
34 | --If you use using_domain_names tags this will be included, if not it will be ignored.
35 | {% if var('using_domain_names', True) and var('using_organizations', True) %}
36 | ,organizations.domain_names as updater_organization_domain_names
37 | {% endif %}
38 |
39 | --If you use organization tags, this will be included, if not it will be ignored.
40 | {% if var('using_organization_tags', True) and var('using_organizations', True) %}
41 | ,organizations.organization_tags as updater_organization_organization_tags
42 | {% endif %}
43 | from users
44 |
45 | {% if var('using_organizations', True) %}
46 | left join organizations
47 | on users.source_relation = organizations.source_relation
48 | and users.organization_id = organizations.organization_id
49 | {% endif %}
50 | )
51 |
52 | select *
53 | from final
--------------------------------------------------------------------------------
/models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('zendesk__unstructured_enabled', False)) }}
2 |
3 | with ticket_comments as (
4 | select *
5 | from {{ var('ticket_comment') }}
6 |
7 | ), users as (
8 | select *
9 | from {{ var('user') }}
10 |
11 | ), comment_details as (
12 | select
13 | ticket_comments.source_relation,
14 | ticket_comments.ticket_comment_id,
15 | ticket_comments.ticket_id,
16 | {{ zendesk.coalesce_cast(["users.email", "'UNKNOWN'"], dbt.type_string()) }} as commenter_email,
17 | {{ zendesk.coalesce_cast(["users.name", "'UNKNOWN'"], dbt.type_string()) }} as commenter_name,
18 | ticket_comments.created_at as comment_time,
19 | ticket_comments.body as comment_body
20 | from ticket_comments
21 | left join users
22 | on ticket_comments.user_id = users.user_id
23 | and ticket_comments.source_relation = users.source_relation
24 | where not coalesce(ticket_comments._fivetran_deleted, False)
25 | and not coalesce(users._fivetran_deleted, False)
26 |
27 | ), comment_markdowns as (
28 | select
29 | source_relation,
30 | ticket_comment_id,
31 | ticket_id,
32 | comment_time,
33 | cast(
34 | {{ dbt.concat([
35 | "'### message from '", "commenter_name", "' ('", "commenter_email", "')\\n'",
36 | "'##### sent @ '", "comment_time", "'\\n'",
37 | "comment_body"
38 | ]) }} as {{ dbt.type_string() }})
39 | as comment_markdown
40 | from comment_details
41 |
42 | ), comments_tokens as (
43 | select
44 | *,
45 | {{ zendesk.count_tokens("comment_markdown") }} as comment_tokens
46 | from comment_markdowns
47 |
48 | ), truncated_comments as (
49 | select
50 | source_relation,
51 | ticket_comment_id,
52 | ticket_id,
53 | comment_time,
54 | case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then left(comment_markdown, {{ var('zendesk_max_tokens', 5000) }} * 4) -- approximate 4 characters per token
55 | else comment_markdown
56 | end as comment_markdown,
57 | case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then {{ var('zendesk_max_tokens', 5000) }}
58 | else comment_tokens
59 | end as comment_tokens
60 | from comments_tokens
61 | )
62 |
63 | select *
64 | from truncated_comments
--------------------------------------------------------------------------------
/models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('zendesk__unstructured_enabled', False)) }}
2 |
3 | with filtered_comment_documents as (
4 | select *
5 | from {{ ref('int_zendesk__ticket_comment_document') }}
6 | ),
7 |
8 | grouped_comment_documents as (
9 | select
10 | source_relation,
11 | ticket_id,
12 | comment_markdown,
13 | comment_tokens,
14 | comment_time,
15 | sum(comment_tokens) over (
16 | partition by source_relation, ticket_id
17 | order by comment_time
18 | rows between unbounded preceding and current row
19 | ) as cumulative_length
20 | from filtered_comment_documents
21 | )
22 |
23 | select
24 | source_relation,
25 | ticket_id,
26 | cast({{ dbt_utils.safe_divide('floor(cumulative_length - 1)', var('zendesk_max_tokens', 5000)) }} as {{ dbt.type_int() }}) as chunk_index,
27 | {{ dbt.listagg(
28 | measure="comment_markdown",
29 | delimiter_text="'\\n\\n---\\n\\n'",
30 | order_by_clause="order by comment_time"
31 | ) }} as comments_group_markdown,
32 | sum(comment_tokens) as chunk_tokens
33 | from grouped_comment_documents
34 | group by 1,2,3
--------------------------------------------------------------------------------
/models/unstructured/intermediate/int_zendesk__ticket_document.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('zendesk__unstructured_enabled', False)) }}
2 |
3 | with tickets as (
4 | select *
5 | from {{ var('ticket') }}
6 |
7 | ), users as (
8 | select *
9 | from {{ var('user') }}
10 |
11 | ), ticket_details as (
12 | select
13 | tickets.source_relation,
14 | tickets.ticket_id,
15 | tickets.subject AS ticket_name,
16 | {{ zendesk.coalesce_cast(["users.name", "'UNKNOWN'"], dbt.type_string()) }} as user_name,
17 | {{ zendesk.coalesce_cast(["users.email", "'UNKNOWN'"], dbt.type_string()) }} as created_by,
18 | tickets.created_at AS created_on,
19 | {{ zendesk.coalesce_cast(["tickets.status", "'UNKNOWN'"], dbt.type_string()) }} as status,
20 | {{ zendesk.coalesce_cast(["tickets.priority", "'UNKNOWN'"], dbt.type_string()) }} as priority
21 | from tickets
22 | left join users
23 | on tickets.requester_id = users.user_id
24 | and tickets.source_relation = users.source_relation
25 | where not coalesce(tickets._fivetran_deleted, False)
26 | and not coalesce(users._fivetran_deleted, False)
27 |
28 | ), final as (
29 | select
30 | source_relation,
31 | ticket_id,
32 | {{ dbt.concat([
33 | "'# Ticket : '", "ticket_name", "'\\n\\n'",
34 | "'Created By : '", "user_name", "' ('", "created_by", "')\\n'",
35 | "'Created On : '", "created_on", "'\\n'",
36 | "'Status : '", "status", "'\\n'",
37 | "'Priority : '", "priority"
38 | ]) }} as ticket_markdown
39 | from ticket_details
40 | )
41 |
42 | select
43 | *,
44 | {{ zendesk.count_tokens("ticket_markdown") }} as ticket_tokens
45 | from final
--------------------------------------------------------------------------------
/models/unstructured/zendesk__document.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('zendesk__unstructured_enabled', False)) }}
2 |
3 | with ticket_document as (
4 | select *
5 | from {{ ref('int_zendesk__ticket_document') }}
6 |
7 | ), grouped as (
8 | select *
9 | from {{ ref('int_zendesk__ticket_comment_documents_grouped') }}
10 |
11 | ), final as (
12 | select
13 | ticket_document.source_relation,
14 | cast(ticket_document.ticket_id as {{ dbt.type_string() }}) as document_id,
15 | grouped.chunk_index,
16 | grouped.chunk_tokens as chunk_tokens_approximate,
17 | {{ dbt.concat([
18 | "ticket_document.ticket_markdown",
19 | "'\\n\\n## COMMENTS\\n\\n'",
20 | "grouped.comments_group_markdown"]) }}
21 | as chunk
22 | from ticket_document
23 | join grouped
24 | on grouped.ticket_id = ticket_document.ticket_id
25 | and grouped.source_relation = ticket_document.source_relation
26 | )
27 |
28 | select *
29 | from final
--------------------------------------------------------------------------------
/models/unstructured/zendesk_unstructured.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | models:
4 | - name: zendesk__document
5 | description: Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.
6 | columns:
7 | - name: document_id
8 | description: Equivalent to `ticket_id`.
9 | - name: chunk_index
10 | description: The index of the chunk associated with the `document_id`.
11 | - name: chunk_tokens_approximate
12 | description: Approximate number of tokens for the chunk, assuming 4 characters per token.
13 | - name: chunk
14 | description: The text of the chunk.
15 |
--------------------------------------------------------------------------------
/models/utils/int_zendesk__calendar_spine.sql:
--------------------------------------------------------------------------------
1 | -- depends_on: {{ ref('stg_zendesk__ticket') }}
2 | with spine as (
3 |
4 | {% if execute and flags.WHICH in ('run', 'build') %}
5 |
6 | {%- set first_date_query %}
7 | select
8 | coalesce(
9 | min(cast(created_at as date)),
10 | cast({{ dbt.dateadd("month", -1, "current_date") }} as date)
11 | ) as min_date
12 | from {{ var('ticket') }}
13 | -- by default take all the data
14 | where cast(created_at as date) >= {{ dbt.dateadd('year',
15 | - var('ticket_field_history_timeframe_years', 50), "current_date") }}
16 | {% endset -%}
17 |
18 | {%- set first_date = dbt_utils.get_single_value(first_date_query) %}
19 |
20 | {% else %}
21 | {%- set first_date = '2016-01-01' %}
22 |
23 | {% endif %}
24 |
25 | {{
26 | dbt_utils.date_spine(
27 | datepart = "day",
28 | start_date = "cast('" ~ first_date ~ "' as date)",
29 | end_date = dbt.dateadd("week", 1, "current_date")
30 | )
31 | }}
32 |
33 | ), recast as (
34 | select
35 | cast(date_day as date) as date_day
36 | from spine
37 | )
38 |
39 | select *
40 | from recast
--------------------------------------------------------------------------------
/models/utils/int_zendesk__timezone_daylight.sql:
--------------------------------------------------------------------------------
1 | {{ config(enabled=var('using_schedules', True)) }}
2 |
3 | with timezone as (
4 |
5 | select *
6 | from {{ var('time_zone') }}
7 |
8 | ), daylight_time as (
9 |
10 | select *
11 | from {{ var('daylight_time') }}
12 |
13 | ), timezone_with_dt as (
14 |
15 | select
16 | timezone.*,
17 | daylight_time.daylight_start_utc,
18 | daylight_time.daylight_end_utc,
19 | daylight_time.daylight_offset_minutes
20 |
21 | from timezone
22 | left join daylight_time
23 | on timezone.time_zone = daylight_time.time_zone
24 | and timezone.source_relation = daylight_time.source_relation
25 |
26 | ), order_timezone_dt as (
27 |
28 | select
29 | *,
30 | -- will be null for timezones without any daylight savings records (and the first entry)
31 | -- we will coalesce the first entry date with .... the X years ago
32 | lag(daylight_end_utc, 1) over (partition by source_relation, time_zone order by daylight_end_utc asc) as last_daylight_end_utc,
33 | -- will be null for timezones without any daylight savings records (and the last entry)
34 | -- we will coalesce the last entry date with the current date
35 | lead(daylight_start_utc, 1) over (partition by source_relation, time_zone order by daylight_start_utc asc) as next_daylight_start_utc
36 |
37 | from timezone_with_dt
38 |
39 | ), split_timezones as (
40 |
41 | -- standard (includes timezones without DT)
42 | -- starts: when the last Daylight Savings ended
43 | -- ends: when the next Daylight Savings starts
44 | select
45 | source_relation,
46 | time_zone,
47 | standard_offset_minutes as offset_minutes,
48 |
49 | -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT
50 | coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,
51 |
52 | -- daylight_start_utc is null for timezones that don't use DT
53 | coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date)) as valid_until
54 |
55 | from order_timezone_dt
56 |
57 | union all
58 |
59 | -- DT (excludes timezones without it)
60 | -- starts: when this Daylight Savings started
61 | -- ends: when this Daylight Savings ends
62 | select
63 | source_relation,
64 | time_zone,
65 | -- Pacific Time is -8h during standard time and -7h during DT
66 | standard_offset_minutes + daylight_offset_minutes as offset_minutes,
67 | daylight_start_utc as valid_from,
68 | daylight_end_utc as valid_until
69 |
70 | from order_timezone_dt
71 | where daylight_offset_minutes is not null
72 |
73 | union all
74 |
75 | select
76 | source_relation,
77 | time_zone,
78 | standard_offset_minutes as offset_minutes,
79 |
80 | -- Get the latest daylight_end_utc time and set that as the valid_from
81 | max(daylight_end_utc) as valid_from,
82 |
83 | -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.
84 | cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until
85 |
86 | from order_timezone_dt
87 | group by 1, 2, 3
88 | -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.
89 | having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp() }} as date)
90 |
91 | ), final as (
92 | select
93 | source_relation,
94 | lower(time_zone) as time_zone,
95 | offset_minutes,
96 | cast(valid_from as {{ dbt.type_timestamp() }}) as valid_from,
97 | cast(valid_until as {{ dbt.type_timestamp() }}) as valid_until
98 | from split_timezones
99 | )
100 |
101 | select *
102 | from final
--------------------------------------------------------------------------------
/models/zendesk__sla_policies.sql:
--------------------------------------------------------------------------------
1 | --final step where we union together all of the reply time, agent work time, and requester wait time sla's
2 |
3 | with reply_time_sla as (
4 |
5 | select *
6 | from {{ ref('int_zendesk__reply_time_combined') }}
7 |
8 | ), agent_work_calendar_sla as (
9 |
10 | select *
11 | from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}
12 |
13 | ), requester_wait_calendar_sla as (
14 |
15 | select *
16 | from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}
17 |
18 | {% if var('using_schedules', True) %}
19 |
20 | ), agent_work_business_sla as (
21 |
22 | select *
23 | from {{ ref('int_zendesk__agent_work_time_business_hours') }}
24 |
25 | ), requester_wait_business_sla as (
26 | select *
27 | from {{ ref('int_zendesk__requester_wait_time_business_hours') }}
28 |
29 | {% endif %}
30 |
31 | ), all_slas_unioned as (
32 | select
33 | source_relation,
34 | ticket_id,
35 | sla_policy_name,
36 | metric,
37 | sla_applied_at,
38 | target,
39 | in_business_hours,
40 | sla_update_at as sla_breach_at,
41 | sla_elapsed_time,
42 | is_sla_breached
43 | from reply_time_sla
44 |
45 | union all
46 |
47 | select
48 | source_relation,
49 | ticket_id,
50 | sla_policy_name,
51 | 'agent_work_time' as metric,
52 | sla_applied_at,
53 | target,
54 | false as in_business_hours,
55 | max(sla_breach_at) as sla_breach_at,
56 | max(running_total_calendar_minutes) as sla_elapsed_time,
57 | {{ fivetran_utils.max_bool("is_breached_during_schedule") }} as is_sla_breached
58 | from agent_work_calendar_sla
59 |
60 | {{ dbt_utils.group_by(n=7) }}
61 |
62 | union all
63 |
64 | select
65 | source_relation,
66 | ticket_id,
67 | sla_policy_name,
68 | 'requester_wait_time' as metric,
69 | sla_applied_at,
70 | target,
71 | false as in_business_hours,
72 | max(sla_breach_at) as sla_breach_at,
73 | max(running_total_calendar_minutes) as sla_elapsed_time,
74 | {{ fivetran_utils.max_bool("is_breached_during_schedule") }} as is_sla_breached
75 | from requester_wait_calendar_sla
76 |
77 | {{ dbt_utils.group_by(n=7) }}
78 |
79 |
80 | {% if var('using_schedules', True) %}
81 |
82 | union all
83 |
84 | select
85 | source_relation,
86 | ticket_id,
87 | sla_policy_name,
88 | 'agent_work_time' as metric,
89 | sla_applied_at,
90 | target,
91 | true as in_business_hours,
92 | max(sla_breach_at) as sla_breach_at,
93 | max(running_total_scheduled_minutes) as sla_elapsed_time,
94 | {{ fivetran_utils.max_bool("is_breached_during_schedule") }} as is_sla_breached
95 | from agent_work_business_sla
96 |
97 | {{ dbt_utils.group_by(n=7) }}
98 |
99 | union all
100 |
101 | select
102 | source_relation,
103 | ticket_id,
104 | sla_policy_name,
105 | 'requester_wait_time' as metric,
106 | sla_applied_at,
107 | target,
108 | true as in_business_hours,
109 | max(sla_breach_at) as sla_breach_at,
110 | max(running_total_scheduled_minutes) as sla_elapsed_time,
111 | {{ fivetran_utils.max_bool("is_breached_during_schedule") }} as is_sla_breached
112 |
113 | from requester_wait_business_sla
114 |
115 | {{ dbt_utils.group_by(n=7) }}
116 |
117 | {% endif %}
118 |
119 | )
120 |
121 | select
122 | {{ dbt_utils.generate_surrogate_key(['source_relation', 'ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,
123 | source_relation,
124 | ticket_id,
125 | sla_policy_name,
126 | metric,
127 | sla_applied_at,
128 | target,
129 | in_business_hours,
130 | sla_breach_at,
131 | case when sla_elapsed_time is null
132 | then ({{ dbt.datediff("sla_applied_at", dbt.current_timestamp(), 'second') }} / 60) --This will create an entry for active sla's
133 | else sla_elapsed_time
134 | end as sla_elapsed_time,
135 | sla_breach_at > current_timestamp as is_active_sla,
136 | case when (sla_breach_at > {{ dbt.current_timestamp() }})
137 | then null
138 | else is_sla_breached
139 | end as is_sla_breach
140 | from all_slas_unioned
--------------------------------------------------------------------------------
/models/zendesk__ticket_backlog.sql:
--------------------------------------------------------------------------------
1 | --This model will only run if 'status' is included within the `ticket_field_history_columns` variable.
2 | {{ config(enabled = 'status' in var('ticket_field_history_columns')) }}
3 |
4 | with ticket_field_history as (
5 | select *
6 | from {{ ref('zendesk__ticket_field_history') }}
7 |
8 | ), tickets as (
9 | select *
10 | from {{ ref('stg_zendesk__ticket') }}
11 |
12 | ), group_names as (
13 | select *
14 | from {{ ref('stg_zendesk__group') }}
15 |
16 | ), users as (
17 | select *
18 | from {{ ref('stg_zendesk__user') }}
19 |
20 | {% if var('using_brands', True) %}
21 | ), brands as (
22 | select *
23 | from {{ ref('stg_zendesk__brand') }}
24 | {% endif %}
25 |
26 | --The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.
27 | {% if 'ticket_form_id' in var('ticket_field_history_columns') %}
28 | ), ticket_forms as (
29 | select *
30 | from {{ ref('int_zendesk__latest_ticket_form') }}
31 | {% endif %}
32 |
33 | --If using organizations, this will be included, if not it will be ignored.
34 | {% if var('using_organizations', True) %}
35 | ), organizations as (
36 | select *
37 | from {{ ref('stg_zendesk__organization') }}
38 | {% endif %}
39 |
40 | ), backlog as (
41 | select
42 | ticket_field_history.source_relation,
43 | ticket_field_history.date_day
44 | ,ticket_field_history.ticket_id
45 | ,ticket_field_history.status
46 | ,tickets.created_channel
47 | {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file
48 | {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.
49 | ,assignee.name as assignee_name
50 |
51 | {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.
52 | ,requester.name as requester_name
53 |
54 | {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.
55 | ,ticket_forms.name as ticket_form_name
56 |
57 | {% elif var('using_organizations', True) and col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.
58 | ,organizations.name as organization_name
59 |
60 | {% elif var('using_brands', True) and col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.
61 | ,brands.name as brand_name
62 |
63 | {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.
64 | ,group_names.name as group_name
65 |
66 | {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.
67 | ,assignee.locale as local_name
68 |
69 | {% else %} --All other fields are not ID's and can simply be included in the query.
70 | ,ticket_field_history.{{ col }}
71 | {% endif %}
72 | {% endfor %}
73 |
74 | from ticket_field_history
75 |
76 | left join tickets
77 | on tickets.ticket_id = ticket_field_history.ticket_id
78 | and tickets.source_relation = ticket_field_history.source_relation
79 |
80 | {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.
81 | left join ticket_forms
82 | on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})
83 | and ticket_forms.source_relation = ticket_field_history.source_relation
84 | {% endif %}
85 |
86 | {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.
87 | left join group_names
88 | on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})
89 | and group_names.source_relation = ticket_field_history.source_relation
90 | {% endif %}
91 |
92 | {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.
93 | left join users as assignee
94 | on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})
95 | and assignee.source_relation = ticket_field_history.source_relation
96 | {% endif %}
97 |
98 | {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.
99 | left join users as requester
100 | on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})
101 | and requester.source_relation = ticket_field_history.source_relation
102 | {% endif %}
103 |
104 | {% if var('using_brands', True) and 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.
105 | left join brands
106 | on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})
107 | and brands.source_relation = ticket_field_history.source_relation
108 | {% endif %}
109 |
110 | {% if var('using_organizations', True) and 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.
111 | left join organizations
112 | on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})
113 | and organizations.source_relation = ticket_field_history.source_relation
114 | {% endif %}
115 |
116 | where ticket_field_history.status not in ('closed', 'solved', 'deleted')
117 | )
118 |
119 | select *
120 | from backlog
--------------------------------------------------------------------------------
/models/zendesk__ticket_field_history.sql:
--------------------------------------------------------------------------------
1 | {{
2 | config(
3 | materialized='incremental',
4 | partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],
5 | unique_key='ticket_day_id',
6 | incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',
7 | file_format='delta'
8 | )
9 | }}
10 |
11 | {%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}
12 |
13 | with change_data as (
14 |
15 | select *
16 | from {{ ref('int_zendesk__field_history_scd') }}
17 |
18 | {% if is_incremental() %}
19 | where valid_from >= (select max(date_day) from {{ this }})
20 |
21 | -- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.
22 | -- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist
23 | -- those values into the future.
24 |
25 | ), most_recent_data as (
26 |
27 | select
28 | *
29 | from {{ this }}
30 | where date_day = (select max(date_day) from {{ this }} )
31 |
32 | {% endif %}
33 |
34 | ), calendar as (
35 |
36 | select *
37 | from {{ ref('int_zendesk__field_calendar_spine') }}
38 | where date_day <= current_date
39 | {% if is_incremental() %}
40 | and date_day >= (select max(date_day) from {{ this }})
41 | {% endif %}
42 |
43 | ), joined as (
44 |
45 | select
46 | calendar.source_relation,
47 | calendar.date_day,
48 | calendar.ticket_id
49 | {% if is_incremental() %}
50 | {% for col in change_data_columns if col.name|lower not in ['source_relation','ticket_id','valid_from','valid_to','ticket_day_id'] %}
51 | , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}
52 | {% endfor %}
53 |
54 | {% else %}
55 | {% for col in change_data_columns if col.name|lower not in ['source_relation','ticket_id','valid_from','valid_to','ticket_day_id'] %}
56 | , {{ col.name }}
57 | {% endfor %}
58 | {% endif %}
59 |
60 | from calendar
61 | left join change_data
62 | on calendar.ticket_id = change_data.ticket_id
63 | and calendar.date_day = change_data.valid_from
64 | and calendar.source_relation = change_data.source_relation
65 |
66 | {% if is_incremental() %}
67 | left join most_recent_data
68 | on calendar.ticket_id = most_recent_data.ticket_id
69 | and calendar.date_day = most_recent_data.date_day
70 | and calendar.source_relation = most_recent_data.source_relation
71 | {% endif %}
72 |
73 | ), set_values as (
74 |
75 | select
76 | source_relation,
77 | date_day,
78 | ticket_id
79 |
80 | {% for col in change_data_columns if col.name|lower not in ['source_relation','ticket_id','valid_from','valid_to','ticket_day_id'] %}
81 | , {{ col.name }}
82 | -- create a batch/partition once a new value is provided
83 | , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by source_relation, ticket_id
84 | order by date_day rows unbounded preceding) as {{ col.name }}_field_partition
85 |
86 | {% endfor %}
87 |
88 | from joined
89 | ),
90 |
91 | fill_values as (
92 |
93 | select
94 | source_relation,
95 | date_day,
96 | ticket_id
97 |
98 | {% for col in change_data_columns if col.name|lower not in ['source_relation','ticket_id','valid_from','valid_to','ticket_day_id'] %}
99 | -- grab the value that started this batch/partition
100 | , first_value( {{ col.name }} ) over (
101 | partition by source_relation, ticket_id, {{ col.name }}_field_partition
102 | order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}
103 | {% endfor %}
104 |
105 | from set_values
106 |
107 | ), fix_null_values as (
108 |
109 | select
110 | source_relation,
111 | date_day,
112 | ticket_id
113 | {% for col in change_data_columns if col.name|lower not in ['source_relation','ticket_id','valid_from','valid_to','ticket_day_id'] %}
114 |
115 | -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled
116 | , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}
117 | {% endfor %}
118 |
119 | from fill_values
120 |
121 | ), surrogate_key as (
122 |
123 | select
124 | {{ dbt_utils.generate_surrogate_key(['source_relation','date_day','ticket_id']) }} as ticket_day_id,
125 | *
126 |
127 | from fix_null_values
128 | )
129 |
130 | select *
131 | from surrogate_key
132 |
--------------------------------------------------------------------------------
/models/zendesk__ticket_summary.sql:
--------------------------------------------------------------------------------
1 | with ticket_metrics as (
2 | select *
3 | from {{ ref('zendesk__ticket_metrics') }}
4 |
5 | ), user_table as (
6 | select *
7 | from {{ ref('stg_zendesk__user') }}
8 |
9 | ), user_sum as (
10 | select
11 | source_relation,
12 | cast(1 as {{ dbt.type_int() }}) as summary_helper,
13 | sum(case when is_active = true
14 | then 1
15 | else 0
16 | end) as user_count,
17 | sum(case when lower(role) != 'end-user' and is_active = true
18 | then 1
19 | else 0
20 | end) as active_agent_count,
21 | sum(case when is_active = false
22 | then 1
23 | else 0
24 | end) as deleted_user_count,
25 | sum(case when lower(role) = 'end-user' and is_active = true
26 | then 1
27 | else 0
28 | end) as end_user_count,
29 | sum(case when is_suspended = true
30 | then 1
31 | else 0
32 | end) as suspended_user_count
33 | from user_table
34 |
35 | group by 1,2
36 |
37 | ), ticket_metric_sum as (
38 | select
39 | source_relation,
40 | cast(1 as {{ dbt.type_int() }}) as summary_helper,
41 | sum(case when lower(status) = 'new'
42 | then 1
43 | else 0
44 | end) as new_ticket_count,
45 | sum(case when lower(status) = 'hold'
46 | then 1
47 | else 0
48 | end) as on_hold_ticket_count,
49 | sum(case when lower(status) = 'open'
50 | then 1
51 | else 0
52 | end) as open_ticket_count,
53 | sum(case when lower(status) = 'pending'
54 | then 1
55 | else 0
56 | end) as pending_ticket_count,
57 | sum(case when lower(type) = 'problem'
58 | then 1
59 | else 0
60 | end) as problem_ticket_count,
61 | sum(case when first_assignee_id != last_assignee_id
62 | then 1
63 | else 0
64 | end) as reassigned_ticket_count,
65 | sum(case when count_reopens > 0
66 | then 1
67 | else 0
68 | end) as reopened_ticket_count,
69 |
70 | sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')
71 | then 1
72 | else 0
73 | end) as surveyed_satisfaction_ticket_count,
74 |
75 | sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')
76 | then 1
77 | else 0
78 | end) as unassigned_unsolved_ticket_count,
79 | sum(case when total_agent_replies < 0
80 | then 1
81 | else 0
82 | end) as unreplied_ticket_count,
83 | sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')
84 | then 1
85 | else 0
86 | end) as unreplied_unsolved_ticket_count,
87 | sum(case when lower(status) not in ('solved', 'closed')
88 | then 1
89 | else 0
90 | end) as unsolved_ticket_count,
91 | sum(case when lower(status) in ('solved', 'closed')
92 | then 1
93 | else 0
94 | end) as solved_ticket_count,
95 | sum(case when lower(status) in ('deleted')
96 | then 1
97 | else 0
98 | end) as deleted_ticket_count,
99 | sum(case when total_ticket_recoveries > 0
100 | then 1
101 | else 0
102 | end) as recovered_ticket_count,
103 | sum(case when assignee_stations_count > 0
104 | then 1
105 | else 0
106 | end) as assigned_ticket_count,
107 | count(count_internal_comments) as total_internal_comments,
108 | count(count_public_comments) as total_public_comments,
109 | count(total_comments)
110 | from ticket_metrics
111 |
112 | group by 1,2
113 |
114 |
115 | ), final as (
116 | select
117 | user_sum.source_relation,
118 | user_sum.user_count,
119 | user_sum.active_agent_count,
120 | user_sum.deleted_user_count,
121 | user_sum.end_user_count,
122 | user_sum.suspended_user_count,
123 | ticket_metric_sum.new_ticket_count,
124 | ticket_metric_sum.on_hold_ticket_count,
125 | ticket_metric_sum.open_ticket_count,
126 | ticket_metric_sum.pending_ticket_count,
127 | ticket_metric_sum.solved_ticket_count,
128 | ticket_metric_sum.problem_ticket_count,
129 | ticket_metric_sum.assigned_ticket_count,
130 | ticket_metric_sum.reassigned_ticket_count,
131 | ticket_metric_sum.reopened_ticket_count,
132 | ticket_metric_sum.surveyed_satisfaction_ticket_count,
133 | ticket_metric_sum.unassigned_unsolved_ticket_count,
134 | ticket_metric_sum.unreplied_ticket_count,
135 | ticket_metric_sum.unreplied_unsolved_ticket_count,
136 | ticket_metric_sum.unsolved_ticket_count,
137 | ticket_metric_sum.recovered_ticket_count,
138 | ticket_metric_sum.deleted_ticket_count
139 | from user_sum
140 |
141 | left join ticket_metric_sum
142 | on user_sum.summary_helper = ticket_metric_sum.summary_helper
143 | and user_sum.source_relation = ticket_metric_sum.source_relation
144 | )
145 |
146 | select *
147 | from final
--------------------------------------------------------------------------------
/packages.yml:
--------------------------------------------------------------------------------
1 | packages:
2 | - package: fivetran/zendesk_source
3 | version: [">=0.17.0", "<0.18.0"]
--------------------------------------------------------------------------------