├── .gitignore ├── DirStructure.md ├── README.md ├── convert_to_md.sh ├── devOps ├── README.md ├── SplunkEnterprise │ ├── DevOps.md │ ├── TemplateTransformer.py │ ├── archive │ │ └── manageSplunk.sh │ ├── configureSplunkMain.sh │ ├── deployApp.sh │ ├── firewalld_sample.txt │ ├── getopts.sample │ ├── houseKeepFiles.sh │ ├── iptables_main.md │ ├── splunkEnterpriseInstall.sh │ └── splunk_multi_instance.sh ├── SplunkUF │ └── SPF_Install │ │ ├── README.md │ │ ├── SplunkFwdWindows.bat │ │ ├── SplunkUF_WinCoreCmdsl.bat │ │ ├── encrypt_decrypt_batch.bat │ │ ├── getHash.py │ │ ├── getSplunk.sh │ │ ├── initd_splunkforwarder.file │ │ ├── linkLatest.sh │ │ ├── splunkUF_install.sh │ │ ├── splunkUF_uninstall.sh │ │ └── systemd.unit ├── api_integration │ ├── ansible │ │ ├── apps │ │ │ └── mysearch │ │ │ │ └── local │ │ │ │ ├── props.conf │ │ │ │ └── savedsearches.conf │ │ ├── create_api.yml │ │ └── lib │ │ │ └── splunkConfParser.py │ └── savedsearch_create_update.txt ├── git │ ├── README.md │ └── git_update.sh └── python_scripts │ ├── common │ ├── README.md │ ├── splunk_common.py │ └── splunk_config_interface.py │ ├── macros │ ├── README.md │ ├── files │ │ └── macros.conf │ ├── update.py │ └── update_macros.py │ └── savedsearches │ ├── README.md │ ├── curl_equilvalent.md │ ├── disable_versioning.md │ ├── files │ ├── config.json │ └── savedsearches.conf │ ├── update.py │ └── upsert.py ├── docs ├── README.md ├── SPL_searches │ ├── ES_basicSearches.md │ ├── TrickySearches.md │ ├── cimSearches.md │ ├── compare_hosts_missing.md │ ├── compare_two_events.md │ ├── cpd_time.md │ ├── cpu_memory.md │ ├── datamodels.searches.md │ ├── dbinspect.md │ ├── distributed_system_tricks.md │ ├── exfiltration_searches.md │ ├── file_explorer_rest_api.md │ ├── for_each.md │ ├── forwarder_related.md │ ├── incident_impact.md │ ├── indexes_vs_roles_mapping.md │ ├── json_examples.md │ ├── key-value.searches.md │ ├── kvstore.md │ ├── ldapsearches.md │ ├── macros_savedsearches_api.md │ ├── makeresults.md │ ├── metadata_searches.md │ ├── timeWrapSearches.md │ └── tstats_searches.md ├── _config.yml ├── collections │ └── Content_update_collection.md ├── da │ ├── DA_dropdown │ │ ├── bin │ │ │ └── README │ │ ├── default │ │ │ ├── app.conf │ │ │ └── data │ │ │ │ └── ui │ │ │ │ ├── nav │ │ │ │ └── default.xml │ │ │ │ └── views │ │ │ │ └── README │ │ ├── local │ │ │ ├── app.conf │ │ │ └── data │ │ │ │ └── ui │ │ │ │ ├── nav │ │ │ │ └── default.xml │ │ │ │ └── views │ │ │ │ ├── asset_db.xml │ │ │ │ └── studio_dropdown.xml │ │ ├── lookups │ │ │ ├── horseshoe_data.csv │ │ │ └── sales_data.csv │ │ └── metadata │ │ │ ├── default.meta │ │ │ └── local.meta │ ├── README.md │ └── dashboard_samples │ │ ├── d3Samples.xml │ │ ├── hostOnboarding.xml │ │ └── simpleXMLD3.xml ├── data_ops.txt ├── design │ ├── detections_repo1.md │ └── large_scale_design.md ├── gist.txt ├── logging_standards.md ├── rest │ ├── alerts_actions_savedsearches.md │ ├── rest_client_insert.md │ └── rest_client_searches.md ├── security │ ├── datasets.md │ └── sample_detections_ESCU.csv ├── splunk_tips │ ├── answers │ │ └── goodAnswers.txt │ ├── api │ │ ├── _internal.txt │ │ ├── cmd_rest.txt │ │ └── key_api_commands.txt │ ├── apps │ │ └── A_prod_ldap_auth │ │ │ └── local │ │ │ ├── authentication.conf │ │ │ └── authorize.conf │ ├── configs │ │ ├── authentication.conf │ │ ├── clm.server.conf │ │ ├── genericTimeLineBreak.configs │ │ ├── indexes.conf │ │ ├── sample_app │ │ │ └── local │ │ │ │ ├── deploymentclient.conf │ │ │ │ ├── inputs.conf │ │ │ │ ├── outputs.conf │ │ │ │ └── serverclass.conf │ │ └── serverclass.conf │ ├── dashboards │ │ ├── AutoUpdateDashboard.txt │ │ └── PopUp.txt │ ├── others │ │ ├── errors │ │ │ ├── authErrors.txt │ │ │ ├── debug_steps.txt │ │ │ └── memoryLeak │ │ │ │ ├── collect_pstacks.sh │ │ │ │ └── jmalloc.sh │ │ └── notes │ │ │ ├── a_toDo.list │ │ │ ├── btool.txt │ │ │ ├── cluster.txt │ │ │ ├── configure_online.txt │ │ │ ├── creditCard.txt │ │ │ ├── datamodel.txt │ │ │ ├── diag.txt │ │ │ ├── download_curl_wget.txt │ │ │ ├── enterprise_security.txt │ │ │ ├── estreamer_vs_syslog.md │ │ │ ├── forwarder.txt │ │ │ ├── general.txt │ │ │ ├── index.txt │ │ │ ├── nullqueue.txt │ │ │ ├── process.md │ │ │ ├── rest.txt │ │ │ ├── search.md │ │ │ ├── searchHead_captain.txt │ │ │ ├── search_order.txt │ │ │ ├── security.txt │ │ │ ├── server.conf.txt │ │ │ ├── shcluster_adding.txt │ │ │ ├── splunk_release.csv │ │ │ ├── thingstoDo.md │ │ │ ├── transforming.txt │ │ │ ├── transforming2.txt │ │ │ ├── tstats_examples.txt │ │ │ └── xml.props.example.txt │ └── stanza │ │ └── single_stanza_extract.md ├── ta │ └── vmware │ │ ├── props.conf │ │ └── transforms.conf └── ui │ └── dropdown.xml ├── sampleData ├── Connectors_vendor_list.pdf ├── dataResources.txt ├── data_process │ └── dob_generic.xlsx └── raw │ ├── application_logs │ ├── cart_checkout.log │ └── vendor_sales.log │ ├── citrix │ └── adc.txt │ ├── csv │ ├── Names_Dataset.csv │ ├── basic_dataset.csv │ └── prices.csv │ ├── json │ ├── http_uri.txt │ ├── sample2.json │ ├── sample_array.json │ ├── simple_1message.json │ └── url_based.md │ ├── msft │ └── mde │ │ └── mde-log-samples.md │ ├── os │ ├── linux │ │ ├── auditd.log │ │ └── syslog_linux_sshd.log │ └── windows │ │ ├── ADImporter.md │ │ └── win-linux-os.md │ ├── vmware │ └── sample_logs.txt │ └── yaml │ └── employee_sample1.yaml └── thirdparty ├── .conf_materials.txt ├── Visual_Studio_code └── shell_integration.md ├── android ├── adb.md └── tips.txt ├── ansible ├── README.md ├── ansible_approaches.md ├── basic │ ├── 01_basic_ansible_facts.yml │ ├── 01_verybasic.yml │ ├── 02_assert_version_comparison.yml │ ├── 02_create_json.yml │ ├── 02_csv_dict_inject copy.yml │ ├── 02_dict_list.yml │ ├── 02_populate_template_json.yml │ ├── 03_map_filters.yml │ ├── 04_jinja_group_sum_template.yml │ ├── 04_template_and_file_lookup.yml │ ├── 04_template_within_jinja.yml │ ├── 04_template_within_template.yml │ ├── 05_lookup_from_file.yml │ ├── 06_read_from_csv.yml │ ├── 07_gitlab_playbook.yml │ ├── 08_list_to_dict.yml │ ├── 09_vars_reuse_good_eg.yml │ ├── 10_role_based_example.yml │ ├── 11_role_call_another_role.yml │ ├── 12_map_select.yml │ ├── 13_filter_dictionary.yaml │ ├── 13_filter_group_regex.yaml │ ├── 14_with_nested_dict_list.yaml │ ├── 15_fileglob.yml │ ├── 16_line_in_file.conf │ ├── 17_template_selectattr_macro.yml │ ├── 18_batch_groupby.yml │ ├── 18_groupby_list.yml │ ├── 19_list_without_loop.yml │ ├── 20_uri_tester.yml │ ├── 21_nested_dict.yml │ ├── 22_jinja_indent.yml │ ├── 24_dynamic_variables_setfact.yml │ ├── 25_dynamic_variables_kv.yml │ ├── 26_jsonArray_to_ndjson.yml │ ├── 31_block_when_tempfile.yml │ ├── 41_crypto_keys.yml │ ├── 42_keystore_truststore.yml │ ├── 50_nested_loop.yml │ ├── check_for_duplicates.txt │ ├── configs │ │ ├── 02_dict_iteration.j2 │ │ ├── 02_list.json │ │ ├── 02_list_template.j2 │ │ ├── 02_names_department.csv │ │ ├── 02_template.j2 │ │ ├── 05_lookup_file.config │ │ ├── 06_country_host_invalid.csv │ │ ├── 06_country_host_valid.csv │ │ ├── 13_fruits.csv │ │ ├── 17_template.j2 │ │ ├── 22_jinja_template.j2 │ │ ├── dummy.remove.csv │ │ ├── fruits │ │ │ ├── apples.csv │ │ │ ├── grapes.csv │ │ │ └── pears.csv │ │ └── nested_list.json │ ├── filter_plugins │ │ └── mapattributes.py │ ├── group_vars │ │ └── 07_github_vars.yml │ ├── roles │ │ ├── distributor_role │ │ │ └── tasks │ │ │ │ ├── main.yml │ │ │ │ ├── simple_debug_test.yml │ │ │ │ └── trigger_git_role.yml │ │ ├── git │ │ │ └── tasks │ │ │ │ ├── git_clone_a_repo.yml │ │ │ │ └── main.yml │ │ └── github │ │ │ └── tasks │ │ │ ├── github_repo_activities.yml │ │ │ └── main.yml │ ├── templates │ │ ├── 04_contact_details.j2 │ │ ├── 04_jinja_in_jinja.j2 │ │ ├── 04_main_callout.md.j2 │ │ ├── 04_vars-email.j2 │ │ ├── 04_vars-name.j2 │ │ └── 04_vars-phone.j2 │ └── vault │ │ └── vault_basic_steps.txt ├── cpt │ ├── 100_filter.yml │ ├── 100_filter_jsonquery.yml │ ├── 100_filter_list_json.yml │ ├── 100_filter_list_yaml.yml │ └── configs │ │ ├── fruits_veg.dict.json │ │ ├── fruits_veg.dict.yml │ │ ├── fruits_veg.list.json │ │ └── fruits_veg.list.yml ├── hortonew │ ├── ansible.cfg │ ├── filestructure.txt │ ├── group_vars │ │ └── windows.yml │ ├── hosts.cfg │ ├── kerberos_authentication.txt │ ├── krb5.conf │ ├── playbooks │ │ ├── SplunkUniversalForwarderInstallLinux.yml │ │ └── SplunkUniversalForwarderInstallWindows.yml │ └── roles │ │ ├── universal_forwarder_linux │ │ └── tasks │ │ │ ├── forwarder.yml │ │ │ └── main.yml │ │ └── universal_forwarder_windows │ │ ├── files │ │ └── install-splunk.ps1 │ │ └── tasks │ │ ├── forwarder.yml │ │ └── main.yml └── splunk_apps │ ├── README.txt │ ├── savedsearch_using_curl.yml │ └── splunk_ansible_uri.yml ├── api ├── json_server │ ├── db.json │ └── json_server.md └── public_api_list.md ├── approach_concepts ├── automation_values.md ├── devops_toolchain_apps.md ├── os_logging.txt └── products_decision.md ├── arcsight └── json_to_cef_f5.txt ├── aws ├── AWS_products.tex ├── README.md ├── active_directory_aws_login.md ├── aws_cis.csv ├── aws_cli_query.md ├── aws_cli_setup.md ├── aws_config_file_sample ├── aws_resources.txt ├── aws_sqs_commands.md ├── iam │ ├── aws_sts_assume_role.md │ └── reference_identifiers.md ├── iam_examples.md ├── lambda │ ├── ebs-snapshot-delete.py │ └── sqs_logset.py ├── multi_cloud_email.drawio.svg ├── network_concepts.md └── setup_aws_env.sh ├── azure ├── az_commands.md ├── az_vault.md ├── azure_cis.csv └── azure_entraid_integration.md ├── blogger └── code_highlight.md ├── certs ├── README.md ├── becomeCA.sh ├── ca.conf ├── cert.conf ├── openssl.md ├── pem_ppk.md └── reference.md ├── confluence └── api_updates.md ├── curl ├── artifactory.md ├── ftp_curl.txt ├── proxy_curl.txt └── splunk_search_macro.txt ├── cyberark └── cyberark_settings.txt ├── dataCapture ├── WindowsEventGenerator.ps1 ├── f5DataCapture.txt ├── getSNMP.sh ├── get_snmp.config ├── networkProtocol.txt ├── wireshark.txt └── wireshark_kubernetes.md ├── disk ├── disk_performance.md ├── dmesg_disk.md └── formatting.txt ├── docker ├── Makefile ├── ansible │ ├── README.md │ └── ansible2.Dockerfile ├── docker-compose.yml ├── dockerfile_samples │ └── README.md ├── npm_samples │ ├── README.md │ └── reactgrid.Dockerfile ├── python_fastapi │ ├── Dockerfile │ ├── README.md │ ├── app │ │ ├── __init__.py │ │ └── main.py │ └── requirements.txt └── tips.txt ├── encode_decode ├── decode.py └── encode.py ├── esxi └── basic_esxi_provision.script ├── eventgen └── executable.md ├── fluentd └── fluent-bit.md ├── gcp ├── components_command_groups.txt ├── gcloud_commands.md ├── gcp_cis.csv ├── gcp_links.md └── getting_shell_access2.md ├── git ├── clone_methods.md ├── conflicts.md ├── git.txt ├── git_advanced_lfs.md ├── git_config_levels.md ├── git_credential_save.txt ├── git_https_proxy.txt ├── git_initial_setup.md ├── git_lifecycle.txt ├── git_resolve.sh ├── git_useful_commands.md ├── merge_strategy_branch_to_master.md ├── prune_history.md └── revert_reset_ideas.md ├── github └── .github │ ├── CODEOWNERS │ ├── ISSUE_TEMPLATE │ └── deploy-detection.yml │ ├── README.md │ └── workflows │ ├── deploy-to-splunk.yml │ └── list-detection-ids.yml ├── gitlab ├── gitlab-ci_scheduled.yml ├── gitlab-runner.md └── tokens.txt ├── go ├── timeSubtract.go └── timeformats.md ├── html ├── crud │ └── singlePage.html ├── selectors.md └── table_style.html ├── iam └── sailpoint_iiq_interview.md ├── images └── gif_links.txt ├── interview ├── behavioural.md ├── set1.txt ├── set2.md └── set3.md ├── javascript └── timeout_fix_gcp.md ├── jenkins ├── README.md ├── git_checkout_jenkins.groovy ├── groovy_snippets.md └── jenkins_api.md ├── jira ├── check_existing_incident.py ├── jira_insertion_basic.py ├── jira_ticketing │ ├── README.md │ ├── config.py │ ├── jira_api.py │ ├── main.py │ └── ticket_handler.py ├── logs_missing_sample.json └── sme_contact_details.yml ├── jq ├── jq_argument_pass1.sh └── jq_find_replace_delete.md ├── jupyter_books ├── Kerberoasting-T1558.003.ipynb └── README.md ├── k8s_kubernetes ├── README.md └── k8s_security.md ├── kafka ├── check_a_topic.txt ├── kafka_quick_setup.md ├── kakfa_within_k8s.txt └── sample_kafka_jumpstation.yml ├── keytool ├── keytool.md └── keytool_java.md ├── ldap_activeDirectory ├── README.md ├── adConnectivity.txt └── sampleusers.ldif ├── linux ├── bonnie_plusplus ├── commands_to_learn.txt ├── disk_commands.md ├── filesystem_admin.txt ├── journalctl_tips.md ├── kernel_uprade.txt └── process_commands.md ├── logrotate ├── logrotate_60days_rsyslog.conf └── sample.logrotate.d ├── machine_learning └── good_sites.md ├── macos ├── applescript │ └── mouse │ │ ├── 145_scrolllock.plist │ │ ├── 145_scrolllock.scpt │ │ └── launch.md ├── initial_setup.md └── snippets.md ├── mouse ├── README.md ├── activeMouse.ps1 └── mouse2.vbs ├── music ├── README.md ├── sample.ly └── sample.lytex ├── mustache ├── empReports.mustache ├── empReports2.json ├── empSalary.json └── empSalary1.mustache ├── n8n ├── README.md └── workflow_samples │ ├── html_css_selector1.json │ ├── html_extract_merge1.json │ ├── mock_data_function1.json │ └── webhook1.json ├── network ├── firewall │ └── ufw.md ├── performance_test_network.md ├── portTest_connectivity.txt ├── session_complications.txt ├── subnet_allocation.md ├── tcpdump_details.md ├── tcpdump_examples.md ├── tcpdump_tcpreplay.md └── test_connection.sh ├── nginx ├── digitalocean.md └── ssl_config1.conf ├── observability ├── apm │ └── elastic_java.md ├── interview │ └── questions.md ├── metrics │ └── Questions.md ├── opentelemetry │ ├── key_Elements.md │ ├── ot_logs_sample.json │ ├── ot_metrics_dataTypes.md │ ├── ot_metrics_sample.json │ ├── ot_traces_sample.json │ └── standards │ │ ├── otel_logback.xml │ │ └── useful.md └── traces │ ├── traceparent.md │ ├── tracing2.md │ └── transaction_sampling.md ├── openldap ├── domainData.ldif ├── insert_data.md ├── mydomain.ldif ├── newusers.ldif ├── slapd.conf ├── ubuntu_ldap.conf └── ubuntu_setup.md ├── opensearch ├── os_single.compose.yml └── simple_searches.md ├── openssl ├── openssl_commands.md └── openssl_grab_pem.txt ├── pandoc └── tips.txt ├── perl ├── compareServerClass.pl └── listPerlInstalledModules.pl ├── php └── shellScriptTrigger │ ├── shellScriptTrigger.js │ └── shellScriptTrigger.php ├── postman └── add_dynamic_value.md ├── powershell ├── Test-Cred.ps1 ├── Test-Port.ps1 ├── admin_commands.md ├── convert_csv_with_double_quotes.ps1 ├── get_driver_version.md ├── get_user_accounts.ps1 ├── integrate_splunk.ps1 ├── key.ps1 ├── sleep.ps1 ├── stitchDocswithJS.ps1 ├── stitchDocuments.ps1 ├── user_bulk │ ├── insert_bulk_users.ps1 │ └── newUser.ps1 └── vital_info.ps1 ├── preChecks ├── commands.csv ├── portChecks.sh ├── ports.csv ├── runCommands.sh └── syslog_logger.sh ├── python ├── autoAppGenerator.py ├── decryptSalt.py ├── echo.py ├── partial_requests_core.py ├── pip.README.md ├── portTest.py ├── python_playgrounds.md ├── regex_validate.py ├── sample_sessions.csv ├── scapy │ ├── import_example1.py │ └── sniff_example.py ├── secureCRT_sessions.py ├── simpleConfigParser.py ├── syslogSimulator.py ├── virtual_environment.md └── web_server │ └── webserver.py ├── quest └── resync.txt ├── recording_videos_graphics ├── gource.md ├── terminal_recording.md └── visio_images.md ├── redpanda ├── example_commands.md └── settings.txt ├── regexes ├── character_group.txt ├── ipAddressValidation.txt ├── regexes.sample └── stacktrace_java_grok.md ├── rfi_process └── README.md ├── rsyslog ├── 514_sample.conf ├── macos.txt ├── old_rsyslog_format.conf ├── rfc5424_sample2.conf ├── rsyslog.d ├── rsyslog_port.conf ├── rsyslog_snmp.conf ├── rsyslog_templates.md ├── sample1.conf ├── sd.sample ├── sd_logging.py ├── tls_rsyslog_omfwd.conf └── v2_port_dynafile.conf ├── ruby └── date_parsing_formats.txt ├── scapy ├── pcap_scapy.py ├── sampleCode.py ├── send_localhost_lo.txt └── syslog_virtualbox.txt ├── security_posture ├── IT_environment_summary.md └── recommed_suggest_advise.md ├── servicenow └── servicenow.txt ├── shell ├── .bash_aliases ├── 7zip.md ├── addint_timestamp.sh ├── arguments_grep.sh ├── array_looping.sh ├── awk_one_liners.md ├── bash_k8s_settings.txt ├── capture_video.sh ├── counter.sh ├── cut_commands.md ├── diff.md ├── dig.md ├── directoryPath.sh ├── file_transfer.sh ├── filebeat_templator │ ├── check_port.ps1 │ ├── check_port.sh │ ├── filebeat.yml.template │ ├── test1.sh │ └── your_csv_file.csv ├── find_exec.md ├── find_replace.sh ├── find_replace_sed.txt ├── fingerprint_file.txt ├── helloWorld.sh ├── if_else_multiple.sh ├── log4sh ├── log4sh.properties.ex4 ├── mtime_scripted_inputs.sh ├── myscriptSample.sh ├── rsync.md ├── screen_capture.md ├── shell.txt ├── shopt.txt ├── simpleBackupScript.sh ├── slogger.sh ├── ssh.txt └── zsh.md ├── shuffle └── setup.md ├── snmp ├── snmp_settings.txt ├── snmptrap.txt ├── snmptrapd.conf.txt └── snmptrapd.txt ├── soc_usecases ├── links_ideas.txt ├── rules_link.txt └── splunk_uc_library.md ├── squid └── squid_kubectl.md ├── ssh └── tunnel.md ├── standards ├── ISO │ └── iso27001 │ │ ├── 1.27001.csv │ │ ├── 2.27001.csv │ │ ├── 3.27001.csv │ │ └── search.txt ├── configItems.txt └── pci │ ├── pci_1.csv │ ├── pci_2.csv │ └── pci_3.csv ├── stencils ├── generate_svg_image.sh ├── good_sites.md ├── image_demo.md ├── my_input_sample.dot └── svg_stencil.md ├── templating ├── Liquid_expressions_Workflow.pdf ├── business_reporting │ └── Atlassian_templates.md └── liquid.md ├── terraform ├── aws-assume-role.txt ├── common_tf_commands.md ├── for_each_sample.md ├── gcp_terraform │ ├── generic_mods │ │ └── sql_postgress │ │ │ ├── main.tf │ │ │ └── variables.tf │ ├── main.tf │ ├── my_proj.tfvars │ └── variables.tf ├── state_file_removal.txt ├── terraform_url.txt └── terraformer │ ├── execution.md │ └── setup.md ├── ticketing └── Decision_Record.md ├── ui └── vega │ ├── es_request.json │ ├── response_data_sankey.json │ └── sankey_chart_ES.json ├── vagrant ├── README.md ├── fixedNodes.Vagrantfile ├── freeBSD.Vagrantfile ├── multivms.resources.Vagrantfile ├── multivms.vagrantfile ├── simpleRocky.Vagrantfile └── splunk_standalone.Vagrantfile ├── vault ├── curl_vault.txt ├── sign_using_vault.sh ├── token_extract.md └── vault_login_env.md ├── video └── extraction.md ├── virtualbox └── commonCommands.txt └── websites └── good_websites.md /README.md: -------------------------------------------------------------------------------- 1 | # SplunkScriplets and Other technology snippets 2 | ### Various Splunk related configs, support items, tricks and notes .. All in ONE Place 3 | The repository is NOT just for Splunk itself, must mostly collection of third party surroundings (like OS, firewalls) and snippets which are useful for an enterprise grade Splunk deployment 4 | 5 | # Directory Structure 6 | - [Tree Diagram](DirStructure.md) 7 | 8 | ## What does each Directory include 9 | - devOps => Splunk and Universal Forwarder installation Scripts 10 | - thirdPary => ThirdParty Scripts like python, shell scripts, connectivity tests etc. 11 | - splunk_tips => UI development snippets, Tricky searches which can be re-used, Any rare errors which people have noted and workarounds, Some key configs like rsyslogd, serverclass.conf, authentication.conf etc. 12 | 13 | ## TO DO 14 | - Create a good repository to put notes related to Splunk and its automation 15 | -------------------------------------------------------------------------------- /convert_to_md.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Navigate to the directory 4 | cd docs/SPL_searches 5 | 6 | # Process each .txt file 7 | for file in *.txt; do 8 | if [ -f "$file" ]; then 9 | # Get the base name without extension 10 | base_name="${file%.txt}" 11 | 12 | # Create new .md file 13 | echo "### ${base_name//_/ } Examples" > "${base_name}.md" 14 | echo "" >> "${base_name}.md" 15 | echo '```' >> "${base_name}.md" 16 | cat "$file" >> "${base_name}.md" 17 | echo '```' >> "${base_name}.md" 18 | 19 | # Remove the original .txt file 20 | rm "$file" 21 | 22 | echo "Converted $file to ${base_name}.md" 23 | fi 24 | done 25 | 26 | echo "Conversion complete!" -------------------------------------------------------------------------------- /devOps/README.md: -------------------------------------------------------------------------------- 1 | The scripts in this directory are more than 8 years old and needs to change with the new era of operators and devops tools 2 | -------------------------------------------------------------------------------- /devOps/SplunkEnterprise/firewalld_sample.txt: -------------------------------------------------------------------------------- 1 | # stackoverflow 24729024 : Alternative for iptables 2 | firewall-cmd --get-active-zones 3 | firewall-cmd --list-all 4 | 5 | firewall-cmd --zone=public --add-port=9009/tcp --permanent 6 | firewall-cmd --reload 7 | 8 | # this can be add-port, add-service 9 | -------------------------------------------------------------------------------- /devOps/SplunkUF/SPF_Install/README.md: -------------------------------------------------------------------------------- 1 | ## Various Splunk download automation 2 | - Download Splunk and Universal forwarder 3 | ``` 4 | # for Ubuntu Linux VMs on MacOS 5 | bash ./getSplunk.sh -p uf -v 9.4.2 -k deb -h e9664af3d956 -a arm64 -o linux 6 | 7 | # for Ubuntu Linux VMs on Traditional OS 8 | bash ./getSplunk.sh -p uf -v 9.4.2 -k deb -h e9664af3d956 -a amd64 -o linux 9 | ``` -------------------------------------------------------------------------------- /devOps/SplunkUF/SPF_Install/SplunkUF_WinCoreCmdsl.bat: -------------------------------------------------------------------------------- 1 | ::Stop all splunk services 2 | net stop splunkd 3 | net stop splunkweb 4 | ::Remove all splunk versions 5 | start /wait MsiExec.exe /uninstall {60ad9785-709f-4b4d-ac19-91cbe0ab7614} /passive 6 | start /wait MsiExec.exe /uninstall {a7579aaa-db6b-46ce-90ca-d8f553481bcc} /passive 7 | start /wait MsiExec.exe /uninstall {2c0fae08-7c9c-40f9-ba21-82a2aad07f0d} /passive 8 | 9 | ::Map drive to splunk install path 10 | net use /delete S: 11 | net use S: 12 | 13 | ::Execute installation string, minimal configuration 14 | start /wait msiexec.exe /i S:\splunk-4.0.9-74233-x86-release.msi INSTALLDIR="%ProgramFiles%\Splunk" RBG_LOGON_INFO_USER_CONTEXT=2 IS_NET_API_LOGON_USERNAME="" IS_NET_API_LOGON_PASSWORD="" LAUNCHSPLUNK=0 AUTOSTARTSERVICE_SPLUNKD=1 AUTOSTARTSERVICE_SPLUNKWEB=0 /passive 15 | -------------------------------------------------------------------------------- /devOps/SplunkUF/SPF_Install/systemd.unit: -------------------------------------------------------------------------------- 1 | #https://answers.splunk.com/answers/59662/is-there-a-systemd-unit-file-for-splunk.html 2 | -------------------------------------------------------------------------------- /devOps/api_integration/ansible/apps/mysearch/local/props.conf: -------------------------------------------------------------------------------- 1 | [host::foo] 2 | TRANSFORMS-foo=foobar 3 | 4 | # The following stanza extracts an ip address from _raw 5 | [my_sourcetype] 6 | EXTRACT-extract_ip = (?\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) 7 | 8 | # The following example shows how to configure lookup tables 9 | [my_lookuptype] 10 | LOOKUP-foo = mylookuptable userid AS myuserid OUTPUT username AS myusername 11 | 12 | # The following shows how to specify field aliases 13 | FIELDALIAS-foo = user AS myuser id AS myid 14 | -------------------------------------------------------------------------------- /devOps/api_integration/savedsearch_create_update.txt: -------------------------------------------------------------------------------- 1 | ss_name="firstApiTest" 2 | hostname="localhost" 3 | creds="admin:changeme" 4 | 5 | curl -k -u ${creds} https://${hostname}:8089/services/saved/searches \ 6 | -d name=${ss_name} \ 7 | -d disabled=1 \ 8 | -d description=descritionText \ 9 | -d search="index=_internal" \ 10 | -d dispatch.index_earliest=-1h \ 11 | -d dispatch.index_latestlatest=now 12 | 13 | 14 | # Method2 15 | curl -k -u ${creds} https://${hostname}:8089/servicesNS/nobody/search/configs/conf-savedsearches/ -d name=myblog 16 | curl -k -u ${creds} https://${hostname}:8089/servicesNS/nobody/search/configs/conf-savedsearches/myblog -d "search=index=_internal|stats count" 17 | -------------------------------------------------------------------------------- /devOps/git/README.md: -------------------------------------------------------------------------------- 1 | # Git Update Script 2 | 3 | A simple shell script to automate git workflow with safety checks and error handling. 4 | 5 | ## Usage 6 | 7 | ```bash 8 | ./git_update.sh "your commit message" 9 | ``` 10 | 11 | ## What it does 12 | 13 | 1. Stashes any local changes 14 | 2. Pulls latest changes with rebase 15 | 3. Reapplies stashed changes 16 | 4. Adds all changes 17 | 5. Commits with provided message 18 | 6. Pushes to origin 19 | 20 | ## Requirements 21 | 22 | - Git installed 23 | - Repository initialized 24 | - Remote origin configured 25 | 26 | ## Error Handling 27 | 28 | - Validates commit message 29 | - Handles pull conflicts 30 | - Manages stashed changes 31 | - Provides feedback for each step -------------------------------------------------------------------------------- /devOps/python_scripts/common/README.md: -------------------------------------------------------------------------------- 1 | ## Usage 2 | 3 | - Macros 4 | ``` 5 | python splunk_config_interface.py macro --file-path /where/you/kept/macro.conf --app myapp --dry-run --log-level DEBUG 6 | python splunk_config_interface.py macro --file-path /where/you/kept/macro.conf --app myapp --action-flag update --log-level DEBUG 7 | python splunk_config_interface.py macro --file-path /where/you/kept/macro.conf --app myapp --action-flag delete --log-level DEBUG 8 | ``` 9 | 10 | - SavedSearches 11 | ``` 12 | python splunk_config_interface.py savedsearch --file-path /where/you/kept/savedsearch.conf --app myapp --dry-run --log-level DEBUG 13 | python splunk_config_interface.py savedsearch --file-path /where/you/kept/savedsearch.conf --app myapp --action-flag update --log-level DEBUG 14 | python splunk_config_interface.py savedsearch --file-path /where/you/kept/savedsearch.conf --app myapp --action-flag delete --log-level DEBUG 15 | ``` 16 | -------------------------------------------------------------------------------- /devOps/python_scripts/savedsearches/README.md: -------------------------------------------------------------------------------- 1 | ## Splunk insert/update via API 2 | 3 | - Step1: Ensure token is setup and updated into code 4 | - Step2: Ensure the Splunk:8089 certificate chain is updated into files location 5 | - Then run like below 6 | 7 | ### Default (dry-run) 8 | ``` 9 | python savedsearches/update.py 10 | ``` 11 | 12 | ### Actually perform POST requests 13 | ``` 14 | python savedsearches/update.py --updateOnly 15 | python savedsearches/update.py --deleteOnly 16 | 17 | ``` 18 | -------------------------------------------------------------------------------- /devOps/python_scripts/savedsearches/disable_versioning.md: -------------------------------------------------------------------------------- 1 | ## Once ESS version is enabled, disabling requires curl 2 | 3 | ``` 4 | curl -k -u \ 5 | https://:8089/servicesNS/nobody/SA-ContentVersioning/properties/feature_flags/general \ 6 | -X POST \ 7 | -d versioning_init="0" \ 8 | -d versioning_activated="0" 9 | ``` 10 | -------------------------------------------------------------------------------- /devOps/python_scripts/savedsearches/files/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "my_app", 3 | "owner": "nobody", 4 | "baseUrl": "https://mysplunk:8089" 5 | } -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | ## GitHub Pages for SplunkScriplets 2 | 3 | Documentation for Each module is written near to the code 4 | -------------------------------------------------------------------------------- /docs/SPL_searches/ES_basicSearches.md: -------------------------------------------------------------------------------- 1 | ### Enterprise Security Basic Searches 2 | 3 | ``` 4 | |inputlookup append=T access_tracker | search user=xyz | `get_asset(dest)` 5 | |inputlookup append=T access_tracker | search user=xyz | `get_identity4events(user)` 6 | |`inactive_account_usage("30","600")`| `settags("access")`| `ctime(lastTime)`| fields + user,inactiveDays 7 | ``` -------------------------------------------------------------------------------- /docs/SPL_searches/TrickySearches.md: -------------------------------------------------------------------------------- 1 | ### Tricky Splunk Searches 2 | 3 | #### Dynamic Variables in Searches 4 | ``` 5 | | gentimes start=-1 | eval myVar=stuff | map search="| dbquery source \"select * from tble where timestamp >= $myVar$ \" " 6 | ``` 7 | 8 | #### Dynamic Time Values 9 | ``` 10 | index=someData [noop|stats count|fields|eval earliest=relative_time(now(),"@d+10h")|eval latest=relative_time(now(),"@d+21h")| convert timeformat="%m/%d/%Y:%T" ctime(*)| format "" "" "" "" "" ""] 11 | ``` -------------------------------------------------------------------------------- /docs/SPL_searches/cimSearches.md: -------------------------------------------------------------------------------- 1 | ### cimSearches Examples 2 | 3 | ``` 4 | | tstas `summariesonly` values(All_Email.recipeient) as R values (All_Email.src_user) as src from datamodel=Email where nodename=All_Email (All_Email.src_user= "*@mycompany.com" or All_Email.src_user= "*@2nd.com") by All_Email.internal_message_id 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/SPL_searches/compare_two_events.md: -------------------------------------------------------------------------------- 1 | ### compare two events Examples 2 | 3 | ``` 4 | #Compare two raw events side by side 5 | 6 | (index=xyz) sourcetype=WinEventLog:Security ((Logon_Type=2 OR Logon_Type=10 OR Logon_Type=11 OR Logon_Type=3) ((EventCode=4624) OR EventCode=528)) user=someuser (host=*) 7 | | head 1 8 | | table _raw 9 | | rename _raw as client_event 10 | | eval item="aa" 11 | | join item [ search (index=xyz) sourcetype=WinEventLog:Security ((Logon_Type=2 OR Logon_Type=10 OR Logon_Type=11 OR Logon_Type=3) ((EventCode=4624) OR EventCode=528)) user=someuser (host=dc*) 12 | | head 1 13 | | table _raw 14 | | rename _raw as dc_event 15 | | eval item="aa"] 16 | ``` 17 | -------------------------------------------------------------------------------- /docs/SPL_searches/dbinspect.md: -------------------------------------------------------------------------------- 1 | ### dbinspect Examples 2 | 3 | ``` 4 | #Mainly related to indexes 5 | 6 | # https://answers.splunk.com/answers/43417/search-to-provide-days-in-hot-warmdb.html 7 | # |dbinspect index=name_of_your_index state=warm 8 | |dbinspect index=main|convert timeformat=""%m/%d/%Y:%H:%M:%S"" mktime(earliestTime) as earliestTime|convert timeformat=""%m/%d/%Y:%H:%M:%S"" mktime(latestTime) as latestTime|stats min(earliestTime) as earliestTime max(latestTime) as latestTime sum(sizeOnDiskMB) as sizeOnDiskMB dc(path) as NumberOfBuckets by state|eval diff_seconds=(latestTime-earliestTime)/3600|eval earliestTime=strftime(earliestTime,"%m/%d/%Y:%H:%M:%S")|eval latestTime=strftime(latestTime,"%m/%d/%Y:%H:%M:%S") 9 | ``` 10 | -------------------------------------------------------------------------------- /docs/SPL_searches/distributed_system_tricks.md: -------------------------------------------------------------------------------- 1 | ### distributed system tricks Examples 2 | 3 | ``` 4 | # Query a lookup from a joined up search head to another SH using rest 5 | # https://www.splunk.com/blog/2017/06/08/syncing-lookups-using-pure-spl.html 6 | 7 | | rest splunk_server=sh1 /services/search/jobs/export search="| inputlookup demo_assets.csv" output_mode=csv | fields value 8 | ``` 9 | -------------------------------------------------------------------------------- /docs/SPL_searches/file_explorer_rest_api.md: -------------------------------------------------------------------------------- 1 | ### file explorer rest api Examples 2 | 3 | ``` 4 | # To use Splunk REST endpoint for exploring filesystem 5 | 6 | | REST splunk_server=local /services/apps/local 7 | | fields title 8 | | map maxsearches=1000 search="| REST splunk_server=local /services/admin/file-explorer/%252Fopt%252Fsplunk%252Fetc%252Fapps%252F$myapp$%252Flocal" 9 | | fields title 10 | 11 | ``` 12 | -------------------------------------------------------------------------------- /docs/SPL_searches/for_each.md: -------------------------------------------------------------------------------- 1 | ### for each Examples 2 | 3 | ``` 4 | # For_each tricky searches 5 | 6 | # https://answers.splunk.com/answers/519459/only-show-null-values-from-timechart-valuessource.html 7 | ... | foreach * [eval shouldAlert=shouldAlert+if(isnull('<>') OR '<>'="",1,0)] | whereshouldAlert>0 8 | ``` 9 | -------------------------------------------------------------------------------- /docs/SPL_searches/forwarder_related.md: -------------------------------------------------------------------------------- 1 | ### forwarder related Examples 2 | 3 | ``` 4 | # To check forwarder sending data 5 | index=_internal host=*fwd* source=*metrics.log group=syslog* | timechart span=1m sum(tcp_KBps) by host 6 | index=_internal host=*fwd* source=*metrics.log group=thruput | timechart span=1m sum(kb) by host 7 | ``` 8 | -------------------------------------------------------------------------------- /docs/SPL_searches/incident_impact.md: -------------------------------------------------------------------------------- 1 | ### incident impact Examples 2 | 3 | ``` 4 | #Data loss based on license usage 5 | index=_internal source="*license_usage.log" type="Usage" earliest=-2h@h 6 | | bucket span=1h h 7 | | strcat h + ";" + st as key 8 | | stats sum(b) as totalBytes _time,key 9 | | convert timeformat="%Y-%m-%d:%H:00" ctime(_time) as Date 10 | | xyseries key Date KB 11 | ``` 12 | -------------------------------------------------------------------------------- /docs/SPL_searches/indexes_vs_roles_mapping.md: -------------------------------------------------------------------------------- 1 | ### indexes vs roles mapping Examples 2 | 3 | ``` 4 | # To show any missing role vs indexes 5 | | rest /services/data/indexes 6 | | stats count by title 7 | | rename title as index 8 | | fields index 9 | | join type=left index [ | rest /services/authorization/roles 10 | | table title, srchIndexesAllowed 11 | | mvexpand srchIndexesAllowed 12 | | rename srchIndexesAllowed as index 13 | ] 14 | 15 | 16 | # To find AD/LDAP to roles 17 | | rest /servcies/admin/LDAP-groups splunk_server=local 18 | | table title, roles 19 | ``` 20 | -------------------------------------------------------------------------------- /docs/SPL_searches/json_examples.md: -------------------------------------------------------------------------------- 1 | ## Creating JSON object from fields 2 | 3 | ``` 4 | | eval user_object = json_object("user", user_field, "hosts", host) 5 | .. 6 | | stats values(user_object) as user_object_valueslist 7 | ``` 8 | -------------------------------------------------------------------------------- /docs/SPL_searches/key-value.searches.md: -------------------------------------------------------------------------------- 1 | ### key-value.searches Examples 2 | 3 | ``` 4 | #https://answers.splunk.com/answers/523059/how-extract-fields-and-values-on-a-multivalue-fiel.html 5 | Your Base Search Here 6 | | rex max_match=0 "\"(?[^=\"]+=[^=\"]+)\"" 7 | | table _time host kvp* 8 | | streamstats count AS serial 9 | | mvexpand kvp 10 | | rex field=kvp "^(?[^=\"]+)=(?[^=\"]+)$" 11 | | eval {kvp_key} = kvp_value 12 | | rename COMMENT AS "If you need to reconstitute original events, then add in the next line" 13 | | rename COMMENT AS "| fields - kvp* | stats values(_time) AS _time values(*) AS * BY serial" 14 | | table Name p_name Type status 15 | ``` 16 | -------------------------------------------------------------------------------- /docs/SPL_searches/kvstore.md: -------------------------------------------------------------------------------- 1 | ### KVstore objects & Sizes 2 | ``` 3 | | rest /services/server/introspection/kvstore/collectionstats 4 | | mvexpand data 5 | | spath input=data 6 | | rex field=ns "(?.*)\.(?.*)" 7 | | eval dbsize=round(size/1024/1024, 2) 8 | | eval indexsize=round(totalIndexSize/1024/1024, 2) 9 | | stats first(count) AS "Objects" first(nindexes) AS Accelerations first(indexsize) AS "Acceleration_Size_MB" first(dbsize) AS "MongoDB_Collection_Size_MB" by App, Collection 10 | | sort - Objects 11 | | head 20 12 | ``` -------------------------------------------------------------------------------- /docs/SPL_searches/macros_savedsearches_api.md: -------------------------------------------------------------------------------- 1 | ### macros savedsearches api Examples 2 | 3 | ``` 4 | #Actual searcxh 5 | earliest=-10d index=mycompany_support sourcetype=some_sourcetype report_date=$report_date$ 6 | |table host,sourcetype 7 | 8 | # to call as a macro 9 | `mycompany_macro_some_os_aggregated_24h("2020-06-24")` 10 | curl --silent -u ${user}:${pass} -k "https://${splunk_host}:9001/services/search/jobs/export" --data-urlencode search="search \`mycompany_macro_some_os_aggregated_24h(2020-06-24)\`" -d output_mode=${output_mode} 11 | 12 | 13 | # To call as a savedsearch 14 | # mycompany_ss_some_os_aggregated_24h 15 | curl --silent -u ${user}:${pass} -k "https://${splunk_host}:9001/services/search/jobs/export" --data-urlencode search=" savedsearch mycompany_ss_some_os_aggregated_24h" -d output_mode=${output_mode} 16 | ``` 17 | -------------------------------------------------------------------------------- /docs/SPL_searches/makeresults.md: -------------------------------------------------------------------------------- 1 | ### makeresults Examples 2 | 3 | ``` 4 | # to split data to multiple events 5 | 6 | | makeresults | eval _raw="08.02.2017 08:20:36.618 | 1752 | INFO | 10098094 | GW: session(90200371) status = INITIALIZING.; 7 | 08.02.2017 08:20:36.618 | 1752 | INFO | 10098094 | GW: session(90200371) status = pending_app_gw.; 8 | 08.02.2017 08:20:36.706 | 5344 | INFO | 10098094 | GW: session(90200371) status = ACTIVE.;"| 9 | rex max_match=0 field=_raw "(?[^;]+)" | mvexpand perLine | table perLine|rex field=perLine "^[^(\n]*((?P\d+)" 10 | ``` 11 | -------------------------------------------------------------------------------- /docs/SPL_searches/metadata_searches.md: -------------------------------------------------------------------------------- 1 | ### metadata searches Examples 2 | 3 | ``` 4 | # source after uploading a csv 5 | | inputlookup myfile.csv | join type=left [| metadata type=sources index=my_index | rex field=source "\/(?[0-9\.]+)\/[a-z\.]\.log*] | eventstats max(lastTime) as latestSource | where lastTime=latestSource| fields source,ip, lastTime] | eval lastTime=strfTime(lastTime,"%F-%T") 6 | ``` 7 | -------------------------------------------------------------------------------- /docs/SPL_searches/timeWrapSearches.md: -------------------------------------------------------------------------------- 1 | ### timeWrapSearches Examples 2 | 3 | ``` 4 | # To compare two values continously 5 | index=_internal earliest=-8d@d latest=-7d@d| eval ReportKey="1weekBefore"| timechart span=10m count by ReportKey | append [index=_internal earliest=-2d@d latest=-1d@d| eval ReportKey="yesterday"| timechart span=10m count by ReportKey] 6 | 7 | 8 | # Faster 9 | |tstats count where index=foo earliest=-2d@d latest=-1d@d by _time span=5m 10 | | eval ReportKey="Baseline" | eval _time=_time+60*60*24 11 | | append [|tstats count where index=foo earliest=-1d@d latest=0d@d by _time span=5m 12 | | eval ReportKey="NewValue" ] 13 | | timechart span=5m sum(count) by ReportKey 14 | ``` 15 | -------------------------------------------------------------------------------- /docs/SPL_searches/tstats_searches.md: -------------------------------------------------------------------------------- 1 | ### tstats searches Examples 2 | 3 | ``` 4 | # Find hosts which wer coming before but stopped logging (sliding scale) 5 | | tstats count WHERE index=* earliest=-3d latest=-2d NOT [| tstats count WHERE index=* earliest=-2d latest=-1d BY host | fields host,source] BY host | lookup dnslookup clientip as host OUTPUT clienthost as DST_RESOLVED 6 | ``` 7 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /docs/collections/Content_update_collection.md: -------------------------------------------------------------------------------- 1 | ## Find out DetectionID vs hash 2 | 3 | - SA-ContentVersioning, cms_metadata collection 4 | ``` 5 | | inputlookup cms_metadata_lookup 6 | ``` 7 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/bin/README: -------------------------------------------------------------------------------- 1 | This is where you put any scripts you want to add to this app. 2 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/default/app.conf: -------------------------------------------------------------------------------- 1 | # 2 | # Splunk app configuration file 3 | # 4 | 5 | [install] 6 | is_configured = 0 7 | 8 | [ui] 9 | is_visible = 1 10 | label = DA_dropdown 11 | 12 | [launcher] 13 | author = 14 | description = 15 | version = 1.0.0 16 | 17 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/default/data/ui/nav/default.xml: -------------------------------------------------------------------------------- 1 | 9 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/default/data/ui/views/README: -------------------------------------------------------------------------------- 1 | Add all the views that your app needs in this directory 2 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/local/app.conf: -------------------------------------------------------------------------------- 1 | [ui] 2 | 3 | [launcher] 4 | version = 5 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/local/data/ui/nav/default.xml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/lookups/horseshoe_data.csv: -------------------------------------------------------------------------------- 1 | value 2 | 30 3 | 34 4 | 2 5 | 3 6 | 6 7 | 700 8 | 15 9 | 300 10 | 34 11 | 2 12 | 3 13 | 6 14 | 700 15 | 15 16 | 30034 17 | 2 18 | 3 19 | 6 20 | 700 21 | 15 22 | 30034 23 | 2 24 | 3 25 | 6 26 | 700 27 | 15 28 | 30034 29 | 2 30 | 3 31 | 6 32 | 700 33 | 15 34 | 30034 35 | 2 36 | 3 37 | 6 38 | 700 39 | 15 40 | 30034 41 | 2 42 | 3 43 | 6 44 | 700 45 | 15 46 | 30034 47 | 2 48 | 3 49 | 6 50 | 700 51 | 15 52 | 30034 53 | 2 54 | 3 55 | 6 56 | 700 57 | 15 58 | 300 59 | 30034 60 | 2 61 | 3 62 | 6 63 | 700 64 | 15 65 | 30034 66 | 2 67 | 3 68 | 6 69 | 700 70 | 15 71 | 30034 72 | 2 73 | 3 74 | 6 75 | 700 76 | 15 77 | 30034 78 | 2 79 | 3 80 | 6 81 | 700 82 | 15 83 | 30034 84 | 2 85 | 3 86 | 6 87 | 700 88 | 15 89 | 30034 90 | 2 91 | 3 92 | 6 93 | 700 94 | 15 95 | 30034 96 | 2 97 | 3 98 | 6 99 | 700 100 | 15 101 | 300 -------------------------------------------------------------------------------- /docs/da/DA_dropdown/lookups/sales_data.csv: -------------------------------------------------------------------------------- 1 | metric,value 2 | Revenue,1000 3 | Distinct Users,100 4 | Revenue,700 5 | Distinct Users,200 6 | Distinct Users,600 7 | Distinct Sessions,2000 8 | Revenue,300 9 | Distinct Sessions,3500 10 | Distinct Users,100 -------------------------------------------------------------------------------- /docs/da/DA_dropdown/metadata/default.meta: -------------------------------------------------------------------------------- 1 | 2 | # Application-level permissions 3 | 4 | [] 5 | access = read : [ * ], write : [ admin, power ] 6 | 7 | ### EVENT TYPES 8 | 9 | [eventtypes] 10 | export = system 11 | 12 | 13 | ### PROPS 14 | 15 | [props] 16 | export = system 17 | 18 | 19 | ### TRANSFORMS 20 | 21 | [transforms] 22 | export = system 23 | 24 | 25 | ### LOOKUPS 26 | 27 | [lookups] 28 | export = system 29 | 30 | 31 | ### VIEWSTATES: even normal users should be able to create shared viewstates 32 | 33 | [viewstates] 34 | access = read : [ * ], write : [ * ] 35 | export = system 36 | -------------------------------------------------------------------------------- /docs/da/DA_dropdown/metadata/local.meta: -------------------------------------------------------------------------------- 1 | [app/ui] 2 | version = 9.0.4.1 3 | modtime = 1683723447.815621000 4 | 5 | [app/launcher] 6 | version = 9.0.4.1 7 | modtime = 1683723447.816407000 8 | 9 | [views/asset_db] 10 | owner = admin 11 | version = 9.0.4.1 12 | modtime = 1683730089.718280000 13 | 14 | [lookups/horseshoe_data.csv] 15 | access = read : [ * ] 16 | export = system 17 | owner = admin 18 | version = 9.0.4.1 19 | modtime = 1683723611.765718000 20 | 21 | [lookups/sales_data.csv] 22 | access = read : [ * ] 23 | export = system 24 | owner = admin 25 | version = 9.0.4.1 26 | modtime = 1683723617.381000000 27 | 28 | [nav/default] 29 | version = 9.0.4.1 30 | modtime = 1683730733.488204000 31 | 32 | [views/studio_dropdown] 33 | owner = admin 34 | version = 9.0.4.1 35 | modtime = 1683731386.095989000 36 | -------------------------------------------------------------------------------- /docs/da/README.md: -------------------------------------------------------------------------------- 1 | ## Steps 2 | 3 | - Server 4 | ``` 5 | appName="DA_dropdown" 6 | cp -pr /opt/splunk/etc/apps/$appName /tmp/ && tar -czf /tmp/${appName}.tgz -C /tmp/ ${appName} && chmod 777 /tmp/${appName}.tgz 7 | ``` 8 | 9 | 10 | - Client 11 | ``` 12 | appName="DA_dropdown" 13 | destDir="docs/da" 14 | fname="${destDir}/${appName}.tgz" 15 | 16 | scp myuser@Server:/tmp/${appName}.tgz $fname 17 | tar -xzf $fname -C $destDir && rm $fname 18 | ``` 19 | 20 | 21 | ## Dashboard Studio link within Splunk 22 | ``` 23 | /app/splunk-dashboard-studio 24 | ``` -------------------------------------------------------------------------------- /docs/gist.txt: -------------------------------------------------------------------------------- 1 | https://gist.github.com/getkub/9b69d1ab776425763859974e0a88142a 2 | -------------------------------------------------------------------------------- /docs/logging_standards.md: -------------------------------------------------------------------------------- 1 | ## Minimal logging 2 | - who logged in (account or User ID); 3 | - what they did (type of event/activity); 4 | - when they logged in (date/time); 5 | - where the login occurred (resource/source of the event such as location, IP address, terminal ID or other means of identification); and 6 | - why the login occurred (a link to the specific ticket that necessitated the login). 7 | 8 | Security code of practice 9 | -------------------------------------------------------------------------------- /docs/rest/alerts_actions_savedsearches.md: -------------------------------------------------------------------------------- 1 | ## Find all alert actions 2 | 3 | ``` 4 | | rest /services/alerts/alert_actions 5 | | table title, command, eai:acl.app, description 6 | 7 | ``` 8 | 9 | -------------------------------------------------------------------------------- /docs/splunk_tips/answers/goodAnswers.txt: -------------------------------------------------------------------------------- 1 | https://answers.splunk.com/answers/386488/regex-in-lookuptable.html#answer-387536 2 | -------------------------------------------------------------------------------- /docs/splunk_tips/api/_internal.txt: -------------------------------------------------------------------------------- 1 | # Shows the current status of file being read 2 | /opt/splunk/bin/splunk _internal call /services/admin/inputstatus/TailingProcessor:FileStatus 3 | -------------------------------------------------------------------------------- /docs/splunk_tips/api/key_api_commands.txt: -------------------------------------------------------------------------------- 1 | user="admin" 2 | pass="changeme" 3 | 4 | # Destructive resync via REST 5 | curl -k -u ${user}:${pass} https://127.0.0.1:8089/services/replication/configuration/commits 6 | -------------------------------------------------------------------------------- /docs/splunk_tips/apps/A_prod_ldap_auth/local/authorize.conf: -------------------------------------------------------------------------------- 1 | # role mapping and custom privileges for the role 2 | [role_myCompany_devOps] 3 | rtsearch = enabled 4 | schedule_search = enabled 5 | schedule_rtsearch = enabled 6 | srchJobsQuota = 50 7 | srchMaxTime = 8640000 8 | srchFilter = * 9 | srchFilterSelecting = true 10 | search = enabled 11 | srchIndexesAllowed = devops;os 12 | srchIndexesDefault = devops 13 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/clm.server.conf: -------------------------------------------------------------------------------- 1 | # to make into a cluster master 2 | [clustering] 3 | mode=master 4 | pass4SymmKey = mypass 5 | multisite = true 6 | replication_factor =2 7 | search_factor = 2 8 | site_replication_factor = origin:1, total:2 9 | site_search_factor = origin:1, total:2 10 | available_sites = site1, site2 11 | cluster_label = mycompany_cluster1 12 | 13 | [shclustering] 14 | pass4SymmKey = mypass_shclustering 15 | shcluster_label = mycompany_shcluster 16 | 17 | [indexer_discovery] 18 | pass4SymmKey = somepassagain 19 | polling_rate = 300 20 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/genericTimeLineBreak.configs: -------------------------------------------------------------------------------- 1 | #Write this for all types of generic datasets 2 | [mydatatype] 3 | SHOULD_LINEMERGE = false 4 | TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%6N%:z 5 | TIME_PREFIX = ^ 6 | LINE_BREAKER = ([\r\n]+) 7 | MAX_TIMESTAMP_LOOKAHEAD = 30 8 | TRANSFORMS-host = xxxxxxxx 9 | TRUNCATE = 500000 10 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/indexes.conf: -------------------------------------------------------------------------------- 1 | # Put this into separate app 2 | [default] 3 | 4 | #VOLUME settings 5 | [volume:home] 6 | path = /mnt/hot 7 | maxVolumenDataSizeMB = 512000 8 | 9 | [volume:cold] 10 | path = /mnt/cold 11 | # 7TB 12 | maxVolumneDataSizeMB = 7200000 13 | 14 | # Indexes 15 | [main] 16 | homePath = volume:home/defaultdb/db 17 | coldPath = volume:cold/defaultdb/db 18 | thawedPath = $SPLUNK_DB/defaultdb/thaweddb 19 | tstatsHomePath = volume:home/defaultdb/datamodel_summary 20 | # Retention period 18 months 21 | frozenTimePeriodInSecs = 46656000 22 | 23 | #### 24 | # Similarly write for every indexes 25 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/sample_app/local/deploymentclient.conf: -------------------------------------------------------------------------------- 1 | [deployment-client] 2 | 3 | [target-broker:deploymentServer] 4 | targetUri= 10.10.26.5:8089 5 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/sample_app/local/inputs.conf: -------------------------------------------------------------------------------- 1 | [monitor://c:\] 2 | disabled=false 3 | recursive = true 4 | sourcetype=mysourcetype 5 | whitelist = \.log$ 6 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/sample_app/local/outputs.conf: -------------------------------------------------------------------------------- 1 | [tcpout:group1] 2 | server=10.1.1.197:9997 3 | -------------------------------------------------------------------------------- /docs/splunk_tips/configs/sample_app/local/serverclass.conf: -------------------------------------------------------------------------------- 1 | [serverClass:winforwarders] 2 | machineTypesFilter = windows* 3 | whitelist.0 = * 4 | [serverClass:winforwarders:app:sample_app] 5 | restartSplunkd = true 6 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/errors/authErrors.txt: -------------------------------------------------------------------------------- 1 | #### SSL errors. Mostly happen when admin forget to copy passwd and secret 2 | 3 | ERROR SSLCommon - Can't read key file /opt/splunk/etc/apps/app_name/bin/auth/prod_splunk-input.pem errno=101077092 error:06065064:digital envelope routines:EVP_DecryptFinal_ex:bad decrypt. 4 | ERROR TcpInputConfig - SSL server certificate not found, or password is wrong - SSL ports will not be opened 5 | ERROR KVStoreConfigurationProvider - Could not get pint from mongod. 6 | 7 | 8 | Fix is to 9 | 1. Remove password from "inputs.conf" of tls_client 10 | 2. Remove sslKeysfilePassword and pass4SymmKey from system/local/server.conf 11 | 3. Remove password from "outputs.conf" of tls_client 12 | ####################### 13 | 14 | # Denied Session token for user: splunk-system-user 15 | Normally happens after Search Head rebuild. Has to re-authenticate via Cluster Master with admin user and password 16 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/errors/debug_steps.txt: -------------------------------------------------------------------------------- 1 | # Use pstack linux utility to dump the pid details 2 | DATA_DIR=$HOME/splunk_data 3 | PID=$(head -n1 $SPLUNK_HOME/var/run/splunk/splunkd.pid) 4 | nohup bash -c "while ps -p $PID > /dev/null; do date --rfc-3339=ns >> $DATA_DIR/pstack.pid; sleep 5; done " & 5 | 6 | $SPLUNK_HOME/bin/splunk diag 7 | 8 | ============= 9 | XZ_OPT=-3 tar cJF jemmalloc-$(hostname)-$(date +%Y%m%d%H%M).tar.gz $DATA_DIR 10 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/a_toDo.list: -------------------------------------------------------------------------------- 1 | para virtualisation - two instances, 2 | dmc configuration 3 | visualisation - css in another app 4 | kvstore - app 5 | api - java app, js app 6 | cim - datamodels 7 | hunk - 8 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/btool.txt: -------------------------------------------------------------------------------- 1 | # to get information of filesystem contents 2 | | REST splunk_server=local /services/apps/local/ | fields title| rename title as myapp | map maxsearches=1000 search="| rest splunk_server=local /services/admin/file-explorer/%252Fopt%252Fsplunk%252Fetc%252Fapps%252F$myapp$%252Flocal" | fields title 3 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/configure_online.txt: -------------------------------------------------------------------------------- 1 | # Configure Splunk online 2 | # sizing, configurator 3 | 4 | https://splunk-sizing.appspot.com/ 5 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/creditCard.txt: -------------------------------------------------------------------------------- 1 | [pan] 2 | DEST_KEY = _MetaData:Index 3 | REGEX = (?:4[0-9]{12}(?:[0-9]{3})? 4 | | 5[1-5][0-9]{14} # MasterCard 5 | | 3[47][0-9]{13} # American Express 6 | | 3(?:0[0-5]|[68][0-9])[0-9]{11} # Diners Club 7 | | 6(?:011|5[0-9]{2})[0-9]{12} # Discover 8 | | (?:2131|1800|35\d{3})\d{11} # JCB 9 | ) 10 | FORMAT = CC_pan 11 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/datamodel.txt: -------------------------------------------------------------------------------- 1 | # Datamodel basic command 2 | |datamodel "Email" "All_Email" search | search ... 3 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/diag.txt: -------------------------------------------------------------------------------- 1 | /opt/splunk/bin/splunk diag --exclue "*/passwd" --exclude "*/*.log.*" --log-age=1 2 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/index.txt: -------------------------------------------------------------------------------- 1 | An index is a collection of databases, which are subdirectories located in $SPLUNK_HOME/var/lib/splunk. 2 | Indexes consist of two types of files: rawdata files and index files. 3 | 4 | $SPLUNK_HOME/etc/splunk-launch.conf # $SPLUNK_DB=/foo/bar 5 | 6 | splunk add index foo -homePath /your/path/foo/db -coldPath /your/pat/foo/colddb -thawedPath /your/path/foo/thawedDb 7 | # or modify indexes.conf 8 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/nullqueue.txt: -------------------------------------------------------------------------------- 1 | # To make data null and not to index 2 | 3 | # props.conf 4 | [nullqueue] 5 | TRANSFORMS-t1=send_to_nullqueue 6 | 7 | # transforms.conf 8 | [send_to_nullqueue] 9 | DEST_KEY= queue 10 | FORMAT = nullQueue 11 | REGEX = . 12 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/rest.txt: -------------------------------------------------------------------------------- 1 | # On Cluster Master: to check any buckets are primary within a server 2 | | REST splunk_server=local /services/cluster/master/buckets/ 3 | 4 | # Check any buckets being replicated to/from after enabling maintenace mode 5 | index=_internal sourcetype=splunkd (component=S2S* OR component=BucketReplicator) 6 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/search.md: -------------------------------------------------------------------------------- 1 | #### stats,eventstats,streamstats 2 | Like eventstats, streamstats aggregates the statistics to the original data, so all of the original data is accessible for further calculations. If time is involved better to use streamstats 3 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/server.conf.txt: -------------------------------------------------------------------------------- 1 | # --------------------------------------------------- 2 | # For SH clustering to manually enable replication files 3 | [shclustering] 4 | conf_replication_include.identities = true 5 | 6 | # identities.conf should be a file in the "local" directory 7 | # --------------------------------------------------- 8 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/splunk_release.csv: -------------------------------------------------------------------------------- 1 | version,date 2 | 6.5.1,November 21: 2016 3 | 6.5.0,September 27: 2016 4 | 6.4.5,December 15: 2016 5 | 6.4.4,October 3: 2016 6 | 6.4.3,August 22: 2016 7 | 6.4.2,July 11: 2016 8 | 6.4.1,May 18: 2016 9 | 6.3.8,October 27: 2016 10 | 6.3.7,September 7: 2016 11 | 6.3.6,July 28: 2016 12 | 6.3.5,June 6: 2016 13 | 6.3.4,April 20: 2016 14 | 6.3.0,September 22: 2015 15 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/thingstoDo.md: -------------------------------------------------------------------------------- 1 | # Enterprise Deploy - Things to Remember 2 | 3 | * Index locations 4 | * Rules - Install or Uninstall rules 5 | * Deployment Server - serverclass.conf 6 | * Secure Splunk - Certificates, SSL, Fowarder SSL etc.. 7 | * Search Head Clustering reqd? 8 | * Base_SI 9 | * outputs.conf - needs to send to other locations 10 | * inputs.conf - main forwarder input 11 | * server.conf - [license] master_uri with port 12 | * Base_SH 13 | * web.conf - Front page notifications, ports 14 | * authentication.conf - LDAP/AD integration 15 | * Base_SHARE (100% sharable KIs) 16 | * transforms.conf - Sharable lookups 17 | * 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/transforming2.txt: -------------------------------------------------------------------------------- 1 | # ============= Extract, EVAL, FIELDALIAS,REPORT =============# 2 | EXTRACT-myProductType=(?[\w]+)\s(?[\w]+)\s 3 | 4 | # From above extraction 5 | EXTRACT-userName=(?(?[^\\\\]+)\\\\)?(?[\w]+)$ in domain_user 6 | 7 | EVAL-action = case(vendor_action="modified", "mod", vendor_action="created", "crc") 8 | EVAL-catego = if(vendor_action="File*", "file", "unknown") 9 | EVAL-vendor = "newVendor" 10 | 11 | FIELDALIAS-dvc = host as dvc 12 | FIELDALIAS-file_path = object as file_path 13 | 14 | LOOKUP-vendor_info = vendor_info_lookup sourcetype OUTPUT vendor,product 15 | 16 | REPORT-auto_kv_abc = auto_kv_abc # in props.conf 17 | [auto_kv_abc] # transforms.conf 18 | DELIMS = " " 19 | 20 | # ====================================================== 21 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/tstats_examples.txt: -------------------------------------------------------------------------------- 1 | | tstats count where index=* by host,sourcetype 2 | -------------------------------------------------------------------------------- /docs/splunk_tips/others/notes/xml.props.example.txt: -------------------------------------------------------------------------------- 1 | #https://answers.splunk.com/answers/187195/how-to-add-and-parse-xml-data-in-splunk.html 2 | Sample XML 3 | 4 | 5 | Jason 6 | 7 | Good pic! 8 | Happy birthday 9 | 10 | 11 | 12 | 13 | #props.conf 14 | [yoursourcetype] 15 | DATETIME_CONFIG = CURRENT 16 | KV_MODE = xml 17 | LINE_BREAKER = () 18 | MUST_BREAK_AFTER = \ 19 | NO_BINARY_CHECK = 1 20 | SHOULD_LINEMERGE = false 21 | TRUNCATE = 0 22 | pulldown_type = 1 23 | FIELDALIAS-rootfields = photo.owner as Owner photo{@id} as PhotoID photo{@title} as PhotoTitle photo.owner{@id} as PhotoOwnerID photo{@format} as PhotoFormat photo.comments.comment{@ownerid} as CommentOwnerID photo.comments.comment as Comment 24 | -------------------------------------------------------------------------------- /docs/splunk_tips/stanza/single_stanza_extract.md: -------------------------------------------------------------------------------- 1 | ## Extracting Splunk stanza using btool 2 | eg of extracting one stanza (approximately) 3 | 4 | ``` 5 | /opt/splunk/bin/splunk cmd btool savedsearches list | sed -n -e '/^\[DMC Alert - Near Critical Disk Usage/,/^\[/p' | sed '$ d' 6 | ``` 7 | -------------------------------------------------------------------------------- /sampleData/Connectors_vendor_list.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getkub/SplunkScriplets/89fb6c5f3552c3480f957baefc63705857f4f619/sampleData/Connectors_vendor_list.pdf -------------------------------------------------------------------------------- /sampleData/dataResources.txt: -------------------------------------------------------------------------------- 1 | http://ossec-docs.readthedocs.io/en/latest/log_samples/# 2 | 3 | # Various parsing formats 4 | https://docs.microsoft.com/en-us/azure/sentinel/data-connectors-reference 5 | https://techcommunity.microsoft.com/t5/microsoft-sentinel-blog/azure-sentinel-the-connectors-grand-cef-syslog-direct-agent/ba-p/803891 6 | 7 | ## Quality Data 8 | https://github.com/elastic/integrations/blob/main/packages/cisco_asa/data_stream/log/sample_event.json 9 | https://github.com/elastic/integrations/tree/main/packages/apache/data_stream/access/_dev/test/pipeline 10 | -------------------------------------------------------------------------------- /sampleData/data_process/dob_generic.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getkub/SplunkScriplets/89fb6c5f3552c3480f957baefc63705857f4f619/sampleData/data_process/dob_generic.xlsx -------------------------------------------------------------------------------- /sampleData/raw/citrix/adc.txt: -------------------------------------------------------------------------------- 1 | https://docs.netscaler.com/en-us/citrix-adc/current-release/application-firewall/logs.html 2 | -------------------------------------------------------------------------------- /sampleData/raw/csv/basic_dataset.csv: -------------------------------------------------------------------------------- 1 | empid,department,salary,organisation 2 | bob,IT,50000,org1 3 | joe,IT,52000,org1 4 | foo,HR,45000,org1 5 | bar,FIN,51000,org1 6 | bob2,IT,50000,org2 7 | joe2,IT,52000,org2 8 | foo2,HR,45000,org2 9 | bar2,FIN,51000,org2 10 | -------------------------------------------------------------------------------- /sampleData/raw/csv/prices.csv: -------------------------------------------------------------------------------- 1 | productId,product_name,price,sale_price,Code 2 | DB-SG-G01,Mediocre Kingdoms,24.99,19.99,A 3 | DC-SG-G02,Dream Crusher,39.99,24.99,B 4 | FS-SG-G03,Final Sequel,24.99,16.99,C 5 | WC-SH-G04,World of Cheese,24.99,19.99,D 6 | WC-SH-T02,World of Cheese Tee,9.99,6.99,E 7 | PZ-SG-G05,Puppies vs. Zombies,4.99,1.99,F 8 | CU-PG-G06,Curling 2014,19.99,16.99,G 9 | MB-AG-G07,Manganiello Bros.,39.99,24.99,H 10 | MB-AG-T01,Manganiello Bros. Tee,9.99,6.99,I 11 | FI-AG-G08,Orvil the Wolverine,39.99,24.99,J 12 | BS-AG-G09,Benign Space Debris,24.99,19.99,K 13 | SC-MG-G10,SIM Cubicle,19.99,16.99,L 14 | WC-SH-A01,Holy Blade of Gouda,5.99,2.99,M 15 | WC-SH-A02,Fire Resistance Suit of Provolone,3.99,1.99,N 16 | SF-BVS-G01,Grand Theft Scooter,26.99,21.99,O 17 | SF-BVS-01,Pony Run,49.99,41.99,P 18 | 19 | -------------------------------------------------------------------------------- /sampleData/raw/json/http_uri.txt: -------------------------------------------------------------------------------- 1 | reqres.in/api/users 2 | -------------------------------------------------------------------------------- /sampleData/raw/json/sample2.json: -------------------------------------------------------------------------------- 1 | { 2 | "timestamp": 1234567890, 3 | "report": "Age Report", 4 | "results": [ 5 | { "name": "John", "age": 43, "city": "TownA" }, 6 | { "name": "Joe", "age": 10, "city": "TownB" } 7 | ] 8 | } -------------------------------------------------------------------------------- /sampleData/raw/json/sample_array.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "host": "${MY_HOST_AMERICAS}", 4 | "port": 443, 5 | "details": { 6 | "country": "US", 7 | "clientAuthEnabled": "false", 8 | "securityMode": "tls", 9 | "enabled": "true" 10 | } 11 | }, 12 | { 13 | "host": "${MY_HOST_EUROPE}", 14 | "port": 22, 15 | "details": { 16 | "country": "UK", 17 | "clientAuthEnabled": "false", 18 | "securityMode": "none", 19 | "enabled": "true" 20 | } 21 | }, 22 | { 23 | "host": "${MY_HOST_EUROPE}", 24 | "port": 8000, 25 | "details": { 26 | "country": "US", 27 | "clientAuthEnabled": "false", 28 | "securityMode": "https", 29 | "enabled": "true" 30 | } 31 | } 32 | ] 33 | -------------------------------------------------------------------------------- /sampleData/raw/json/simple_1message.json: -------------------------------------------------------------------------------- 1 | { 2 | "book": { 3 | "name": "Harry Potter and the Goblet of Fire", 4 | "author": "J. K. Rowling", 5 | "year": 2000, 6 | "characters": ["Harry Potter", "Hermione Granger", "Ron Weasley"], 7 | "genre": "Fantasy Fiction", 8 | "price": { 9 | "paperback": "$10.40", "hardcover": "$20.32", "kindle": "$4.11" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /sampleData/raw/json/url_based.md: -------------------------------------------------------------------------------- 1 | - https://pkgstore.datahub.io/core/world-cities/world-cities_json/data/5b3dd46ad10990bca47b04b4739a02ba/world-cities_json.json 2 | - 3 | -------------------------------------------------------------------------------- /sampleData/raw/os/linux/auditd.log: -------------------------------------------------------------------------------- 1 | type=SYSCALL msg=audit(1522927552.749:917): arch=c000003e syscall=2 success=yes exit=3 a0=7ffe2ce05793 a1=0 a2=1fffffffffff0000 a3=7ffe2ce043a0 items=1 ppid=2906 pid=4668 auid=1000 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=pts4 ses=1 comm="cat" exe="/bin/cat" key="passwd" 2 | type=CWD msg=audit(1522927552.749:917): cwd="/root" 3 | type=PATH msg=audit(1522927552.749:917): item=0 name="/etc/passwd" inode=3147443 dev=08:01 mode=0100644 ouid=0 ogid=0 rdev=00:00 nametype=NORMAL 4 | type=UNKNOWN[1327] msg=audit(1522927552.749:917): proctitle=636174002F6574632F706173737764 5 | -------------------------------------------------------------------------------- /sampleData/raw/os/windows/ADImporter.md: -------------------------------------------------------------------------------- 1 | - [AD importer](https://github.com/curi0usJack/ADImporter) 2 | -------------------------------------------------------------------------------- /sampleData/raw/yaml/employee_sample1.yaml: -------------------------------------------------------------------------------- 1 | department: IT 2 | employees: 3 | - empName: emp1 4 | position: manager 5 | reportees: 6 | - emp11 7 | - emp12 8 | - empName: emp2 9 | position: manager 10 | reportees: 11 | - emp22 12 | - emp24 13 | 14 | -------------------------------------------------------------------------------- /thirdparty/.conf_materials.txt: -------------------------------------------------------------------------------- 1 | # How to download all pdf's in one shot 2 | curl --silent http://conf.splunk.com/sessions/2017-sessions.html 2>&1 | egrep -i speaker-file | egrep pdf| wget -B http://conf.splunk.com -F -i - --continue 3 | -------------------------------------------------------------------------------- /thirdparty/android/adb.md: -------------------------------------------------------------------------------- 1 | ## MacOS 2 | 3 | ``` 4 | brew install android-platform-tools 5 | ``` 6 | 7 | ## List/View 8 | ``` 9 | fName="/storage/self/primary/Audiobooks/AB*.mp3" 10 | adb shell ls -l $fName 11 | ``` 12 | 13 | ## Push & Pull (copy) 14 | ``` 15 | src="~/Documents/data/music.mp3" 16 | dest="/storage/self/primary/Audiobooks/" 17 | adb push $src $dest 18 | ``` 19 | 20 | ## Delete 21 | ``` 22 | fName="/storage/self/primary/Audiobooks/AB*.mp3" 23 | adb shell rm $fName 24 | ``` 25 | -------------------------------------------------------------------------------- /thirdparty/android/tips.txt: -------------------------------------------------------------------------------- 1 | https://github.com/0x192/universal-android-debloater 2 | -------------------------------------------------------------------------------- /thirdparty/ansible/README.md: -------------------------------------------------------------------------------- 1 | ## Set of Ansible Samples 2 | - Configs/CSV files close to the example 3 | - Map/read_csv/iterate examples 4 | -------------------------------------------------------------------------------- /thirdparty/ansible/ansible_approaches.md: -------------------------------------------------------------------------------- 1 | ## Block concept 2 | - To do multiple tasks with same `when` condition or similar 3 | 4 | ``` 5 | - name: Main Task 6 | block: 7 | - name: Get subtask1 8 | set_fact: 9 | task1: "subtask1" 10 | - name: Get subtask2 11 | set_fact: 12 | task2: "subtask2" 13 | - name: Get subtask3 14 | include_tasks: "my-common-task.yaml" 15 | loop: "{{my_list | list }}" 16 | loop_control: 17 | loop_var: fruit 18 | when: env_input is defined 19 | ``` 20 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/01_basic_ansible_facts.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # ansible-playbook -i localhost, --connection=local 01_basic_ansible_facts.yml 3 | - name: Playbook 4 | hosts: all 5 | gather_facts: yes 6 | tasks: 7 | - name: Test message 8 | debug: 9 | var: ansible_facts 10 | verbosity: 0 11 | - name: Test message 12 | debug: 13 | var: hostvars[inventory_hostname] 14 | - name: Test message 15 | debug: 16 | msg: 17 | - "ansible_user_id": "{{ansible_user_id}}" 18 | - "ansible_env.USER": "{{ansible_env.USER}}" 19 | - "ansible_date_time.iso8601": "{{ansible_date_time.iso8601}}" 20 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/01_verybasic.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Playbook 3 | hosts: localhost 4 | gather_facts: no 5 | tasks: 6 | - name: Test message 7 | debug: 8 | msg: "test message" 9 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/02_assert_version_comparison.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Compare versions using builtin" 3 | hosts: localhost 4 | gather_facts: no 5 | vars: 6 | mylist: 7 | installed_version: "10.52.00" 8 | latest_version: "10.52.00.01" 9 | tasks: 10 | - name: "Comparing semver versions" 11 | assert: 12 | that: 13 | - " '10.52.0' is version( '10.52.1', 'lt', version_type='semver')" 14 | # - name: "Comparing semver versions" 15 | # assert: "Convert vars to json" 16 | # that: 17 | # - "mylist.installed_version is version(mylist.installed_version, 'lt', version_type='semver')" 18 | - name: "Regex version numbers out" 19 | debug: 20 | msg: 21 | - "{{ mylist.latest_version |regex_replace('^(\\d+\\..\\d+)\\..+', '\\1') }}" -------------------------------------------------------------------------------- /thirdparty/ansible/basic/02_create_json.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Create JSON from vars" 3 | hosts: localhost 4 | gather_facts: no 5 | vars: 6 | mylist: 7 | type: fruit 8 | ttl: 60m 9 | country: US 10 | tasks: 11 | - name: "Convert vars to json" 12 | debug: 13 | var: mylist 14 | - name: "Convert vars to json" 15 | debug: 16 | var: mylist| to_json 17 | 18 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/04_template_and_file_lookup.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Read a template as file and put into another template 3 | hosts: localhost 4 | gather_facts: no 5 | vars: 6 | name: "bob" 7 | email: "bob@bob.com" 8 | phone: "12345" 9 | 10 | tasks: 11 | - name: "Render contact_details template to temporary file" 12 | template: 13 | src: "./templates/04_contact_details.j2" 14 | dest: "/tmp/contact_details_tmp.md.j2" 15 | 16 | - name: "Update final template" 17 | lineinfile: 18 | path: "/tmp/main_callout.md" 19 | line: "{{ lookup('template', '/tmp/contact_details_tmp.md.j2') }}" 20 | create: true 21 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/04_template_within_jinja.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Read a template and Put into another template 3 | hosts: localhost 4 | gather_facts: no 5 | tasks: 6 | - name: "Get Values into Individual Vars" 7 | set_fact: 8 | "{{ item | basename | regex_replace('.j2') | regex_replace('04_vars-') }}": "{{ lookup('file', item) }}" 9 | with_fileglob: 10 | - "./templates/04_vars-*.j2" 11 | 12 | - debug: 13 | var: email 14 | - debug: 15 | var: name 16 | - debug: 17 | var: phone 18 | - name: "show templating results" 19 | set_fact: 20 | four_contact_details: "{{ lookup('template', './templates/04_contact_details.j2') }}" 21 | 22 | - name: "First pass" 23 | template: 24 | src: "./templates/04_jinja_in_jinja.j2" 25 | dest: "/tmp/04_jinja_in_jinja.md" 26 | mode: preserve -------------------------------------------------------------------------------- /thirdparty/ansible/basic/04_template_within_template.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Read a template and Put into another template 3 | hosts: localhost 4 | gather_facts: no 5 | tasks: 6 | - name: "show templating results" 7 | set_fact: 8 | contact_details: "{{ lookup('template', './templates/04_contact_details.j2') }}" 9 | vars: 10 | name: "bob" 11 | email: "bob@bob.com" 12 | phone: "12345" 13 | 14 | - name: "Update final template" 15 | template: 16 | src: "./templates/04_main_callout.md.j2" 17 | dest: "/tmp/main_callout.md" 18 | mode: preserve 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/07_gitlab_playbook.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: URI playbook 3 | hosts: localhost 4 | gather_facts: no 5 | vars_files: 6 | - group_vars/07_github_vars.yml 7 | roles: 8 | - github 9 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/08_list_to_dict.yml: -------------------------------------------------------------------------------- 1 | - hosts: localhost 2 | gather_facts: no 3 | vars: 4 | l1: 5 | - World|North_America|USA|Texas 6 | - World|Europe|UK|England|London 7 | - World|Australia 8 | tasks: 9 | - debug: 10 | var: _out|from_yaml 11 | loop: "{{ l1 }}" 12 | vars: 13 | _list: "{{ item.split('|') }}" 14 | _len: "{{ _list|length }}" 15 | _out: | 16 | {% for i in range(_len|int) %} 17 | {{ 'obj_'|indent(width=(i*4),first=true) }}{{ i+1 }}: 18 | {{ _list[i]| indent(width=(i*4), indentfirst=True) }}{{ ":" if not loop.last else "" }} 19 | {% endfor %} -------------------------------------------------------------------------------- /thirdparty/ansible/basic/10_role_based_example.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Trigger play with roles" 3 | hosts: localhost 4 | gather_facts: no 5 | roles: 6 | - role: git 7 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/11_role_call_another_role.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "my_main_play" 3 | hosts: localhost 4 | gather_facts: no 5 | roles: 6 | # - { role: 'roles/distributor_role' } 7 | - distributor_role 8 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/14_with_nested_dict_list.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Network Getting Started First Playbook 4 | gather_facts: false 5 | hosts: localhost 6 | tasks: 7 | - name: "Updatetemplate with values" 8 | debug: 9 | msg: "{{item.0}}, {{item.1}}, {{item.2}}" 10 | with_nested: 11 | - "{{ os }}" 12 | - "{{ locations | dictsort }}" 13 | vars: 14 | locations: 15 | 01_inputs: "01_inputs-principal" 16 | 02_filter: "02_filter-principal" 17 | 03_outputs: "03_outputs-principal" 18 | os: 19 | - windows 20 | - linux 21 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/15_fileglob.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Fileglob and filters" 3 | hosts: localhost 4 | gather_facts: no 5 | tasks: 6 | - name: "Find list of files by pattern" 7 | find: 8 | paths: ./configs 9 | patterns: "*.csv" 10 | file_type: file 11 | register: files_list 12 | - name: "Print list" 13 | debug: 14 | msg: "{{ filebase_noext }}" 15 | loop: "{{files_list.files|map(attribute='path')| list }}" 16 | vars: 17 | - filebase: "{{item|basename}}" 18 | - filebase_noext: "{{ filebase.split('.')[:-1].0 }}" 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/16_line_in_file.conf: -------------------------------------------------------------------------------- 1 | - name: disable SSH PermitRootLogin FALSE 2 | become: true 3 | lineinfile: 4 | path: "/etc/ssh/sshd_config" 5 | regexp: '^PermitRootLogin\s' 6 | line: 'PermitRootLogin no' 7 | backup: yes 8 | when: 9 | - ansible_distribution_major_version == "8" 10 | 11 | - name: revert SSHD KeepAlive to default SSH connection closures 12 | become: true 13 | lineinfile: 14 | path: "/etc/ssh/sshd_config" 15 | regexp: '^ClientAliveInterval\s' 16 | line: 'ClientAliveInterval 5m' 17 | notify: 18 | - reload sshd 19 | 20 | - name: reload sshd 21 | become: true 22 | service: 23 | name: sshd 24 | state: reloaded 25 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/17_template_selectattr_macro.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Get Variables from YML" 3 | hosts: localhost 4 | gather_facts: no 5 | tasks: 6 | - name: "Read as a list" 7 | read_csv: 8 | path: ./configs/13_fruits.csv 9 | register: fruits_list 10 | - name: "Read as a dict" 11 | read_csv: 12 | path: ./configs/13_fruits.csv 13 | key: id 14 | register: fruits_dict 15 | # - debug: 16 | # msg: '{{ fruits_list }}' 17 | 18 | - debug: 19 | msg: '{{ clubbed }}' 20 | vars: 21 | clubbed: | 22 | " 23 | {% for result in fruits_list.list %} 24 | '{{ result.type }}-{{ result.name }}-{{ result.id }}', 25 | {% endfor %} 26 | " 27 | 28 | - name: "show templating results" 29 | template: 30 | src: ./configs/17_template.j2 31 | dest: /tmp/17_template.txt 32 | mode: '0644' 33 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/18_groupby_list.yml: -------------------------------------------------------------------------------- 1 | - hosts: localhost 2 | gather_facts: no 3 | vars: 4 | by_type: "{{ fruits.list|groupby('type') }}" 5 | group_first_list: "{{ by_type |map('first')|list }}" 6 | group_last_list: "{{ by_type |map('last')|map('map', attribute='name') | list }}" 7 | net_dict: "{{ dict(group_first_list|zip(group_last_list)) }}" 8 | net_list: "{{ net_dict|dict2items(key_name='name', value_name='net') }}" 9 | tasks: 10 | - read_csv: 11 | path: ./configs/13_fruits.csv 12 | register: fruits 13 | - debug: 14 | var: by_type 15 | - debug: 16 | var: group_first_list 17 | - debug: 18 | var: group_last_list 19 | - debug: 20 | var: net_dict 21 | - debug: 22 | var: net_list -------------------------------------------------------------------------------- /thirdparty/ansible/basic/19_list_without_loop.yml: -------------------------------------------------------------------------------- 1 | # - hosts: localhost 2 | # gather_facts: no 3 | # vars: 4 | # by_type: "{{ fruits.dict.values()|groupby('type') }}" 5 | # my_list2: | 6 | # {% for batch in my_dict2[type]|batch(input_range|int) %} 7 | # {% for i in batch %} 8 | # - id: {{ i.id }} 9 | # name: {{ i.name }} 10 | # {% endfor %} 11 | # {% endfor %} 12 | # fruits_dict: "{{ fruits.dict }}" 13 | 14 | # tasks: 15 | # - read_csv: 16 | # path: ./configs/13_fruits.csv 17 | # key: id 18 | # register: fruits 19 | # - debug: 20 | # var: fruits_dict 21 | 22 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/22_jinja_indent.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Nested vs sub-elements" 3 | hosts: localhost 4 | gather_facts: no 5 | vars: 6 | - multiline_var: | 7 | line1="line1" 8 | somerandom2 9 | line3 10 | tasks: 11 | - name: "Updating Templates" 12 | template: 13 | src: ./configs/22_jinja_template.j2 14 | dest: /tmp/22_jinja_template.txt 15 | mode: '0644' 16 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/24_dynamic_variables_setfact.yml: -------------------------------------------------------------------------------- 1 | - name: "Read All files" 2 | read_csv: 3 | path: "{{item}}" 4 | with_fileglob: 5 | - "/tmp/patterns.*.csv" 6 | register: "register_patterns" 7 | 8 | - name: "Dynamic list" 9 | set_fact: "pat_{{pat_var}}={{item.list}}" 10 | with_items: 11 | - "{{register_patterns.results}}" 12 | vars: 13 | pat_var: "{{item.item | basename | regex_replace('.csv')| regex_replace('patterns.')}}" 14 | no_log: true 15 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/25_dynamic_variables_kv.yml: -------------------------------------------------------------------------------- 1 | - hosts: localhost 2 | vars: 3 | fruits: "{{ dict(f_keys|zip(f_vals)) }}" 4 | f_vals: "{{ fruit.results|map(attribute='list')|list }}" 5 | f_keys: "{{ fruit.results|map(attribute='item')| 6 | map('basename')| 7 | map('splitext')| 8 | map('first')|list }}" 9 | tasks: 10 | - read_csv: 11 | fieldnames: color,size,price 12 | path: "{{ item }}" 13 | with_fileglob: "configs/fruits/*.csv" 14 | register: fruit 15 | - debug: 16 | var: fruits 17 | - debug: 18 | var: fruits.apples 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/26_jsonArray_to_ndjson.yml: -------------------------------------------------------------------------------- 1 | # Converts a JSON array into newline separate json events 2 | # Quite useful to process an output of 'curl' command 3 | - hosts: localhost 4 | vars: 5 | json_file: /tmp/orig.json 6 | dest_file: /tmp/final.ndjson 7 | tasks: 8 | - name: Read JSON file 9 | set_fact: 10 | json_data: "{{ lookup('file', json_file) | from_json }}" 11 | - name: Convert JSON array/list into ndjson 12 | set_fact: 13 | ndjson_content: "{% for item in json_data %}{{ item | to_json| trim}}\n{% endfor %}" 14 | - name: Save ndjson to file 15 | copy: 16 | content: "{{ ndjson_content }}" 17 | dest: "{{dest_file}}" 18 | 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/31_block_when_tempfile.yml: -------------------------------------------------------------------------------- 1 | - hosts: localhost 2 | vars: 3 | fruit: "apple" 4 | tasks: 5 | - name: Main Task 6 | block: 7 | - name: Get subtask1 8 | set_fact: 9 | task1: "subtask1" 10 | - name: print subtask 11 | debug: 12 | msg: task1 13 | when: fruit is defined 14 | - name: Create temporary file 15 | ansible.builtin.tempfile: 16 | state: file 17 | suffix: temp 18 | register: tempfile_1 19 | 20 | - name: Print temporary file 21 | ansible.builtin.debug: 22 | msg: "{{tempfile_1}}" 23 | 24 | - name: Use the registered var and the file module to remove the temporary file 25 | ansible.builtin.file: 26 | path: "{{ tempfile_1.path }}" 27 | state: absent 28 | when: tempfile_1.path is defined 29 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/check_for_duplicates.txt: -------------------------------------------------------------------------------- 1 | - name: "Check for duplicates and fail if present" 2 | shell: awk -F',' '{print $1}' {{someInputFile}} | sort | uniq -d 3 | register: my_input_file 4 | failed_when: 5 | my_input_file.stdout_lines | length > 0 6 | 7 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/02_dict_iteration.j2: -------------------------------------------------------------------------------- 1 | 2 | {% for deptElement in deptValue %} 3 | {{deptElement}} 4 | {{deptElement.firstname}} {{deptElement.secondname}} 5 | {% endfor %} -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/02_list.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "service_bus_namespace": "company-primary-eventhub", 4 | "event_hub_name": "main-tracking-events", 5 | "connection_string": "string1" 6 | }, 7 | { 8 | "service_bus_namespace": "company-secondary-eventhub", 9 | "event_hub_name": "analytics-stream-events", 10 | "connection_string": "string2" 11 | }, 12 | { 13 | "service_bus_namespace": "company-monitoring-eventhub", 14 | "event_hub_name": "system-monitoring-events", 15 | "connection_string": "string3" 16 | } 17 | ] -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/02_list_template.j2: -------------------------------------------------------------------------------- 1 | 2 | ## Header 3 | 4 | {% for hub in eh_list %} 5 | {{hub}} 6 | {% endfor %} -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/02_names_department.csv: -------------------------------------------------------------------------------- 1 | id,firstname,secondname,department,subDepartment,salary 2 | ID01,first01,surnam1,HR,pension,30000 3 | ID02,first02,surnam2,IT,development,40000 4 | ID03,first03,surnam3,IT,development,42000 5 | ID04,first04,surnam4,IT,operations,46000 6 | ID05,first05,surnam5,IT,operations,42000 7 | ID06,first06,surnam6,IT,development,42000 8 | ID07,first07,surnam7,CA,Accountancy,52000 9 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/02_template.j2: -------------------------------------------------------------------------------- 1 | 2 | {% for department in hosts_list.list| groupby ('department') %} 3 | if host = '{{department.0}}' ; do 4 | {% for item2 in department.1 %} 5 | {{item2}} 6 | {% endfor %} 7 | fi 8 | 9 | {% endfor %} -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/05_lookup_file.config: -------------------------------------------------------------------------------- 1 | varA: "varAContent" 2 | ArrayA: ["apple", "mango", "pear"] 3 | country.state.district: [ "dist1", "dist2", "dist3"] 4 | 5 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/06_country_host_invalid.csv: -------------------------------------------------------------------------------- 1 | id,hostname,host_ip,country_code,country_name 2 | ID01,myhost1,10.2.3.4,US,United States 3 | ID02,incorrect host1,10.2.3.4.900,US,United States 4 | ID03,myhost1,10.2.3.4,USA,United States -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/06_country_host_valid.csv: -------------------------------------------------------------------------------- 1 | id,hostname,host_ip,country_code,country_name 2 | ID01,myhost1,10.2.3.2,US,United States 3 | ID02,myhost2,10.2.3.3,US,United States 4 | ID03,myhost3,10.2.3.4,UK,United Kingdom 5 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/13_fruits.csv: -------------------------------------------------------------------------------- 1 | id,name,quantity,type 2 | 1,apple,10,fruit 3 | 2,orange,20,fruit 4 | 3,carrot,5,veg 5 | 4,beetroot,2,veg 6 | 5,onion,3,veg 7 | 6,tomato,4,both 8 | 7,pear,4,fruit 9 | 8,banana,6,fruit 10 | 9,persimon,4,fruit 11 | 10,guava,4,fruit 12 | 11,pepper,4,veg 13 | 12,potato,5,veg 14 | 13,cherry,5,fruit 15 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/17_template.j2: -------------------------------------------------------------------------------- 1 | {% macro input(name, value='', type='text', size=20) -%} 2 | 4 | {%- endmacro %} 5 | 6 | {% set my_fruit_list = [] %} 7 | {% set start = 0 %} 8 | {% set end = 2 %} 9 | {%- for item in (fruits_dict.dict| dict2items | selectattr("value.type", "match", "^veg$"))[start:end] -%} 10 | {{ my_fruit_list.append(item.value.name) }} 11 | {%- endfor -%} 12 | my_list=["{{ my_fruit_list|join('", "') }}"] 13 | 14 |

{{ input('user_test') }}

15 |

{{ input('pass_test', type='password') }}

16 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/22_jinja_template.j2: -------------------------------------------------------------------------------- 1 | linestarting 2 | with_tab 3 | {{multiline_var|indent (width=4, first=True)}} -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/dummy.remove.csv: -------------------------------------------------------------------------------- 1 | id,name,quantity,type 2 | 1,apple,10,fruit 3 | 2,orange,20,fruit 4 | 3,carrot,5,veg 5 | 4,beetroot,2,veg 6 | 5,onion,3,veg 7 | 6,tomato,4,both 8 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/fruits/apples.csv: -------------------------------------------------------------------------------- 1 | red,big,20 2 | green,small,10 -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/fruits/grapes.csv: -------------------------------------------------------------------------------- 1 | red,big,20 2 | black,small,10 -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/fruits/pears.csv: -------------------------------------------------------------------------------- 1 | green,big,30 2 | yellow,small,20 3 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/configs/nested_list.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "fruit", 4 | "names": [ 5 | "apple", 6 | "mango", 7 | "pear", 8 | "grapes" 9 | ] 10 | }, 11 | { 12 | "type": "both", 13 | "names": [ 14 | "tomato" 15 | ] 16 | }, 17 | { 18 | "type": "veg", 19 | "names": [ 20 | "potato", 21 | "augbe", 22 | "chilli" 23 | ] 24 | } 25 | ] -------------------------------------------------------------------------------- /thirdparty/ansible/basic/filter_plugins/mapattributes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | class FilterModule(object): 3 | def filters(self): 4 | return { 'mapattributes': self.mapattributes } 5 | 6 | def mapattributes(self, list_of_dicts, list_of_keys): 7 | l = [] 8 | for di in list_of_dicts: 9 | newdi = { } 10 | for key in list_of_keys: 11 | newdi[key] = di[key] 12 | l.append(newdi) 13 | return l 14 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/group_vars/07_github_vars.yml: -------------------------------------------------------------------------------- 1 | github_base_url: "https://api.github.com" 2 | github_username: "getkub" 3 | github_list_api: "users/{{github_username}}/repos" -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/distributor_role/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Simple debug test" 3 | import_tasks: simple_debug_test.yml 4 | tags: [simple_debug_test] 5 | 6 | - name: "trigger_git_role" 7 | import_tasks: trigger_git_role.yml 8 | tags: [trigger_git_role] -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/distributor_role/tasks/simple_debug_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "simple_debug_test" 4 | debug: 5 | msg: "Inside simple_debug_test" 6 | 7 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/distributor_role/tasks/trigger_git_role.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Dummy role" 4 | debug: 5 | msg: "Inside trigger_git_role" 6 | 7 | - name: "Call another role" 8 | include_role: 9 | name: '../roles/git' 10 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/git/tasks/git_clone_a_repo.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "GIT clone a repo to TEMP" 4 | git: 5 | repo: "https://github.com/getkub/SplunkScriplets.git" 6 | dest: "/tmp/my_clone" 7 | force: "yes" 8 | update: "yes" 9 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/git/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "GIT roles" 4 | import_tasks: git_clone_a_repo.yml 5 | tags: [git_clone_a_repo] -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/github/tasks/github_repo_activities.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "GET list for {{github_username}}" 4 | uri: 5 | url: "{{github_base_url}}/{{github_list_api}}" 6 | method: GET 7 | #user: "{{gihub_user}}" 8 | #password: "{{gihub_pass}}" 9 | return_content: yes 10 | validate_certs: no 11 | force_basic_auth: yes 12 | status_code: [200, 202, 204] 13 | headers: 14 | kbn-xsrf: true 15 | Content-Type: application/json 16 | register: github_repo_list_register 17 | 18 | - name: "Debug github_repo_list_register" 19 | debug: 20 | msg: "{{github_repo_list_register}}" 21 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/roles/github/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "github_repo_activities" 4 | import_tasks: github_repo_activities.yml 5 | tags: [github_repo_activities] 6 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_contact_details.j2: -------------------------------------------------------------------------------- 1 | | Key | Value | 2 | |---|---| 3 | | name | {{name}} | 4 | | email | {{email}} | 5 | | phone | {{phone}} | 6 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_jinja_in_jinja.j2: -------------------------------------------------------------------------------- 1 | {{four_contact_details}} 2 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_main_callout.md.j2: -------------------------------------------------------------------------------- 1 | # Sample markdown 2 | 3 | ## aim is to fill up contact details automatically from another template 4 | 5 | {{contact_details}} 6 | -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_vars-email.j2: -------------------------------------------------------------------------------- 1 | MY_EMAIL@yahoo.com -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_vars-name.j2: -------------------------------------------------------------------------------- 1 | MY_NAME -------------------------------------------------------------------------------- /thirdparty/ansible/basic/templates/04_vars-phone.j2: -------------------------------------------------------------------------------- 1 | 01234756837 -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/100_filter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Filter JSON dictionary from external file before loop 3 | hosts: localhost 4 | gather_facts: false 5 | 6 | tasks: 7 | - name: Read JSON file 8 | set_fact: 9 | my_json_dict: "{{ lookup('file', './configs/fruits_veg.json') | from_json }}" 10 | 11 | - name: Filter JSON dictionary 12 | set_fact: 13 | filtered_fruits: "{{ my_json_dict.fruits_veg | selectattr('type', 'equalto', 'fruit') | list }}" 14 | 15 | - name: Loop over filtered fruits 16 | debug: 17 | msg: "Name: {{ item.name }}, Color: {{ item.color }}" 18 | loop: "{{ filtered_fruits }}" 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/100_filter_jsonquery.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Filter JSON dictionary from external file before loop 3 | hosts: localhost 4 | gather_facts: false 5 | 6 | tasks: 7 | - name: Read JSON file 8 | set_fact: 9 | my_json_dict: "{{ lookup('file', './configs/fruits_veg.json') | from_json }}" 10 | 11 | - name: Filter JSON dictionary 12 | set_fact: 13 | filtered_fruits: "{{ my_json_dict.fruits_veg | json_query('[?type == `fruit`]') }}" 14 | 15 | - name: Loop over filtered fruits 16 | debug: 17 | msg: "Name: {{ item.name }}, Color: {{ item.color }}" 18 | loop: "{{ filtered_fruits }}" 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/100_filter_list_json.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Filter JSON list before loop 3 | hosts: localhost 4 | gather_facts: false 5 | 6 | tasks: 7 | - name: Read JSON file 8 | set_fact: 9 | my_json_list: "{{ lookup('file', './configs/fruits_veg.list.json') | from_json }}" 10 | 11 | - name: Filter JSON list 12 | set_fact: 13 | filtered_fruits: "{{ my_json_list | selectattr('type', 'equalto', 'fruit') | list }}" 14 | 15 | - name: Loop over filtered fruits 16 | debug: 17 | msg: "Name: {{ item.name }}, Color: {{ item.color }}" 18 | loop: "{{ filtered_fruits }}" 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/100_filter_list_yaml.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Filter YAML dictionary before loop 3 | hosts: localhost 4 | gather_facts: false 5 | 6 | tasks: 7 | - name: Read YAML file 8 | set_fact: 9 | my_yaml_dict: "{{ lookup('file', './configs/fruits_veg.dict.yml') | from_yaml }}" 10 | 11 | - name: Filter YAML dictionary 12 | set_fact: 13 | filtered_fruits: "{{ my_yaml_dict.fruits_veg | selectattr('type', 'equalto', 'fruit') | list }}" 14 | 15 | - name: Loop over filtered fruits 16 | debug: 17 | msg: "Name: {{ item.name }}, Color: {{ item.color }}" 18 | loop: "{{ filtered_fruits }}" 19 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/configs/fruits_veg.dict.json: -------------------------------------------------------------------------------- 1 | { 2 | "fruits_veg": [ 3 | { 4 | "name": "apple", 5 | "color": "red", 6 | "type": "fruit" 7 | }, 8 | { 9 | "name": "banana", 10 | "color": "yellow", 11 | "type": "fruit" 12 | }, 13 | { 14 | "name": "carrot", 15 | "color": "orange", 16 | "type": "vegetable" 17 | }, 18 | { 19 | "name": "orange", 20 | "color": "orange", 21 | "type": "fruit" 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/configs/fruits_veg.dict.yml: -------------------------------------------------------------------------------- 1 | fruits_veg: 2 | - name: apple 3 | color: red 4 | type: fruit 5 | - name: banana 6 | color: yellow 7 | type: fruit 8 | - name: carrot 9 | color: orange 10 | type: vegetable 11 | - name: orange 12 | color: orange 13 | type: fruit 14 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/configs/fruits_veg.list.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "apple", 4 | "color": "red", 5 | "type": "fruit" 6 | }, 7 | { 8 | "name": "banana", 9 | "color": "yellow", 10 | "type": "fruit" 11 | }, 12 | { 13 | "name": "carrot", 14 | "color": "orange", 15 | "type": "vegetable" 16 | }, 17 | { 18 | "name": "orange", 19 | "color": "orange", 20 | "type": "fruit" 21 | } 22 | ] 23 | -------------------------------------------------------------------------------- /thirdparty/ansible/cpt/configs/fruits_veg.list.yml: -------------------------------------------------------------------------------- 1 | - name: apple 2 | color: red 3 | type: fruit 4 | - name: banana 5 | color: yellow 6 | type: fruit 7 | - name: carrot 8 | color: orange 9 | type: vegetable 10 | - name: orange 11 | color: orange 12 | type: fruit 13 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = /etc/ansible/hosts 3 | remote_tmp = /tmp/hortonew 4 | forks = 50 5 | poll_interval = 5 6 | sudo_user = root 7 | host_key_checking = False 8 | sudo_exe = sudo 9 | timeout = 10 10 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/filestructure.txt: -------------------------------------------------------------------------------- 1 | # http://blog.hortonew.com/ansible-splunk-forwarder-deployment 2 | /etc/ansible/ 3 | ansible.cfg (ansible specific settings) 4 | hosts (where you can store groups of hosts for ease of management) 5 | group_vars/ 6 | windows.yml 7 | playbooks/ (where you should store .yml playbooks) 8 | SplunkUniversalForwarderInstallWindows.yml 9 | SplunkUniversalForwarderInstallLinux.yml 10 | 11 | splunk_binaries/ (where you should store installers) 12 | splunkforwarder-6.3.0-aa7d4b1ccb80-x64-release.msi 13 | splunkforwarder-6.3.0-aa7d4b1ccb80-linux-2.6-x86_64.rpm 14 | 15 | roles/ (where you can group tasks by Linux/Windows) 16 | universal_forwarder_linux/ 17 | tasks/ 18 | main.yml 19 | forwarder.yml 20 | universal_forwarder_windows/ 21 | files/ 22 | install-splunk.ps1 23 | tasks/ 24 | main.yml 25 | forwarder.yml 26 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/group_vars/windows.yml: -------------------------------------------------------------------------------- 1 | ansible_ssh_user: my.adminuser@SOMETHING.YOURDOMAIN.COM 2 | ansible_ssh_pass: 3 | ansible_ssh_port: 5985 4 | ansible_connection: winrm 5 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/hosts.cfg: -------------------------------------------------------------------------------- 1 | [install-splunk-windows] 2 | some-host-name-windows.at.my.domain.net 3 | 10.10.10.10 4 | 5 | [install-splunk-linux] 6 | 192.168.8.4 7 | some-host-name-linux.at.my.domain.net 8 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/kerberos_authentication.txt: -------------------------------------------------------------------------------- 1 | ansible_ssh_user: my.adminuser@SOMETHING.YOURDOMAIN.COM 2 | ansible_ssh_pass: 3 | ansible_ssh_port: 5985 4 | ansible_connection: winrm 5 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/playbooks/SplunkUniversalForwarderInstallLinux.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Linux Universal Forwarder Install 3 | hosts: install-splunk-linux 4 | remote_user: your-user-here 5 | sudo: yes 6 | vars: 7 | splunk_working_directory: '/tmp/Splunk/' 8 | splunk_deployment_server: 'your-deployment-server.yourdomain.com' 9 | splunk_deployment_server_port: '8089' 10 | splunk_user: 'admin' 11 | splunk_password: 'changeme' 12 | splunk_uf_binary_linux: 'splunkforwarder-6.3.0-aa7d4b1ccb80-linux-2.6-x86_64.rpm' 13 | roles: 14 | - universal_forwarder_linux 15 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/playbooks/SplunkUniversalForwarderInstallWindows.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Windows x64 Universal Forwarder Install 3 | hosts: install-splunk-windows 4 | vars: 5 | splunk_working_directory: 'c:\Temp\Splunk\' 6 | splunk_remote_file_share: '\\server-name.yourdomain.com\c$\Distribute\' 7 | splunk_site: 'CORPORATE' 8 | splunk_install_path: 'c:\Program Files\SplunkUniversalForwarder\' 9 | splunk_deployment_server: 'your-deployment-server.yourdomain.com' 10 | splunk_deployment_server_port: '8089' 11 | splunk_user: 'admin' 12 | splunk_password: 'changeme' 13 | splunk_uf_binary: 'splunkforwarder-6.3.0-aa7d4b1ccb80-x64-release.msi' 14 | roles: 15 | - universal_forwarder_windows 16 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/roles/universal_forwarder_linux/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include: forwarder.yml 3 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/roles/universal_forwarder_windows/files/install-splunk.ps1: -------------------------------------------------------------------------------- 1 | param ( 2 | [string]$site, 3 | [string]$installer = "c:\Temp\Splunk\splunkforwarder-6.3.0-aa7d4b1ccb80-x64-release.msi", 4 | [string]$log = "c:\Temp\Splunk\splunkinstall.log" 5 | ) 6 | 7 | $splunk_install_file = $installer 8 | 9 | switch ($site.ToUpper()) { 10 | "CORPORATE" {$splunk_deployment_server = "10.10.10.10:8089"} 11 | "SITE1" {$splunk_deployment_server = "10.10.10.11:8089"} 12 | "SITE2" {$splunk_deployment_server = "10.10.10.12:8089"} 13 | "SITE3" {$splunk_deployment_server = "10.10.10.13:8089"} 14 | default {"-site parameter missing. Site list: corporate, site1, site2, site3"; exit} 15 | } 16 | 17 | & msiexec.exe /qn /Liwem! $log /i $splunk_install_file AGREETOLICENSE=Yes DEPLOYMENT_SERVER=`"$splunk_deployment_server`" INSTALL_SHORTCUT=0 /quiet 18 | -------------------------------------------------------------------------------- /thirdparty/ansible/hortonew/roles/universal_forwarder_windows/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include: forwarder.yml 3 | -------------------------------------------------------------------------------- /thirdparty/ansible/splunk_apps/README.txt: -------------------------------------------------------------------------------- 1 | Please check ansible project 2 | -------------------------------------------------------------------------------- /thirdparty/ansible/splunk_apps/splunk_ansible_uri.yml: -------------------------------------------------------------------------------- 1 | - name: "SEARCH: Run Search and register Sid" 2 | uri: 3 | url: "{{splunk_ssl_uri}}:{{splunk_mgmt_port}}/services/search/jobs" 4 | method: POST 5 | user: xxxx 6 | password: xxxxx 7 | body: 8 | - [ search, "{{splunk_search}}"] 9 | status_code: [200, 201] 10 | force_basic_auth: yes 11 | validate_certs: no 12 | body_format: form-urlencoded 13 | return_content: true 14 | vars: 15 | - splunk_search: "search index=_internal | stats count" 16 | - splunk_ssl_uri: "https://localhost" 17 | - splunk_mgmt_port: "8089" 18 | register: splunk_search_register 19 | -------------------------------------------------------------------------------- /thirdparty/api/json_server/db.json: -------------------------------------------------------------------------------- 1 | { 2 | "posts": [ 3 | { "id": "1", "title": "apple title", "views": 100 }, 4 | { "id": "2", "title": "banana title", "views": 200 } 5 | ], 6 | "comments": [ 7 | { "id": "1", "text": "apple comment", "postId": "1" }, 8 | { "id": "2", "text": "banana comment", "postId": "2" } 9 | ], 10 | "profile": { 11 | "name": "typicode" 12 | } 13 | } -------------------------------------------------------------------------------- /thirdparty/api/json_server/json_server.md: -------------------------------------------------------------------------------- 1 | ## JSON server based on npm 2 | https://github.com/typicode/json-server 3 | 4 | ## steps 5 | ``` 6 | npx json-server db.json 7 | ``` 8 | 9 | 10 | ## Queries 11 | ``` 12 | http://localhost:3000/posts?id=2 13 | ``` -------------------------------------------------------------------------------- /thirdparty/api/public_api_list.md: -------------------------------------------------------------------------------- 1 | ## Public API 2 | 3 | #### Without API key 4 | 5 | - https://api.sunrise-sunset.org/ 6 | - https://earthquake.usgs.gov/fdsnws/event/1/query?format=geojson&starttime=2020-01-01&endtime=2020-01-02 7 | - https://api.coindesk.com/v1/bpi/currentprice.json 8 | - https://dog.ceo/api/breeds/image/random 9 | - https://api.ipify.org?format=json 10 | - https://randomuser.me/api/ 11 | - http://www.7timer.info/bin/api.pl?lon=113.17&lat=23.09&product=civil&output=json 12 | - https://jsonplaceholder.typicode.com/posts 13 | 14 | 15 | #### With API key 16 | 17 | - https://api.openweathermap.org/data/2.5/weather?lat=44.34&lon=10.99&appid={API_KEY} 18 | 19 | -------------------------------------------------------------------------------- /thirdparty/approach_concepts/automation_values.md: -------------------------------------------------------------------------------- 1 | simplify 2 | standardize 3 | automate 4 | repeat -------------------------------------------------------------------------------- /thirdparty/approach_concepts/devops_toolchain_apps.md: -------------------------------------------------------------------------------- 1 | | Stage | Sample Products | Description | 2 | |------| ------- | ------- | 3 | |Source Control Management| Github, GitLab | | 4 | | Static Code Analysis | Sonarqube | | 5 | | Software Composition Analysis | snyk | | 6 | | Unit Test | Junit, PostMan, SoapUI, Wiremock | | 7 | | Static Application Security (SAST) | Fortify | Vulnerability analysis| 8 | | Build Packaging | maven, gradle, nuget, npm | software to package| 9 | | Binary Management | Jfrog artefactory | | 10 | | Software composition Analysis | Jfrog Xray | Analyse open source binaries for reisk, security & license compliance | -------------------------------------------------------------------------------- /thirdparty/approach_concepts/products_decision.md: -------------------------------------------------------------------------------- 1 | ### Things to be aware of 2 | - Cost 3 | - Scale 4 | - Flexibility 5 | - Vendor Lock 6 | - Capability 7 | - Cloud Native 8 | - At scale 9 | 10 | Common denominator products should be chosen 11 | -------------------------------------------------------------------------------- /thirdparty/aws/README.md: -------------------------------------------------------------------------------- 1 | AWS high level scriptlets 2 | 3 | Various configs for AWS 4 | - setting up AWS profile 5 | - Profile Switcher 6 | -------------------------------------------------------------------------------- /thirdparty/aws/aws_cli_query.md: -------------------------------------------------------------------------------- 1 | ## Query data 2 | ``` 3 | aws --region=eu-west-2 ec2 describe-instances --filters "Name=instance-state-name,Values=running" \ 4 | --query 'Reservations[].Instances[].[Tags[?Key==`environment`] | [].Value]' \ 5 | --output text 6 | ``` 7 | -------------------------------------------------------------------------------- /thirdparty/aws/aws_cli_setup.md: -------------------------------------------------------------------------------- 1 | - Install Assume-role 2 | ``` 3 | brew install remind101/formulae/assume-role 4 | ``` 5 | 6 | - append the profiles content 7 | ``` 8 | cat append_contents_to_existing_aws_config_file >> ~/.aws/config file. 9 | ``` 10 | 11 | - Setup environment 12 | ``` 13 | cp setup_aws_env.sh /usr/local/bin/setup_aws_env 14 | 15 | chmod 755 /usr/local/bin/setup_aws_env 16 | 17 | ``` 18 | 19 | - Add alias alias 20 | 21 | ``` 22 | cp ~/.zshrc ~/zshrc-backup 23 | echo ‘alias setup_aws_env="source setup_aws_env"’ >> ~/.zshrc 24 | 25 | setup_aws_env 26 | ``` 27 | -------------------------------------------------------------------------------- /thirdparty/aws/aws_config_file_sample: -------------------------------------------------------------------------------- 1 | # ***************************************************************************** 2 | # AWS Extend Switch Roles 3 | # Firefox Extension 4 | # https://github.com/tilfinltd/aws-extend-switch-roles 5 | # https://addons.mozilla.org/en-GB/firefox/addon/aws-extend-switch-roles3/ 6 | # ***************************************************************************** 7 | 8 | # aws_config_file=~/.aws/config 9 | [default] 10 | region=eu-west-1 11 | azure_tenant_id=xxxxxxxx 12 | azure_app_id_uri=https://signin.aws.amazon.com/saml 13 | azure_default_username=my_email_or_id 14 | azure_default_role_arn= 15 | azure_default_duration_hours=12 16 | azure_default_remember_me=true 17 | 18 | # sample 19 | [profile my-dev-role-readonly] 20 | region=eu-west-1 21 | source_profile=default 22 | role_arn=arn:aws:iam::1234556789:role/my-role-ro 23 | color=ffd600 24 | -------------------------------------------------------------------------------- /thirdparty/aws/aws_resources.txt: -------------------------------------------------------------------------------- 1 | ## Get all resources 2 | 3 | aws resourcegroupstaggingapi get-resources --query 'ResourceTagMappingList[].[ResourceARN]' -------------------------------------------------------------------------------- /thirdparty/aws/aws_sqs_commands.md: -------------------------------------------------------------------------------- 1 | ## Pull Events from SQS using AWS Cli 2 | ``` 3 | aws sqs receive-message --queue-url https://sqs.eu-west-2.amazonaws.com/123457890/my-queue-13455.fifo --attribute-names All --message-attribute-name All --max-number-of-messages 1 --profile my_profile_with-role_assume --region eu-west-2 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/aws/iam/aws_sts_assume_role.md: -------------------------------------------------------------------------------- 1 | ### AWS sts assume role in one command 2 | - https://stackoverflow.com/questions/63241009/aws-sts-assume-role-in-one-command 3 | - 4 | ``` 5 | export $(printf "AWS_ACCESS_KEY_ID=%s AWS_SECRET_ACCESS_KEY=%s AWS_SESSION_TOKEN=%s" \ 6 | $(aws sts assume-role \ 7 | --role-arn arn:aws:iam::123456789012:role/MyAssumedRole \ 8 | --role-session-name MySessionName \ 9 | --query "Credentials.[AccessKeyId,SecretAccessKey,SessionToken]" \ 10 | --output text)) 11 | ``` 12 | -------------------------------------------------------------------------------- /thirdparty/aws/iam/reference_identifiers.md: -------------------------------------------------------------------------------- 1 | 2 | - https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html 3 | 4 | ``` 5 | arn:aws:iam::account:root 6 | arn:aws:iam::account:user/user-name-with-path 7 | arn:aws:iam::account:group/group-name-with-path 8 | arn:aws:iam::account:role/role-name-with-path 9 | arn:aws:iam::account:policy/policy-name-with-path 10 | arn:aws:iam::account:instance-profile/instance-profile-name-with-path 11 | arn:aws:sts::account:federated-user/user-name 12 | arn:aws:sts::account:assumed-role/role-name/role-session-name 13 | arn:aws:iam::account:mfa/virtual-device-name-with-path 14 | arn:aws:iam::account:u2f/u2f-token-id 15 | arn:aws:iam::account:server-certificate/certificate-name-with-path 16 | arn:aws:iam::account:saml-provider/provider-name 17 | arn:aws:iam::account:oidc-provider/provider-name 18 | ``` 19 | -------------------------------------------------------------------------------- /thirdparty/aws/iam_examples.md: -------------------------------------------------------------------------------- 1 | ## IAM policy example 2 | ``` 3 | { 4 | "Version": "2012-10-17", 5 | "Statement": [ 6 | { 7 | "Sid": "", 8 | "Effect": "Allow", 9 | "Action": "sts:AssumeRole", 10 | "Resource": "arn:aws:iam::12345678:role/my-custom-role-automation" 11 | } 12 | ] 13 | } 14 | 15 | ``` 16 | 17 | ## IAM policy Trust example 18 | ```json 19 | { 20 | "Version": "2013-10-17", 21 | "Statement": [ 22 | { 23 | "Effect": "Allow", 24 | "Principal": { 25 | "Federated": "arn:aws:iam::12345678:oidc-provider/oidc.eks.eu-west-1.amazonaws.com/id/E0EB3DCA3B48AAB48AAF9AC4" 26 | }, 27 | "Action": "sts:AssumeRoleWithWebIdentity", 28 | "Condition": { 29 | "StringLike": { 30 | "oidc.eks.eu-west-1.amazonaws.com/id/E0EB3DCA3B48AAB48AAF9AC4:sub": "system:serviceaccount::ansible" 31 | } 32 | } 33 | } 34 | ] 35 | } 36 | 37 | ``` 38 | -------------------------------------------------------------------------------- /thirdparty/aws/lambda/ebs-snapshot-delete.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import datetime 3 | from datetime import timezone, timedelta, datetime 4 | 5 | def lambda_handler(event, context): 6 | 7 | client = boto3.client('ec2', region_name = 'eu-west-2') 8 | snapshots = client.describe_snapshots(OwnerIds=['self']) 9 | print ("Deleting snapshots older than 10 days") 10 | print (snapshots) 11 | 12 | for snapshot in snapshots['Snapshots']: 13 | start_time = snapshot['StartTime'] 14 | id = snapshot['SnapshotId'] 15 | delete_time = datetime.now(timezone.utc) - timedelta(days=10) 16 | if delete_time > start_time: 17 | print(f"Start time {start_time} Delete time {delete_time}") 18 | client.delete_snapshot(SnapshotId = id) 19 | print(f"Snapshot with id {id} is deleted!" ) 20 | 21 | -------------------------------------------------------------------------------- /thirdparty/aws/network_concepts.md: -------------------------------------------------------------------------------- 1 | ## How to do VPC, subnet, IGW, route table 2 | 3 | - https://www.youtube.com/watch?v=aa3gGwJpCro 4 | 5 | 6 | Concepts are 7 | - Create VPC (10.0.0.0/16) 8 | - Create multiple subnets 9 | - public (10.0.1.0/24, 10.0.2.0/24 ) 10 | - private (10.0.3.0/24, 10.0.4.0/24 ) 11 | - Create internet gateway (IGW) and attach to VPC 12 | - Create Route Table and attach to VPC. This will have only local routes by default 13 | - For PUBLIC Routes -> Edit Routes -> destination (0.0.0.0/0) -> Target of Internet Gateway and attach to relevant IGW 14 | - For PRIVATE Routes -> Subnet Association -> Associate two private networks to private route table 15 | - For PUBLIC Routes -> Subnet Association -> Associate two public networks to private route table 16 | 17 | - NAT Gateway required for egress 18 | -------------------------------------------------------------------------------- /thirdparty/azure/az_commands.md: -------------------------------------------------------------------------------- 1 | ## Generic Azure commands 2 | 3 | ``` 4 | # Check current account 5 | az account show 6 | 7 | # If you have multiple subscriptions, list them 8 | az account list --output table 9 | 10 | # Set the correct subscription 11 | az account set --subscription "Your-Subscription-Name-or-ID" 12 | ``` -------------------------------------------------------------------------------- /thirdparty/blogger/code_highlight.md: -------------------------------------------------------------------------------- 1 | 2 | ## Size of code block 3 | 4 | ``` 5 | 10 | ``` 11 | 12 | ## To add highlight.js into the blog 13 | - Add below within `` tag 14 | ``` 15 | 16 | 19 | ``` 20 | -------------------------------------------------------------------------------- /thirdparty/certs/ca.conf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 2048 3 | distinguished_name = req_distinguished_name 4 | x509_extensions = v3_req 5 | prompt=no 6 | 7 | [ req_distinguished_name ] 8 | countryName = UK 9 | stateOrProvinceName = Watford 10 | localityName = London 11 | organizationName = mycompany 12 | organizationalUnitName = itops 13 | commonName = * 14 | 15 | [ v3_req ] 16 | keyUsage = nonRepudiation, digitalSignature, keyEncipherment, dataEncipherment 17 | extendedKeyUsage = serverAuth 18 | subjectAltName = @alt_names 19 | [alt_names] 20 | DNS.1 = mydev.test 21 | 22 | -------------------------------------------------------------------------------- /thirdparty/certs/cert.conf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 2048 3 | distinguished_name = req_distinguished_name 4 | x509_extensions = v3_req 5 | prompt=no 6 | 7 | [ req_distinguished_name ] 8 | countryName = UK 9 | stateOrProvinceName = Watford 10 | localityName = London 11 | organizationName = mycompany 12 | organizationalUnitName = itops 13 | commonName = * 14 | 15 | [ v3_req ] 16 | keyUsage = nonRepudiation, digitalSignature, keyEncipherment, dataEncipherment 17 | extendedKeyUsage = serverAuth 18 | subjectAltName = @alt_names 19 | [alt_names] 20 | DNS.1 = mydev.test 21 | DNS.2 = svc1.mydev.test 22 | DNS.3 = svc2.mydev.test 23 | DNS.4 = svc3.mydev.test 24 | DNS.5 = svc4.mydev.test 25 | DNS.6 = svc5.mydev.test 26 | DNS.7 = n8n.mydev.test 27 | IP.1 = 192.168.1.18 28 | -------------------------------------------------------------------------------- /thirdparty/certs/pem_ppk.md: -------------------------------------------------------------------------------- 1 | ### PEM key conversions 2 | - https://aws.amazon.com/premiumsupport/knowledge-center/ec2-ppk-pem-conversion/ 3 | 4 | 5 | ``` 6 | brew install putty 7 | # Convert ppk to pem 8 | puttygen ppkkey.ppk -O private-openssh -o pemkey.pem 9 | 10 | # .pem file to a .ppk file 11 | puttygen pemKey.pem -o ppkKey.ppk -O private 12 | ``` 13 | -------------------------------------------------------------------------------- /thirdparty/certs/reference.md: -------------------------------------------------------------------------------- 1 | Also refer: https://github.com/getkub/k8s_kubernetes/tree/main/docs/csr 2 | -------------------------------------------------------------------------------- /thirdparty/confluence/api_updates.md: -------------------------------------------------------------------------------- 1 | ## API updates 2 | 3 | - uses Huachao Mao REST Client plugin in Visual studio (Ctrl+Alt_R in Windows or Cmd+Alt+R in MacOS) 4 | 5 | ``` 6 | PUT {{baseUrl}}/wiki/api/v2/pages/3327557500 7 | Content-Type: application/json 8 | Accept: application/json 9 | Authorization: Basic {{token_b64}} 10 | 11 | <@ ./content.json 12 | ``` 13 | 14 | - content.json is below 15 | 16 | 17 | ``` 18 | { 19 | "id": "3327557500", 20 | "title": "sample Update", 21 | "body": { 22 | "representation": "storage", 23 | "value": "

Notes

sample" 24 | }, 25 | "version": { 26 | "number": 2, 27 | "message": "Updated with Markdown" 28 | } 29 | } 30 | ``` 31 | -------------------------------------------------------------------------------- /thirdparty/curl/artifactory.md: -------------------------------------------------------------------------------- 1 | ## JFrog Artifactory 2 | 3 | ``` 4 | # PUT 5 | curl -v -T $fname "${artifactory_url}/${directory}/${fname}" 6 | ``` 7 | 8 | 9 | 10 | ``` 11 | # DELETE 12 | curl -v --request DELETE "${artifactory_url}/${directory}/${fname}" 13 | ``` 14 | -------------------------------------------------------------------------------- /thirdparty/curl/ftp_curl.txt: -------------------------------------------------------------------------------- 1 | # Use curl for ftp tests 2 | curl -v --ftp-ssl ftp://\\:@hostname 3 | curl -v --ftp-ssl ftp://mydomain\\svcftpuser1:ftpuserpass@mysplunkServer.com 4 | 5 | -------------------------------------------------------------------------------- /thirdparty/curl/proxy_curl.txt: -------------------------------------------------------------------------------- 1 | # Proxy user in the curl command 2 | 3 | curl -o test.csv -vk -x https://${proxyFQDN}:${proxyPort} --proxy-user ${proxyUser}:${proxyPass} -L ${destURL} -u ${destUser}:${destPass} 4 | -------------------------------------------------------------------------------- /thirdparty/curl/splunk_search_macro.txt: -------------------------------------------------------------------------------- 1 | curl --silent -u $user:$pass -k "https://$splunk_host:8089/services/search/jobs/export" --data-urlencode search="search \`mymacro(param1,param2)\`" 2 | -------------------------------------------------------------------------------- /thirdparty/cyberark/cyberark_settings.txt: -------------------------------------------------------------------------------- 1 | The Translator file neds to be placed : “%ProgramFiles%\PrivateArk\Server\Syslog” 2 | 3 | #In DBparam.ini file below entry & restart CyberArk Vault Server 4 | [Syslog] 5 | SyslogTranslatorFile=Syslog\SplunkCIM.xsl 6 | SyslogServerPort=514 7 | SyslogServerIP= 8 | SyslogServerProtocol=UDP 9 | SyslogMessageCodeFilter=0-999 10 | SyslogSendBOMPrefix=No 11 | 12 | http://docs.splunk.com/Documentation/AddOns/latest/CyberArk/Setup 13 | -------------------------------------------------------------------------------- /thirdparty/dataCapture/networkProtocol.txt: -------------------------------------------------------------------------------- 1 | 25 = SMTP 2 | 21 = FTP 3 | 22 = SSH 4 | 23 = Telnet 5 | 37 = Time 6 | 79 = Finger 7 | 80 = HTTP 8 | 113 = Ident 9 | 110 = POP3 10 | -------------------------------------------------------------------------------- /thirdparty/dataCapture/wireshark_kubernetes.md: -------------------------------------------------------------------------------- 1 | 2 | ``` 3 | https://github.com/eldadru/ksniff 4 | https://github.com/kubernetes-sigs/krew 5 | ``` -------------------------------------------------------------------------------- /thirdparty/disk/disk_performance.md: -------------------------------------------------------------------------------- 1 | # Measure Disk 2 | - IOPS 3 | - Bandwidth (BW) 4 | - slat (Submission Latency) 5 | - clat (Completion Latency) 6 | - lat (Overall latency) 7 | 8 | ## Replicate Sequential write pattern 9 | ``` 10 | sudo apt -y install fio 11 | 12 | ``` 13 | 14 | ## Edit configs 15 | ``` 16 | # vi /tmp/fio-seq-write.job 17 | 18 | [global] 19 | name=fio-seq-write 20 | filename=fio-seq-write 21 | rw=write 22 | bs=16k 23 | direct=1 24 | numjobs=4 25 | group_reporting 26 | time_based 27 | runtime=300 28 | 29 | [file1] 30 | size=10G 31 | ioengine=libaio 32 | iodepth=16 33 | ``` 34 | 35 | ## Run FIO 36 | ``` 37 | sudo fio fio-seq-write.job 38 | ``` 39 | -------------------------------------------------------------------------------- /thirdparty/disk/dmesg_disk.md: -------------------------------------------------------------------------------- 1 | ## List all Detected Devices 2 | ``` 3 | dmesg | grep sda 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/disk/formatting.txt: -------------------------------------------------------------------------------- 1 | ## Macos based disks 2 | # https://superuser.com/questions/527657/how-do-you-format-a-2-gb-sd-card-to-fat32-preferably-with-disk-utility 3 | diskutil list 4 | diskExternal="/dev/disk4" 5 | name="somdDiskName" 6 | sudo diskutil eraseDisk FAT32 $name MBRFormat $diskExternal 7 | -------------------------------------------------------------------------------- /thirdparty/docker/ansible/README.md: -------------------------------------------------------------------------------- 1 | ``` 2 | docker pull joniator/ansible:latest 3 | 4 | 5 | docker run -it --name=test_ansible joniator/ansible:latest /bin/sh 6 | docker stop test_ansible && docker rm $(docker ps -qa) 7 | ``` -------------------------------------------------------------------------------- /thirdparty/docker/dockerfile_samples/README.md: -------------------------------------------------------------------------------- 1 | https://github.com/getkub/k8s_kubernetes/tree/main/helm/code-server-on-kubernetes 2 | 3 | -------------------------------------------------------------------------------- /thirdparty/docker/npm_samples/README.md: -------------------------------------------------------------------------------- 1 | - https://blog.ag-grid.com/full-row-editing-ag-grid-committing-changes-button-click/ 2 | -------------------------------------------------------------------------------- /thirdparty/docker/npm_samples/reactgrid.Dockerfile: -------------------------------------------------------------------------------- 1 | # pull official base image 2 | FROM node:14.15.1-alpine 3 | 4 | # https://github.com/silevis/reactgrid/blob/develop/Dockerfile 5 | # set working directory 6 | WORKDIR /app 7 | 8 | # add `/app/node_modules/.bin` to $PATH 9 | ENV PATH /app/node_modules/.bin:$PATH 10 | 11 | # install app dependencies 12 | COPY package.json ./ 13 | COPY package-lock.json ./ 14 | RUN npm install 15 | RUN npm install react react-dom --no-save --silent 16 | 17 | # add app 18 | COPY . ./ 19 | 20 | # start app 21 | CMD ["npm", "start"] -------------------------------------------------------------------------------- /thirdparty/docker/python_fastapi/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://hub.docker.com/layers/python/library/python/3.9/images/sha256-b7e449e11f8c466fbaf021dcc731563cb36a41321420db3cf506ba4d71d33a65?context=explore 2 | FROM python:3.9-bullseye 3 | 4 | WORKDIR /code 5 | 6 | COPY ./requirements.txt /code/requirements.txt 7 | RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt 8 | COPY ./app /code/app 9 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80"] 10 | 11 | # CMD [ "python", "./your-daemon-or-script.py" ] 12 | # CMD [ "python3", "-m" , "flask", "run", "--host=0.0.0.0"] 13 | -------------------------------------------------------------------------------- /thirdparty/docker/python_fastapi/README.md: -------------------------------------------------------------------------------- 1 | ## Build 2 | ``` 3 | myimage="python_fastapi" 4 | myversion="v1.01" 5 | mycontainer="python_fastapi" 6 | docker build -t ${myimage}:${myversion} . 7 | docker images 8 | ``` 9 | 10 | ## Run 11 | ``` 12 | docker run --name $mycontainer -p 80:80 ${myimage}:${myversion} 13 | 14 | http://localhost 15 | http://localhost/docs 16 | http://localhost/items/1?q=apple 17 | ``` 18 | 19 | ## Clean-up 20 | ``` 21 | docker rm $(docker ps -qa -f status=exited) 22 | docker rmi -f ${myimage}:${myversion} 23 | ``` -------------------------------------------------------------------------------- /thirdparty/docker/python_fastapi/app/__init__.py: -------------------------------------------------------------------------------- 1 | # empty -------------------------------------------------------------------------------- /thirdparty/docker/python_fastapi/app/main.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from fastapi import FastAPI 4 | 5 | app = FastAPI() 6 | 7 | 8 | @app.get("/") 9 | def read_root(): 10 | return {"Hello": "World"} 11 | 12 | 13 | @app.get("/items/{item_id}") 14 | def read_item(item_id: int, q: Union[str, None] = None): 15 | return {"item_id": item_id, "q": q} 16 | -------------------------------------------------------------------------------- /thirdparty/docker/python_fastapi/requirements.txt: -------------------------------------------------------------------------------- 1 | fastapi>=0.68.0,<0.69.0 2 | pydantic>=1.8.0,<2.0.0 3 | uvicorn>=0.15.0,<0.16.0 4 | 5 | -------------------------------------------------------------------------------- /thirdparty/docker/tips.txt: -------------------------------------------------------------------------------- 1 | docker ps --format '{{.Names}}\t{{.Image}}' 2 | docker load < docker.abc.tar 3 | 4 | # ------------------------------------------ # 5 | # For docker errors due to /var/lib/docker 6 | # ------------------------------------------ # 7 | systemctl stop docker 8 | lvdisplay -v vg_root/docker-pool # This displays the volume 9 | 10 | rm -rf /var/lib/docker 11 | rm /etc/sysconfig/docker-storage 12 | docker-storage-setup 13 | systemctl status docker 14 | 15 | # ------------------------------------------ # 16 | # docker-compose issues 17 | # ------------------------------------------ # 18 | sudo mount /tmp -o remount,exec 19 | 20 | 21 | ### To debug a container which is NOT starting using docker-compose 22 | docker-compose -f run sh 23 | 24 | 25 | ## Exec 26 | docker run -it --entrypoint bash repo.myrepo.co/project/imagename:imagetag 27 | docker push repo.myrepo.co/project/imagename:imagetag 28 | -------------------------------------------------------------------------------- /thirdparty/encode_decode/decode.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import base64 3 | 4 | # Decryption 5 | def decode(key, string): 6 | decoded_chars = [] 7 | string = base64.urlsafe_b64decode(string) 8 | for i in xrange(len(string)): 9 | key_c = key[ i % len(key)] 10 | encoded_c = chr(abs(ord(string[i]) - ord(key_c) % 256)) 11 | decoded_chars.append(encoded_c) 12 | decoded_string = "".join(decoded_chars) 13 | return decoded_string 14 | 15 | # Get command line arguments 16 | try: 17 | key = sys.argv[1] 18 | epasswd = sys.argv[2] 19 | except IndexError, e: 20 | print "Incorrect Arguments: Expects key and encrypted_string" 21 | sys.exit(1) 22 | 23 | decrypted_string = decode(key,epasswd) 24 | 25 | print "encrypted_string=" + epasswd + " ;key=" + key + " ;decrypted_string=" + decrypted_string 26 | -------------------------------------------------------------------------------- /thirdparty/encode_decode/encode.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import base64 3 | 4 | # Decryption 5 | def encode(key, string): 6 | encoded_chars = [] 7 | for i in xrange(len(string)): 8 | key_c = key[ i % len(key)] 9 | encoded_c = chr(ord(string[i]) + ord(key_c) % 256) 10 | encoded_chars.append(encoded_c) 11 | encoded_string = "".join(encoded_chars) 12 | return base64.urlsafe_b64encode(encoded_string) 13 | 14 | # Get command line arguments 15 | try: 16 | key = sys.argv[1] 17 | decrypted_string = sys.argv[2] 18 | except IndexError, e: 19 | print "Incorrect Arguments: Expects key and encrypted_string" 20 | sys.exit(1) 21 | 22 | encrypted_string = encode(key,decrypted_string) 23 | 24 | print "decrypted_string=" + decrypted_string + " ;key=" + key + " ;encrypted_string=" + encrypted_string 25 | -------------------------------------------------------------------------------- /thirdparty/esxi/basic_esxi_provision.script: -------------------------------------------------------------------------------- 1 | xxx 2 | -------------------------------------------------------------------------------- /thirdparty/fluentd/fluent-bit.md: -------------------------------------------------------------------------------- 1 | ``` 2 | https://docs.fluentbit.io/manual/concepts/data-pipeline 3 | ``` 4 | -------------------------------------------------------------------------------- /thirdparty/gcp/gcloud_commands.md: -------------------------------------------------------------------------------- 1 | ## Get credentials and run a command 2 | ``` 3 | gcloud container clusters get-credentials my-gke-cluster --region europe-west1 --project myproject 4 | kubectl exec keycloak-56754df4b4-cc7tv --namespace keycloak -c keycloak -- ls 5 | ``` 6 | -------------------------------------------------------------------------------- /thirdparty/gcp/gcp_links.md: -------------------------------------------------------------------------------- 1 | IAP ranges -> https://cloud.google.com/iap/docs/using-tcp-forwarding 2 | -------------------------------------------------------------------------------- /thirdparty/gcp/getting_shell_access2.md: -------------------------------------------------------------------------------- 1 | ### In case if you cannot access via kubectl 2 | ``` 3 | curl https://api.myip.com # To find current IP of GCP console 4 | myns=xyz 5 | myproj=some_project 6 | myzone="europe-west1-b" 7 | 8 | kubectl -n ${myns} get pods -o wide 9 | mynode="fill_up_node_from_above" 10 | 11 | # GET access to the node 12 | gcloud beta compute ssh ${mynode} --project ${myproj} --zone ${myzone} 13 | 14 | sudo docker ps | grep your_search 15 | sudo docker exec -it your_pod /bin/bash 16 | ``` 17 | -------------------------------------------------------------------------------- /thirdparty/git/clone_methods.md: -------------------------------------------------------------------------------- 1 | - Clone with project token: 2 | ``` 3 | # https://stackoverflow.com/questions/25409700/using-gitlab-token-to-clone-without-authentication 4 | git clone https://oauth2:ACCESS_TOKEN@somegitlab.com/vendor/package.git 5 | ``` 6 | -------------------------------------------------------------------------------- /thirdparty/git/conflicts.md: -------------------------------------------------------------------------------- 1 | - https://stackoverflow.com/questions/10697463/resolve-git-merge-conflicts-in-favor-of-their-changes-during-a-pull/33569970#33569970 2 | 3 | 4 | ## Resolve Git merge conflicts in favor of their changes 5 | ``` 6 | git pull -X theirs 7 | 8 | OR 9 | git merge --strategy-option theirs 10 | 11 | 12 | ``` 13 | 14 | ## If you're already in conflicted state, and you want to just accept all of theirs: 15 | ``` 16 | git checkout --theirs . 17 | git add . 18 | 19 | # OR 20 | git merge --abort 21 | git pull -X theirs 22 | ``` 23 | 24 | 25 | ## Cherry pick 26 | ``` 27 | filename="path/to/the/conflicted_file.php" 28 | hashid="1023e24" 29 | git cherry-pick $hashid 30 | git checkout --theirs $filename 31 | git add $filename 32 | ``` 33 | -------------------------------------------------------------------------------- /thirdparty/git/git.txt: -------------------------------------------------------------------------------- 1 | # git logging 2 | git log --stat --date=iso --pretty=format:"%cd hash=%H abbr_hash=%h author_name=\"%an\" commit_msg=\"%s\" " 3 | -------------------------------------------------------------------------------- /thirdparty/git/git_advanced_lfs.md: -------------------------------------------------------------------------------- 1 | ## Clone repo without pulling large files 2 | 3 | https://stackoverflow.com/questions/42019529/how-to-clone-pull-a-git-repository-ignoring-lfs 4 | 5 | ``` 6 | brew install git-lfs 7 | GIT_LFS_SKIP_SMUDGE=1 git clone SERVER-REPOSITORY 8 | 9 | # Then specifically pull the relevant large file 10 | lfname="some_large_file" 11 | git-lfs pull --include ${lfname} 12 | ``` -------------------------------------------------------------------------------- /thirdparty/git/git_config_levels.md: -------------------------------------------------------------------------------- 1 | ### There are 3 levels of git config; project, global and system. 2 | 3 | ----------------------------------------------- 4 | - project: Project configs are only available for the current project and stored in .git/config in the project's directory. 5 | - global: Global configs are available for all projects for the current user and stored in ~/.gitconfig. 6 | - system: System configs are available for all the users/projects and stored in /etc/gitconfig. 7 | ----------------------------------------------- 8 | 9 | ``` 10 | # Create a project specific config, you have to execute this under the project's directory. 11 | $ git config user.name "John Local_to_project" 12 | 13 | # Create a global config 14 | $ git config --global user.name "John Global_for_user" 15 | 16 | # Create a system config 17 | $ git config --system user.name "MY_HOST_DEFAULT" 18 | ``` 19 | -------------------------------------------------------------------------------- /thirdparty/git/git_credential_save.txt: -------------------------------------------------------------------------------- 1 | # if Automatic saving is not working 2 | url_base="https://10.220.12.12:444" 3 | user="joe" 4 | git config --global credential.${url_base}.username ${user} 5 | git config --global credential.helper cache 6 | -------------------------------------------------------------------------------- /thirdparty/git/git_https_proxy.txt: -------------------------------------------------------------------------------- 1 | # Set Proxy 2 | git config --global http.proxy http://myproxy.mycompany.com:8080 3 | git config --global https.proxy http://myproxy.mycompany.com:8080 4 | 5 | # Unset at global level 6 | git config --global --unset https.proxy 7 | git config --global --unset http.proxy 8 | 9 | # Unset at local level 10 | git config --unset http.proxy 11 | git config --unset https.proxy 12 | -------------------------------------------------------------------------------- /thirdparty/git/git_lifecycle.txt: -------------------------------------------------------------------------------- 1 | Application life-cycle 2 | code -> build -> integrate -> test -> release -> deploy -> Operate 3 | 4 | Code : git 5 | Build: 6 | Integrate: 7 | Test: 8 | release: 9 | deploy: 10 | Operate: 11 | -------------------------------------------------------------------------------- /thirdparty/git/git_useful_commands.md: -------------------------------------------------------------------------------- 1 | 2 | ``` 3 | filename="/my/path/file" 4 | ``` 5 | 6 | ## Follow file 7 | ``` 8 | git log --follow $filename 9 | git log --oneline --follow $filename 10 | ``` 11 | 12 | ## diff the same file between two different commits 13 | ``` 14 | git diff HEAD~2 HEAD -- $filename 15 | git diff c598d3fe a0647d42 -- $filename 16 | ``` 17 | 18 | ## Git: checkout a single file from a specific commit 19 | ``` 20 | git checkout c598d3fe $filename 21 | ``` 22 | -------------------------------------------------------------------------------- /thirdparty/git/merge_strategy_branch_to_master.md: -------------------------------------------------------------------------------- 1 | If the Branch have conflicts, but needs to ensure Branch overpower the master 2 | 3 | https://stackoverflow.com/questions/2862590/how-to-replace-master-branch-in-git-entirely-from-another-branch 4 | 5 | ``` 6 | mybranch="my_branch" 7 | git checkout $mybranch 8 | git merge -s ours master 9 | git checkout master 10 | git merge $mybranch 11 | ``` 12 | -------------------------------------------------------------------------------- /thirdparty/git/prune_history.md: -------------------------------------------------------------------------------- 1 | ## Remove all history 2 | ``` 3 | git checkout main 4 | git checkout --orphan newBranch 5 | git add -A # Add all files and commit them 6 | git commit 7 | git branch -D main # Deletes the master branch 8 | git branch -m main # Rename the current branch to master 9 | git push -f origin main # Force push master branch to gitlab/github. Ensure it is UNPROTECTED 10 | git gc --aggressive --prune=all # remove the old files 11 | ``` 12 | -------------------------------------------------------------------------------- /thirdparty/git/revert_reset_ideas.md: -------------------------------------------------------------------------------- 1 | ## GIT Revert a commit 2 | ``` 3 | git revert -m 1 7cefa25a 4 | ``` 5 | 6 | ## GIT RESET hard to specific commit 7 | ``` 8 | git reset --hard HEAD 9 | git reset --hard 10 | ``` 11 | -------------------------------------------------------------------------------- /thirdparty/github/.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Require approval from the security team for all detection files 2 | /detections/ @org/security-team 3 | # Require approval for a virtual deployment file 4 | /.github/deployment.txt @org/security-team @lead-developer -------------------------------------------------------------------------------- /thirdparty/github/.github/ISSUE_TEMPLATE/deploy-detection.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Detection Rules to Splunk 2 | description: Submit detection IDs to deploy to Splunk 3 | title: "[Deploy] Detection Rules" 4 | labels: ["deploy"] 5 | body: 6 | - type: input 7 | id: detection_ids 8 | attributes: 9 | label: Detection IDs 10 | description: Enter comma-separated detection IDs (e.g., id1,id2). Check pinned issue #1 for available IDs. 11 | placeholder: id1,id2,id3 12 | validations: 13 | required: true 14 | - type: textarea 15 | id: comments 16 | attributes: 17 | label: Additional Comments 18 | description: Optional notes about this deployment 19 | placeholder: Any comments -------------------------------------------------------------------------------- /thirdparty/gitlab/gitlab-ci_scheduled.yml: -------------------------------------------------------------------------------- 1 | ## To ensure a scheduled run is possible and align to specific schedule 2 | variables: 3 | GIT_STRATEGY: clone 4 | 5 | stages: 6 | - my_stage_1 7 | 8 | my_stage_1: 9 | tags: 10 | - runner1 11 | image: 12 | name: docker/alpine:3.9 13 | stage: my_stage_1 14 | rules: 15 | - if '$SCHEDULE == "my_schedule_at_9am"' 16 | script: 17 | - source /opt/venv/bin/activiate 18 | - export PATH=$PATH:/my/spec/dir 19 | - sh ./mycustomer_script ${PARAMS}_STAGE 20 | 21 | -------------------------------------------------------------------------------- /thirdparty/gitlab/tokens.txt: -------------------------------------------------------------------------------- 1 | # Clone 2 | my_user="auto_user" 3 | git_token="4AsUgS132358k4xW" 4 | git_host="localhost:444" 5 | git clone https://${my_user}:${git_token}@${git_host}/my_user/my_repo.git 6 | -------------------------------------------------------------------------------- /thirdparty/go/timeSubtract.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "time" 6 | ) 7 | 8 | func main() { 9 | fmt.Println("time", time.Now().Format("2006-01-02T15:04:05Z")) 10 | fmt.Println("time-5m", time.Now().Add(-5*time.Minute)) 11 | } 12 | -------------------------------------------------------------------------------- /thirdparty/html/selectors.md: -------------------------------------------------------------------------------- 1 | ## Various Selectors 2 | 3 | ``` 4 | #xpath 5 | /html/body/table/tbody/tr[1]/td[2]/span 6 | 7 | # Selector 8 | body > table > tbody > tr:nth-child(1) > td:nth-child(2) > span 9 | 10 | # JS path 11 | document.querySelector("body > table > tbody > tr:nth-child(1) > td:nth-child(2) > span") 12 | ``` 13 | 14 | ### CSS Selector Conditionals 15 | ``` 16 | # If contains style*=ff0000 within span 17 | body > table > tbody > tr:nth-child(2) > td>span[style*=ff0000] 18 | 19 | ``` 20 | -------------------------------------------------------------------------------- /thirdparty/images/gif_links.txt: -------------------------------------------------------------------------------- 1 | https://s160.convertio.me/p/kuenqu7eds7gr3KTfghhEQ/8ad10427097d282ab8f1544afb507f78/1654685744823_linkedin.gif 2 | -------------------------------------------------------------------------------- /thirdparty/interview/set1.txt: -------------------------------------------------------------------------------- 1 | Q) How much is your Understanding of Splunk or Elastic Search Stack? What are the components of Splunk/Elastic Stack? What they used for 2 | 3 | Q) What is an API (especially REST API). What are the 4 common/standard HTTP methods to interact with web-services 4 | 5 | Q) How much is your understanding of SIEM? Concept of use-cases 6 | 7 | Q) Windows & Linux OS - What are the normal type of dataset present in Windows & Linux Operating System? Where is it normally stored 8 | 9 | Q) Give an example of International Time standard/format? What are the main components to represent a precise time? 10 | -------------------------------------------------------------------------------- /thirdparty/interview/set2.md: -------------------------------------------------------------------------------- 1 | - Regex: Give a Sample regular expression to validate an email address. Doesn't have to be perfect, but a near sample would do 2 | - Python or Java: How to handle errors/exceptions. Give an example in one of the flavour. Give some samples 3 | - Difference between runtime exception & Compile Time Exception. Give an example 4 | - Give an example of JSON to represent a data for "EmployeeID", "EmployeeName", "EmployeeSalary" 5 | - I've two list [ 'a', 'b', 'd', 'c'] & ['x','z','y'] . Please can you write a simple program (Python or Java preferred) to merge these two list and sort it? 6 | -------------------------------------------------------------------------------- /thirdparty/javascript/timeout_fix_gcp.md: -------------------------------------------------------------------------------- 1 | https://stackoverflow.com/questions/49976573/why-google-cloud-shell-auto-disconnect-after-1-hours 2 | ``` 3 | setInterval(function() {document.elementFromPoint(500, 500).click();}, 30000); 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/jenkins/README.md: -------------------------------------------------------------------------------- 1 | Dedicated Repository with samples 2 | - https://github.com/getkub/jenkins_examples 3 | -------------------------------------------------------------------------------- /thirdparty/jenkins/git_checkout_jenkins.groovy: -------------------------------------------------------------------------------- 1 | // Adhoc git checkout without plugin 2 | 3 | stage('Git checkout - Manual') { 4 | steps { 5 | script { 6 | def config = [ 7 | 'githubUser': 'myuser'?: "defaultUser", 8 | 'repo': 'my-repo-custom', 9 | 'githubToken': 'my_pat' 10 | ] 11 | 12 | withCredentials([string(credentialsId: config.githubToken, variable: 'gitToken')]) { 13 | sh 'git clone https://'+config.githubUser+':'+gitToken+'@github.com/my-org/'+config.repo+'.git' 14 | } 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /thirdparty/jenkins/groovy_snippets.md: -------------------------------------------------------------------------------- 1 | ### Groovy Snippets 2 | 3 | - Cut a portion 4 | ``` 5 | "${scm.getUserRemoteConfigs()[0].getUrl().tokenize('/')[2].split('\\.')}" 6 | ``` 7 | 8 | 9 | - Branch checks with regex pattern 10 | ``` 11 | MY_BRANCH="feature/correct-pattern" 12 | def branches = [ "main", "feature/.*"] 13 | if (branches.any{ branch -> MY_BRANCH ==~ branch}) { 14 | println("MY_BRANCH adheres to pattern") 15 | } 16 | else { 17 | println("MY_BRANCH NOT adhere to pattern") 18 | currentBuild.result = 'ABORTED' 19 | error('Quitting due to NON-pattern match') 20 | } 21 | 22 | ``` 23 | 24 | 25 | - Skip a stage based on boolean value 26 | ``` 27 | when { excpression {return params.SKIP_STAGE_3 }} 28 | ``` -------------------------------------------------------------------------------- /thirdparty/jenkins/jenkins_api.md: -------------------------------------------------------------------------------- 1 | ### API URL's 2 | ``` 3 | jdomain="something.com" 4 | jproject="dev" 5 | https://${jdomain}/${jproject}/plugin/job-dsk/api-viewer/index.html 6 | ``` 7 | -------------------------------------------------------------------------------- /thirdparty/jira/jira_ticketing/README.md: -------------------------------------------------------------------------------- 1 | ### Explanation of Each File 2 | - config.py - Contains configuration values like JIRA URL, authentication credentials, etc. 3 | - jira_api.py - Contains functions for interacting with the JIRA API (querying for tickets, creating tickets, and adding comments). 4 | - ticket_handler.py - Contains the logic for handling ticket creation, querying, and appending comments (main logic). 5 | - main.py - Entry point to run the program and invoke the functions. 6 | 7 | 8 | ### Requirements 9 | ``` 10 | pip install requests 11 | ``` 12 | 13 | ### How to run 14 | ``` 15 | python main.py 16 | ``` -------------------------------------------------------------------------------- /thirdparty/jira/jira_ticketing/config.py: -------------------------------------------------------------------------------- 1 | # config.py 2 | 3 | # JIRA base URL and Authentication details 4 | JIRA_URL = "https://your_jira_instance.atlassian.net/rest/api/2" 5 | JIRA_USERNAME = "your_username" 6 | JIRA_API_TOKEN = "your_api_token" # API token from Jira 7 | 8 | # JIRA Project Key (adjust as per your environment) 9 | PROJECT_KEY = "YOUR_PROJECT_KEY" 10 | 11 | # Issue Type for tickets 12 | ISSUE_TYPE = "Task" # Modify if needed 13 | -------------------------------------------------------------------------------- /thirdparty/jira/jira_ticketing/main.py: -------------------------------------------------------------------------------- 1 | # main.py 2 | from ticket_handler import handle_ticket 3 | 4 | # Example usage 5 | rule_id = "RULE01" # The rule ID (e.g., LOGS_MISSING) 6 | feed_name = "feed_name1" # The feed index (e.g., feed_name1) 7 | description = "The feed 'feed_name1' did not receive expected logs. Severity: 4, Priority: 6. Please investigate." 8 | child_alert_message = "Child alert: Missing logs for device X in feed_name1." 9 | 10 | # Call the function to handle the ticket 11 | handle_ticket(rule_id, feed_name, description, child_alert_message) 12 | -------------------------------------------------------------------------------- /thirdparty/jira/logs_missing_sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "alert_details": { 3 | "use_case": { 4 | "id": "RULE01", 5 | "name": "Missing logs from Key Devices", 6 | "rule_id": "LOGS_MISSING", 7 | "severity": "4", 8 | "priority": "6" 9 | }, 10 | "mitre_mapping": { 11 | "tactic_id": "TA0040", 12 | "tactic_name": "Impact" 13 | }, 14 | "expected": { 15 | "host": "*", 16 | "index": "feed_name1" 17 | }, 18 | "actual": { 19 | "event_count": 0 20 | }, 21 | "timestamps": { 22 | "scheduled_time": "2024-11-22T05:56:56.905Z", 23 | "alert_generated": "2024-11-22T05:56:57.283Z" 24 | } 25 | } 26 | } 27 | 28 | 29 | -------------------------------------------------------------------------------- /thirdparty/jq/jq_argument_pass1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # https://bbs.archlinux.org/viewtopic.php?id=239827 4 | 5 | in_port=8000 6 | in_File="../../sampleData/raw/json/sample_array.json" 7 | 8 | #jq --arg in_port $in_port --arg out_securityMode "$out_securityMode" 'map(if (.port == $in_port) then .details.securityMode = $out_securityMode else . end )' ${in_File} 9 | jq --arg in_port $in_port 'map(if (.port == 8000) then .details.securityMode = "NA" else . end )' ${in_File} -------------------------------------------------------------------------------- /thirdparty/jq/jq_find_replace_delete.md: -------------------------------------------------------------------------------- 1 | - Data sample: [Link](../../sampleData/raw/json/sample_array.json) 2 | - https://lzone.de/cheat-sheet/jq 3 | 4 | ### Find and Replace based on select condition. Use `\=` for assignment 5 | ``` 6 | cat sample_array.json | jq '.[]| select (.host == "${MY_HOST_EUROPE}")| .details.country |= "xxxxxx" ' 7 | ``` 8 | 9 | ### Find and Replace based and retain entire data 10 | ``` 11 | jq 'map(if (.host == "${MY_HOST_EUROPE}") then .details.country = "xxxxxx" else . end )' sample_array.json 12 | ``` 13 | 14 | ### Delete a key-value 15 | ``` 16 | cat sample_array.json | jq '.[]| select (.host == "${MY_HOST_EUROPE}")| del(.details.country)' 17 | ``` 18 | -------------------------------------------------------------------------------- /thirdparty/jupyter_books/README.md: -------------------------------------------------------------------------------- 1 | examples of Jupyter Books 2 | -------------------------------------------------------------------------------- /thirdparty/k8s_kubernetes/README.md: -------------------------------------------------------------------------------- 1 | ## Moved into a new repo 2 | getkub/k8s_kubernetes 3 | 4 | -------------------------------------------------------------------------------- /thirdparty/k8s_kubernetes/k8s_security.md: -------------------------------------------------------------------------------- 1 | ## Guidance 2 | - Harden container hosts 3 | - approved host-os which is hardened to CIS standards (cgroups, SElinux, appArmor rules, seccomp profiles) 4 | - Immutable file-systems for container and writable volumes for application and data 5 | -------------------------------------------------------------------------------- /thirdparty/kafka/check_a_topic.txt: -------------------------------------------------------------------------------- 1 | kafka-console-consumer --zookeeper server:port/kafka --topic topic-name 2 | -------------------------------------------------------------------------------- /thirdparty/kafka/kafka_quick_setup.md: -------------------------------------------------------------------------------- 1 | https://kafka.js.org/docs/running-kafka-in-development 2 | 3 | ``` 4 | version: '2' 5 | services: 6 | zookeeper: 7 | image: wurstmeister/zookeeper:latest 8 | ports: 9 | - "2181:2181" 10 | kafka: 11 | image: wurstmeister/kafka:2.11-1.1.1 12 | ports: 13 | - "9092:9092" 14 | links: 15 | - zookeeper 16 | environment: 17 | KAFKA_ADVERTISED_HOST_NAME: ${HOST_IP} 18 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 19 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' 20 | KAFKA_DELETE_TOPIC_ENABLE: 'true' 21 | KAFKA_CREATE_TOPICS: "topic-test:1:1" 22 | volumes: 23 | - /var/run/docker.sock:/var/run/docker.sock 24 | ``` 25 | -------------------------------------------------------------------------------- /thirdparty/keytool/keytool.md: -------------------------------------------------------------------------------- 1 | ## Various Keytool command 2 | #### Convert from jks12 to pkcs12 3 | ``` 4 | jks="myjks_file" 5 | storepass="mystorepass" 6 | 7 | keytool -importkeystore \ 8 | -srckeystore ${jks}.jks -srcstoretype jks -srcstorepass $storepass \ 9 | -destkeystore ${jks}.p12 -deststoretype pkcs12 -deststorepass $storepass \ 10 | 11 | ``` 12 | 13 | ``` 14 | # Alternatively 15 | keytool -list -rfc -keystore $jks -storepass $storepass | sed "s/^\-*BEGIN [A-Z]-*//g;s/^\-*END [A-Z]-*$//g" > ${jks}.pem 16 | 17 | als="ca" 18 | keytool -export -rfc -keystore $jks -storepass $storepass -alias $als -file ${jks}.pem 19 | ``` 20 | 21 | 22 | #### PKCS12 to PEM 23 | ``` 24 | openssl pkcs12 -in ${jks}.p12 -out ${jks}.pem -passin pass:${storepass} -passout pass:${storepass} 25 | ``` 26 | 27 | ### PEM can be used in cacert in CURL 28 | -------------------------------------------------------------------------------- /thirdparty/keytool/keytool_java.md: -------------------------------------------------------------------------------- 1 | 2 | ``` 3 | chain_name="my-chain-1" 4 | keypass="changeit" 5 | ``` 6 | 7 | ## Linux OS 8 | ``` 9 | Linux normally has in /etc/pki/ca-trust/source/anchors/*.pem 10 | ``` 11 | 12 | 13 | ### Java 14 | ``` 15 | keytool -import -trustcacerts -keystore $JAVA_HOME/lib/security/cacerts -storepass $keypass -noprompt -alias $chain_name -file $chain_name 16 | 17 | keytool -delete -alias $chain_name -keystore $JAVA_HOME/lib/security/cacerts -storepass $keypass 18 | ``` 19 | -------------------------------------------------------------------------------- /thirdparty/ldap_activeDirectory/README.md: -------------------------------------------------------------------------------- 1 | ## jar file 2 | https://github.com/intoolswetrust/ldap-server 3 | 4 | 5 | ``` 6 | # Import user and start server 7 | java -jar ldap-server.jar users.ldif 8 | 9 | ## to Get information 10 | searchbase="dc=ldap,dc=example" 11 | ldap_server="ldap://127.0.0.1:10389" 12 | bind="uid=admin,ou=system" 13 | bindpw="secret" 14 | 15 | ldapsearch -x -b "${searchbase}" -H "${ldap_server}" -D "${bind}" -W 16 | ldapsearch -x -b "${searchbase}" -H "${ldap_server}" -D "${bind}" -w ${bindpw} "objectclass=*" 17 | 18 | ldapsearch -x -LLL -H "${ldap_server}" -D "${bind}" -w ${bindpw} -b "${searchbase}" -s sub '(objectClass=*)' 'givenName=username*' 19 | 20 | ``` -------------------------------------------------------------------------------- /thirdparty/ldap_activeDirectory/adConnectivity.txt: -------------------------------------------------------------------------------- 1 | # AD connectivity test 2 | echo 1>/dev/null 2>/dev/null < /dev/tcp/2012myLdap.company.com/636; echo $? 3 | # for timeout use: timeout 1 bash -c 'cat < /dev/null > /dev/tcp/google.com/80' 4 | # Try 636 & 389 5 | # 0 means success, 1 means failure 6 | 7 | # AD test using CURL 8 | curl "ldap://ldap.uninett.no/dc=uninett,dc=no??sub?(&(cn=peop*)(objectClass=room))" 9 | #http://www.idevelopment.info/data/LDAP/LDAP_Resources/SEARCH_Setting_the_SCOPE_Parameter.shtml 10 | -------------------------------------------------------------------------------- /thirdparty/ldap_activeDirectory/sampleusers.ldif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getkub/SplunkScriplets/89fb6c5f3552c3480f957baefc63705857f4f619/thirdparty/ldap_activeDirectory/sampleusers.ldif -------------------------------------------------------------------------------- /thirdparty/linux/bonnie_plusplus: -------------------------------------------------------------------------------- 1 | # Running sample 2 | nohup /usr/sbin/bonnie++ -d /splunk/indexfilesystem -s 115000 -u splunk:splunk -fq -n 50 >> outputfile & 3 | -------------------------------------------------------------------------------- /thirdparty/linux/commands_to_learn.txt: -------------------------------------------------------------------------------- 1 | fstrim 2 | # fstrim /proc/$(docker inspedct --format='{{ .State.Pid }}' )/root 3 | -------------------------------------------------------------------------------- /thirdparty/linux/disk_commands.md: -------------------------------------------------------------------------------- 1 | ``` 2 | lsblk -io KNAME,TYPE,SIZE,MODEL,LABEL 3 | ``` 4 | 5 | ## Will show Start/End and Blocks 6 | ``` 7 | fdisk -l /dev/sda 8 | # https://support.microsoft.com/en-us/kb/929491 9 | ``` 10 | 11 | ## Bad sectors in Hard Disk 12 | ``` 13 | sudo hdparm -Ttv /dev/sda 14 | ``` 15 | 16 | ## dd commands and showing disk write speed 17 | ``` 18 | dd if=/dev/zero of=/pv-1tb-0/dd.test bs=32k count=100000 oflag=direct 19 | ``` 20 | -------------------------------------------------------------------------------- /thirdparty/linux/kernel_uprade.txt: -------------------------------------------------------------------------------- 1 | ## Crude steps 2 | yum update kernel-lt 3 | vi /etc/default/grub 4 | grub2-mkconfig -o /boot/grub2/grub.cfg 5 | 6 | ## After reboot 7 | 8 | rpm -qa | grep kernel # get older kernel list 9 | yum remove old_kernel1 10 | 11 | ## Run grub2-mkconfig to clean up boot list 12 | -------------------------------------------------------------------------------- /thirdparty/linux/process_commands.md: -------------------------------------------------------------------------------- 1 | ``` 2 | cat /proc/sys/net/core/somaxconn 3 | ``` 4 | -------------------------------------------------------------------------------- /thirdparty/logrotate/logrotate_60days_rsyslog.conf: -------------------------------------------------------------------------------- 1 | /var/log/rsyslog/*/*/*.log 2 | { 3 | notifempty 4 | compress 5 | create 0664 syslog nuk 6 | daily 7 | dateext 8 | missingok 9 | rotate 60 10 | sharedscripts 11 | delaycompress 12 | postrotate 13 | /usr/lib/rsyslog/rsyslog-rotate 14 | endscript 15 | 16 | } 17 | -------------------------------------------------------------------------------- /thirdparty/machine_learning/good_sites.md: -------------------------------------------------------------------------------- 1 | - https://www.ray.io/integrations 2 | - 3 | -------------------------------------------------------------------------------- /thirdparty/macos/applescript/mouse/145_scrolllock.scpt: -------------------------------------------------------------------------------- 1 | tell application "System Events" 2 | key code 145 using {shift down, command down} -- shift-command-left 3 | end tell 4 | 5 | -------------------------------------------------------------------------------- /thirdparty/macos/applescript/mouse/launch.md: -------------------------------------------------------------------------------- 1 | 2 | ``` 3 | cd ~/somelocation/ 4 | launchctl load 145_scrolllock.plist 5 | launchctl start 145_scrolllock 6 | 7 | ## Oneliner 8 | mydir="~/Documents" 9 | cd $mydir && launchctl load 145_scrolllock.plist && launchctl start 145_scrolllock && cd - 10 | cd $mydir && launchctl stop 145_scrolllock && launchctl unload 145_scrolllock.plist && cd - 11 | ``` 12 | 13 | ## check if its loaded correctly 14 | ``` 15 | launchctl list | grep 145_scrolllock 16 | ``` 17 | -------------------------------------------------------------------------------- /thirdparty/macos/initial_setup.md: -------------------------------------------------------------------------------- 1 | - install brew 2 | - install visual studio, draw.io, chrome, sublime 3 | - 4 | ``` 5 | brew install docker 6 | brew install python@3.11 7 | brew install ansible 8 | 9 | ``` 10 | 11 | 12 | - prompts & zsh themes 13 | ``` 14 | ZSH_THEME="philips" 15 | 16 | PROMPT='%{$fg[blue]%}%B%c/%b%{$reset_color%} $(git_prompt_info)%(!.#.$) ' 17 | 18 | ``` 19 | -------------------------------------------------------------------------------- /thirdparty/macos/snippets.md: -------------------------------------------------------------------------------- 1 | ``` 2 | sysctl -n machdep.cpu.brand_string 3 | ``` 4 | -------------------------------------------------------------------------------- /thirdparty/mouse/README.md: -------------------------------------------------------------------------------- 1 | ## To call vbs 2 | ``` 3 | Cscript.exe C:\mydir\myvbscript.vbs 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/mouse/activeMouse.ps1: -------------------------------------------------------------------------------- 1 | param($minutes = 120) 2 | $myshell = New-Object -com "Wscipt.Shell" 3 | 4 | for ($i = 0; $i -lt $minutes; $i++) { 5 | Start-Sleep -Seconds 60 6 | $myshell.SendKeys("{SCROLLLOCK 2}") 7 | } 8 | -------------------------------------------------------------------------------- /thirdparty/mouse/mouse2.vbs: -------------------------------------------------------------------------------- 1 | ' cscript "" 2 | 3 | Set fso = CreateObject ("Scripting.FileSystemObject") 4 | Set stdout = fso.GetStandardStream (1) 5 | Set stderr = fso.GetStandardStream (2) 6 | 7 | Dim Counter: Counter = 1 8 | Set WshShell = WScript.CreateObject("WScript.Shell") 9 | 10 | stdout.WriteLine "Starting for counter: 120mins" 11 | 12 | While Counter < 120 13 | Counter = Counter + 1 14 | WScript.Sleep 6000 15 | WshShell.SendKeys "{SCROLLLOCK 2}" 16 | 'stdout.WriteLine "Entry: " & Counter 17 | Wend 18 | 19 | Set WshShell = Nothing 20 | -------------------------------------------------------------------------------- /thirdparty/music/README.md: -------------------------------------------------------------------------------- 1 | ## Generate Sheet music from notations 2 | 3 | ### Pre-Reqs 4 | - MuseScore 5 | - Lilypond (`brew install lilypond`) 6 | - Run lilypond with notation file generated from AI tool 7 | 8 | ``` 9 | D+ Eb+ C+ G / F G 10 | D+ Eb+ C+ G/ Eb F 11 | F Ab C+ D+ / C+ Bb 12 | Bb C+ D+ F+/ D+ Eb+ 13 | ``` -------------------------------------------------------------------------------- /thirdparty/music/sample.ly: -------------------------------------------------------------------------------- 1 | \version "2.24.0" 2 | 3 | \relative c' { 4 | 4^\markup { "D+" } 4^\markup { "Eb+" } 4^\markup { "C+" } g4 | 5 | 4^\markup { "D+" } 4^\markup { "Eb+" } 4^\markup { "C+" } g4 | 6 | 4^\markup { "F" } 4^\markup { "Ab" } 4^\markup { "C+" } 4^\markup { "D+" } | 7 | 4^\markup { "Bb" } 4^\markup { "C+" } 4^\markup { "D+" } 4^\markup { "F+" } | 8 | } 9 | -------------------------------------------------------------------------------- /thirdparty/music/sample.lytex: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[utf8]{inputenc} 3 | \usepackage{graphicx} 4 | \begin{document} 5 | 6 | \begin{lilypond} 7 | \version "2.24.3" 8 | \relative c' { 9 | 4^\markup { "D+" } 4^\markup { "Eb+" } 4^\markup { "C+" } g4 | 10 | 4^\markup { "D+" } 4^\markup { "Eb+" } 4^\markup { "C+" } g4 | 11 | 4^\markup { "F" } 4^\markup { "Ab" } 4^\markup { "C+" } 4^\markup { "D+" } | 12 | 4^\markup { "Bb" } 4^\markup { "C+" } 4^\markup { "D+" } 4^\markup { "F+" } | 13 | } 14 | 15 | \end{lilypond} 16 | 17 | \end{document} 18 | -------------------------------------------------------------------------------- /thirdparty/mustache/empReports.mustache: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | {{#data}} 8 | 9 | 10 | 11 | 21 | 22 | {{/data}} 23 |
empnameempSalaryreports
{{empname}}{{empSalary}} 12 | 13 | {{#reports}} 14 | 15 | 16 | 17 | 18 | {{/reports}} 19 |
{{date}}{{schedule}}
20 |
24 | -------------------------------------------------------------------------------- /thirdparty/mustache/empReports2.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": [{ 3 | "empname": "bob", 4 | "empSalary": 12000, 5 | "reports": [{ 6 | "date": "2021-07-20", 7 | "schedule": "daily" 8 | }, { 9 | "date": "2021-07-21", 10 | "schedule": "weekly" 11 | }] 12 | }, { 13 | "empname": "joe", 14 | "empSalary": 13000, 15 | "reports ": [{ 16 | "date": "2021-07-20", 17 | "schedule": "daily" 18 | }, { 19 | "date": "2021-07-21", 20 | "schedule": "weekly" 21 | }] 22 | }] 23 | } 24 | -------------------------------------------------------------------------------- /thirdparty/mustache/empSalary.json: -------------------------------------------------------------------------------- 1 | {"data": [{"empname":"bob", "empSalary": 12000},{"empname":"joe", "empSalary": 13000}]} 2 | -------------------------------------------------------------------------------- /thirdparty/mustache/empSalary1.mustache: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {{#data}} 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /thirdparty/n8n/README.md: -------------------------------------------------------------------------------- 1 | ## Various Workflows using n8n 2 | 3 | 4 | ## User management 5 | - https://docs.n8n.io/hosting/user-management/ 6 | 7 | ## Scaling 8 | - https://docs.n8n.io/hosting/scaling/ 9 | -------------------------------------------------------------------------------- /thirdparty/network/performance_test_network.md: -------------------------------------------------------------------------------- 1 | ## Soak Test Network - iPerf 2 | ``` 3 | sudo apt -y install iperf3 4 | iperf3 -s 5 | 6 | iperf3 -c server2 -p 8000 -t 300 7 | ``` 8 | -------------------------------------------------------------------------------- /thirdparty/network/session_complications.txt: -------------------------------------------------------------------------------- 1 | netstat -nat | grep source_host_ip 2 | # Will see the sessions established. If they are NOT getting closed off 3 | 4 | sudo netstat -altpn | grep 8080 | grep LISTEN 5 | # Will show process 6 | 7 | ## ----------------------- ## 8 | # do 9 | lsof -ni : 10 | # get the process id pid) and FileDescriptor 11 | # then do 12 | 13 | stat /proc//fd/ 14 | 15 | ## Get information of proc 16 | # in HEX , so the rem_value should be converted from hex to decimal 17 | cat /proc/net/tcp 18 | lsof -Pi :22 19 | -------------------------------------------------------------------------------- /thirdparty/network/subnet_allocation.md: -------------------------------------------------------------------------------- 1 | ## Subnet Allocation tool 2 | - https://network00.com/NetworkTools/IPv4SubnetCreator/ 3 | 4 | -------------------------------------------------------------------------------- /thirdparty/network/tcpdump_examples.md: -------------------------------------------------------------------------------- 1 | https://hackertarget.com/tcpdump-examples/ 2 | -------------------------------------------------------------------------------- /thirdparty/network/test_connection.sh: -------------------------------------------------------------------------------- 1 | # Test various levels of connectivity 2 | # Can your laptop connect to VPN, then to intranet site, then to final URL etc. 3 | 4 | outputFile="/tmp/connectionTest.log" 5 | a=1 6 | url1="https://google.com" 7 | url2="https://not-reachable-somesite_outthere.com" 8 | url3="https://facebook.com" 9 | sleepSeconds="60" 10 | 11 | while [ $a -lt 200 ]; do 12 | 13 | runID=`date +%s` 14 | urls=( "$url1", "$url2", "$url3" ) 15 | 16 | for url in "${urls[@]}" 17 | do 18 | curl -s --connect-timeout 2 -o /dev/null $url 19 | echo "runID=$runID url=$url rc=$? runTime="`date +%FT%T` >> ${outputFile} 20 | done 21 | a=`expr $a + 1` 22 | sleep $sleepSeconds 23 | done 24 | -------------------------------------------------------------------------------- /thirdparty/observability/apm/elastic_java.md: -------------------------------------------------------------------------------- 1 | ## Java APM 2 | 3 | - https://www.elastic.co/guide/en/apm/agent/java/current/supported-technologies-details.html#supported-java-versions 4 | - -------------------------------------------------------------------------------- /thirdparty/observability/opentelemetry/ot_logs_sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "timestamp": { 3 | "seconds": 1649423141, 4 | "nanos": 345000000 5 | }, 6 | "attributes": { 7 | "uid": 100, 8 | "auid": 1233, 9 | "comm": "bash", 10 | "exe": "/bin/bash", 11 | "key": "login", 12 | "syscall": "open", 13 | "return": 1, 14 | "retval": -1, 15 | "msg": "Login attempt from /dev/pts/0" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /thirdparty/observability/opentelemetry/ot_metrics_sample.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "http_server_requests", 4 | "kind": "Counter", 5 | "value": 123, 6 | "attributes": { 7 | "http.status_code": 200 8 | } 9 | }, 10 | { 11 | "name": "db_query_duration", 12 | "kind": "Histogram", 13 | "value": 50.2, 14 | "attributes": { 15 | "db.operation": "SELECT", 16 | "db.table": "users" 17 | } 18 | } 19 | ] 20 | -------------------------------------------------------------------------------- /thirdparty/observability/opentelemetry/standards/otel_logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /thirdparty/observability/traces/traceparent.md: -------------------------------------------------------------------------------- 1 | ## Traceparent 2 | 3 | Distributed tracing relies on injecting a custom traceparent HTTP header into outgoing requests to track the flow of a single request across multiple microservices in a distributed system. 4 | 5 | - The traceparent header follows a specific format defined by the W3C Trace Context specification: 6 | ``` 7 | version-trace_id-parent_id-trace_flags 8 | ``` 9 | 10 | eg : Through Service A, B & C, it has below headers 11 | ``` 12 | traceparent at Service A: 00-trace_id_A-00-01 13 | traceparent at Service B: 00-trace_id_B-trace_id_A-01 14 | traceparent at Service C: 00-trace_id_C-trace_id_A-01 15 | ``` -------------------------------------------------------------------------------- /thirdparty/observability/traces/tracing2.md: -------------------------------------------------------------------------------- 1 | ## Details of Tracing 2 | - Distributed Tracing - https://www.elastic.co/guide/en/observability/current/apm-distributed-tracing.html 3 | - Transaction sampling - https://www.elastic.co/guide/en/observability/current/apm-sampling.html 4 | 5 | -------------------------------------------------------------------------------- /thirdparty/observability/traces/transaction_sampling.md: -------------------------------------------------------------------------------- 1 | ## Elastic APM supports two types of sampling: 2 | 3 | - Head-based sampling 4 | - Tail-based sampling 5 | 6 | ### Head Based Sampling 7 | 8 | - the sampling decision for each trace is made when the trace is initiated 9 | - a sampling value of .2 indicates a transaction sample rate of 20% 10 | - Head-based sampling is quick and easy to set up 11 | - Entirely random — interesting data might be discarded purely due to chance 12 | 13 | ### Tail Based Sampling 14 | 15 | - the sampling decision for each trace is made after the trace has completed 16 | - -------------------------------------------------------------------------------- /thirdparty/openldap/domainData.ldif: -------------------------------------------------------------------------------- 1 | dn: dc=df,dc=org 2 | objectClass: top 3 | objectClass: dcObject 4 | objectclass: organization 5 | o: df org 6 | dc: df 7 | 8 | dn: cn=admin,dc=df,dc=org 9 | objectClass: organizationalRole 10 | cn: admin 11 | description: Directory admin 12 | 13 | dn: ou=People,dc=df,dc=org 14 | objectClass: organizationalUnit 15 | ou: People 16 | 17 | dn: ou=Group,dc=df,dc=org 18 | objectClass: organizationalUnit 19 | ou: Group -------------------------------------------------------------------------------- /thirdparty/openldap/insert_data.md: -------------------------------------------------------------------------------- 1 | ### Insert data to ldap 2 | ``` 3 | password="adminpassword" 4 | ldapserver="127.0.0.1:1389" 5 | ldifFile="/tmp/newusers.ldif" 6 | mydn="dc=example,dc=org" 7 | ldapadd -x -D $mydn -w $password -H ldap://${ldapserver} -f ${ldifFile} 8 | ldapmodify -a -x -D $mydn -w $password -H ldap://${ldapserver} -f ${ldifFile} 9 | 10 | ldapsearch -H ldap://${ldapserver} -x -s base -b "" -LLL "+" 11 | #ldapadd -x -W -D "cn=ldapadm,${mydn}" -f /etc/openldap/slapd.d/xxxxx.ldif 12 | 13 | ``` 14 | 15 | 16 | ## Password changes 17 | - https://www.digitalocean.com/community/tutorials/how-to-change-account-passwords-on-an-openldap-server 18 | 19 | ``` 20 | ldappasswd -H ldap://${ldapserver} -x -D $mydn -W -A -S 21 | ldappasswd -H ldap://${ldapserver} -x -D $mydn -w old_passwd -a old_passwd -S 22 | ``` 23 | -------------------------------------------------------------------------------- /thirdparty/openldap/newusers.ldif: -------------------------------------------------------------------------------- 1 | # Add John Smith to the organization 2 | dn: uid=jsmith1,ou=People,dc=df,dc=org 3 | changetype: add 4 | objectClass: inetOrgPerson 5 | description: John Smith from Accounting. John is the project 6 | manager of the building project, so contact him with any qu 7 | estions. 8 | cn: John Smith 9 | sn: Smith 10 | uid: jsmith1 11 | 12 | # Add Sally Brown to the organization 13 | dn: uid=sbrown20,ou=People,dc=df,dc=org 14 | changetype: add 15 | objectClass: inetOrgPerson 16 | description: Sally Brown from engineering. Sally is responsibl 17 | e for designing the blue prints and testing the structural int 18 | egrity of the design. 19 | cn: Sally Brown 20 | sn: Brown 21 | uid: sbrown20 22 | 23 | -------------------------------------------------------------------------------- /thirdparty/openldap/slapd.conf: -------------------------------------------------------------------------------- 1 | #slapd.conf for CentOS 2 | 3 | #schemas to use 4 | include /etc/openldap/schema/core.schema 5 | include /etc/openldap/schema/cosine.schema 6 | include /etc/openldap/schema/inetorgperson.schema 7 | include /etc/openldap/schema/nis.schema 8 | 9 | #log 10 | pidfile /var/run/openldap/slapd.pid 11 | argsfile /var/run/openldap/slapd.args 12 | 13 | ######################################## 14 | ##### dbm database definitions #### 15 | ######################################## 16 | 17 | database bdb 18 | suffix dc=server,dc=com 19 | rootdn cn=Manager,dc=server,dc=com 20 | rootpw secret 21 | 22 | #database directory 23 | directory /var/lib/ldap 24 | 25 | #end 26 | -------------------------------------------------------------------------------- /thirdparty/openldap/ubuntu_ldap.conf: -------------------------------------------------------------------------------- 1 | #slapd.conf for Ubuntu 22.04 2 | 3 | #schemas to use 4 | include /etc/ldap/schema/core.schema 5 | include /etc/ldap/schema/cosine.schema 6 | include /etc/ldap/schema/inetorgperson.schema 7 | include /etc/ldap/schema/nis.schema 8 | 9 | #log 10 | pidfile /var/run/slapd/slapd.pid 11 | argsfile /var/run/slapd/slapd.args 12 | 13 | ######################################## 14 | ##### dbm database definitions #### 15 | ######################################## 16 | 17 | database bdb 18 | suffix dc=df,dc=org 19 | rootdn cn=admin,dc=df,dc=org 20 | 21 | #database directory 22 | directory /var/lib/ldap 23 | 24 | #end 25 | -------------------------------------------------------------------------------- /thirdparty/openldap/ubuntu_setup.md: -------------------------------------------------------------------------------- 1 | ## Ubuntu Setup 2 | - https://www.tecmint.com/install-openldap-server-for-centralized-authentication/ 3 | 4 | Commands 5 | 6 | ``` 7 | cp /usr/share/slapd/slapd.init.ldif /var/lib/ldap/ 8 | ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/cosine.ldif 9 | ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/nis.ldif 10 | ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/inetorgperson.ldif 11 | 12 | ldapmodify -Y EXTERNAL -H ldapi:/// -f /tmp/mydomain.ldif 13 | ldapadd -Y EXTERNAL -x -D cn=admin,dc=df,dc=org -W -f /tmp/domainData.ldif 14 | ldapadd -Y EXTERNAL -x -D "cn=admin,dc=df,dc=org" -W -f /tmp/domainData.ldif 15 | 16 | ``` -------------------------------------------------------------------------------- /thirdparty/opensearch/simple_searches.md: -------------------------------------------------------------------------------- 1 | ## Workbench tool 2 | ``` 3 | search source=opensearch_dashboards_sample_data_ecommerce | dedup total_unique_products 4 | 5 | search source=opensearch_dashboards_sample_data_ecommerce | fields customer_gender,category | stats count() as count by customer_gender,category| fields customer_gender,category,count 6 | ``` 7 | -------------------------------------------------------------------------------- /thirdparty/openssl/openssl_commands.md: -------------------------------------------------------------------------------- 1 | ## Verify a CAfile (chain) 2 | 3 | ``` 4 | openssl verify -CAfile myfile.ca.chain my.crt 5 | ``` 6 | -------------------------------------------------------------------------------- /thirdparty/openssl/openssl_grab_pem.txt: -------------------------------------------------------------------------------- 1 | 2 | # https://unix.stackexchange.com/questions/368123/how-to-extract-the-root-ca-and-subordinate-ca-from-a-certificate-chain-in-linux 3 | your_site="mydev.site:9200" 4 | openssl s_client -showcerts -verify 5 -connect ${your_site} < /dev/null | awk '/BEGIN/,/END/{ if(/BEGIN/){a++}; out="cert"a".crt"; print >out}' && for cert in *.crt; do newname=$(openssl x509 -noout -subject -in $cert | sed -n 's/^.*CN=\(.*\)$/\1/; s/[ ,.*]/_/g; s/__/_/g; s/^_//g;p').pem; mv $cert $newname; done 5 | -------------------------------------------------------------------------------- /thirdparty/pandoc/tips.txt: -------------------------------------------------------------------------------- 1 | # Using Docker 2 | docker run -v `pwd`:/data pandoc/docker -f markdown -t latex -H header.sty inputFileName.tsv -o outputFileName.pdf 3 | 4 | # contents of header.sty 5 | \usepackage{scrextend} 6 | \usepackage[brazil, brazilian]{babel} 7 | \usepackage[utf8]{inputenc} 8 | \usepackage[a4paper,landscape,top=2.5cm,bottom=2.5cm,left=0.2cm, right=0.2cm] {geometry} 9 | -------------------------------------------------------------------------------- /thirdparty/perl/listPerlInstalledModules.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | # list all of the perl modules installed 3 | use File::Find ; 4 | for (@INC) { find(\&modules,$_) ; } 5 | sub modules 6 | { 7 | if (-d && /^[a-z]/) { $File::Find::prune = 1 ; return } 8 | return unless /\.pm$/ ; 9 | my $fullPath = "$File::Find::dir/$_"; 10 | $fullPath =~ s!\.pm$!!; 11 | $fullPath =~ s#/(\w+)$#::$1# ; 12 | print "$fullPath \n"; 13 | } 14 | -------------------------------------------------------------------------------- /thirdparty/php/shellScriptTrigger/shellScriptTrigger.js: -------------------------------------------------------------------------------- 1 | $('#my-span-id').load('./shellScriptTrigger.php'); 2 | -------------------------------------------------------------------------------- /thirdparty/php/shellScriptTrigger/shellScriptTrigger.php: -------------------------------------------------------------------------------- 1 | 6 | -------------------------------------------------------------------------------- /thirdparty/postman/add_dynamic_value.md: -------------------------------------------------------------------------------- 1 | - Adding dynamic value to raw body : https://community.postman.com/t/raw-json-body-how-to-add-variable/3396 2 | -------------------------------------------------------------------------------- /thirdparty/powershell/Test-Cred.ps1: -------------------------------------------------------------------------------- 1 | # Validate a user credentials are correct. 2 | $thisCreds = Get-Credential 3 | $thisUser = $thisCreds.username 4 | $thisPass = $thisCreds.GetNetworkCredential().password 5 | $Root = "LDAP://" + ([ADSI]'').distinguishedName 6 | $Domain = New-Object System.DirectoryServices.DirectoryEntry($Root,$thisUser,$thisPass) 7 | 8 | If ($domain.name -ne $null) 9 | 10 | { 11 | return "Authenticated" 12 | } Else 13 | { 14 | return "Not authenticated" 15 | } 16 | -------------------------------------------------------------------------------- /thirdparty/powershell/admin_commands.md: -------------------------------------------------------------------------------- 1 | ## Get Audit Policy 2 | ``` 3 | auditpol /get /category:* 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/powershell/convert_csv_with_double_quotes.ps1: -------------------------------------------------------------------------------- 1 | Import-Csv ".\SourceFile.csv" | Export-Csv ".\DestinationFile.csv" -NoTypeInformation 2 | 3 | 4 | 5 | # To Find duplicates & non-duplicates 6 | myfile="blah.csv" 7 | Import-Csv $myfile | group-Object -Property device_ip| Where{$_.Count -gt 1} | Foreach-Object {$_.Group} | Export-csv -Path yes_duplicates.csv -NoTypeInformation 8 | Import-Csv $myfile | group-Object -Property device_ip| Where{$_.Count -lt 2} | Foreach-Object {$_.Group} | Export-csv -Path non_duplicates.csv -NoTypeInformation 9 | 10 | # Then in notepad++ (for duplicates) 11 | (.+,.+)\r\n.+,.+ => \1 12 | -------------------------------------------------------------------------------- /thirdparty/powershell/get_driver_version.md: -------------------------------------------------------------------------------- 1 | ``` 2 | (Get-WmiObject Win32_PnPSignedDriver| where {$_.DeviceName -eq "Thunderbolt(TM) Controller - 15BF"}).driverversion 3 | ``` 4 | -------------------------------------------------------------------------------- /thirdparty/powershell/get_user_accounts.ps1: -------------------------------------------------------------------------------- 1 | # List user accounts on a Windows system 2 | Get-WmiObject -Class Win32_UserAccount -------------------------------------------------------------------------------- /thirdparty/powershell/integrate_splunk.ps1: -------------------------------------------------------------------------------- 1 | # Powershell script REST method to get information from Splunk 2 | 3 | [System.Net.ServicePointManager]::ServerCertificateValidationCAllback = { $True } 4 | $server = 'mysplunkhost' 5 | $my_user = 'splunk_svc' 6 | 7 | $url_export = "https://${server}:8089/services/search/jobs/export" 8 | $my_search = 'search `mymacro(param1,parm2)`' 9 | 10 | $body = @{ 11 | search = $my_search 12 | output_mode = "csv" 13 | } 14 | 15 | Invoke-RestMethod -Method Post -Uri $url_export -Credential $my_user -Body $body -OutFile output.csv 16 | -------------------------------------------------------------------------------- /thirdparty/powershell/key.ps1: -------------------------------------------------------------------------------- 1 | param($minutes = 120) 2 | $myshell = New-Object -com "Wscript.Shell" 3 | 4 | for ($i = 0; $i -lt $minutes; $i++) { 5 | Start-Sleep -Seconds 60 6 | $myshell.SendKeys("{SCROLLLOCK 2}") 7 | } 8 | -------------------------------------------------------------------------------- /thirdparty/powershell/sleep.ps1: -------------------------------------------------------------------------------- 1 | param($minutes = 60) 2 | $myshell = New-Object -com "Wscript.Shell" 3 | 4 | for ($i=0; $i -lt $minutes; $i++) { 5 | Start-Sleep -Seconds 60 6 | $myshell.SendKeys("{SCROLLLOCK 2}") 7 | 8 | } 9 | -------------------------------------------------------------------------------- /thirdparty/powershell/user_bulk/insert_bulk_users.ps1: -------------------------------------------------------------------------------- 1 | #foreach ($newuser in cat ./users.txt) { 2 | #./NewAdminuser.ps1 -objectname $newuser} 3 | 4 | $csv = import-csv "myAdminUserList.csv" 5 | foreach ($item in $csv) 6 | { 7 | "newuser = $($item.user) with password = $($item.password)" 8 | 9 | ./NewAdminUser.ps1 -objectname $item.user -plainPassword $item.password 10 | } 11 | Write-Host 'The script has completed. Please review the output above for any errors or warnings. Press any key to dismiss.'; 12 | $null = $Host.UI.RawUI.ReadKey('NoEcho,IncludeKeyDown'); 13 | 14 | -------------------------------------------------------------------------------- /thirdparty/preChecks/commands.csv: -------------------------------------------------------------------------------- 1 | #group,validate,command 2 | version,SplunkES,grep VERSION /opt/splunk/etc/splunk.version 3 | -------------------------------------------------------------------------------- /thirdparty/preChecks/ports.csv: -------------------------------------------------------------------------------- 1 | #Port,Description,FWD,SH,IND,CM,DEP 2 | 8089,Splunk_Management,No,Yes,Yes,Yes,Yes 3 | 9997,Splunk_Receiver,Yes,No,Yes,No,No 4 | 8065,APP_Server,No,Yes,No,No,No 5 | 8066,Cluster_Replication,No,Yes,Yes,No,No 6 | 8000,Splunk_Web,No,Yes,No,Yes,No 7 | 8191,KV_Store,No,Yes,No,No,No 8 | 514,Rsyslog,Yes,No,No,No,No 9 | -------------------------------------------------------------------------------- /thirdparty/preChecks/syslog_logger.sh: -------------------------------------------------------------------------------- 1 | # syslog simulation using logger or netcat (nc) 2 | remoteIP="10.12.13.14" 3 | remotePort="514" 4 | sampleMessage="TEST_MSG" 5 | 6 | # logger -n $remoteIP -P $remotePort $sampleMessage 7 | 8 | echo $sampleMessage | nc $remoteIP $remotePort 9 | -------------------------------------------------------------------------------- /thirdparty/python/partial_requests_core.py: -------------------------------------------------------------------------------- 1 | ... 2 | try: 3 | 4 | r=requests.get(url=myurl, auth(myuser,mysession), verify=False, proxies=dictProxy, params=payload, timeout=90) 5 | print r.url 6 | -------------------------------------------------------------------------------- /thirdparty/python/pip.README.md: -------------------------------------------------------------------------------- 1 | ### To install per user 2 | ``` 3 | python3 -m pip install --user pyautogui 4 | ``` 5 | 6 | 7 | ## Pip List 8 | ``` 9 | /opt/venv/bin/pip3 list 10 | ``` 11 | -------------------------------------------------------------------------------- /thirdparty/python/portTest.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import argparse 3 | 4 | parser = argparse.ArgumentParser() 5 | parser.add_argument('-d','--dest', help='Destination Host', required=True) 6 | parser.add_argument('-p','--port', help='Destination Port', required=True, type=int) 7 | args = parser.parse_args() 8 | 9 | s = socket.socket() 10 | host = args.dest 11 | port = args.port 12 | 13 | try: 14 | s.connect((host,port)) 15 | print("Connection to host:%s at port:%d is Successful!" % (host,port)) 16 | except Exception as e: 17 | print("Connection to host:%s at port:%d FAILED! Exception=%s" % (host,port,e)) 18 | finally: 19 | s.close() 20 | -------------------------------------------------------------------------------- /thirdparty/python/python_playgrounds.md: -------------------------------------------------------------------------------- 1 | ## Various Links 2 | https://hightower.space/ioc-finder/ 3 | -------------------------------------------------------------------------------- /thirdparty/python/sample_sessions.csv: -------------------------------------------------------------------------------- 1 | session_name,folder,hostname,protocol,port,username 2 | server1,dev/my_group1/server,10.1.2.3,ssh2,22,myuser 3 | server2,dev/my_group2/server,10.1.2.4,ssh2,22,myuser 4 | -------------------------------------------------------------------------------- /thirdparty/python/scapy/import_example1.py: -------------------------------------------------------------------------------- 1 | from scapy.layers.inet import IP, ICMP 2 | from scapy.sendrecv import sr 3 | import sys 4 | sr(IP(dst=sys.argv[1])/ICMP()) 5 | -------------------------------------------------------------------------------- /thirdparty/python/scapy/sniff_example.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2.7 2 | 3 | # Script to use Scapy and Sniff out traffic based on filter. Similar to wireshark 4 | 5 | from scapy.all import * 6 | 7 | def pkt_callback(pkt): 8 | del pkt[Ether].src 9 | del pkt[Ether].dst 10 | del pkt[IP].chksum 11 | del pkt[UDP].chksum 12 | pkt[IP].dst = '192.168.100.100' 13 | sendp(pkt) 14 | pkt.show() 15 | 16 | # sniff(iface='lo', filter='port 514', prn=lambda x: x.show(), store=0) 17 | sniff(iface='lo', filter='port 514', prn=pkt_callback, store=0) 18 | 19 | # filter have lot of options. eg 'udp port 514' 20 | -------------------------------------------------------------------------------- /thirdparty/python/simpleConfigParser.py: -------------------------------------------------------------------------------- 1 | # Converts Splunk inputs.conf to CSV file 2 | 3 | import ConfigParser as configparser 4 | import csv 5 | 6 | config = configparser.ConfigParser() 7 | config.read('/tmp/inputs.btool') 8 | csv_columns = ['stanza', 'index', 'sourcetype' ] 9 | 10 | f=open('/tmp/myoutput.csv', 'wb') 11 | w=csv.DictWriter(f,fieldnames=csv_columns, extrasaction='ignore') 12 | w.writeheader() 13 | 14 | for each_section in config.sections(): 15 | mydict = dict(config.items(each_section)) 16 | mydict['stanza'] = each_section 17 | w.writerow(mydict) 18 | 19 | f.close() 20 | -------------------------------------------------------------------------------- /thirdparty/python/virtual_environment.md: -------------------------------------------------------------------------------- 1 | ``` 2 | myproj="env" 3 | python3 -m venv ${myproj} 4 | source ${myproj}/bin/activate 5 | ``` 6 | 7 | 8 | ### Ensure gitignore is kept up-to-date 9 | ``` 10 | git_ignore_file=".gitignore" 11 | echo "${myproj}" >> ${git_ignore_file} 12 | echo ".vscode" >> ${git_ignore_file} 13 | echo ".DS_Store" >> ${git_ignore_file} 14 | ``` 15 | 16 | ### Installing software and Freezing Requirements 17 | ``` 18 | pip install sqlalchemy 19 | pip freeze > requirements.txt 20 | python3 21 | import sqlalchemy 22 | sqlalchemy.__version__ 23 | ``` 24 | 25 | -------------------------------------------------------------------------------- /thirdparty/quest/resync.txt: -------------------------------------------------------------------------------- 1 | # VAS_ERR_NOT_FOUND DN:Out of memory error LDAP Error . LDAP could not resolve or connect to LDAP server 2 | 3 | /opt/quest/bin/vastool -u svcUserQuest@domain.company.com -k /root/svcUserQuest-ADMIN.keytab join -f -c ou=MYTEAM,ou=another,ou=servers,dc=mydomain,dc=mycompany,dc=com admin.domain.com 4 | 5 | 6 | -------------------------------------------------------------------------------- /thirdparty/recording_videos_graphics/gource.md: -------------------------------------------------------------------------------- 1 | ## Brew install 2 | ``` 3 | brew install gource 4 | ``` 5 | https://github.com/acaudwell/Gource/wiki/Controls 6 | 7 | ## See graphs 8 | ``` 9 | # No usernames, go through day in 0.1 seconds, skip non-worthful days 10 | gource --hide usernames --seconds-per-day 0.1 --auto-skip-seconds 0.25 11 | ``` 12 | -------------------------------------------------------------------------------- /thirdparty/recording_videos_graphics/terminal_recording.md: -------------------------------------------------------------------------------- 1 | https://asciinema.org/docs/installation#installing-on-linux 2 | -------------------------------------------------------------------------------- /thirdparty/recording_videos_graphics/visio_images.md: -------------------------------------------------------------------------------- 1 | github.com/excalidraw/ 2 | -------------------------------------------------------------------------------- /thirdparty/redpanda/example_commands.md: -------------------------------------------------------------------------------- 1 | rpk cluster logdirs describe -H --aggretate-into-topic --sort-by-size -H --aggregate-into topic -------------------------------------------------------------------------------- /thirdparty/redpanda/settings.txt: -------------------------------------------------------------------------------- 1 | redpanda: 2 | developer_mode: false 3 | enable_transactions: false 4 | auto_create_topics_enabled: false 5 | default_topic_replications: 3 6 | append_chunk_size = 32768 7 | rpk: 8 | tune_ballast_file: true 9 | ballast_file_path: "/var/lib/redpanda/data/ballast" 10 | ballast_file_size: "1GiB" 11 | 12 | # =========================== # 13 | # startup flags 14 | # =========================== # 15 | 16 | --check=false 17 | --advertise-rpc-addr=same_host.svc:33145 18 | --overprovisioned 19 | --default-log-level=info 20 | --smp=4 21 | --memory=8000M # of Device/POD memory 22 | --reserve-memory=500M 23 | -------------------------------------------------------------------------------- /thirdparty/regexes/character_group.txt: -------------------------------------------------------------------------------- 1 | https://stackoverflow.com/questions/977251/regular-expressions-and-negating-a-whole-character-group 2 | 3 | Use negative lookahead: (This is quite good to say, find lines which does NOT contain a particular group/words) 4 | ^(?!.*ab).*$ 5 | 6 | 7 | https://regex101.com/r/4ZzoXy/1 8 | -------------------------------------------------------------------------------- /thirdparty/regexes/ipAddressValidation.txt: -------------------------------------------------------------------------------- 1 | #IPv4 2 | (?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){0,3}|0x0*[0-9a-f]{1,8}|0+[0-3]?[0-7]{0,10}|429496729[0-5]|42949672[0-8]\d|4294967[01]\d\d|429496[0-6]\d{3}|42949[0-5]\d{4}|4294[0-8]\d{5}|429[0-3]\d{6}|42[0-8]\d{7}|4[01]\d{8}|[1-3]\d{0,9}|[4-9]\d{0,8}) 3 | 4 | 5 | #IPv6 6 | (?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?[^="\\]+)=(?:\\")?(?<_VAL_1>[^="\\]+) 5 | -------------------------------------------------------------------------------- /thirdparty/regexes/stacktrace_java_grok.md: -------------------------------------------------------------------------------- 1 | StackTrace Regex sample 2 | ``` 3 | 2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call: {} 4 | java.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist 5 | 6 | at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445) 7 | at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396) 8 | ``` 9 | 10 | GROK 11 | ``` 12 | \A%{TIMESTAMP_ISO8601:timestamp}\s+%{LOGLEVEL:loglevel}\s+(?(?:[a-zA-Z0-9-]+\.)*[A-Za-z0-9$]+)\s+(-\s+)?(?=(?[A-Z]+[0-9]{4,5}))*%{DATA:message}({({[^}]+},?\s*)*})?\s*$(?(?m:.*))? 13 | ``` 14 | -------------------------------------------------------------------------------- /thirdparty/rfi_process/README.md: -------------------------------------------------------------------------------- 1 | # Product choosing matrix 2 | 3 | RFI 4 | RFP 5 | 6 | --- 7 | - Weightages 8 | 9 | High Level Grouping 10 | 11 | | Criteria | Measure | Weightage%| 12 | | -------- | ------- |------- | 13 | | Technical & Operational | Technical abilities, Operational easyness | 30| 14 | | Commercial & Cost | Capex cost, operational cost, TCO, alignment | 30| 15 | | Partnership | strategic | 10 | 16 | | Compliancy | Various compliant process | 9 | 17 | | ESG | society | 5 | 18 | 19 | Fully compliant - 100 pts 20 | Partial compliant - 70 pts 21 | In progress - 20 pts 22 | Not available - 0 pts 23 | 24 | 25 | Prioritisation 26 | - MUST, 100% 27 | - SHOULD, 70% 28 | - COULD, 20% 29 | 30 | --- 31 | Technical Score 32 | - Requirements definitions 33 | - Evaluation of MVP/POC and rate the solution 34 | - Non Functional requirement 35 | 36 | --- 37 | 38 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/macos.txt: -------------------------------------------------------------------------------- 1 | # rsyslog launch in mac 2 | To have launchd start rsyslog now and restart at login: 3 | brew services start rsyslog 4 | Or, if you don't want/need a background service you can just run: 5 | rsyslogd -f /usr/local/etc/rsyslog.conf -i /usr/local/var/run/rsyslogd.pid 6 | 7 | /usr/local/Cellar/rsyslog/7.4.5: 30 files, 1.5MB 8 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/rfc5424_sample2.conf: -------------------------------------------------------------------------------- 1 | global(DefaultNetStreamDriverCAFile="/etc/ssl/certs/my-ca.pem" 2 | *.* action( 3 | type="omfwd" 4 | Protocol="tcp" 5 | Target="10.20.30.40" 6 | Port=4514 7 | queue.filename="rule1" 8 | queue.maxDiskSpace="1g" 9 | queue.saveonShutdown="on" 10 | queue.type="LinkedList" 11 | action.resumeRetryCount="-1" 12 | StreamDriver="gtls" 13 | StreamDriverMode="1" 14 | StreamDriverAuthMode="anon" 15 | TCP_Framing="octect-counted" 16 | Template="RSYSLOG_SyslogProtocol23Format" 17 | ) 18 | 19 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/rsyslog.d: -------------------------------------------------------------------------------- 1 | #/etc/rsyslog.d/mycustom.conf 2 | $template GenericTemplate, /var/log/syslog/%syslogfacility-text%-515/%FromHost%/messages.log 3 | # $template SNMPTemplate, "/var/log/syslog/snmp/%FromHost%/messages.log" 4 | $FileOwner splunk 5 | $FileGroup splunk 6 | $DirOwner splunk 7 | $DirGroup splunk 8 | # 9 | local2.notice ?GenericTemplate 10 | # 11 | # Custom Port 12 | $template myCustomNetworkDevice, "/var/log/syslog/myCustomNetworkDevice/%FromHost%/messages.log" 13 | $template PlainFormat,"%rawmsg%\n" 14 | $RuleSet myCustomNetworkDeviceRule 15 | 16 | # myCustomNetworkDeviceRule Ruleset 17 | $InputTCPServerBindRuleset myCustomNetworkDevice 18 | $InputTCPServerRun 20054 19 | local1.* ?myCustomNetworkDevice 20 | 21 | 22 | # EOF 23 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/rsyslog_snmp.conf: -------------------------------------------------------------------------------- 1 | $template my_snmp_path, "/var/log/snmp/%msg:R,ERE,1,FIELD: UDP: \[(.*)\]\:--end% %syslogtag:1:32% %msg%\n" 2 | $template my_snmp_syslog, "%TIMESTAMP:::date-rfc3339% %msg:R,ERE,1,FIELD: UDP: \[(.*)\]\:--end% %syslogtag:1:32% %msg%\n" 3 | 4 | $RuleSet local 5 | 6 | if $programname == 'snmptrapd' and $msg starts with ' UDP: [' then ?my_snmp_path,my_snmp_syslog 7 | 8 | & ~ 9 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/rsyslog_templates.md: -------------------------------------------------------------------------------- 1 | ## Quality templates 2 | 3 | https://www.rsyslog.com/doc/v8-stable/configuration/templates.html 4 | 5 | ``` 6 | template(name="RSYSLOG_StdJSONFmt" type="string" 7 | string="{\"message\":\"%msg:::json%\",\"fromhost\":\"%HOSTNAME:::json%\",\"facility\": 8 | \"%syslogfacility-text%\",\"priority\":\"%syslogpriority-text%\",\"timereported\": 9 | \"%timereported:::date-rfc3339%\",\"timegenerated\": 10 | \"%timegenerated:::date-rfc3339%\"}") 11 | ``` -------------------------------------------------------------------------------- /thirdparty/rsyslog/sample1.conf: -------------------------------------------------------------------------------- 1 | #/etc/rsyslog.d/mycustom.conf 2 | $template GenericTemplate, /var/log/syslog/%syslogfacility-text%-515/%FromHost%/messages.log 3 | # $template SNMPTemplate, "/var/log/syslog/snmp/%FromHost%/messages.log" 4 | $FileOwner splunk 5 | $FileGroup splunk 6 | $DirOwner splunk 7 | $DirGroup splunk 8 | # 9 | local2.notice ?GenericTemplate 10 | # 11 | # Custom Port 12 | $template myCustomNetworkDevice, "/var/log/syslog/myCustomNetworkDevice/%FromHost%/messages.log" 13 | $template PlainFormat,"%rawmsg%\n" 14 | $RuleSet myCustomNetworkDeviceRule 15 | 16 | # myCustomNetworkDeviceRule Ruleset 17 | $InputTCPServerBindRuleset myCustomNetworkDevice 18 | $InputTCPServerRun 20054 19 | local1.* ?myCustomNetworkDevice 20 | 21 | 22 | # EOF 23 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/sd.sample: -------------------------------------------------------------------------------- 1 | # https://www.juniper.net/documentation/en_US/junos/topics/reference/general/syslog-interpreting-msg-generated-structured-data-format.html 2 | # version timestamp hostname process processID TAG [junos@2636.platform variable-value-pairs] message-text 3 | 4 | <165>1 2017-02-15T09:17:15.719Z router1 mgd 3046 UI_DBASE_LOGOUT_EVENT [junos@2636.1.1.1.2.18 username="user"] User 'user' exiting configuration mode 5 | 6 | 7 | # For forwarding to remote, ensure RSYSLOG_SyslogProtocol23Format is used for keeping SD in tact 8 | *.* @remote:port;RSYSLOG_SyslogProtocol23Format 9 | -------------------------------------------------------------------------------- /thirdparty/rsyslog/tls_rsyslog_omfwd.conf: -------------------------------------------------------------------------------- 1 | # Install rsyslog-gnutls 2 | # sudo apt-get install rsyslog-gnutls 3 | 4 | # certificate files - just CA for a client 5 | global(DefaultNetstreamDriverCAFile="/tmp/certs_rsyslog/host.cert") 6 | 7 | # set up the action for all messages 8 | action(type="omfwd" Target="localhost" protocol="tcp" port="6514" StreamDriver="gtls" StreamDriverMode="1" StreamDriverAuthMode="anon") 9 | -------------------------------------------------------------------------------- /thirdparty/ruby/date_parsing_formats.txt: -------------------------------------------------------------------------------- 1 | # Logstash Ruby 2 | code => "require 'date'; d = DateTime.parse('3rd Feb 2001 04:05:06+03:30'); event.set('[event][b_hour]', d.hour)" 3 | 4 | require 'date'; 5 | mytime1 = '2021-02-23T20:00:04.333Z'; 6 | mytime2 = '2021-02-23T21:00:04'; 7 | d = DateTime.parse(mytime1); 8 | #print DateTime.strptime(mytime2,'%FT%T'); 9 | print DateTime.strptime(mytime1,'%FT%T.%LZ'); 10 | print "\n"; 11 | d = DateTime.strptime(mytime1,'%FT%T.%LZ') 12 | print d.strftime('%b %d %Y, %T'); 13 | 14 | -------------------------------------------------------------------------------- /thirdparty/scapy/pcap_scapy.py: -------------------------------------------------------------------------------- 1 | from scapy.all import * 2 | from scapy.utils import rdpcap 3 | import sys 4 | import argparse 5 | 6 | 7 | parser = argparse.ArgumentParser() 8 | parser.add_argument("pcapInFile", help="Enter your input PCAP file", type=str) 9 | args = parser.parse_args() 10 | 11 | conf.L3socket 12 | conf.L3socket=L3RawSocket 13 | 14 | if os.path.isfile(args.pcapInFile): 15 | pkts=rdpcap(args.pcapInFile,10) # could be used like this rdpcap("filename",500) fetches first 500 pkts 16 | for pkt in pkts: 17 | #print 'IP_src=' + pkt[IP].src + ' IP_dst=' + pkt[IP].dst + ' sport=' + str(pkt[UDP].sport) + ' dport=' + str(pkt[UDP].dport) 18 | print pkt[IP].show() 19 | # sendp(pkt) #sending packet at layer 2 20 | else: 21 | print ("PCAP file does NOT exist") 22 | -------------------------------------------------------------------------------- /thirdparty/scapy/sampleCode.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.insert(0, '/configs/scapy') 3 | from scapy.all import * 4 | 5 | conf.L3socket 6 | conf.L3socket=L3RawSocket 7 | packet = IP(dst="127.0.0.1", src="10.121.122.122")/UDP(sport=333, dport=514)/Raw(load='<165>1 2018-06-29T10:14:15.003Z machine.test mylog - ID47 [exampleSDID@32473 iut="3" eventSource="Application" eventID="1011"] MY_AN_MSG application event log entry') 8 | 9 | send(packet) 10 | -------------------------------------------------------------------------------- /thirdparty/scapy/send_localhost_lo.txt: -------------------------------------------------------------------------------- 1 | # Sending to localhost 2 | 3 | conf.L3socket 4 | conf.L3socket=L3RawSocket 5 | 6 | packet = IP(dst="127.0.0.1", src="10.121.12.12")/UDP(sport=333, dport=514)/Raw(load="<158> Apr 15 12:16:13 tag: 158 load Scapy message") 7 | send(packet) 8 | -------------------------------------------------------------------------------- /thirdparty/scapy/syslog_virtualbox.txt: -------------------------------------------------------------------------------- 1 | # To send to a VM : from host to Guest using scapy 2 | 3 | # src is vboxnet1 ether, dst is vm-guest ether 4 | packet = Ether(src="0a:00:27:00:00:01", dst="08:00:27:82:a3:dd")/IP(dst="10.10.2.3", src="10.10.2.1")/UDP(sport=333, dport=514)/"Scapy message" 5 | sendp(packet, iface="vboxnet1") 6 | -------------------------------------------------------------------------------- /thirdparty/security_posture/recommed_suggest_advise.md: -------------------------------------------------------------------------------- 1 | ## Terminologies 2 | 3 | - **Recommend** : Implies a strong endorisement based on quality of recommended option 4 | - **Suggest** : neutral proposal withotu necessarily implying an endorsement 5 | - **Advise**: providing guidance, opinions, research based on expertise or auhtority to help to make an informed decision or action 6 | -------------------------------------------------------------------------------- /thirdparty/servicenow/servicenow.txt: -------------------------------------------------------------------------------- 1 | # API Integrations 2 | http://wiki.servicenow.com/index.php?title=Table_API_Curl_Examples 3 | 4 | # In built API 5 | snowIp= 6 | tabName=sys_user 7 | curl --user userid:pass --header "Accept: application/json" https://${snowIp}/api/now/table/${tabName} | grep }| python -mson.tool >/tmp/${tabName}.json 8 | 9 | # Within TA 10 | inputs.conf => [snow://sys_user] 11 | service_now.conf => url 12 | -------------------------------------------------------------------------------- /thirdparty/shell/.bash_aliases: -------------------------------------------------------------------------------- 1 | alias ..="cd .." 2 | alias ...="cd ../.." 3 | alias ....="cd ../../.." 4 | alias back='cd -' 5 | 6 | alias ll='ls -l' 7 | alias lt='ls -lrt' 8 | alias lf='ls -F' 9 | alias l='ls -al' 10 | 11 | alias lm="ls -al | less" 12 | 13 | alias ff="find / -type f -name" 14 | alias f.="find . -type f -name" 15 | -------------------------------------------------------------------------------- /thirdparty/shell/7zip.md: -------------------------------------------------------------------------------- 1 | ## Encrypt using 7zip 2 | ``` 3 | finalFile="myfilepackage.tgz" 4 | pass="mypass" 5 | src_file="src_file.txt" 6 | 7 | 7z a -p${pass} ${finalFile} ${src_file} 8 | ``` 9 | -------------------------------------------------------------------------------- /thirdparty/shell/addint_timestamp.sh: -------------------------------------------------------------------------------- 1 | # https://serverfault.com/questions/310098/how-to-add-a-timestamp-to-bash-script-log 2 | ./script.sh | while IFS= read -r line; do printf '%s %s\n' "$(date)" "$line"; done 3 | -------------------------------------------------------------------------------- /thirdparty/shell/arguments_grep.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # echo fruit=apple colour=red 4 | input_args=$@ 5 | colourFlag=`echo $input_args | grep colour | awk -F 'colour=' '{print $2}' | awk -F ' ' '{print $1}'` 6 | fruitFlag=`echo $input_args | grep fruit | awk -F 'fruit=' '{print $2}' | awk -F ' ' '{print $1}'` 7 | 8 | echo colourFlag=$colourFlag 9 | echo fruitFlag=$fruitFlag 10 | 11 | 12 | ## Better Way 13 | # Set default values for the flags 14 | colourFlag="" 15 | fruitFlag="" 16 | 17 | # Loop through all the arguments 18 | for arg in "$@"; do 19 | case $arg in 20 | fruit=*) 21 | fruitFlag="${arg#*=}" 22 | ;; 23 | colour=*) 24 | colourFlag="${arg#*=}" 25 | ;; 26 | esac 27 | done 28 | 29 | echo "colourFlag=$colourFlag" 30 | echo "fruitFlag=$fruitFlag" 31 | -------------------------------------------------------------------------------- /thirdparty/shell/array_looping.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | all_envs=('apple' 'banana' 'pear') 4 | for (( i=1; i<=${#all_envs[@]}; i++ )) 5 | do 6 | echo $i ${all_envs[$i]} 7 | 8 | done 9 | -------------------------------------------------------------------------------- /thirdparty/shell/awk_one_liners.md: -------------------------------------------------------------------------------- 1 | - https://www.baeldung.com/linux/find-matching-text-replace-next-line 2 | - Data sample: [Link](../../sampleData/raw/json/sample_array.json) 3 | 4 | ### Position based data manipulation 5 | ``` 6 | awk '/MY_HOST_EUROPE/{ rl = NR + 3 } NR == rl { gsub( /US/,"UK")} 3' sample_array.json 7 | ``` 8 | -------------------------------------------------------------------------------- /thirdparty/shell/bash_k8s_settings.txt: -------------------------------------------------------------------------------- 1 | ## ~/.bashrc 2 | 3 | export NS="" 4 | alias k="kubectl -n \${NS}" 5 | PS1="[\W]$" 6 | set -o vi 7 | 8 | alias ctx='kubectl config set-context --current --namespace' 9 | export KUBECONFIG=/home/${USER}/.config/config 10 | -------------------------------------------------------------------------------- /thirdparty/shell/capture_video.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # https://www.youtube.com/watch?v=BBvod49uySQ 4 | printf "Input URL: " 5 | read URL 6 | youtube-dl -f 'bestaudio[ext=m4a]' $URL 7 | 8 | #Capture Videos 9 | -------------------------------------------------------------------------------- /thirdparty/shell/counter.sh: -------------------------------------------------------------------------------- 1 | baseURL="https://someDir/" 2 | count=32 3 | i=1 4 | 5 | while [ $i -le $count ] 6 | do 7 | if [ $i -lt 10 ]; then 8 | pad=0 9 | else 10 | pad="" 11 | fi 12 | 13 | echo curl -o ${pad}${i}.mp3 "${baseURL}/${pad}${i}.mp3" 14 | i=$((i+1)) 15 | done 16 | -------------------------------------------------------------------------------- /thirdparty/shell/cut_commands.md: -------------------------------------------------------------------------------- 1 | ### kill a process 2 | ``` 3 | process='kubectl proxy' 4 | kill `ps -ef | grep ${process}| grep -v grep| tr -s " " | cut -d ' ' -f2` 5 | ``` 6 | -------------------------------------------------------------------------------- /thirdparty/shell/diff.md: -------------------------------------------------------------------------------- 1 | ### side by side 2 | ``` 3 | diff -y file1 file2 4 | ``` 5 | 6 | ### Width adjustment of side by side 7 | ``` 8 | diff -W $(( $(tput cols) -2 )) -y file1 file2 9 | ``` 10 | -------------------------------------------------------------------------------- /thirdparty/shell/dig.md: -------------------------------------------------------------------------------- 1 | ## DIG command with xargs to curl 2 | ``` 3 | dig +short host.docker.internal | xargs -I{} curl -s http://{}:8200/v1/sys/seal-status 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/shell/directoryPath.sh: -------------------------------------------------------------------------------- 1 | # https://stackoverflow.com/questions/242538/unix-shell-script-find-out-which-directory-the-script-file-resides 2 | 3 | SCRIPT=$(readlink -f "$0") 4 | SCRIPTPATH=$(dirname "$SCRIPT") 5 | CONFIG_FILE_DIR="$SCRIPTPATH/../../.." 6 | 7 | ls -l $CONFIG_FILE_DIR 8 | -------------------------------------------------------------------------------- /thirdparty/shell/file_transfer.sh: -------------------------------------------------------------------------------- 1 | ## No nc, no wget no curl even. Here is a one-liner bash to transfer files 2 | fname="yourfile.txt" 3 | bash -c "exec 3<>/dev/tcp/IP/80; echo -e 'GET /${fname} HTTP/1.1\r\nHost: ip\r\nConnection: close\r\n\r\n' >&3; cat <&3 > ${fname}" 4 | 5 | 6 | ## Also check https://github.com/peass-ng/PEASS-ng/blob/master/linPEAS/README.md 7 | -------------------------------------------------------------------------------- /thirdparty/shell/filebeat_templator/test1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Dummy array for testing 4 | successful_hosts=("127.0.0.1:22" "::1:22" "192.168.1.1:22") 5 | 6 | # Initialize an empty string 7 | logstash_hosts="" 8 | 9 | # Loop through the array and format each element 10 | for host in "${successful_hosts[@]}"; do 11 | logstash_hosts+="\"$host\", " 12 | done 13 | 14 | # Add brackets and remove the trailing comma and space 15 | logstash_hosts="[ ${logstash_hosts%, *} ]" 16 | 17 | # Print the result 18 | echo "logstash_hosts: $logstash_hosts" 19 | -------------------------------------------------------------------------------- /thirdparty/shell/filebeat_templator/your_csv_file.csv: -------------------------------------------------------------------------------- 1 | hostname,ip,port 2 | host1,192.168.1.10,22 3 | host2,127.0.0.1,22 4 | host3,localhost,22 5 | host4,10.0.0.1,22 6 | -------------------------------------------------------------------------------- /thirdparty/shell/find_exec.md: -------------------------------------------------------------------------------- 1 | ## find command with exec 2 | 3 | 4 | - To print filename alongside cksum 5 | ``` 6 | find /path/to/search -name "*.txt" -exec sh -c 'echo "File: {}"; cksum {}' \; 7 | 8 | find /path/to/search -name "*.txt" -exec sh -c 'for file do echo "File: $file"; cksum "$file"; done' sh {} + 9 | 10 | ``` -------------------------------------------------------------------------------- /thirdparty/shell/find_replace.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Replace string 3 | 4 | oldFruit="apple" 5 | newFruit="orange" 6 | oldString="fruit/apple" 7 | 8 | if [[ "$oldString" == *"$oldFruit"* ]] 9 | then 10 | echo $oldString 11 | newString=${oldString//apple/orange} 12 | echo $newString 13 | fi 14 | 15 | 16 | if [[ "$oldString" == *"$oldFruit"* ]] 17 | then 18 | echo Var: $oldString 19 | newString=${oldString//$oldFruit/$newFruit} 20 | echo Var: $newString 21 | fi -------------------------------------------------------------------------------- /thirdparty/shell/find_replace_sed.txt: -------------------------------------------------------------------------------- 1 | # http://unix.stackexchange.com/questions/155805/sed-replace-first-k-instances-of-a-word-in-the-file/155810 2 | # To replace the "k"th occurence , below example k=4 3 | # change 4th occurence to \x00 . Change everything to something new and replace \x00 back to original 4 | sed -E -e 'H;1h;$!d;x; s/\/\x00/g4; s/\/new/g; s/\x00/old/g' 5 | -------------------------------------------------------------------------------- /thirdparty/shell/fingerprint_file.txt: -------------------------------------------------------------------------------- 1 | lsblk -o MOUNTPOINT, UUID -------------------------------------------------------------------------------- /thirdparty/shell/helloWorld.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | 3 | if [ -r log4sh ]; then 4 | LOG4SH_CONFIGURATION='log4sh.properties.ex4' . ./log4sh 5 | else 6 | echo "ERROR: could not load (log4sh)" >&2 7 | exit 1 8 | fi 9 | 10 | # change the default message level from ERROR to INFO 11 | logger_setLevel INFO 12 | threadid=t`date +%s` 13 | logger_setThreadName "$threadid" 14 | sScript=`logger_getFilename` 15 | sHost=`hostname` 16 | sOs=`uname -s` 17 | sUser=`whoami` 18 | logger_info "sScript=$sScript sHost=$sHost sOs=$sOs sUser=$sUser" 19 | 20 | # say hello to the world 21 | logger_info "Hello, world!" 22 | logger_error "this is an error" 23 | -------------------------------------------------------------------------------- /thirdparty/shell/if_else_multiple.sh: -------------------------------------------------------------------------------- 1 | # regex in if loop 2 | if [[ "$action" =~ ^(apple|mango|pine|straw)$ ]] 3 | then 4 | echo "Fruit=${action}" 5 | else 6 | echo "ERROR: Not in desired list " 7 | exit 100 8 | fi 9 | -------------------------------------------------------------------------------- /thirdparty/shell/mtime_scripted_inputs.sh: -------------------------------------------------------------------------------- 1 | runId=`date +%Y%m%d%H%M%S` 2 | interval_min=60 3 | file_input_dir="/tmp" 4 | file_output_dir="/tmp" 5 | 6 | find $file_input_dir -type f -name '*.out' -mmin ${interval_min} -exec stat --printf="runId=$runId fname=%n fsize=%s atime=%X mtime=%Y ctime=%Z gid=%g gname=%G uid=%u uname=%U acl=%a aclname=%A inode=%i\n" {} \; 7 | -------------------------------------------------------------------------------- /thirdparty/shell/myscriptSample.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | my_dir="$(dirname "$0")" 4 | 5 | . "$my_dir/slogger.sh" 6 | 7 | thisLogFile=/tmp/SystemOut1.log 8 | LOGFILE "$thisLogFile" 9 | 10 | echo SCRIPTENTRY 11 | SCRIPTENTRY 12 | updateUserDetails() { 13 | echo ENTRY 14 | ENTRY 15 | echo DEBUG 16 | DEBUG "Username: $1, Key: $2" 17 | echo INFO 18 | INFO "User details updated for $1" 19 | echo RETURN 20 | RETURN 21 | } 22 | 23 | echo INFO 24 | INFO "Updating user details..." 25 | echo updateUserDetails 26 | updateUserDetails "testUser" "12345" 27 | 28 | rc=2 29 | 30 | if [ ! "$rc" = "0" ] 31 | then 32 | ERROR "Failed to update user details. RC=$rc" 33 | fi 34 | SCRIPTEXIT 35 | -------------------------------------------------------------------------------- /thirdparty/shell/rsync.md: -------------------------------------------------------------------------------- 1 | ## Rsync samples 2 | ``` 3 | ### scp alternative 4 | rsync -avz --remove-source-fiels -e ssh /this/dir remoteuser@remotehost:/remote/dir 5 | 6 | ### From remote systems 7 | rsync -vtr --progress --exclude debug/ rsync://mirror.pnl.gov/epel/7/x86_64/ epel 8 | 9 | ### Faster rsync 10 | rsync -r --size-only --progress ${srcDir}/* ${destDir}/ --dry-run 11 | ``` 12 | 13 | ## Ignore Certain files 14 | ``` 15 | rsync -r --size-only --progress ${srcDir}/* ${destDir}/ --exclude '.DS_Store' --dry-run 16 | ``` 17 | 18 | 19 | ## More Accurate run, but takes time 20 | ``` 21 | rsync -rhic --progress ${srcDir}/* ${destDir}/ --ignore-times --dry-run 22 | 23 | ``` 24 | -------------------------------------------------------------------------------- /thirdparty/shell/screen_capture.md: -------------------------------------------------------------------------------- 1 | # Generate images of code and terminal output 2 | https://github.com/charmbracelet/freeze 3 | 4 | -------------------------------------------------------------------------------- /thirdparty/shell/shell.txt: -------------------------------------------------------------------------------- 1 | # Grepping large files 2 | tail -f | egrep --line-buffered 3 | 4 | # Linux kernel settings 5 | sysctl vm.zone_reclaim_mode 6 | 7 | 8 | # Some good options with grep 9 | # Below will show key-value with s= 10 | grep -Poi "s=.*?\s" filename.txt | uniq 11 | -------------------------------------------------------------------------------- /thirdparty/shell/shopt.txt: -------------------------------------------------------------------------------- 1 | shopt -s dotglob 2 | 3 | # Various shell options -------------------------------------------------------------------------------- /thirdparty/shell/simpleBackupScript.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ignore_list=`grep -v '#' ignore_list.txt` 4 | for i in $ignore_list 5 | do 6 | exclude_chain=${exclude_chain}" --exclude=$i" 7 | done 8 | 9 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 10 | baseDir=~/Documents 11 | backupDir=~/Documents/backup 12 | cd ${baseDir}/mydir/backupScripts 13 | 14 | echo "tgz ${baseDir}/rough to ${backupDir}/pers.backup.tgz" 15 | tar $exclude_chain -czf ${backupDir}/pers.backup.tgz -C ${baseDir} mydir 16 | cd $DIR 17 | 18 | # ls ${backupDir} 19 | -------------------------------------------------------------------------------- /thirdparty/shell/ssh.txt: -------------------------------------------------------------------------------- 1 | # bash script to ssh multiple servers in a Loop and issue commands 2 | # https://stackoverflow.com/questions/20254906/bash-script-to-ssh-multiple-servers-in-a-loop-and-issue-commands 3 | 4 | #!/bin/bash 5 | user="abc" 6 | while read hostLine 7 | do 8 | ssh ${user}@${hostLine} "hostname; ls;" < /dev/null 9 | done < "$1" 10 | -------------------------------------------------------------------------------- /thirdparty/shell/zsh.md: -------------------------------------------------------------------------------- 1 | ## To add userid & hostname in prompt 2 | ``` 3 | # https://github.com/ohmyzsh/ohmyzsh/issues/5686 4 | PROMPT='%(!.%{%F{yellow}%}.)$USER @ %{$fg[white]%}%M %{$fg_bold[red]%}➜ %{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}' 5 | 6 | ``` 7 | -------------------------------------------------------------------------------- /thirdparty/shuffle/setup.md: -------------------------------------------------------------------------------- 1 | 2 | ## Shuffle docker 3 | ``` 4 | git clone https://github.com/frikky/Shuffle 5 | cd Shuffle 6 | mkdir shuffle-database 7 | sudo chown 1000:1000 -R shuffle-database 8 | docker-compose up -d 9 | ``` 10 | 11 | 12 | ## Local firewall enable 13 | ``` 14 | sudo su - 15 | mylan=192.168.30.1 16 | iptables -I INPUT -s ${mylan}/24 -p tcp --dport 3001 -j ACCEPT 17 | iptables -I INPUT -s ${mylan}/24 -p tcp --dport 3443 -j ACCEPT 18 | iptables -I INPUT -s ${mylan}/24 -p tcp --dport 5001 -j ACCEPT 19 | iptables-save >/etc/iptables/rules.v4 20 | ``` 21 | -------------------------------------------------------------------------------- /thirdparty/snmp/snmptrapd.conf.txt: -------------------------------------------------------------------------------- 1 | # /etc/snmp/snmptrapd.conf 2 | authCommunity log public 3 | createUser -e 0x8000000001020304 traptest SHA mycred AES 4 | authuser log traptest 5 | 6 | 7 | # To test SNMPv3 8 | snmptrap -v 3 -c public -n "" -a SHA -A mypassword -x AES -X mycred -l authPriv -u traptest -e 0x8000000001020304 localhost 0 linkUp.1 9 | -------------------------------------------------------------------------------- /thirdparty/snmp/snmptrapd.txt: -------------------------------------------------------------------------------- 1 | # /etc/sysconfig/snmptrapd 2 | OPTIONS="-Ls6 -n -m ALL -M /usr/share/snmp/mibs -p /var/run/snmptrapd.pid -F \"%B Security: %P; EnterpriseOID: %N; TrapType: %W; TrapSubType: %q; Payload : %v\n\"" 3 | -------------------------------------------------------------------------------- /thirdparty/soc_usecases/links_ideas.txt: -------------------------------------------------------------------------------- 1 | https://www.splunk.com/en_us/blog/security/cloud-federated-credential-abuse-cobalt-strike-threat-research-feb-2021.html 2 | 3 | -------------------------------------------------------------------------------- /thirdparty/soc_usecases/rules_link.txt: -------------------------------------------------------------------------------- 1 | https://github.com/SigmaHQ/sigma 2 | 3 | 4 | Mitre Cyber Analytics Repository (CAR) 5 | https://car.mitre.org/analytics/ 6 | -------------------------------------------------------------------------------- /thirdparty/soc_usecases/splunk_uc_library.md: -------------------------------------------------------------------------------- 1 | https://docs.splunksecurityessentials.com/features/kill_chain_overview/ 2 | 3 | https://lantern.splunk.com/Security/Use_Cases 4 | -------------------------------------------------------------------------------- /thirdparty/squid/squid_kubectl.md: -------------------------------------------------------------------------------- 1 | ``` 2 | kubectl exec "$(kubectl get pod -n mysquidns -l app=sleep -o jsonpath={.items..metadata.name})" -n squid -- sh -c "HTTPS_PROXY=10.10.10.30:3128 curl https://www.google.com" 3 | kubectl exec "$(kubectl get pod -n mysquidns -l app.kubernetes.io/name=my-squid-proxy -o jsonpath={.items..metadata.name})" -n squid -- tail /var/log/squid/access.log 4 | ``` 5 | -------------------------------------------------------------------------------- /thirdparty/ssh/tunnel.md: -------------------------------------------------------------------------------- 1 | ### SSH Tunnelling 2 | 3 | https://github.com/getkub/k8s_kubernetes/blob/main/basic_setup.md#tunnel-from-remote-laptop 4 | 5 | ``` 6 | ssh -L ${localport}:${remoteIP}:${remotePort} ${remoteHostUser}:${remoteHost} 7 | ``` 8 | 9 | ## SSH & TLS 10 | HTTPS connection can be redirected via SSH port forwarding - however the SSL/TLS certificate validation will fail in such cases as the host name does not match 11 | https://superuser.com/questions/347415/is-it-possible-to-tunnel-https-traffic-via-ssh-tunnel-with-standard-ssh-programs 12 | -------------------------------------------------------------------------------- /thirdparty/standards/ISO/iso27001/1.27001.csv: -------------------------------------------------------------------------------- 1 | GroupId_1,GroupName_1 2 | "A.05","Security policy" 3 | "A.06","Organisation of information security" 4 | "A.07","Asset management" 5 | "A.08","Human resources security" 6 | "A.09","Physical and environmental security" 7 | "A.10","Communications and operations management" 8 | "A.11","Access control" 9 | "A.12","Information systems acquisition, development and maintenance" 10 | "A.13","Information security incident management" 11 | "A.14","Business continuity management" 12 | "A.15","Compliance" 13 | -------------------------------------------------------------------------------- /thirdparty/standards/ISO/iso27001/search.txt: -------------------------------------------------------------------------------- 1 | | inputlookup 3.27001.csv| eval GroupId_2=substr(GroupId_3,0,7) | lookup 2.27001.csv GroupId_2| eval GroupId_1=substr(GroupId_2,0,4)| lookup 1.27001.csv GroupId_1 2 | -------------------------------------------------------------------------------- /thirdparty/standards/configItems.txt: -------------------------------------------------------------------------------- 1 | Naming convention Standards 2 | 3 | - All Knowledge item names (eg Ip address, host), we will stick to "Splunk Common Information Model Add-on" standards 4 | - All object names in "small case" (eg: thisisavariable) 5 | - Reports (__rp____) 6 | eg: abc_siem_rp_firewall_packetfiltering_15m_breaches 7 | - Alerts (__al____) 8 | - Dashboards/Views (__db___) 9 | eg: abc_siem_db_os_windows_memoryusage.xml 10 | - SavedSearches (__ss___) 11 | -------------------------------------------------------------------------------- /thirdparty/standards/pci/pci_1.csv: -------------------------------------------------------------------------------- 1 | pci_1,description 2 | "P01","Install and maintain a firewall configuration to protect cardholder data" 3 | "P02","Do not use vendor-supplied defaults for system passwords and other security parameters" 4 | "P03","Protect stored cardholder data" 5 | "P04","Encrypt transmission of cardholder data across open, public networks" 6 | "P05","Use and regularly update anti-virus software or programs " 7 | "P06","Develop and maintain secure systems and applications" 8 | "P07","Restrict access to cardholder data by business need to know" 9 | "P08","Assign a unique ID to each person with computer access" 10 | "P09","Restrict physical access to cardholder data" 11 | "P10","Track and monitor all access to network resources and cardholder data" 12 | "P11","Regularly test security systems and processes" 13 | "P12","Maintain a policy that addresses information security for all personnel" 14 | -------------------------------------------------------------------------------- /thirdparty/stencils/generate_svg_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script converts the .dot graphviz into an image 3 | fname="my_input_sample" 4 | dot -Tsvg ${fname}.dot -o ${fname}.svg || \ 5 | echo "This script requires graphviz. Install it on a Mac with: " \ 6 | echo " brew install graphviz" 7 | -------------------------------------------------------------------------------- /thirdparty/stencils/good_sites.md: -------------------------------------------------------------------------------- 1 | ## Sites collection 2 | - https://www.producthunt.com/topics?ref=header_nav 3 | 4 | 5 | ## Good sites 2 6 | - https://www.autodraw.com/ 7 | -------------------------------------------------------------------------------- /thirdparty/stencils/image_demo.md: -------------------------------------------------------------------------------- 1 | 2 | ## Image demo 3 | - https://blush.design/collections 4 | 5 | 6 | ## Logos 7 | - https://logo.shapefactory.co/ -------------------------------------------------------------------------------- /thirdparty/stencils/my_input_sample.dot: -------------------------------------------------------------------------------- 1 | graph { 2 | "base" -- "bdocker"; 3 | "base" -- "fluentd"; 4 | "base" -- "splunk-hf"; 5 | "base" -- "splunk-indexer"; 6 | "base" -- "splunk-sh"; 7 | } 8 | -------------------------------------------------------------------------------- /thirdparty/stencils/svg_stencil.md: -------------------------------------------------------------------------------- 1 | ## Stencil Links 2 | 3 | - https://docs.microsoft.com/en-us/azure/architecture/browse/ 4 | - https://aws.amazon.com/architecture/icons/ 5 | 6 | ## SVG Icons 7 | - https://feathericons.com/?ref=producthunt 8 | - https://www.iconshock.com/desktop-icons/ 9 | - https://svgporn.com/ 10 | - https://www.svggobbler.com/ 11 | -------------------------------------------------------------------------------- /thirdparty/templating/Liquid_expressions_Workflow.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getkub/SplunkScriplets/89fb6c5f3552c3480f957baefc63705857f4f619/thirdparty/templating/Liquid_expressions_Workflow.pdf -------------------------------------------------------------------------------- /thirdparty/templating/business_reporting/Atlassian_templates.md: -------------------------------------------------------------------------------- 1 | - SWOT (Strength, weakness, Opportunities, Threats) https://www.atlassian.com/software/confluence/templates/swot-analysis 2 | - Risk Assessment https://www.atlassian.com/software/confluence/templates/risk-assessment 3 | -------------------------------------------------------------------------------- /thirdparty/templating/liquid.md: -------------------------------------------------------------------------------- 1 | https://shopify.dev/api/liquid -------------------------------------------------------------------------------- /thirdparty/terraform/aws-assume-role.txt: -------------------------------------------------------------------------------- 1 | https://stackoverflow.com/questions/55128348/execute-terraform-apply-with-aws-assume-role 2 | 3 | aws_credentials=$(aws sts assume-role --role-arn arn:aws:iam::1234567890:role/nameOfMyrole --role-session-name "RoleSession1" --output json) 4 | 5 | export AWS_ACCESS_KEY_ID=$(echo $aws_credentials|jq '.Credentials.AccessKeyId'|tr -d '"') 6 | export AWS_SECRET_ACCESS_KEY=$(echo $aws_credentials|jq '.Credentials.SecretAccessKey'|tr -d '"') 7 | export AWS_SESSION_TOKEN=$(echo $aws_credentials|jq '.Credentials.SessionToken'|tr -d '"') 8 | 9 | 10 | -------------------------------------------------------------------------------- /thirdparty/terraform/common_tf_commands.md: -------------------------------------------------------------------------------- 1 | ## Format properly 2 | ``` 3 | ## To hightlight files which needs changes 4 | terraform fmt -check -recursive 5 | ``` 6 | 7 | ``` 8 | ## To format 9 | terraform fmt -recursive 10 | ``` 11 | -------------------------------------------------------------------------------- /thirdparty/terraform/for_each_sample.md: -------------------------------------------------------------------------------- 1 | ## Use for_each instead of index.loop 2 | ``` 3 | dynamic "allow" { 4 | for_each = each.value.action == "allow" ? [{ 5 | ports = each.value.ports 6 | }]: [] 7 | 8 | content { 9 | protocol = allow.value["ports"] 10 | } 11 | } 12 | 13 | my_tags = each.value.my_tags 14 | cidr_ranges_src = each.value.flow == "EGRESS" ? each.value.cidr_ranges_src: null 15 | other_optional_field = try(each.value.flow == "EGRESS" ? each.value.other_optional_field: null , null) 16 | 17 | ``` -------------------------------------------------------------------------------- /thirdparty/terraform/gcp_terraform/generic_mods/sql_postgress/variables.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | description = "project name" 3 | } 4 | 5 | variable "default_region" { 6 | description = "the default region of the sql instance" 7 | } 8 | 9 | variable "network_name" { 10 | description = "name of the network" 11 | } 12 | 13 | variable "db_obj" { 14 | type = map 15 | } 16 | -------------------------------------------------------------------------------- /thirdparty/terraform/gcp_terraform/variables.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | description = "The project ID to host the network in" 3 | } 4 | 5 | variable "env_tag" { 6 | description = "env_tag" 7 | } 8 | 9 | variable "network_name" { 10 | description = "network_name" 11 | } 12 | 13 | variable "default_region" { 14 | description = "default_region" 15 | } 16 | 17 | variable "default_zone" { 18 | description = "default_zone" 19 | } 20 | 21 | variable "gcp_zones" { 22 | description = "gcp_zones" 23 | } 24 | 25 | variable "subnets_obj" { 26 | description = "Subnets in MAP format" 27 | } 28 | 29 | variable "fw_obj" { 30 | description = "Firewalls in MAP format" 31 | } 32 | 33 | variable "db_obj" { 34 | description = "Database in MAP format" 35 | } 36 | 37 | variable "app_static_ip_obj" { 38 | description = "static in MAP format" 39 | } 40 | 41 | -------------------------------------------------------------------------------- /thirdparty/terraform/state_file_removal.txt: -------------------------------------------------------------------------------- 1 | # Ensure it has all TF_VARS exported before running. Put this into a bash file and call it as 'remove_tfstate.sh' 2 | TF_VARS=$1 3 | terraform state rm 'name_of_record["specify_here"]' 4 | 5 | ## Then initiate an apply 6 | terraform apply -input=false -lock-timeout=60m $(MY_TF_ARTEFACTS) && \ 7 | AWS_SHARED_CREDENTIALS_FILE=".credentials.aws" \ 8 | ACCOUNT_ID=1234556 \ 9 | $(TF_BASH) ./remove_tfstate.sh "${TF_VARS}" 10 | 11 | -------------------------------------------------------------------------------- /thirdparty/terraform/terraform_url.txt: -------------------------------------------------------------------------------- 1 | https://faultbucket.ca/2020/07/terraform-handling-list-of-maps/ 2 | https://medium.com/datamindedbe/how-to-conditionally-disable-modules-in-terraform-f38fdbe34f1b 3 | https://dilani-alwis.medium.com/terraform-tips-tricks-de8bc46dde13 4 | https://tothecloud.dev/terraform-loop-through-nested-map/ 5 | -------------------------------------------------------------------------------- /thirdparty/terraform/terraformer/setup.md: -------------------------------------------------------------------------------- 1 | ``` 2 | export PROVIDER={all,google,aws,kubernetes} 3 | curl -LO https://github.com/GoogleCloudPlatform/terraformer/releases/download/$(curl -s https://api.github.com/repos/GoogleCloudPlatform/terraformer/releases/latest | grep tag_name | cut -d '"' -f 4)/terraformer-${PROVIDER}-linux-amd64 4 | chmod +x terraformer-${PROVIDER}-linux-amd64 5 | sudo mv terraformer-${PROVIDER}-linux-amd64 /usr/local/bin/terraformer 6 | ``` 7 | -------------------------------------------------------------------------------- /thirdparty/vagrant/README.md: -------------------------------------------------------------------------------- 1 | ## Vagrant setup 2 | 3 | - Have a setup DATA directory 4 | ``` 5 | export VAGRANT_DATA_DIR="/tmp/vagrant" 6 | ``` 7 | 8 | - Setup correct box 9 | 10 | ``` 11 | vagrant box remove rockylinux/9 --provider virtualbox 12 | vagrant box add rockylinux/9 --provider virtualbox 13 | ``` 14 | 15 | - Run using a config file 16 | ``` 17 | VAGRANT_VAGRANTFILE=thirdparty/vagrant/splunk_standalone.Vagrantfile vagrant up 18 | ``` 19 | 20 | - Destroy 21 | ``` 22 | VAGRANT_VAGRANTFILE=thirdparty/vagrant/splunk_standalone.Vagrantfile vagrant destroy -f 23 | ``` -------------------------------------------------------------------------------- /thirdparty/vagrant/freeBSD.Vagrantfile: -------------------------------------------------------------------------------- 1 | # FreeBSD Vagrant 2 | Vagrant.configure("2") do |config| 3 | config.vm.box = "freebsd/FreeBSD-11.2-RELEASE" 4 | config.vm.guest = :freebsd 5 | config.ssh.shell = "sh" 6 | config.vm.network "private_network", type: "dhcp" 7 | config.vm.synced_folder ".", "/vagrant", id: "vagrant-root", disabled: true 8 | config.vm.box_version = "2018.06.22" 9 | end 10 | -------------------------------------------------------------------------------- /thirdparty/vagrant/simpleRocky.Vagrantfile: -------------------------------------------------------------------------------- 1 | Vagrant.configure("2") do |config| 2 | # Specify the box 3 | config.vm.box = "rockylinux/9" 4 | 5 | # Optional: Basic provider settings to ensure compatibility 6 | config.vm.provider "virtualbox" do |vb| 7 | vb.memory = 2048 8 | vb.cpus = 2 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /thirdparty/vault/curl_vault.txt: -------------------------------------------------------------------------------- 1 | curl --request POST --header \"Content-Type: application/json\" " + 2 | " --header \"X-Vault-Namespace: ${vault_namespace}\" " + 3 | " --data '{\"jwt\": \"${jwtToken}\", \"role\": \"myrole\"}' ${vault_endpoint}/v1/auth/kubernetes/login -k -s 4 | -------------------------------------------------------------------------------- /thirdparty/vault/sign_using_vault.sh: -------------------------------------------------------------------------------- 1 | keyDir="/tmp/keys" 2 | mkdir -p $keyDir 3 | account_name="1234567" 4 | vault_url="vault_url_here" 5 | 6 | yes y | ssh-keygen -t rsa -N "" -f ${keyDir}/id_rsa 7 | public_key=`cat ${keyDir}/id_rsa.pub` 8 | 9 | echo ' 10 | { 11 | "public_key": "'${public_key}'", 12 | "valid_principals": "ec2-user", 13 | "ttl": "60m0s" 14 | } 15 | ' > ${keyDir}/ssh-ca.json 16 | 17 | curl --insecure \ 18 | --header "X-Vault-Token": ${client_token} \ 19 | --header "X-Vault-Namespace": aws/${account_name} \ 20 | --request POST \ 21 | --data @${keyDir}/ssh-ca.json \ 22 | ${vault_url}/v1/ssh/sign/${account_name}-ssh-role | jq -r .data.signed_key > ${keyDir}/id_rsa.signed.pub 23 | -------------------------------------------------------------------------------- /thirdparty/vault/vault_login_env.md: -------------------------------------------------------------------------------- 1 | ## Export Variables 2 | ``` 3 | export AWS_REGION="us-west-1" 4 | export VAULT_SKIP_VERIFY=true 5 | export VAULT_ADDR="https://my_vault_url.com" 6 | export VAULT_NAMESPACE="aws/my-dev-space" 7 | vault login -method=aws role=my-aws-iam-ec2-role 8 | ``` 9 | -------------------------------------------------------------------------------- /thirdparty/virtualbox/commonCommands.txt: -------------------------------------------------------------------------------- 1 | # VBoxManage common commands 2 | VBoxManage list vms 3 | VBoxManage list runningvms 4 | 5 | #Variable here fetched from runningvms 6 | myvm=centos_minimal 7 | 8 | # Start and get IP etc. 9 | VBoxManage startvm $myvm -type headless 10 | VBoxManage guestproperty enumerate $myvm 11 | VBoxManage guestproperty get $myvm "/VirtualBox/GuestInfo/Net/0/V4/IP" 12 | 13 | # to Shutdown softly 14 | VBoxManage controlvm $myvm poweroff soft 15 | 16 | # Restart VBox in case of network Adapter issue in Mac 17 | sudo /Library/Application\ Support/VirtualBox/LaunchDaemons/VirtualBoxStartup.sh restart 18 | -------------------------------------------------------------------------------- /thirdparty/websites/good_websites.md: -------------------------------------------------------------------------------- 1 | # Animation, Graphics, Vectors, Images 2 | https://poly.pizza/ 3 | 4 | --------------------------------------------------------------------------------
empnameempSalary
{{empname}}{{empSalary}}