├── .gitignore ├── docs └── daipe.jpeg ├── requirements.txt ├── .github └── ISSUE_TEMPLATE │ ├── custom.md │ ├── feature_request.md │ └── bug_report.md ├── decentralized_data_storage └── ipfs_utils │ ├── ipfs_client.py │ └── ipfs_server.py ├── cybersecurity_framework └── threat_detection │ ├── anomaly_detector.py │ └── malware_detector.py ├── node_network ├── node_manager.py └── node_communication.py ├── ai_engine └── ai_models │ ├── neural_network.py │ └── decision_tree.py ├── blockchain_integration └── smart_contracts │ ├── security_contract.py │ └── data_contract.py ├── ai_data ├── data_loader.py └── data_preprocessor.py ├── tests └── ai_engine_tests │ ├── test_decision_tree.py │ └── test_neural_network.py ├── node_utils ├── node_monitoring.py └── node_registration.py ├── blockchain_integration_tests ├── test_data_contract.py └── test_security_contract.py ├── incident_response ├── response_generator.py └── notification_system.py ├── cybersecurity_framework_tests ├── test_threat_detection.py └── test_incident_response.py ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Google App Engine generated folder 2 | appengine-generated/ 3 | -------------------------------------------------------------------------------- /docs/daipe.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KOSASIH/DAIPE/HEAD/docs/daipe.jpeg -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python==3.9.5 2 | numpy==1.21.2 3 | pandas==1.3.5 4 | websocket-client==1.3.2 5 | pi-sdk==1.2.3 # Pi Network SDK 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /decentralized_data_storage/ipfs_utils/ipfs_client.py: -------------------------------------------------------------------------------- 1 | import ipfshttpclient 2 | 3 | class IpfsClient: 4 | def __init__(self, host, port): 5 | self.client = ipfshttpclient.connect(host, port) 6 | 7 | def add_file(self, file_path): 8 | return self.client.add(file_path) 9 | 10 | def get_file(self, file_hash): 11 | return self.client.cat(file_hash) 12 | -------------------------------------------------------------------------------- /cybersecurity_framework/threat_detection/anomaly_detector.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.ensemble import IsolationForest 3 | 4 | class AnomalyDetector: 5 | def __init__(self, data): 6 | self.data = data 7 | self.model = IsolationForest(contamination=0.1) 8 | 9 | def train(self): 10 | self.model.fit(self.data) 11 | 12 | def predict(self, new_data): 13 | return self.model.predict(new_data) 14 | -------------------------------------------------------------------------------- /node_network/node_manager.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | class NodeManager: 4 | def __init__(self, node_id, node_address): 5 | self.node_id = node_id 6 | self.node_address = node_address 7 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 8 | 9 | def connect(self): 10 | self.socket.connect((self.node_address, 8080)) 11 | 12 | def send_message(self, message): 13 | self.socket.send(message.encode()) 14 | 15 | def receive_message(self): 16 | return self.socket.recv(1024).decode() 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /ai_engine/ai_models/neural_network.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | class NeuralNetwork: 4 | def __init__(self, input_shape, output_shape): 5 | self.model = tf.keras.models.Sequential([ 6 | tf.keras.layers.Dense(64, activation='relu', input_shape=input_shape), 7 | tf.keras.layers.Dense(32, activation='relu'), 8 | tf.keras.layers.Dense(output_shape, activation='softmax') 9 | ]) 10 | self.model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) 11 | 12 | def train(self, X, y): 13 | self.model.fit(X, y, epochs=10, batch_size=32, validation_split=0.2) 14 | 15 | def predict(self, X): 16 | return self.model.predict(X) 17 | -------------------------------------------------------------------------------- /blockchain_integration/smart_contracts/security_contract.py: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.8.0; 2 | 3 | contract SecurityContract { 4 | address private owner; 5 | mapping (address => bool) public allowedNodes; 6 | 7 | constructor() public { 8 | owner = msg.sender; 9 | } 10 | 11 | function addNode(address node) public { 12 | require(msg.sender == owner, "Only the owner can add nodes"); 13 | allowedNodes[node] = true; 14 | } 15 | 16 | function removeNode(address node) public { 17 | require(msg.sender == owner, "Only the owner can remove nodes"); 18 | allowedNodes[node] = false; 19 | } 20 | 21 | function checkNode(address node) public view returns (bool) { 22 | return allowedNodes[node]; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /ai_data/data_loader.py: -------------------------------------------------------------------------------- 1 | # Data Loader class 2 | class DataLoader: 3 | def __init__(self, data_contract): 4 | self.data_contract = data_contract 5 | 6 | def load(self): 7 | # Load data from source 8 | if self.data_contract.data_type == "csv": 9 | data = pd.read_csv(self.data_contract.data_source) 10 | elif self.data_contract.data_type == "json": 11 | data = pd.read_json(self.data_contract.data_source) 12 | elif self.data_contract.data_type == "parquet": 13 | data = pd.read_parquet(self.data_contract.data_source) 14 | else: 15 | raise ValueError("Invalid data type") 16 | 17 | return data 18 | 19 | def __str__(self): 20 | return f"DataLoader(data_contract={self.data_contract})" 21 | 22 | # Example usage 23 | data_loader = DataLoader(data_contract) 24 | data = data_loader.load() 25 | print(data.head()) 26 | -------------------------------------------------------------------------------- /ai_data/data_preprocessor.py: -------------------------------------------------------------------------------- 1 | # Data Preprocessor class 2 | class DataPreprocessor: 3 | def __init__(self, data): 4 | self.data = data 5 | 6 | def preprocess(self): 7 | # Handle missing values 8 | self.data.fillna(self.data.mean(), inplace=True) 9 | 10 | # Handle outliers 11 | self.data = self.data[(np.abs(self.data) <= 3 * self.data.std()).all(axis=1)] 12 | 13 | # Encode categorical variables 14 | categorical_cols = self.data.select_dtypes(include=["object"]).columns 15 | for col in categorical_cols: 16 | self.data[col] = pd.get_dummies(self.data[col], drop_first=True) 17 | 18 | return self.data 19 | 20 | def __str__(self): 21 | return f"DataPreprocessor(data={self.data})" 22 | 23 | # Example usage 24 | data_preprocessor = DataPreprocessor(data) 25 | processed_data = data_preprocessor.preprocess() 26 | print(processed_data.head()) 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /tests/ai_engine_tests/test_decision_tree.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | from ai_engine.decision_tree import DecisionTree 4 | 5 | class TestDecisionTree(unittest.TestCase): 6 | def setUp(self): 7 | # Set up decision tree for testing 8 | self.dt = DecisionTree() 9 | 10 | def test_train(self): 11 | # Test training of decision tree 12 | input_data = np.random.rand(100, 10) 13 | output_data = np.random.rand(100, 1) 14 | self.dt.train(input_data, output_data) 15 | self.assertTrue(self.dt.trained) 16 | 17 | def test_predict(self): 18 | # Test prediction using decision tree 19 | input_data = np.random.rand(1, 10) 20 | output = self.dt.predict(input_data) 21 | self.assertEqual(output.shape, (1, 1)) 22 | 23 | def test_evaluate(self): 24 | # Test evaluation of decision tree 25 | input_data = np.random.rand(100, 10) 26 | output_data = np.random.rand(100, 1) 27 | accuracy = self.dt.evaluate(input_data, output_data) 28 | self.assertGreaterEqual(accuracy, 0.0) 29 | self.assertLessEqual(accuracy, 1.0) 30 | 31 | if __name__ == '__main__': 32 | unittest.main() 33 | -------------------------------------------------------------------------------- /tests/ai_engine_tests/test_neural_network.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | import numpy as np 4 | from ai_engine.neural_network import NeuralNetwork 5 | 6 | class TestNeuralNetwork(unittest.TestCase): 7 | def setUp(self): 8 | # Set up neural network for testing 9 | self.nn = NeuralNetwork(input_size=784, hidden_size=256, output_size=10) 10 | 11 | def test_forward_pass(self): 12 | # Test forward pass through neural network 13 | input_data = np.random.rand(1, 784) 14 | output = self.nn.forward_pass(input_data) 15 | self.assertEqual(output.shape, (1, 10)) 16 | 17 | def test_backward_pass(self): 18 | # Test backward pass through neural network 19 | input_data = np.random.rand(1, 784) 20 | output = self.nn.forward_pass(input_data) 21 | self.nn.backward_pass(output, input_data) 22 | self.assertTrue(self.nn.weights_updated) 23 | 24 | def test_train(self): 25 | # Test training of neural network 26 | input_data = np.random.rand(100, 784) 27 | output_data = np.random.rand(100, 10) 28 | self.nn.train(input_data, output_data, epochs=10) 29 | self.assertTrue(self.nn.trained) 30 | 31 | if __name__ == '__main__': 32 | unittest.main() 33 | -------------------------------------------------------------------------------- /node_utils/node_monitoring.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import requests 5 | import psutil 6 | 7 | # Node Monitoring class 8 | class NodeMonitoring: 9 | def __init__(self, node_id, monitoring_server_url): 10 | self.node_id = node_id 11 | self.monitoring_server_url = monitoring_server_url 12 | 13 | def monitor_node(self): 14 | # Monitor node resources and send data to monitoring server 15 | node_data = { 16 | "node_id": self.node_id, 17 | "cpu_usage": psutil.cpu_percent(), 18 | "memory_usage": psutil.virtual_memory().percent, 19 | "storage_usage": psutil.disk_usage('/').percent 20 | } 21 | response = requests.post(self.monitoring_server_url, data=json.dumps(node_data)) 22 | if response.status_code == 200: 23 | print(f"Node {self.node_id} monitoring data sent successfully") 24 | else: 25 | print(f"Error sending node {self.node_id} monitoring data: {response.text}") 26 | 27 | def start_monitoring(self): 28 | # Start monitoring node resources at regular intervals 29 | while True: 30 | self.monitor_node() 31 | time.sleep(60) # Monitor every 60 seconds 32 | 33 | # Example usage 34 | node_id = "node1" 35 | monitoring_server_url = "https://monitoring-server.com/monitor" 36 | 37 | node_monitoring = NodeMonitoring(node_id, monitoring_server_url) 38 | node_monitoring.start_monitoring() 39 | -------------------------------------------------------------------------------- /blockchain_integration_tests/test_data_contract.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | from web3 import Web3 4 | from blockchain_integration.data_contract import DataContract 5 | 6 | class TestDataContract(unittest.TestCase): 7 | def setUp(self): 8 | # Set up test blockchain network 9 | self.w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/YOUR_PROJECT_ID')) 10 | self.data_contract = DataContract(self.w3, '0x1234567890abcdef') 11 | 12 | def test_deploy(self): 13 | # Test deployment of data contract 14 | tx_hash = self.data_contract.deploy() 15 | self.assertIsNotNone(tx_hash) 16 | 17 | def test_set_data(self): 18 | # Test setting of data 19 | data = 'Hello, World!' 20 | tx_hash = self.data_contract.set_data(data) 21 | self.assertIsNotNone(tx_hash) 22 | 23 | def test_get_data(self): 24 | # Test getting of data 25 | data = self.data_contract.get_data() 26 | self.assertEqual(data, 'Hello, World!') 27 | 28 | def test_update_data(self): 29 | # Test updating of data 30 | new_data = 'Hello, Blockchain!' 31 | tx_hash = self.data_contract.update_data(new_data) 32 | self.assertIsNotNone(tx_hash) 33 | 34 | def test_delete_data(self): 35 | # Test deletion of data 36 | tx_hash = self.data_contract.delete_data() 37 | self.assertIsNotNone(tx_hash) 38 | 39 | if __name__ == '__main__': 40 | unittest.main() 41 | -------------------------------------------------------------------------------- /blockchain_integration/smart_contracts/data_contract.py: -------------------------------------------------------------------------------- 1 | # Data Contract class 2 | class DataContract: 3 | def __init__(self, data_type, data_schema, data_source, data_destination): 4 | self.data_type = data_type 5 | self.data_schema = data_schema 6 | self.data_source = data_source 7 | self.data_destination = data_destination 8 | 9 | def validate(self): 10 | # Validate data type 11 | if self.data_type not in ["csv", "json", "parquet"]: 12 | raise ValueError("Invalid data type") 13 | 14 | # Validate data schema 15 | if not isinstance(self.data_schema, dict): 16 | raise ValueError("Invalid data schema") 17 | 18 | # Validate data source 19 | if not isinstance(self.data_source, str): 20 | raise ValueError("Invalid data source") 21 | 22 | # Validate data destination 23 | if not isinstance(self.data_destination, str): 24 | raise ValueError("Invalid data destination") 25 | 26 | def to_dict(self): 27 | return { 28 | "data_type": self.data_type, 29 | "data_schema": self.data_schema, 30 | "data_source": self.data_source, 31 | "data_destination": self.data_destination 32 | } 33 | 34 | def __str__(self): 35 | return f"DataContract(data_type={self.data_type}, data_schema={self.data_schema}, data_source={self.data_source}, data_destination={self.data_destination})" 36 | 37 | # Example usage 38 | data_contract = DataContract( 39 | data_type="csv", 40 | data_schema={"column1": "int", "column2": "string"}, 41 | data_source="data.csv", 42 | data_destination="processed_data.csv" 43 | ) 44 | 45 | print(data_contract.to_dict()) 46 | -------------------------------------------------------------------------------- /incident_response/response_generator.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import requests 5 | from jinja2 import Template 6 | from datetime import datetime 7 | 8 | # Incident Response class 9 | class IncidentResponse: 10 | def __init__(self, incident_id, incident_type, severity, description, affected_systems): 11 | self.incident_id = incident_id 12 | self.incident_type = incident_type 13 | self.severity = severity 14 | self.description = description 15 | self.affected_systems = affected_systems 16 | 17 | def generate_response(self): 18 | # Load response template 19 | template = Template(filename="response_template.j2") 20 | 21 | # Render response template with incident data 22 | response = template.render( 23 | incident_id=self.incident_id, 24 | incident_type=self.incident_type, 25 | severity=self.severity, 26 | description=self.description, 27 | affected_systems=self.affected_systems, 28 | timestamp=datetime.now().strftime("%Y-%m-%d %H:%M:%S") 29 | ) 30 | 31 | return response 32 | 33 | def send_response(self, notification_system): 34 | # Send response to notification system 35 | notification_system.send_notification(self.generate_response()) 36 | 37 | # Example usage 38 | incident_response = IncidentResponse( 39 | incident_id="INC123", 40 | incident_type="Malware Outbreak", 41 | severity="High", 42 | description="Malware outbreak detected on multiple systems", 43 | affected_systems=["system1", "system2", "system3"] 44 | ) 45 | 46 | notification_system = NotificationSystem() 47 | incident_response.send_response(notification_system) 48 | -------------------------------------------------------------------------------- /incident_response/notification_system.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import requests 5 | from email.mime.text import MIMEText 6 | from email.mime.multipart import MIMEMultipart 7 | from smtplib import SMTP 8 | 9 | # Notification System class 10 | class NotificationSystem: 11 | def __init__(self, notification_config): 12 | self.notification_config = notification_config 13 | 14 | def send_notification(self, response): 15 | # Load notification configuration 16 | notification_config = self.notification_config 17 | 18 | # Send notification via email 19 | msg = MIMEMultipart() 20 | msg["Subject"] = "Incident Response - " + response["incident_id"] 21 | msg["From"] = notification_config["email_from"] 22 | msg["To"] = notification_config["email_to"] 23 | 24 | body = response["response"] 25 | msg.attach(MIMEText(body, "plain")) 26 | 27 | server = SMTP(notification_config["smtp_server"], notification_config["smtp_port"]) 28 | server.starttls() 29 | server.login(notification_config["smtp_username"], notification_config["smtp_password"]) 30 | server.sendmail(notification_config["email_from"], notification_config["email_to"], msg.as_string()) 31 | server.quit() 32 | 33 | # Send notification via API 34 | api_url = notification_config["api_url"] 35 | api_key = notification_config["api_key"] 36 | headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} 37 | response = requests.post(api_url, headers=headers, json=response) 38 | if response.status_code != 200: 39 | print("Error sending notification via API") 40 | 41 | def load_notification_config(self): 42 | # Load notification configuration from file 43 | with open("notification_config.json", "r") as f: 44 | notification_config = json.load(f) 45 | return notification_config 46 | 47 | # Example usage 48 | notification_system = NotificationSystem(NotificationSystem().load_notification_config()) 49 | -------------------------------------------------------------------------------- /blockchain_integration_tests/test_security_contract.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | from web3 import Web3 4 | from blockchain_integration.security_contract import SecurityContract 5 | 6 | class TestSecurityContract(unittest.TestCase): 7 | def setUp(self): 8 | # Set up test blockchain network 9 | self.w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/YOUR_PROJECT_ID')) 10 | self.security_contract = SecurityContract(self.w3, '0x1234567890abcdef') 11 | 12 | def test_deploy(self): 13 | # Test deployment of security contract 14 | tx_hash = self.security_contract.deploy() 15 | self.assertIsNotNone(tx_hash) 16 | 17 | def test_mint(self): 18 | # Test minting of new tokens 19 | token_amount = 100 20 | tx_hash = self.security_contract.mint(token_amount) 21 | self.assertIsNotNone(tx_hash) 22 | 23 | def test_transfer(self): 24 | # Test transfer of tokens 25 | from_account = '0x1234567890abcdef' 26 | to_account = '0xfedcba9876543210' 27 | token_amount = 50 28 | tx_hash = self.security_contract.transfer(from_account, to_account, token_amount) 29 | self.assertIsNotNone(tx_hash) 30 | 31 | def test_balance_of(self): 32 | # Test balance of tokens for an account 33 | account = '0x1234567890abcdef' 34 | balance = self.security_contract.balance_of(account) 35 | self.assertGreaterEqual(balance, 0) 36 | 37 | def test_allowance(self): 38 | # Test allowance of tokens for an account 39 | owner = '0x1234567890abcdef' 40 | spender = '0xfedcba9876543210' 41 | allowance = self.security_contract.allowance(owner, spender) 42 | self.assertGreaterEqual(allowance, 0) 43 | 44 | def test_approve(self): 45 | # Test approval of tokens for an account 46 | owner = '0x1234567890abcdef' 47 | spender = '0xfedcba9876543210' 48 | token_amount = 50 49 | tx_hash = self.security_contract.approve(owner, spender, token_amount) 50 | self.assertIsNotNone(tx_hash) 51 | 52 | if __name__ == '__main__': 53 | unittest.main() 54 | -------------------------------------------------------------------------------- /cybersecurity_framework_tests/test_threat_detection.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | from cybersecurity_framework.threat_detection import ThreatDetection 4 | from cybersecurity_framework.threat_intelligence import ThreatIntelligence 5 | 6 | class TestThreatDetection(unittest.TestCase): 7 | def setUp(self): 8 | # Set up threat detection system 9 | self.threat_detection = ThreatDetection() 10 | self.threat_intelligence = ThreatIntelligence() 11 | 12 | def test_analyze_network_traffic(self): 13 | # Test analysis of network traffic 14 | network_traffic = [ 15 | {'src_ip': '192.168.1.100', 'dst_ip': '8.8.8.8', 'protocol': 'TCP'}, 16 | {'src_ip': '192.168.1.100', 'dst_ip': '1.1.1.1', 'protocol': 'UDP'} 17 | ] 18 | threats = self.threat_detection.analyze_network_traffic(network_traffic) 19 | self.assertGreaterEqual(len(threats), 0) 20 | 21 | def test_analyze_system_logs(self): 22 | # Test analysis of system logs 23 | system_logs = [ 24 | {'timestamp': '2022-01-01 12:00:00', 'log_level': 'INFO', 'message': 'Login successful'}, 25 | {'timestamp': '2022-01-01 12:00:01', 'log_level': 'WARNING', 'message': 'Suspicious activity detected'} 26 | ] 27 | threats = self.threat_detection.analyze_system_logs(system_logs) 28 | self.assertGreaterEqual(len(threats), 0) 29 | 30 | def test_integrate_with_threat_intelligence(self): 31 | # Test integration with threat intelligence 32 | threat_intel = self.threat_intelligence.get_threat_intel() 33 | self.threat_detection.integrate_with_threat_intelligence(threat_intel) 34 | self.assertTrue(self.threat_detection.threat_intel_integrated) 35 | 36 | def test_detect_malware(self): 37 | # Test detection of malware 38 | malware_sample = b'\x00\x01\x02\x03\x04\x05' 39 | detection_result = self.threat_detection.detect_malware(malware_sample) 40 | self.assertTrue(detection_result) 41 | 42 | def test_detect_anomaly(self): 43 | # Test detection of anomaly 44 | anomaly_data = [1, 2, 3, 4, 5, 6, 7, 8, 9] 45 | detection_result = self.threat_detection.detect_anomaly(anomaly_data) 46 | self.assertTrue(detection_result) 47 | 48 | if __name__ == '__main__': 49 | unittest.main() 50 | -------------------------------------------------------------------------------- /cybersecurity_framework_tests/test_incident_response.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import unittest 3 | from cybersecurity_framework.incident_response import IncidentResponse 4 | from cybersecurity_framework.threat_detection import ThreatDetection 5 | 6 | class TestIncidentResponse(unittest.TestCase): 7 | def setUp(self): 8 | # Set up incident response system 9 | self.incident_response = IncidentResponse() 10 | self.threat_detection = ThreatDetection() 11 | 12 | def test_identify_incident(self): 13 | # Test identification of incident 14 | incident_data = {'timestamp': '2022-01-01 12:00:00', 'log_level': 'CRITICAL', 'message': 'System compromise detected'} 15 | incident_id = self.incident_response.identify_incident(incident_data) 16 | self.assertIsNotNone(incident_id) 17 | 18 | def test_contain_incident(self): 19 | # Test containment of incident 20 | incident_id = 'INC12345' 21 | containment_result = self.incident_response.contain_incident(incident_id) 22 | self.assertTrue(containment_result) 23 | 24 | def test_eradicate_incident(self): 25 | # Test eradication of incident 26 | incident_id = 'INC12345' 27 | eradication_result = self.incident_response.eradicate_incident(incident_id) 28 | self.assertTrue(eradication_result) 29 | 30 | def test_recover_from_incident(self): 31 | # Test recovery from incident 32 | incident_id = 'INC12345' 33 | recovery_result = self.incident_response.recover_from_incident(incident_id) 34 | self.assertTrue(recovery_result) 35 | 36 | def test_integrate_with_threat_detection(self): 37 | # Test integration with threat detection 38 | threat_data = self.threat_detection.analyze_network_traffic([]) 39 | self.incident_response.integrate_with_threat_detection(threat_data) 40 | self.assertTrue(self.incident_response.threat_detection_integrated) 41 | 42 | def test_generate_incident_report(self): 43 | # Test generation of incident report 44 | incident_id = 'INC12345' 45 | report = self.incident_response.generate_incident_report(incident_id) 46 | self.assertIsNotNone(report) 47 | 48 | def test_notify_stakeholders(self): 49 | # Test notification of stakeholders 50 | incident_id = 'INC12345' 51 | notification_result = self.incident_response.notify_stakeholders(incident_id) 52 | self.assertTrue(notification_result) 53 | 54 | if __name__ == '__main__': 55 | unittest.main() 56 | -------------------------------------------------------------------------------- /node_utils/node_registration.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import requests 5 | from cryptography.fernet import Fernet 6 | from cryptography.hazmat.primitives import hashes 7 | from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC 8 | 9 | # Node Registration class 10 | class NodeRegistration: 11 | def __init__(self, node_id, node_key, registration_server_url): 12 | self.node_id = node_id 13 | self.node_key = node_key 14 | self.registration_server_url = registration_server_url 15 | 16 | def register_node(self): 17 | # Register node with registration server 18 | registration_data = { 19 | "node_id": self.node_id, 20 | "node_key": self.node_key, 21 | "node_type": "compute_node", 22 | "node_capabilities": ["cpu", "memory", "storage"] 23 | } 24 | encrypted_registration_data = self.encrypt_data(json.dumps(registration_data)) 25 | response = requests.post(self.registration_server_url, data=encrypted_registration_data) 26 | if response.status_code == 200: 27 | print(f"Node {self.node_id} registered successfully") 28 | else: 29 | print(f"Error registering node {self.node_id}: {response.text}") 30 | 31 | def encrypt_data(self, data): 32 | # Encrypt data using Fernet symmetric encryption 33 | kdf = PBKDF2HMAC( 34 | algorithm=hashes.SHA256(), 35 | length=32, 36 | salt=self.node_key.encode(), 37 | iterations=100000, 38 | ) 39 | key = base64.urlsafe_b64encode(kdf.derive(self.node_key.encode())) 40 | f = Fernet(key) 41 | encrypted_data = f.encrypt(data.encode()) 42 | return encrypted_data 43 | 44 | def decrypt_data(self, encrypted_data): 45 | # Decrypt data using Fernet symmetric encryption 46 | kdf = PBKDF2HMAC( 47 | algorithm=hashes.SHA256(), 48 | length=32, 49 | salt=self.node_key.encode(), 50 | iterations=100000, 51 | ) 52 | key = base64.urlsafe_b64encode(kdf.derive(self.node_key.encode())) 53 | f = Fernet(key) 54 | decrypted_data = f.decrypt(encrypted_data) 55 | return decrypted_data.decode() 56 | 57 | # Example usage 58 | node_id = "node1" 59 | node_key = "my_secret_key" 60 | registration_server_url = "https://registration-server.com/register" 61 | 62 | node_registration = NodeRegistration(node_id, node_key, registration_server_url) 63 | node_registration.register_node() 64 | -------------------------------------------------------------------------------- /decentralized_data_storage/ipfs_utils/ipfs_server.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import ipfshttpclient 5 | from flask import Flask, request, jsonify 6 | from flask_cors import CORS 7 | from ipfsapi import IpfsApi 8 | 9 | # IPFS Server class 10 | class IpfsServer: 11 | def __init__(self, ipfs_api, ipfs_http_client): 12 | self.ipfs_api = ipfs_api 13 | self.ipfs_http_client = ipfs_http_client 14 | 15 | def add_file(self, file_path): 16 | # Add file to IPFS 17 | with open(file_path, "rb") as f: 18 | file_hash = self.ipfs_api.add(f.read()) 19 | return file_hash 20 | 21 | def get_file(self, file_hash): 22 | # Get file from IPFS 23 | file_data = self.ipfs_api.cat(file_hash) 24 | return file_data 25 | 26 | def pin_file(self, file_hash): 27 | # Pin file to IPFS 28 | self.ipfs_api.pin_add(file_hash) 29 | 30 | def unpin_file(self, file_hash): 31 | # Unpin file from IPFS 32 | self.ipfs_api.pin_rm(file_hash) 33 | 34 | def get_file_info(self, file_hash): 35 | # Get file info from IPFS 36 | file_info = self.ipfs_api.object_stat(file_hash) 37 | return file_info 38 | 39 | def search_files(self, query): 40 | # Search files in IPFS 41 | search_results = self.ipfs_api.search(query) 42 | return search_results 43 | 44 | # Flask API 45 | app = Flask(__name__) 46 | CORS(app) 47 | 48 | # IPFS API client 49 | ipfs_api = IpfsApi() 50 | 51 | # IPFS HTTP client 52 | ipfs_http_client = ipfshttpclient.connect() 53 | 54 | # IPFS Server instance 55 | ipfs_server = IpfsServer(ipfs_api, ipfs_http_client) 56 | 57 | # API endpoints 58 | @app.route("/add_file", methods=["POST"]) 59 | def add_file(): 60 | file_path = request.form["file_path"] 61 | file_hash = ipfs_server.add_file(file_path) 62 | return jsonify({"file_hash": file_hash}) 63 | 64 | @app.route("/get_file", methods=["GET"]) 65 | def get_file(): 66 | file_hash = request.args.get("file_hash") 67 | file_data = ipfs_server.get_file(file_hash) 68 | return jsonify({"file_data": file_data}) 69 | 70 | @app.route("/pin_file", methods=["POST"]) 71 | def pin_file(): 72 | file_hash = request.form["file_hash"] 73 | ipfs_server.pin_file(file_hash) 74 | return jsonify({"message": "File pinned successfully"}) 75 | 76 | @app.route("/unpin_file", methods=["POST"]) 77 | def unpin_file(): 78 | file_hash = request.form["file_hash"] 79 | ipfs_server.unpin_file(file_hash) 80 | return jsonify({"message": "File unpinned successfully"}) 81 | 82 | @app.route("/get_file_info", methods=["GET"]) 83 | def get_file_info(): 84 | file_hash = request.args.get("file_hash") 85 | file_info = ipfs_server.get_file_info(file_hash) 86 | return jsonify({"file_info": file_info}) 87 | 88 | @app.route("/search_files", methods=["GET"]) 89 | def search_files(): 90 | query = request.args.get("query") 91 | search_results = ipfs_server.search_files(query) 92 | return jsonify({"search_results": search_results}) 93 | 94 | if __name__ == "__main__": 95 | app.run(debug=True) 96 | -------------------------------------------------------------------------------- /ai_engine/ai_models/decision_tree.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | from sklearn.tree import DecisionTreeClassifier 3 | from sklearn.model_selection import train_test_split 4 | from sklearn.metrics import accuracy_score, classification_report, confusion_matrix 5 | import pandas as pd 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | from sklearn import tree 9 | 10 | # Function to import the dataset 11 | def importdata(): 12 | balance_data = pd.read_csv( 13 | 'https://archive.ics.uci.edu/ml/machine-learning-' + 14 | 'databases/balance-scale/balance-scale.data', 15 | sep=',', header=None) 16 | # Displaying dataset information 17 | print("Dataset Length: ", len(balance_data)) 18 | print("Dataset Shape: ", balance_data.shape) 19 | print("Dataset: ", balance_data.head()) 20 | return balance_data 21 | 22 | # Function to split the dataset into features and target variables 23 | def splitdataset(balance_data): 24 | # Separating the target variable 25 | X = balance_data.values[:, 1:5] 26 | Y = balance_data.values[:, 0] 27 | # Splitting the dataset into train and test 28 | X_train, X_test, y_train, y_test = train_test_split( 29 | X, Y, test_size=0.3, random_state=100) 30 | return X, Y, X_train, X_test, y_train, y_test 31 | 32 | # Function to train using Gini Index 33 | def train_using_gini(X_train, X_test, y_train): 34 | # Creating the classifier object 35 | clf_gini = DecisionTreeClassifier(criterion="gini", random_state=100, 36 | max_depth=3, min_samples_leaf=5) 37 | # Performing training 38 | clf_gini.fit(X_train, y_train) 39 | return clf_gini 40 | 41 | # Function to train using Entropy 42 | def train_using_entropy(X_train, X_test, y_train): 43 | # Decision tree with entropy 44 | clf_entropy = DecisionTreeClassifier( 45 | criterion="entropy", random_state=100, max_depth=3, min_samples_leaf=5) 46 | # Performing training 47 | clf_entropy.fit(X_train, y_train) 48 | return clf_entropy 49 | 50 | # Function to make predictions 51 | def prediction(X_test, clf_object): 52 | y_pred = clf_object.predict(X_test) 53 | print("Predicted values:") 54 | print(y_pred) 55 | return y_pred 56 | 57 | # Function to calculate accuracy 58 | def cal_accuracy(y_test, y_pred): 59 | print("Confusion Matrix: ", confusion_matrix(y_test, y_pred)) 60 | print("Accuracy : ", accuracy_score(y_test, y_pred)*100) 61 | print("Report : ", classification_report(y_test, y_pred)) 62 | 63 | # Driver code 64 | def main(): 65 | data = importdata() 66 | X, Y, X_train, X_test, y_train, y_test = splitdataset(data) 67 | clf_gini = train_using_gini(X_train, X_test, y_train) 68 | clf_entropy = train_using_entropy(X_train, X_test, y_train) 69 | print("Results Using Gini Index:") 70 | y_pred_gini = prediction(X_test, clf_gini) 71 | cal_accuracy(y_test, y_pred_gini) 72 | print("Results Using Entropy:") 73 | y_pred_entropy = prediction(X_test, clf_entropy) 74 | cal_accuracy(y_test, y_pred_entropy) 75 | 76 | # Plotting the decision tree 77 | plt.figure(figsize=(15, 10)) 78 | tree.plot_tree(clf_gini, filled=True, feature_names=[ 79 | 'X1', 'X2', 'X3', 'X4'], class_names=['L', 'B', 'R']) 80 | plt.show() 81 | 82 | if __name__ == "__main__": 83 | main() 84 | -------------------------------------------------------------------------------- /node_network/node_communication.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import json 4 | import socket 5 | import threading 6 | import time 7 | from cryptography.fernet import Fernet 8 | from cryptography.hazmat.primitives import hashes 9 | from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC 10 | 11 | # Node Communication class 12 | class NodeCommunication: 13 | def __init__(self, node_id, node_key, node_port, network_nodes): 14 | self.node_id = node_id 15 | self.node_key = node_key 16 | self.node_port = node_port 17 | self.network_nodes = network_nodes 18 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 19 | self.socket.bind(("localhost", self.node_port)) 20 | self.socket.listen(5) 21 | self.threads = [] 22 | 23 | def start_node(self): 24 | # Start node and listen for incoming connections 25 | print(f"Node {self.node_id} started and listening on port {self.node_port}") 26 | while True: 27 | conn, addr = self.socket.accept() 28 | print(f"Connected by {addr}") 29 | t = threading.Thread(target=self.handle_connection, args=(conn,)) 30 | t.start() 31 | self.threads.append(t) 32 | 33 | def handle_connection(self, conn): 34 | # Handle incoming connection and receive data 35 | while True: 36 | data = conn.recv(1024) 37 | if not data: 38 | break 39 | print(f"Received data from {addr}: {data.decode()}") 40 | self.process_data(data.decode()) 41 | 42 | def process_data(self, data): 43 | # Process received data and take action 44 | data_json = json.loads(data) 45 | if data_json["type"] == "message": 46 | print(f"Received message from node {data_json['node_id']}: {data_json['message']}") 47 | self.send_response(data_json["node_id"], "ack") 48 | elif data_json["type"] == "request": 49 | print(f"Received request from node {data_json['node_id']}: {data_json['request']}") 50 | self.send_response(data_json["node_id"], "response") 51 | 52 | def send_response(self, node_id, response): 53 | # Send response to node 54 | response_json = json.dumps({"type": "response", "node_id": self.node_id, "response": response}) 55 | self.send_data(node_id, response_json) 56 | 57 | def send_data(self, node_id, data): 58 | # Send data to node 59 | node_port = self.network_nodes[node_id]["port"] 60 | conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 61 | conn.connect(("localhost", node_port)) 62 | conn.sendall(data.encode()) 63 | conn.close() 64 | 65 | def encrypt_data(self, data): 66 | # Encrypt data using Fernet symmetric encryption 67 | kdf = PBKDF2HMAC( 68 | algorithm=hashes.SHA256(), 69 | length=32, 70 | salt=self.node_key.encode(), 71 | iterations=100000, 72 | ) 73 | key = base64.urlsafe_b64encode(kdf.derive(self.node_key.encode())) 74 | f = Fernet(key) 75 | encrypted_data = f.encrypt(data.encode()) 76 | return encrypted_data 77 | 78 | def decrypt_data(self, encrypted_data): 79 | # Decrypt data using Fernet symmetric encryption 80 | kdf = PBKDF2HMAC( 81 | algorithm=hashes.SHA256(), 82 | length=32, 83 | salt=self.node_key.encode(), 84 | iterations=100000, 85 | ) 86 | key = base64.urlsafe_b64encode(kdf.derive(self.node_key.encode())) 87 | f = Fernet(key) 88 | decrypted_data = f.decrypt(encrypted_data) 89 | return decrypted_data.decode() 90 | 91 | # Example usage 92 | node_id = "node1" 93 | node_key = "my_secret_key" 94 | node_port = 8080 95 | network_nodes = { 96 | "node1": {"port": 8080}, 97 | "node2": {"port": 8081}, 98 | "node3": {"port": 8082}, 99 | } 100 | 101 | node_communication = NodeCommunication(node_id, node_key, node_port, network_nodes) 102 | node_communication.start_node() 103 | -------------------------------------------------------------------------------- /cybersecurity_framework/threat_detection/malware_detector.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import os 3 | import hashlib 4 | import pefile 5 | import pefile 6 | import yara 7 | import json 8 | import requests 9 | from sklearn.ensemble import RandomForestClassifier 10 | from sklearn.model_selection import train_test_split 11 | from sklearn.metrics import accuracy_score, classification_report, confusion_matrix 12 | import pandas as pd 13 | import numpy as np 14 | import matplotlib.pyplot as plt 15 | 16 | # Function to extract features from a PE file 17 | def extract_features(file_path): 18 | features = [] 19 | pe = pefile.PE(file_path) 20 | features.append(pe.FILE_HEADER.NumberOfSections) 21 | features.append(pe.FILE_HEADER.SizeOfOptionalHeader) 22 | features.append(pe.OPTIONAL_HEADER.SizeOfCode) 23 | features.append(pe.OPTIONAL_HEADER.SizeOfInitializedData) 24 | features.append(pe.OPTIONAL_HEADER.SizeOfUninitializedData) 25 | features.append(pe.OPTIONAL_HEADER.AddressOfEntryPoint) 26 | features.append(pe.OPTIONAL_HEADER.BaseOfCode) 27 | features.append(pe.OPTIONAL_HEADER.BaseOfData) 28 | features.append(pe.OPTIONAL_HEADER.ImageBase) 29 | features.append(pe.OPTIONAL_HEADER.SectionAlignment) 30 | features.append(pe.OPTIONAL_HEADER.FileAlignment) 31 | features.append(pe.OPTIONAL_HEADER.SizeOfHeaders) 32 | features.append(pe.OPTIONAL_HEADER.CheckSum) 33 | features.append(pe.OPTIONAL_HEADER.SizeOfImage) 34 | features.append(pe.OPTIONAL_HEADER.SizeOfHeaders) 35 | features.append(pe.OPTIONAL_HEADER.Subsystem) 36 | features.append(pe.OPTIONAL_HEADER.DllCharacteristics) 37 | features.append(pe.OPTIONAL_HEADER.SizeOfStackReserve) 38 | features.append(pe.OPTIONAL_HEADER.SizeOfStackCommit) 39 | features.append(pe.OPTIONAL_HEADER.SizeOfHeapReserve) 40 | features.append(pe.OPTIONAL_HEADER.SizeOfHeapCommit) 41 | features.append(pe.OPTIONAL_HEADER.LoaderFlags) 42 | features.append(pe.OPTIONAL_HEADER.NumberOfRvaAndSizes) 43 | features.append(pe.FILE_HEADER.Characteristics) 44 | features.append(pe.FILE_HEADER.Machine) 45 | features.append(pe.FILE_HEADER.NumberOfSections) 46 | features.append(pe.FILE_HEADER.TimeDateStamp) 47 | features.append(pe.FILE_HEADER.PointerToSymbolTable) 48 | features.append(pe.FILE_HEADER.NumberOfSymbols) 49 | features.append(pe.FILE_HEADER.SizeOfOptionalHeader) 50 | features.append(pe.FILE_HEADER.ImageBase) 51 | return features 52 | 53 | # Function to calculate the hash of a file 54 | def calculate_hash(file_path): 55 | hash_sha256 = hashlib.sha256() 56 | with open(file_path, "rb") as f: 57 | for chunk in iter(lambda: f.read(4096), b""): 58 | hash_sha256.update(chunk) 59 | return hash_sha256.hexdigest() 60 | 61 | # Function to scan a file using YARA rules 62 | def scan_with_yara(file_path): 63 | rules = yara.compile(file="malware_rules.yara") 64 | matches = rules.match(file_path) 65 | if matches: 66 | return True 67 | else: 68 | return False 69 | 70 | # Function to classify a file as malicious or benign using a machine learning model 71 | def classify_file(file_path): 72 | features = extract_features(file_path) 73 | features = np.array(features).reshape(1, -1) 74 | model = RandomForestClassifier() 75 | model.load("malware_model.pkl") 76 | prediction = model.predict(features) 77 | if prediction[0] == 0: 78 | return "Benign" 79 | else: 80 | return "Malicious" 81 | 82 | # Function to submit a file to a sandbox for analysis 83 | def submit_to_sandbox(file_path): 84 | api_key = "YOUR_API_KEY" 85 | api_secret = "YOUR_API_SECRET" 86 | url = "https://sandbox.example.com/api/submit" 87 | headers = { 88 | "Authorization": f"Bearer {api_key}", 89 | "Content-Type": "application/octet-stream" 90 | } 91 | with open(file_path, "rb") as f: 92 | response = requests.post(url, headers=headers, data=f.read()) 93 | if response.status_code == 200: 94 | return response.json() 95 | else: 96 | return None 97 | 98 | # Main function 99 | def main(): 100 | file_path = "malware_sample.exe" 101 | hash = calculate_hash(file_path) 102 | print(f"File hash: {hash}") 103 | yara_result = scan_with_yara(file_path) 104 | print(f"YARA result: {yara_result}") 105 | classification = classify_file(file_path) 106 | print(f"Classification: {classification}") 107 | sandbox_result = submit_to_sandbox(file_path) 108 | print(f"Sandbox result: {sandbox_result}") 109 | 110 | if __name__ == "__main__": 111 | main() 112 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [](https://www.iso.org/iso-9001-quality-management.html) 2 | [](https://www.iso.org/isoiec-27001-information-security.html) 3 | [](https://standards.ieee.org/) 4 | [](https://www.w3.org/) 5 | [](https://gdpr.eu/) 6 | [](https://hitrustalliance.net/) 7 | [](https://cmmiinstitute.com/) 8 | [](https://www.nist.gov/cyberframework) 9 | [](https://whc.unesco.org/) 10 | [](http://www.fao.org/) 11 | [](https://www.who.int/) 12 | [](https://www.iata.org/) 13 | [](https://www.fsc.org/) 14 | [](https://bcorporation.net/) 15 | [](https://www.usgbc.org/leed) 16 | [](https://www.sai-global.com/) 17 | [](https://www.iso.org/iso-14001-environmental-management.html) 18 | [](https://www.iso.org/iso-45001-occupational-health-and-safety.html) 19 | [](https://cmmiinstitute.com/) 20 | [](https://www.pcisecuritystandards.org/) 21 | [](https://www.iso.org/iso-50001-energy-management.html) 22 | [](https://www.iso.org/iso-22301-business-continuity.html) 23 | [](https://www.iso.org/iso-13485-medical-devices.html) 24 | [](https://www.nabers.gov.au/) 25 | [](https://www.greenseal.org/) 26 | [](https://www.energystar.gov/) 27 | [](https://www.sasb.org/) 28 | [](https://www.iso.org/iso-37001-anti-bribery.html) 29 | [](https://www.iso.org/iso-20121-sustainable-events.html) 30 | [](https://www.iso.org/iso-26000-social-responsibility.html) 31 | [](https://www.iso.org/iso-31000-risk-management.html) 32 | [](https://www.iso.org/iso-45003-psychological-health.html) 33 | [](https://www.iso.org/iso-37002-whistleblowing-management.html) 34 | [](https://www.iso.org/iso-50002-energy-audits.html) 35 | [](https://www.iso.org/iso-14064-greenhouse-gases.html) 36 | [](https://www.iso.org/iso-14046-water-footprint.html) 37 | [](https://www.iso.org/iso-22316-organizational-resilience.html) 38 | [](https://www.iso.org/iso-28000-supply-chain-security.html) 39 | [](https://www.iso.org/iso-37001-anti-bribery.html) 40 | [](https://www.iso.org/iso-45001-occupational-health-and-safety.html) 41 | [](https://www.iso.org/iso-9001-quality-management.html) 42 | [](https://www.iso.org/iso-14001-environmental-management.html) 43 | [](https://www.iso.org/iso-50001-energy-management.html) 44 | [](https://www.iso.org/iso-22301-business-continuity.html) 45 | [](https://www.iso.org/iso-13485-medical-devices.html) 46 | [](https://www.iso.org/iso-27001-information-security.html) 47 | [](https://www.iso.org/iso-20000-it-service-management.html) 48 | [](https://www.iso.org/iso-45003-psychological-health.html) 49 | [](https://www.iso.org/iso-50001-energy-management.html) 50 | [](https://www.iso.org/iso-14064-greenhouse-gases.html) 51 | [](https://www.iso.org/iso-14064-greenhouse-gases.html) 52 | [](https://www.iso.org/iso-14064-greenhouse-gases.html) 53 | [](https://www.iso.org/iso-22320-emergency-management.html) 54 | [](https://www.iso.org/iso-22313-business-continuity.html) 55 | [](https://www.iso.org/iso-28001-supply-chain-security.html) 56 | [](https://www.iso.org/iso-37002-whistleblowing-management.html) 57 | [](https://www.iso.org/iso-45002-occupational-health-and-safety.html) 58 | [](https://www.iso.org/iso-50003-energy-management.html) 59 | [](https://www.iso.org/iso-9000-quality-management.html) 60 | [](https://www.iso.org/iso-10002-customer-satisfaction.html) 61 | [](https://www.iso.org/iso-10001-customer-satisfaction.html) 62 | [](https://www.iso.org/iso-10003-customer-satisfaction.html) 63 | [](https://www.iso.org/iso-10004-customer-satisfaction.html) 64 | [](https://www.iso.org/iso-14015-environmental-assessment.html) 65 | [](https://www.iso.org/iso-14046-water-footprint.html) 66 | [](https://www.iso.org/iso-50006-energy-management.html) 67 | [](https://www.iso.org/iso-50007-energy-management.html) 68 | [](https://www.iso.org/iso-22301-business-continuity.html) 69 | 70 | [](https://aws.amazon.com/certification/certified-solutions-architect-associate/) 71 | [](https://aws.amazon.com/certification/certified-developer-associate/) 72 | [](https://aws.amazon.com/certification/certified-sysops-administrator-associate/) 73 | [](https://learn.microsoft.com/en-us/certifications/azure-fundamentals/) 74 | [](https://learn.microsoft.com/en-us/certifications/azure-solutions-architect/) 75 | [](https://cloud.google.com/certification/cloud-architect) 76 | [](https://cloud.google.com/certification/cloud-engineer) 77 | [](https://www.cisco.com/c/en/us/training-events/training-certifications/certifications/associate/ccna.html) 78 | [](https://www.comptia.org/certifications/security) 79 | [](https://www.isc2.org/Certifications/CISSP) 80 | [](https://www.eccouncil.org/programs/certified-ethical-hacker-ceh/) 81 | [](https://www.pmi.org/certifications/project-management-pmp) 82 | [](https://www.axelos.com/certifications/itil-certification) 83 | [](https://www.scrumstudy.com/certification/scrum-master-certification/) 84 | [](https://aws.amazon.com/certification/certified-solutions-architect-professional/) 85 | [](https://aws.amazon.com/certification/certified-devops-engineer-professional/) 86 | [](https://learn.microsoft.com/en-us/certifications/devops-engineer/) 87 | [](https://cloud.google.com/certification/data-engineer) 88 | [](https://cloud.google.com/certification/machine-learning-engineer) 89 | [](https://www.cisco.com/c/en/us/training-events/training-certifications/certifications/cyberops-associate.html) 90 | [](https://www.comptia.org/certifications/network) 91 | [](https://www.comptia.org/certifications/a) 92 | [](https://www.isaca.org/credentialing/cism) 93 | [](https://www.isaca.org/credentialing/cisa) 94 | [](https://www.isc2.org/Certifications/CCSP) 95 | [](https://learn.microsoft.com/en-us/certifications/security-compliance-identity-fundamentals/) 96 | [](https://education.oracle.com/java-se-11-developer/pexam_1Z0-819) 97 | [](https://www.redhat.com/en/services/certification/rhce) 98 | [](https://aws.amazon.com/certification/certified-advanced-networking-specialty/) 99 | [](https://aws.amazon.com/certification/certified-security-specialty/) 100 | [](https://learn.microsoft.com/en-us/certifications/azure-security-engineer/) 101 | [](https://learn.microsoft.com/en-us/certifications/data-scientist/) 102 | [](https://cloud.google.com/certification/cloud-developer) 103 | [](https://www.cisco.com/c/en/us/training-events/training-certifications/certifications/professional/ccnp.html) 104 | [](https://www.comptia.org/certifications/cloud) 105 | [](https://www.cncf.io/certification/cka/) 106 | [](https://www.cncf.io/certification/ckad/) 107 | [](https://www.vmware.com/certification/vcp.html) 108 | [](https://iapp.org/certify/cipp/) 109 | [](https://www.dama.org/certification/cdmp) 110 | [](https://trailhead.salesforce.com/credentials/administrator) 111 | [](https://trailhead.salesforce.com/credentials/platformdeveloper) 112 | [](https://www.eccouncil.org/programs/certified-ethical-hacker-ceh/) 113 | [](https://learn.microsoft.com/en-us/certifications/azure-ai-engineer/) 114 | [](https://aws.amazon.com/certification/certified-database-specialty/) 115 | [](https://aws.amazon.com/certification/certified-machine-learning-specialty/) 116 | [](https://learn.microsoft.com/en-us/certifications/azure-solutions-architect/) 117 | [](https://learn.microsoft.com/en-us/certifications/data-engineer/) 118 | [](https://cloud.google.com/certification/collaboration-engineer) 119 | [](https://www.cisco.com/c/en/us/training-events/training-certifications/certifications/associate/ccda.html) 120 | [](https://www.comptia.org/certifications/pentest) 121 | [](https://www.isc2.org/Certifications/CISSP) 122 | [](https://www.isaca.org/credentialing/cism) 123 | [](https://www.isaca.org/credentialing/cisa) 124 | [](https://www.isaca.org/credentialing/crisc) 125 | [](https://learn.microsoft.com/en-us/certifications/azure-fundamentals/) 126 | [](https://education.oracle.com/java-se-11-programmer/pexam_1Z0-815) 127 | [](https://www.redhat.com/en/services/certification/rhcsa) 128 | [](https://www.isaca.org/credentialing/cdpse) 129 | [](https://trailhead.salesforce.com/credentials/advancedadministrator) 130 | [](https://trailhead.salesforce.com/credentials/technicalarchitect) 131 | [](https://aws.amazon.com/certification/certified-solutions-architect-associate/) 132 | [](https://aws.amazon.com/certification/certified-developer-associate/) 133 | [](https://aws.amazon.com/certification/certified-sysops-administrator-associate/) 134 | [](https://learn.microsoft.com/en-us/certifications/azure-database-administrator/) 135 | [](https://learn.microsoft.com/en-us/certifications/data-scientist/) 136 | [](https://cloud.google.com/certification/cloud-architect) 137 | [](https://www.cisco.com/c/en/us/training-events/training-certifications/certifications/cyberops-professional.html) 138 | [](https://www.comptia.org/certifications/security) 139 | [](https://www.isc2.org/Certifications/CCSP) 140 | [](https://www.isc2.org/Certifications/CISSP) 141 | [](https://www.isaca.org/credentialing/cism) 142 | [](https://www.isaca.org/credentialing/cisa) 143 | [](https://www.isaca.org/credentialing/crisc) 144 | [](https://learn.microsoft.com/en-us/certifications/power-platform-fundamentals/) 145 | [](https://education.oracle.com/mysql-database-administrator/pexam_1Z0-921) 146 | [](https://www.redhat.com/en/services/certification/rhca) 147 | [](https://trailhead.salesforce.com/credentials/marketingcloudadmin) 148 | [](https://trailhead.salesforce.com/credentials/commerceclouddeveloper) 149 | 150 |
DAIPE by KOSASIH is licensed under Creative Commons Attribution 4.0 International