├── .obsidian ├── app.json ├── appearance.json ├── core-plugins-migration.json ├── core-plugins.json ├── hotkeys.json └── workspace.json ├── API ├── create-an-api.md ├── json-implementation.md └── what_is_an_api.md ├── Apache └── configure-apache2.md ├── Backend ├── connect-to-mongodb-database.md └── create-a-basic-express-server.md ├── C_and_Cpp_Programming ├── cmake-tutorial.md ├── compile-commands.md ├── cpp-program-structure.md ├── cpp-tutorial.md ├── cpp-vscode-extensions.md ├── create-a-makefile.md ├── create-cmakelists.md ├── debugging-c-cpp.md ├── intellisense-configuration.md └── spellchecking-c-code.md ├── Databases ├── choosing-a-database.md ├── configure-mysql.md └── learning-sql.md ├── Docker └── dockerfiles.md ├── GitHub ├── configuring-github.md ├── create-a-workflow.md └── github-special-files.md ├── LICENSE ├── LearningWebDev.odt ├── Node_JavaScript_Programming ├── babel-and-webpack.md ├── babel.md ├── debugging-nodejs.md ├── javascript-programming.md ├── make-a-package-json-file.md ├── make-an-nodemon-json-file.md ├── nodejs-development.md ├── nodejs-resources.md ├── nodejs-server.md ├── using-npm.md └── webpack.md ├── PHP └── php-development.md ├── Python ├── debugging-python.md ├── python-extensions-vscode.md └── python3-development.md ├── README.md ├── React_Development ├── commonpackages.md ├── react-development-notes.md ├── react-server-apache2.md └── using-eslint-with-react.md ├── VSCode ├── create-a-jsconfig-json-file.md ├── create-a-launch.json file.md ├── create-a-settings.json file.md ├── create-a-tasks-json-file.md ├── extension-suggestion.md ├── vscode-workspaces.md └── what-is-devcontainer.md ├── WebDevelopment ├── aria-accessibility.md ├── beginning-webdev.md ├── css.md └── html.md ├── about-manifest-json.md ├── about-serviceworkers.md ├── about-yaml.md ├── configure-eslint.md ├── configure-prettier.md ├── connect-to-psql.md ├── dotnet ├── about_csharp.md ├── dotnet-sdk-stuff.md └── using-dotnet.md ├── json-file-structure.md ├── osdev-stuff.md ├── regex.md ├── using-gcc.md ├── using-gdb-debugger.md ├── using-gnu-ld-linker.md └── webdev.epub /.obsidian/app.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /.obsidian/appearance.json: -------------------------------------------------------------------------------- 1 | { 2 | "accentColor": "" 3 | } -------------------------------------------------------------------------------- /.obsidian/core-plugins-migration.json: -------------------------------------------------------------------------------- 1 | { 2 | "file-explorer": true, 3 | "global-search": true, 4 | "switcher": true, 5 | "graph": true, 6 | "backlink": true, 7 | "canvas": true, 8 | "outgoing-link": true, 9 | "tag-pane": true, 10 | "properties": false, 11 | "page-preview": true, 12 | "daily-notes": true, 13 | "templates": true, 14 | "note-composer": true, 15 | "command-palette": true, 16 | "slash-command": false, 17 | "editor-status": true, 18 | "bookmarks": true, 19 | "markdown-importer": false, 20 | "zk-prefixer": false, 21 | "random-note": false, 22 | "outline": true, 23 | "word-count": true, 24 | "slides": false, 25 | "audio-recorder": false, 26 | "workspaces": false, 27 | "file-recovery": true, 28 | "publish": false, 29 | "sync": false 30 | } -------------------------------------------------------------------------------- /.obsidian/core-plugins.json: -------------------------------------------------------------------------------- 1 | [ 2 | "file-explorer", 3 | "global-search", 4 | "switcher", 5 | "graph", 6 | "backlink", 7 | "canvas", 8 | "outgoing-link", 9 | "tag-pane", 10 | "page-preview", 11 | "daily-notes", 12 | "templates", 13 | "note-composer", 14 | "command-palette", 15 | "editor-status", 16 | "bookmarks", 17 | "outline", 18 | "word-count", 19 | "file-recovery" 20 | ] -------------------------------------------------------------------------------- /.obsidian/hotkeys.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /.obsidian/workspace.json: -------------------------------------------------------------------------------- 1 | { 2 | "main": { 3 | "id": "61c02ba09ce2d71e", 4 | "type": "split", 5 | "children": [ 6 | { 7 | "id": "81cc7f826e68d61f", 8 | "type": "tabs", 9 | "children": [ 10 | { 11 | "id": "29f0d8ac447ff127", 12 | "type": "leaf", 13 | "state": { 14 | "type": "empty", 15 | "state": {} 16 | } 17 | }, 18 | { 19 | "id": "57e0f902477bdfe5", 20 | "type": "leaf", 21 | "state": { 22 | "type": "markdown", 23 | "state": { 24 | "file": "osdev-stuff.md", 25 | "mode": "source", 26 | "source": false 27 | } 28 | } 29 | }, 30 | { 31 | "id": "93c46dea9ebeadf4", 32 | "type": "leaf", 33 | "state": { 34 | "type": "markdown", 35 | "state": { 36 | "file": "configure-eslint.md", 37 | "mode": "source", 38 | "source": false 39 | } 40 | } 41 | }, 42 | { 43 | "id": "72602a42c3066b1b", 44 | "type": "leaf", 45 | "state": { 46 | "type": "markdown", 47 | "state": { 48 | "file": "VSCode/what-is-devcontainer.md", 49 | "mode": "source", 50 | "source": false 51 | } 52 | } 53 | }, 54 | { 55 | "id": "465f7c367cbe16f9", 56 | "type": "leaf", 57 | "state": { 58 | "type": "markdown", 59 | "state": { 60 | "file": "VSCode/what-is-devcontainer.md", 61 | "mode": "source", 62 | "source": false 63 | } 64 | } 65 | }, 66 | { 67 | "id": "4d8153ef2d067638", 68 | "type": "leaf", 69 | "state": { 70 | "type": "markdown", 71 | "state": { 72 | "file": "GitHub/github-special-files.md", 73 | "mode": "source", 74 | "source": false 75 | } 76 | } 77 | }, 78 | { 79 | "id": "369d0b03c1027976", 80 | "type": "leaf", 81 | "state": { 82 | "type": "markdown", 83 | "state": { 84 | "file": "C_and_Cpp_Programming/create-a-makefile.md", 85 | "mode": "source", 86 | "source": false 87 | } 88 | } 89 | } 90 | ], 91 | "currentTab": 6 92 | } 93 | ], 94 | "direction": "vertical" 95 | }, 96 | "left": { 97 | "id": "7de9deafefd5b942", 98 | "type": "split", 99 | "children": [ 100 | { 101 | "id": "c72cf4b8e99d8a19", 102 | "type": "tabs", 103 | "children": [ 104 | { 105 | "id": "0fbb2d6ce229c530", 106 | "type": "leaf", 107 | "state": { 108 | "type": "file-explorer", 109 | "state": { 110 | "sortOrder": "alphabetical" 111 | } 112 | } 113 | }, 114 | { 115 | "id": "f75e486a3c3066be", 116 | "type": "leaf", 117 | "state": { 118 | "type": "search", 119 | "state": { 120 | "query": "", 121 | "matchingCase": false, 122 | "explainSearch": false, 123 | "collapseAll": false, 124 | "extraContext": false, 125 | "sortOrder": "alphabetical" 126 | } 127 | } 128 | }, 129 | { 130 | "id": "a6e3e631005d5b19", 131 | "type": "leaf", 132 | "state": { 133 | "type": "bookmarks", 134 | "state": {} 135 | } 136 | } 137 | ] 138 | } 139 | ], 140 | "direction": "horizontal", 141 | "width": 300 142 | }, 143 | "right": { 144 | "id": "7bd973efa9822180", 145 | "type": "split", 146 | "children": [ 147 | { 148 | "id": "f560336255b6fc88", 149 | "type": "tabs", 150 | "children": [ 151 | { 152 | "id": "6dce15b3f604198e", 153 | "type": "leaf", 154 | "state": { 155 | "type": "backlink", 156 | "state": { 157 | "file": "C_and_Cpp_Programming/create-a-makefile.md", 158 | "collapseAll": false, 159 | "extraContext": false, 160 | "sortOrder": "alphabetical", 161 | "showSearch": false, 162 | "searchQuery": "", 163 | "backlinkCollapsed": false, 164 | "unlinkedCollapsed": true 165 | } 166 | } 167 | }, 168 | { 169 | "id": "76ab74bf0430ac54", 170 | "type": "leaf", 171 | "state": { 172 | "type": "outgoing-link", 173 | "state": { 174 | "file": "C_and_Cpp_Programming/create-a-makefile.md", 175 | "linksCollapsed": false, 176 | "unlinkedCollapsed": true 177 | } 178 | } 179 | }, 180 | { 181 | "id": "5ebe76bd8f2aa2d5", 182 | "type": "leaf", 183 | "state": { 184 | "type": "tag", 185 | "state": { 186 | "sortOrder": "frequency", 187 | "useHierarchy": true 188 | } 189 | } 190 | }, 191 | { 192 | "id": "697874bafd035c3d", 193 | "type": "leaf", 194 | "state": { 195 | "type": "outline", 196 | "state": { 197 | "file": "C_and_Cpp_Programming/create-a-makefile.md" 198 | } 199 | } 200 | } 201 | ] 202 | } 203 | ], 204 | "direction": "horizontal", 205 | "width": 300, 206 | "collapsed": true 207 | }, 208 | "left-ribbon": { 209 | "hiddenItems": { 210 | "switcher:Open quick switcher": false, 211 | "graph:Open graph view": false, 212 | "canvas:Create new canvas": false, 213 | "daily-notes:Open today's daily note": false, 214 | "templates:Insert template": false, 215 | "command-palette:Open command palette": false 216 | } 217 | }, 218 | "active": "369d0b03c1027976", 219 | "lastOpenFiles": [ 220 | "README.md", 221 | "Docker/dockerfiles.md", 222 | "Python/python3-development.md", 223 | "Python/python3-notes.md", 224 | "GitHub/create-a-workflow.md", 225 | "GitHub/github-special-files.md", 226 | "Python", 227 | "Backend/create-a-basic-express-server.md", 228 | "API/create-an-api.md", 229 | "API/json-implementation.md", 230 | "C_and_Cpp_Programming/compile-commands.md", 231 | "C_and_Cpp_Programming/cmake-tutorial.md", 232 | "VSCode/create-a-jsconfig-json-file.md", 233 | "React_Development/react-server-apache2.md", 234 | "React_Development/react-development-notes.md", 235 | "VSCode/what-is-devcontainer.md", 236 | "dotnet/dotnet-sdk-stuff.md", 237 | "Backend/connect-to-mongodb-database.md", 238 | "VSCode/create-a-tasks-json-file.md", 239 | "connect-to-psql.md", 240 | "configure-prettier.md", 241 | "configure-eslint.md", 242 | "osdev-stuff.md", 243 | "bash-shell-stuff.md", 244 | "json-file-structure.md", 245 | "dotnet", 246 | "VSCode/extension-suggestion.md", 247 | "Node_JavaScript_Programming/make-a-package-json-file.md", 248 | "Backend", 249 | "Docker" 250 | ] 251 | } -------------------------------------------------------------------------------- /API/create-an-api.md: -------------------------------------------------------------------------------- 1 | 2 | # Create a Node / Express API 3 | 4 | To create a Node.js and Express API, you can follow these general steps: 5 | 6 | 1. Install Node.js and npm on your machine. 7 | 2. Create a new directory for your project and navigate into it. 8 | 3. Initialize a new Node.js project using `npm init`. 9 | 4. Install Express using `npm install express`. 10 | 5. Create a new JavaScript file for your server, and require Express at the top of the file. 11 | 6. Define your routes and middleware functions using Express. 12 | 7. Start your server using `app.listen()`. 13 | 14 | Here's an example of a simple Node.js and Express API that listens for GET requests on the root route and returns a JSON response: 15 | 16 | ```js 17 | const express = require('express'); 18 | const app = express(); 19 | 20 | app.get('/', (req, res) => { 21 | res.json({ message: 'Hello, world!' }); 22 | }); 23 | 24 | app.listen(3000, () => { 25 | console.log('Server listening on port 3000'); 26 | }); 27 | ``` 28 | 29 | This code creates a new Express app, defines a single route for the root URL that returns a JSON response, and starts the server on port 3000. You can test this API by running the server and visiting `http://localhost:3000` in your web browser or using a tool like Postman to send a GET request to the same URL. Note that this example uses the latest version of Node.js (v18) and Express (v4). If you need to use an older version of Node.js or Express, you may need to modify the code accordingly. 30 | 31 | ## API Endpoints 32 | 33 | What are API Endpoints? API endpoints are specific URLs within a web service. They represent the locations where the server can access the resources needed to carry out various operations. Each endpoint provides access to a particular feature or set of data. For example, in a REST API for a blog, you might have endpoints for retrieving all blog posts, retrieving a single blog post, creating a new blog post, updating an existing blog post, and deleting a blog post. These endpoints are accessed through HTTP methods such as GET, POST, PUT, and DELETE. In summary, API endpoints are the URLs through which clients can interact with a web service to perform various actions or retrieve specific data 34 | 35 | REST APIs, or Representational State Transfer Application Programming Interfaces, are used to provide a standard way of accessing web resources. They are commonly used to enable communication and data exchange between different software systems over the internet. REST APIs are widely used for various purposes, including: 36 | 37 | - Data Exchange: REST APIs allow for the exchange of data, content, algorithms, media, and other digital resources between different systems 38 | 39 | - Cloud Applications: They are useful in cloud applications due to their stateless nature, which makes calls to the API more flexible and scalable 40 | 41 | - Web Services: RESTful APIs are used by various websites and services such as Amazon, Google, LinkedIn, and Twitter to allow users to connect to, manage, and interact with cloud services in a distributed environment 42 | 43 | - Standardized Communication: REST APIs provide a simple and uniform interface for accessing and manipulating resources, making them a preferred choice for building APIs that need to support high-performing and reliable communication at scale 44 | 45 | REST APIs are used to facilitate the exchange of data and resources between different software systems, and they are particularly well-suited for use in cloud applications and web services. 46 | 47 | ## Advantages of Using REST APIs 48 | 49 | Flexibility in Data Formats: REST allows a greater variety of data formats, including JSON, XML, plain text, and HTML, while SOAP only allows XML 50 | 51 | - Stateless Calls: REST APIs are stateless, making them suitable for cloud applications and enabling better scalability 52 | 53 | - Simplicity and Ease of Adoption: Many consider REST APIs easier to use and adopt than SOAP APIs, making them ideal for creating public web services 54 | 55 | - Better Performance and Scalability: REST has better performance and scalability compared to SOAP. REST reads can be cached, while SOAP-based reads cannot be cached 56 | 57 | ## Differences Between REST and SOAP APIs 58 | 59 | - Structure: SOAP is a structured protocol, while REST is more flexible and less defined 60 | 61 | - Data Formats: REST allows a greater variety of data formats, while SOAP only allows XML 62 | 63 | - Statelessness: REST APIs are stateless, while SOAP APIs are not necessarily stateless 64 | 65 | ## Common Challenges When Working with REST APIs 66 | 67 | Lack of Built-in Messaging System: REST doesn't have a built-in messaging system, so if a communication fails, the client has to deal with it by retrying. 68 | 69 | - Security Features: REST lacks some built-in security features that SOAP has, although they aren't necessary when working with limited server resources and bandwidth 70 | 71 | - Statelessness Requirement: All calls to a REST API must be stateless, meaning that every interaction is independent, which can pose challenges in certain scenarios 72 | 73 | REST APIs offer advantages such as flexibility, statelessness, and better performance, but they also come with challenges related to messaging, security, and statelessness requirements. 74 | 75 | When it comes to versioning REST API endpoints, there are a few common approaches: 76 | 77 | URL Versioning: This involves including the version number in the URL. For example, you might have /v1/users and /v2/users to represent different versions of the user endpoint. 78 | 79 | Custom Request Header: Another approach is to use a custom request header to specify the version. This keeps the URL clean and is often used when the versioning needs to be more dynamic. 80 | 81 | Media Type Versioning: This approach involves using the Accept header to specify the version of the media type that the client expects. For example, Accept: application/vnd.company.v2+json. 82 | 83 | Query Parameter: You can also use a query parameter to specify the version, such as /users?version=2. 84 | 85 | Each approach has its own advantages and disadvantages, and the choice often depends on the specific requirements of the API and the preferences of the development team. 86 | 87 | ## Best Practices for Designing REST APIs 88 | 89 | Some best practices for designing REST APIs include, 90 | 91 | - Using JSON as the format for sending and receiving data. 92 | - Using nouns instead of verbs in endpoint paths. 93 | - Naming collections with plural nouns. 94 | - Handling errors gracefully and returning standard error codes. 95 | - Allowing filtering, sorting, and pagination. 96 | - Maintaining good security practices 97 | 98 | These best practices help ensure that REST APIs are easy to understand, future-proof, and secure, and they ultimately make the lives of API consumers easier. 99 | -------------------------------------------------------------------------------- /API/json-implementation.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gbowne1/ProgrammerNotes/804c2d92f3029c24dec9cb5a3dce31e91a269445/API/json-implementation.md -------------------------------------------------------------------------------- /API/what_is_an_api.md: -------------------------------------------------------------------------------- 1 | # What is an API 2 | 3 | REST, which stands for Representational State Transfer, is an architectural style for designing networked applications. It was introduced by computer scientist Roy Fielding in 2000. A REST API (also known as RESTful API) is an application programming interface that conforms to the constraints of the REST architectural style. It allows for interaction with RESTful web services and is based on a set of principles and constraints that promote simplicity, scalability, and statelessness in design. Key points about REST APIs include: 4 | 5 | Principles and Constraints: REST is not a protocol or a standard, but an architectural style with guiding principles and constraints that must be satisfied for a service interface to be referred to as RESTful. These principles include a client-server architecture, statelessness, cacheability, uniform interface, layered system, and code on demand. 6 | Interaction: REST APIs communicate via HTTP requests to perform standard database functions like creating, reading, updating, and deleting records (also known as CRUD) within a resource. They make use of standard HTTP methods (GET, POST, PUT, DELETE, etc.) and URIs to identify resources. 7 | Resource Model: A REST API consists of an assembly of interlinked resources, and it uses resource identifiers and hypermedia links to facilitate interactions between client and server components. 8 | 9 | A REST API is an interface that follows the principles of the REST architectural style, providing a standardized and flexible approach to building web-based APIs. For more detailed information, you can refer to the provided sources. 10 | 11 | If you would like to learn more about this, You should review this . 12 | 13 | You could also use XML for API's and XML was used for this purpose before JSON came along, however, JSON is generally faster and lighter than XML, as it has a smaller size and a simpler structure. It does not have any redundant or unnecessary elements, making it an efficient choice for data interchange in web applications. JSON is also easy to write and understand, as it uses a human-readable format of key-value pairs and arrays. It does not require any special tags, attributes, or schemas, unlike XML. Additionally, JSON supports common data types, such as strings, numbers, booleans, and nulls, and can be used with various libraries and tools that provide functions for parsing, validating, manipulating, and transforming JSON data 14 | 15 | Some benefits of using JSON for building APIs include: 16 | 17 | Simplicity and Readability: JSON is easy to write and understand, as it uses a human-readable format of key-value pairs and arrays. 18 | Efficiency: JSON is generally faster and lighter than XML, as it has a smaller size and a simpler structure. 19 | Compatibility: JSON can be used with various libraries and tools that provide functions for parsing, validating, manipulating, and transforming JSON data 20 | -------------------------------------------------------------------------------- /Apache/configure-apache2.md: -------------------------------------------------------------------------------- 1 | # Configuring Apache2 2 | 3 | ## Virtual Hosts 4 | 5 | An Apache2 virtual host is a powerful feature that allows you to host multiple websites (or applications) on a single server using a single Apache2 instance. It essentially tricks your server into thinking it's serving content from multiple machines, even though it's all on the same physical hardware. 6 | 7 | When a user accesses a website, their browser sends a request specifying the domain name (e.g., `www.example.com`). Apache2 uses virtual hosts to determine which website's content to serve based on the requested domain name. 8 | 9 | - /etc/apache2/sites-available/*.conf: These files define configurations for individual websites or applications hosted on your server. The naming convention is typically `*.conf`, and they reside in the sites-available directory. There can be multiple virtual host files depending on how many websites you plan to host. 10 | - /etc/apache2/sites-enabled/*.conf: This directory contains symlinks to the virtual host configuration files from sites-available that are currently active. 11 | 12 | There are some module configuration files. 13 | 14 | - /etc/apache2/mods-available/*.load: These files with the .load extension enable specific Apache2 modules. Debian manages Apache2 modules by keeping track of enabled and available modules in separate directories. 15 | 16 | - /etc/apache2/mods-enabled/*.conf: These files contain configurations for the enabled modules and reside in the mods-enabled directory. 17 | 18 | Additional configuration files may be located in the /etc/apache2/conf-available/ directory. These files can contain snippets for various functionalities and are enabled by creating symlinks in the conf-enabled directory. 19 | 20 | You really shouldn't directly modify the files in the sites-available and mods-available directories unless you know your way around Apache2. 21 | 22 | Instead, use the a2ensite and a2enmod commands to enable them by creating symlinks in the corresponding enabled directories. 23 | 24 | The Apache2 package provides utilities like a2ensite and a2enmod to manage these configuration files easily. 25 | 26 | For further details on configuring Apache2 virtual hosts and modules, refer to the Apache documentation or search online resources specific to Debian 10 Buster. 27 | 28 | In Apache2, you need to have an apache2.conf in /etc/apache2, This is the core configuration file that holds the overall settings for Apache2. It includes other configuration files and defines the global behavior of the web server. 29 | 30 | Apache2 generally requires the use of systemd, which is quite sad. 31 | 32 | If you use SysVinit, in order to check status, restart, etc. you need to issue 33 | 34 | `sudo /etc/init.d/apache2 restart` 35 | 36 | `sudo /etc/init.d/apache2 status` 37 | 38 | 39 | ## flags and options used 40 | 41 | Usage: apache2 [-D name] [-d directory] [-f file] 42 | [-C "directive"] [-c "directive"] 43 | [-k start|restart|graceful|graceful-stop|stop] 44 | [-v] [-V] [-h] [-l] [-L] [-t] [-T] [-S] [-X] 45 | Options: 46 | -D name : define a name for use in directives 47 | -d directory : specify an alternate initial ServerRoot 48 | -f file : specify an alternate ServerConfigFile 49 | -C "directive" : process directive before reading config files 50 | -c "directive" : process directive after reading config files 51 | -e level : show startup errors of level (see LogLevel) 52 | -E file : log startup errors to file 53 | -v : show version number 54 | -V : show compile settings 55 | -h : list available command line options (this page) 56 | -l : list compiled in modules 57 | -L : list available configuration directives 58 | -t -D DUMP_VHOSTS : show parsed vhost settings 59 | -t -D DUMP_RUN_CFG : show parsed run settings 60 | -S : a synonym for -t -D DUMP_VHOSTS -D DUMP_RUN_CFG 61 | -t -D DUMP_MODULES : show all loaded modules 62 | -M : a synonym for -t -D DUMP_MODULES 63 | -t -D DUMP_INCLUDES: show all included configuration files 64 | -t : run syntax check for config files 65 | -T : start without DocumentRoot(s) check 66 | -X : debug mode (only one worker, do not detach) 67 | 68 | ## Logging in Apache2 69 | 70 | Apache2 logs go into /var/log/apache2 71 | 72 | The /var/log/apache2 needs to have the right owner and permissions. 73 | 74 | check to see if it is 75 | 76 | `ls -ld /var/log/apache2/` 77 | 78 | drwxrwxr-x 2 www-data www-data 4096 Apr 16 14:27 /var/log/apache2/ 79 | 80 | if it is not, just issue this command 81 | 82 | `sudo chown www-data:www-data /var/log/apache2/` 83 | 84 | also change the perms using 85 | 86 | `sudo chmod g+rw /var/log/apache2/` 87 | 88 | Logs may not work if you tail them with 89 | 90 | `sudo tail /var/log/apache2/error_log` 91 | 92 | if that does not work you can 93 | 94 | `sudo cat /var/log/apache2/error.log` 95 | 96 | The logs it should create are: 97 | 98 | access.log 99 | error.log 100 | other_vhosts_access.log 101 | 102 | ## Specifying Log files 103 | 104 | Specifying Log Files: 105 | 106 | The CustomLog and ErrorLog directives specify the location and format for access and error logs, respectively. You can use them in apache2.conf for global settings or within virtual host configuration files. 107 | 108 | Here's an example of configuring a custom access log for a virtual host: 109 | 110 | 111 | ServerName yourdomain.com 112 | DocumentRoot /var/www/html/yourdomain.com 113 | CustomLog /var/log/apache2/yourdomain_access.log custom_format 114 | ErrorLog /var/log/apache2/yourdomain_error.log 115 | 116 | 117 | ## Customizing the log format 118 | 119 | The LogFormat directive in apache2.conf defines the format for log entries. By default, Apache2 uses the "Common Log Format" (CLF), but you can create custom formats to capture specific data points. 120 | 121 | Here's an example of modifying the LogFormat directive 122 | 123 | LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" custom_format 124 | 125 | This format captures additional information like referrer URL and user agent compared to the default CLF. 126 | 127 | ## Where the code for your site goes 128 | 129 | Your website or webapp code goes in: 130 | 131 | /var/www/html 132 | /var/www 133 | /public_html 134 | /usr/share (for WebApps) 135 | 136 | navigate your favorite browser to localhost: and you should see the Apache2 template page 137 | -------------------------------------------------------------------------------- /Backend/connect-to-mongodb-database.md: -------------------------------------------------------------------------------- 1 | # MongoDB Database connection 2 | 3 | Here is how to connect to a MongoDB database. 4 | 5 | I use MongoDB on the command line, not MongoDB Atlas or MongoDB Compass. 6 | 7 | The MongoDB localhost port is: 27017 8 | 9 | To get MongoDB the import is `const mongoose = require('mongoose');` 10 | 11 | In NodeJS and ExpressJS: 12 | 13 | ```js 14 | const mongoose = require('mongoose'); 15 | const mongoURI = 'mongodb://localhost:27017/codebooker'; 16 | 17 | mongoose.connect(mongoURI, { 18 | useNewUrlParser: true, 19 | useUnifiedTopology: true, 20 | useCreateIndex: true, 21 | useFindAndModify: false 22 | }).then(() => { 23 | console.log('Connected to MongoDB'); 24 | }).catch((err) => { 25 | console.log('Failed to connect to MongoDB', err); 26 | }); 27 | ``` 28 | 29 | Here are some of the MongoDB show commands: 30 | 31 | - show dbs: This command is used to display a list of all available databases. 32 | - show collections: This command is used to display a list of all collections in the current database. 33 | - show users: This command is used to display a list of all users for the current database. 34 | - show roles: This command is used to display a list of all roles for the current database. 35 | - show profile: This command is used to display system.profile information. 36 | - show logs: This command is used to display recent log messages. 37 | - show databases: This command is used to display the current database being used2 38 | 39 | Finding information in collections is basically 40 | 41 | db.collectioname.findOne() 42 | -------------------------------------------------------------------------------- /Backend/create-a-basic-express-server.md: -------------------------------------------------------------------------------- 1 | # Express Server 2 | 3 | This is the code that would create a basic ExpressJS server, which connects to a MongoDB database 4 | 5 | ```js 6 | const express = require('express'); 7 | const mongoose = require('mongoose'); 8 | const cors = require('cors'); 9 | 10 | const app = express(); 11 | 12 | // Connect to MongoDB 13 | mongoose.connect('mongodb://localhost/my-database', { 14 | useNewUrlParser: true, 15 | useUnifiedTopology: true, 16 | }); 17 | 18 | // Define routes and middleware 19 | app.use(cors()); 20 | app.use(express.json()); 21 | app.use('/api/users', require('./routes/users')); 22 | 23 | // Start the server 24 | const PORT = process.env.PORT || 5000; 25 | app.listen(PORT, () => console.log(`Server started on port ${PORT}`)); 26 | ``` 27 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/compile-commands.md: -------------------------------------------------------------------------------- 1 | # CMake Compile Commands 2 | 3 | There is a file called `compile_commands.json` 4 | 5 | `compile_commands.json` is a JSON-formatted file that contains structured data about every compilation unit in a project. It specifies how to replay single compilations independently of the build system 6 | 7 | The file consists of a JSON array of "command objects" where each command object specifies one way a translation unit is built. 8 | 9 | The compile_commands.json file is used by various tools, such as IDEs and static analysis tools, to obtain information about the build process, such as include paths and compilation flags, that is necessary for their operation 10 | 11 | The file is usually generated by build systems, such as CMake, and put at the top of the build directory 12 | 13 | The CMAKE_EXPORT_COMPILE_COMMANDS flag can be used with CMake to generate the compile_commands.json file, just set it to `ON` like 14 | 15 | In your `CMakeLists.txt` file you can generate this file if you include this: 16 | 17 | `set(CMAKE_EXPORT_COMPILE_COMMANDS ON)` 18 | 19 | It is normally a generated file by cmake when you run a cmake build and will normally look like this: 20 | 21 | ```json 22 | [ 23 | { 24 | "directory": "/home/user/project", 25 | "command": "gcc -Iinclude -c src/file1.c -o obj/file1.o", 26 | "file": "src/file1.c" 27 | }, 28 | ] 29 | ``` 30 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/cpp-program-structure.md: -------------------------------------------------------------------------------- 1 | # Structuring a C++ program 2 | 3 | There are indeed many possibilities of creating and structuring the files of a program with C++. In this text `folder` and `directcory` will be interchangeable and used as meaning the same thing, but will use the word `directory` to mean either one. 4 | 5 | First of all, the structure of the program you've created should be maintainable by yourself and anyone involved in the project. 6 | 7 | Why? It makes sense to not make so much work of yourself or anyone else, even if no one else may look at the code or the structure of your program and not just because it will save time. 8 | 9 | There are some traditional C and C++ program structure conventions that will make this a bit easier for you and anyone else involved. 10 | 11 | Here is a guide to creating a maintainable project in C++: 12 | 13 | Create your project directory. 14 | 15 | In that directory, create a directory called `src`. 16 | 17 | Also in that directory, create a directory called `include`. All of your header files with the extension of `.h` will be placed. You also might choose to put the `include` directory inside the `src`. There is no distinct reasoning behind this, however this decision can be left up to the developer to choose which one. We will talk more about this in a bit. 18 | 19 | In the main directory, create a main.cpp file. This will be the entrypoint to your program, and it will have a int main(); function. This entrypoint should stay in the root directory of the project. 20 | 21 | From here you can and should try and modularize your program and its code. 22 | 23 | This refers to the practice of dividing code into smaller, independent, and cohesive units that perform specific tasks or functions. These units, often called modules, components, or classes, allow for better organization, reusability, and maintainability of the code. Modular programming emphasizes separating the functionality of a program into independent, interchangeable modules, with each module containing everything necessary to execute only one aspect of the desired functionality. 24 | 25 | When it comes to C++, modular programming involves dividing a computer program into separate sub-programs or modules, each containing the necessary code to execute a specific aspect of the program's functionality. This approach allows for better management of large programs, increased maintainability, readability of the code, and ease of making changes in the future or correcting errors 26 | 27 | Modularizing a C++ project's code offers several benefits. It makes the code easier to read, understand, and maintain by separating it into functions or modules, with each dealing with a specific aspect of the overall functionality. Dividing the code into smaller, independent modules is a good practice for maintaining code readability and reusability. This can be done by separating functions, classes, and data structures into different files and organizing them logically. 28 | 29 | This approach also reduces the chances of conflicts when multiple developers are working on the same codebase, leading to improved collaboration and productivity. Additionally, modularization can enhance code reusability, which is beneficial for future projects. However, it's important to note that excessive modularization can lead to code that is overly complex and difficult to manage, so at minimum try to be consistent. 30 | 31 | The program should be subdivided into separate sub-programs or modules, with clear limitations and communication among them. 32 | 33 | In C and C++, this can be achieved by dividing the code into smaller modules called functions or procedures, each handling a specific responsibility. 34 | 35 | ## Modularization 36 | 37 | Modularization in a C++ project is important for several reasons: 38 | 39 | Easier Maintenance: By breaking down a large software program into smaller pieces, it becomes easier to develop, maintain, and understand the code. Each module can be developed and tested independently, reducing the complexity of the overall program 40 | 41 | Improved Readability: Modular programming makes your code easier to read as it separates it into functions that each deal with one aspect of the overall functionality. It can make your files a lot smaller and easier to understand, compared to monolithic code 42 | 43 | Increased Reusability: With modular programming, the same code or function can be used in multiple places. Instead of copying and pasting the code, you can call it from whatever module or library it's in, reducing code duplication and potential for error 44 | 45 | Faster Debugging and Updates: If there's a bug in the code or you need to update a specific function, you only have to fix it in one module, and everything that uses it gets updated right away. This reduces the risk of problems caused by slightly different implementations of the same functionality in different parts of your code 46 | 47 | Easier Collaboration: Modular programming is essential where multiple teams need to work on different components of a program. When you have multiple developers working on the same piece of code, it usually results in conflicts and issues which can slow down the team. Splitting the code between more functions, files, and/or repositories, means you can decrease the chances of conflicts happening 48 | 49 | Better Project Control: Modularization allows you to better control the program. Program control functions are used to subdivide and control the program. These functions are unique to the program being written 50 | 51 | Task Efficiency: Specific task functions are designed to be used with several programs. These functions perform a specific task and thus are usable in many different programs because the other programs also need to do the specific task. This increases the efficiency of the program 52 | 53 | However, it's important to note that while modularization can greatly improve the manageability and efficiency of a project, it can also add complexity if not handled properly. For example, when a project is growing, it can add difficulty in finding specific implementations 54 | 55 | Therefore, it's crucial to find a balance and structure the modules in a way that best suits the project's needs. 56 | 57 | To modularize header files versus implementation files in C or C++, the general practice is to declare the functions, classes, and data structures in the header files, while the implementation details are defined in the implementation files (typically with a .cpp extension for 58 | C++). This separation helps in organizing the code and allows for better reusability and maintainability. However, for small, simple functions or when using templates, the implementation might be included in the header file. 59 | 60 | This separation helps in organizing the code and allows for better reusability and maintainability. However, there are cases where the implementation is included in the header file, such as for small, simple functions or when using templates. The decision on how to modularize depends on factors such as code complexity, project size, and performance considerations. 61 | 62 | The decision to use a /src/ and /src/include/ directory versus a /include and a separate /src/ directory can depend on various factors. One advantage of the former is that it reduces the maintenance overhead for clients of the library, as they only need to be aware of one include directory, which reduces the risk of header name collisions 63 | 64 | It also benefits implementors, as they can change the layout of their src directory without worrying about breaking library clients. 65 | 66 | Additionally, it may have a small influence on build times, as having a smaller size folder with includes could potentially make the build process slightly faster. 67 | 68 | On the other hand, the decision may also depend on the size of the project. For smaller projects, keeping the files in one directory tends to be more convenient, while for larger applications with hundreds or thousands of files, separating them into different directories becomes more practical. 69 | 70 | The decision to use a /src/ and /src/include/ directory versus a /include and a separate /src/ directory can have both advantages and disadvantages. 71 | 72 | Disadvantages of using a /src/ and /src/include/ directory: 73 | 74 | - Complexity: It can add complexity to the project structure, especially for smaller projects 75 | 76 | - Build System Configuration: It may require additional configuration in the build system to specify the include paths 77 | 78 | - Verbose Include Paths: It can lead to verbose include paths, especially when using fine-grained modularization 79 | 80 | How it can improve code organization: 81 | 82 | - Reduces Maintenance Overhead: It reduces the maintenance overhead for clients of the library, as they only need to be aware of one include directory, reducing the risk of header name collisions 83 | 84 | - Easier Reorganization: It makes it easier to reorganize the layout of the src directory without affecting library clients 85 | 86 | - Influence on Build Times: It may have a small influence on build times, as having a smaller size folder with includes could potentially make the build process slightly faster 87 | 88 | In the end, the choice between the two approaches is often a matter of preference and depends on the specific needs of the project. Both methods are valid, and it ultimately depends on the coding style and organizational preferences of the development team. 89 | 90 | If your project will have a bunch of class files and keep this in in a practical and maintainable fashion. Becuause many projects have a lot of files witih: 91 | 92 | ```cpp 93 | // other C++ code here 94 | 95 | class MyClass { 96 | 97 | } 98 | ``` 99 | 100 | The best practices for class files can be considered: 101 | 102 | - Separation of Declaration and Definition: It is a common practice to separate the declaration of a class (interface) in a header file (.h or .hpp) and the definition (implementation) in a corresponding source file (.cpp). 103 | 104 | - Namespace and Directory Structure: Organize the classes into namespaces and use a clear directory structure to reflect the namespaces. This helps in avoiding naming conflicts and makes the codebase more maintainable. 105 | 106 | - Dependency Management: Use forward declarations to minimize dependencies between header files. This can help reduce compilation times and minimize the impact of changes. 107 | 108 | - Use of Modules: With the introduction of C++20, modules provide a new way to organize code and manage dependencies. Consider using modules for better organization and encapsulation of code. 109 | 110 | - Consistent Coding Style: Adopt a consistent coding style and adhere to best practices such as naming conventions, code formatting, and documentation. clang-format and clang-tidy will help out here immensely and we will cover this in another topic file or section. 111 | 112 | - Unit Testing: Implement unit tests for the classes to ensure their correctness and maintainability over time. 113 | 114 | For class files, it's a common practice to separate the declaration (in a header file) and the definition (in a source file). Organizing classes into namespaces and using a clear directory structure to reflect the namespaces helps in avoiding naming conflicts and makes the codebase more maintainable. Tools like clang-format and clang-tidy can help maintain a consistent coding style. 115 | 116 | This will help you or anyone else working with your project maintain your project multiple class files in a practical and maintainable fashion, it is important to follow best practices such as separating declaration and definition, organizing code into namespaces, managing dependencies, adopting new language features like modules, maintaining a consistent coding style, and implementing unit tests. 117 | 118 | If you are working with someone elses C++ project, do your best to try and follow the code style they have used. It is usually reasonably easy to spot how the project is laid out. 119 | 120 | If your C++ program will be compiled to run on different platforms like Windows, Linux, and Mac, it's important to write platform-independent code whenever possible, or use preprocessor directives to handle platform-specific code. 121 | 122 | Yes, there are tools that can help maintain consistency in a C++ program structure. One such tool is ReSharper C++, which provides a configurable code formatter, naming style settings, and sorting of #include directives to help maintain a consistent code style throughout the codebase 123 | 124 | Additionally, using automatic static code analysis tools and following coding standards can also improve consistency and code quality in C++ programs 125 | 126 | It's important to use meaningful names, maintain a consistent coding style, and follow best practices to ensure code consistency 127 | 128 | In C++, the decision to separate the implementation from the declaration of a class is based on several factors. Here are some guidelines: 129 | 130 | Class Size and Complexity: If a class has a small number of non-trivial member functions, it may be preferable to make the member functions inline and place them beneath the class definition in the header file 131 | 132 | Header-Only Approach: In modern C++, classes or libraries are increasingly being distributed as "header-only," meaning all of the code for the class is placed in a header file. However, separating the declaration and implementation is a prerequisite to doing so 133 | 134 | Reusability and Compilation Time: Separating the class declaration into a header file and the implementation into a .cpp file facilitates reusability and can minimize recompilation times when an implementation detail changes 135 | 136 | Best Practice: It is a common practice to store class declarations in a separate file, called a header file, and store the member function definitions in a separate .cpp file with the same name as the class 137 | 138 | By following these guidelines, you can make an informed decision on how to structure the implementation and declaration of a C++ class to maintain code organization and reusability. 139 | 140 | Platform-specific code in a C++ program may be needed to handle differences in operating systems, hardware, or external libraries. For example, when creating a game engine that supports multiple operating systems and graphics APIs, the code for creating a window or initializing the renderer will be platform-specific 141 | 142 | One common approach to handling platform-specific code is to use conditional compilation, such as #ifdef WIN32 for Windows-specific code and #ifdef linux for Linux-specific code 143 | 144 | Another approach is to encapsulate platform-specific code in separate modules and use abstraction layers and design patterns to manage the differences 145 | 146 | When writing effective unit tests for C++ classes, several best practices should be followed: 147 | 148 | Keep tests small and focused: Each unit test should focus on testing a single unit of functionality in isolation, making them easier to maintain and identify the source of any failures 149 | 150 | Write tests before code: This practice, known as Test-Driven Development (TDD), helps clarify the desired functionality and makes it easier to write testable code 151 | 152 | Use descriptive test names: Clearly describe what is being tested in the name of the test to improve the readability of the tests and make it easier to understand their purpose 153 | 154 | Test one scenario per test: Each unit test should cover a single code path or scenario, making it easier to identify the cause of a test failure 155 | 156 | Automate unit tests: Unit tests should be automated to ensure they are run consistently and frequently as part of the development process 157 | 158 | By following these best practices, developers can create effective unit tests for C++ classes, improving the quality and reliability of their code. 159 | 160 | Additionally, in the context of cross-platform mobile development, Flutter allows the creation of platform-specific code using platform channels, enabling the use of different languages (e.g., Kotlin, Java, Swift, Objective-C, C++) for specific platforms 161 | 162 | ## Pre-processor directives 163 | 164 | Preprocessor directives are instructions provided to the C++ preprocessor before the actual compilation of code begins. They begin with a `#` symbol and are used to make source programs easier to change and compile in different execution environments. The preprocessor directives can include, but are not limited to, the following: 165 | 166 | 1. **#include**: This directive is used to include a file in the source code program. It can be a standard header file (e.g., `#include `) or a user-defined header file (e.g., `#include "myHeaderFile.h"`). The preprocessor replaces the `#include` directive with the entire content of the specified header file [Source 0](https://cplusplus.com/doc/tutorial/preprocessor/), [Source 1](https://www.geeksforgeeks.org/cc-preprocessors/). 167 | 168 | 2. **#define**: This directive is used to define a macro, which is a rule or pattern that specifies how certain input sequences should be mapped to replacements. For example, you could define a macro that stands for a constant value (e.g., `#define PI 3.14159`). The preprocessor replaces all instances of the macro in the code with its defined value before the code is compiled [Source 0](https://cplusplus.com/doc/tutorial/preprocessor/), [Source 1](https://www.geeksforgeeks.org/cc-preprocessors/). 169 | 170 | 3. **#undef**: This directive is used to undefine a macro. If you have defined a macro using `#define` and you no longer need it, you can use `#undef` to remove its definition [Source 1](https://www.geeksforgeeks.org/cc-preprocessors/). 171 | 172 | 4. **#ifdef, #ifndef, #if, #else, #elif, #endif**: These conditional directives are used to compile a specific portion of the program or to skip compilation of a specific part based on certain conditions. For example, `#ifdef` checks if a certain macro is defined, and if so, the code within `#ifdef` and `#endif` is compiled. Similarly, `#ifndef` checks if a certain macro is not defined [Source 1](https://www.geeksforgeeks.org/cc-preprocessors/), [Source 4](https://codeforwin.org/c-programming/c-preprocessor-directives-include-define-undef-conditional-directives). 173 | 174 | 5. **#error**: This directive is used to generate a user-defined error message during compilation. If the preprocessor encounters an `#error` directive, it will report the error and stop the compilation process [Source 0](https://cplusplus.com/doc/tutorial/preprocessor/). 175 | 176 | 6. **#pragma**: This directive is used to offer machine- or operating system-specific compiler features, while maintaining overall compatibility with other compilers. They vary from compiler to compiler [Source 1](https://www.geeksforgeeks.org/cc-preprocessors/). 177 | 178 | Here are some examples of preprocessor directives: 179 | 180 | #include // Include standard header file 181 | 182 | #define PI 3.14159 // Define a macro 183 | 184 | #ifdef PI // Check if PI is defined 185 | std::cout << "PI is defined as " << PI << "\n"; 186 | #else 187 | std::cout << "PI is not defined\n"; 188 | #endif 189 | 190 | #undef PI // Undefine a macro 191 | 192 | #ifndef PI // Check if PI is not defined 193 | std::cout << "PI is not defined\n"; 194 | #else 195 | std::cout << "PI is defined as " << PI << "\n"; 196 | #endif 197 | 198 | In this example, the `#include` directive includes the `iostream` header file. The `#define` directive defines a macro named `PI` with a value of `3.14159`. The `#ifdef` directive checks if `PI` is defined, and if so, it outputs a message. The `#undef` directive undefines `PI`, and the `#ifndef` directive checks if `PI` is not defined, and if so, it outputs a message. 199 | 200 | Preprocessor directives are processed before the actual compilation of code begins. They are used to make source programs easier to change and compile in different execution environments [Source 2](https://learn.microsoft.com/en-us/cpp/preprocessor/preprocessor-directives?view=msvc-170), [Source 3](https://stackoverflow.com/questions/4757107/preprocessor-directives). -------------------------------------------------------------------------------- /C_and_Cpp_Programming/cpp-vscode-extensions.md: -------------------------------------------------------------------------------- 1 | # C and C++ VSCode Extensions 2 | 3 | There are many C and C++ VSCode extensions to install. Not all of them are good or helpful. 4 | 5 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/create-a-makefile.md: -------------------------------------------------------------------------------- 1 | # Makefile 2 | 3 | A Makefile is a file that contains a set of instructions used by the `make` utility to build and manage a C or C++ project. It specifies the dependencies between the source files and the commands needed to build the executable. The make utility reads the Makefile and determines which files need to be recompiled based on the dependencies and the timestamps of the files. It then executes the necessary commands to build the executable. 4 | 5 | A Makefile typically includes the following: 6 | 7 | - A list of source files 8 | - Compiler flags and options 9 | - Rules for building object files and the executable 10 | - Dependencies between the source files and the object files 11 | 12 | Makefiles are used to simplify the build process and automate the compilation of large projects. They help to avoid the need to manually compile each source file and link them together. Instead, the make utility reads the Makefile and determines which files need to be recompiled based on the dependencies and the timestamps of the files. Makefiles are commonly used in Unix-based systems, but they can also be used in other operating systems. They are especially useful for large projects with many source files and dependencies. 13 | 14 | To use a Makefile, you need to run the make utility from the command line. The make utility reads the Makefile and determines which files need to be recompiled based on the dependencies and the timestamps of the files. It then executes the necessary commands to build the executable. Overall, Makefiles are an important tool for managing and building C or C++ projects, especially for large projects with many source files and dependencies. 15 | 16 | Here is a sample Makefile for a C++ project 17 | 18 | ```Makefile 19 | # Compiler 20 | CXX = g++ 21 | 22 | # Compiler flags 23 | CXXFLAGS = -std=c++11 -Wall -Wextra -pedantic 24 | 25 | # Source files 26 | SRCS = main.cpp foo.cpp bar.cpp 27 | 28 | # Object files 29 | OBJS = $(SRCS:.cpp=.o) 30 | 31 | # Executable 32 | EXEC = myprogram 33 | 34 | # Default target 35 | all: $(EXEC) 36 | 37 | # Link object files into executable 38 | $(EXEC): $(OBJS) 39 | $(CXX) $(CXXFLAGS) $(OBJS) -o $(EXEC) 40 | 41 | # Compile source files into object files 42 | %.o: %.cpp 43 | $(CXX) $(CXXFLAGS) -c $< -o $@ 44 | 45 | # Clean up object files and executable 46 | clean: 47 | rm -f $(OBJS) $(EXEC) 48 | ``` 49 | 50 | This file will get generated by CMakeLists.txt if you use CMake to compile and build your project. 51 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/create-cmakelists.md: -------------------------------------------------------------------------------- 1 | # Create a CMakeLists.txt file 2 | 3 | You can do this manually if you wish to do so. This usually is in the root directory/folder of your project. 4 | 5 | The first line of the file must be: 6 | 7 | `cmake_minimum_required(VERSION 3.16)` 8 | 9 | The next line must be `project(my_project)` 10 | 11 | ```cmake 12 | cmake_minimum_required(VERSION 3.10) 13 | 14 | project(myproject) 15 | 16 | set(CMAKE_CXX_STANDARD 17) 17 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 18 | 19 | add_executable(myprogram main.cpp foo.cpp bar.cpp) 20 | ``` 21 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/debugging-c-cpp.md: -------------------------------------------------------------------------------- 1 | # Debugging C and C++ code 2 | 3 | Debugging C and C++ code is an essential and some would say critical skill for any C/C++programmer. 4 | 5 | Here's a breakdown of the key techniques: 6 | 7 | Understanding the Problem: 8 | 9 | Unexpected Output: If the program's output significantly differs from what you expect, there's a bug. 10 | Crashing Program: A program that crashes abruptly indicates a serious issue. 11 | 12 | Using Print Statements: 13 | 14 | Insert printf statements (C) or cout statements (C++) at strategic points in your code. 15 | Print the values of variables to track their changes during execution. 16 | This helps verify if variables hold the intended values at each step. 17 | 18 | Debuggers: 19 | 20 | GDB (GNU Debugger): A powerful command-line debugger for Linux and Unix-based systems. 21 | LLDB: A modern debugger often used on macOS. 22 | IDEs (Integrated Development Environments): Many IDEs like Visual Studio and Code::Blocks have built-in debuggers with graphical interfaces. 23 | 24 | Methods used in debugging 25 | 26 | - Set breakpoints at specific lines to pause program execution and examine the state. 27 | - Execute code one line at a time or step into functions to see how they behave. 28 | - View the values of variables at any point during execution. 29 | - See the sequence of function calls that led to the current execution point. This is the call stack. 30 | 31 | Debugging Tips: 32 | 33 | My suggestion for beginners in C/C++ to begin by isolating the problematic section of code. 34 | Add comments to explain your thought process and debugging steps. 35 | Explain your code to an imaginary listener (like a rubber duck) to identify flaws. This is the rubber duck method 36 | Write unit tests to verify the functionality of smaller code blocks. Unit testing is a more advanced topic. I would not worry about this as a beginner until you have had more experience. However, running and testing your program(s), trying out and testing how it runs 37 | As a beginner, most of your bugs will be syntactical or missing special symbols or the symbols in the wrong place(s) 38 | 39 | When faced with a bug, a good debugging strategy is to: 40 | 41 | - Reproduce the issue reliably 42 | - Use debugger tools to understand what's happening 43 | - Analyze the root cause of the problem 44 | - Fix the bug and test the solution 45 | 46 | 47 | 48 | By effectively combining these techniques, you'll be well-equipped to tackle debugging challenges in your C and C++ project 49 | -------------------------------------------------------------------------------- /C_and_Cpp_Programming/intellisense-configuration.md: -------------------------------------------------------------------------------- 1 | # IntelliSense Configuration 2 | 3 | Configuration for C/C++ Extension with the extension id of 4 | `ms-vscode.cpptools`is done in Visual Studio Code with the `c_cpp_properties.json` file. 5 | 6 | The `c_cpp_properties.json` file is usually kept in the workspace folder of a C/C++ project in Visual Studio Code. This is used to configure the `IntelliSense` in Visual Studio Code 7 | 8 | Alternatively, you can create the file manually and configure it to your needs. If you want to set a global `c_cpp_properties.json` file for the entire workspace, you can create a single file and place it in the root of the workspace. 9 | 10 | If the file is not present in the workspace folder, you can create it manually by running the `C/C++: Edit Configurations (UI)` command from the Command Palette (Ctrl+Shift+P) and saving the configuration. 11 | 12 | The `c_cpp_properties.json` file can also be kept in the `.vscode` folder of a project. 13 | 14 | It generally looks like this in Linux: 15 | 16 | ```json 17 | { 18 | "env": "", 19 | "configurations": [ 20 | { 21 | "name": "Linux", 22 | "includePath": [ 23 | "${workspaceFolder}/**", 24 | "/usr/include", 25 | "${workspaceRoot}", 26 | ], 27 | "defines": [], 28 | "compilerPath": "/usr/bin/g++-8", 29 | "cStandard": "c11", 30 | "cppStandard": "c++17", 31 | "compilerArgs": "", 32 | "browse": { 33 | "path": [ 34 | "${workspaceFolder}/**", 35 | "/usr/local/include", 36 | "/usr/include/x86_64-linux-gnu", 37 | "/usr/include" 38 | ] 39 | }, 40 | "compileCommands": "", 41 | "forcedInclude": [], 42 | "limitSymbolsToIncludedHeaders": true, 43 | "intelliSenseMode": "linux-gcc-x86", 44 | "configurationProvider": "ms-vscode.cmake-tools", 45 | "databaseFilename": "${workspaceFolder}/.vscode/browse.vc.db", 46 | "enableConfigurationSquiggles": "", 47 | "mergeConfigurations": "", 48 | }, 49 | ], 50 | "version": 4 51 | } 52 | ``` 53 | 54 | I don't use this in Mac or Windows so this would be the best configuration for using the GNU compilers, debuggers and the linker. 55 | 56 | The default value for the `intelliSenseMode` property in the `c_cpp_properties.json` file is `default`. 57 | 58 | This means that the extension will try to automatically detect the correct IntelliSense engine to use based on your project configuration. If the extension is unable to detect the correct engine, it will fall back to using the Clang C++ compiler for x64 architectures. 59 | 60 | Well, anyhow.. the attributes this configuration file uses are typically under the header "configuration". 61 | 62 | 63 | name: -------------------------------------------------------------------------------- /C_and_Cpp_Programming/spellchecking-c-code.md: -------------------------------------------------------------------------------- 1 | # Spellchecking your code 2 | 3 | Especially for C and C++, you should try using cSpell. 4 | 5 | Install cSpell by `sudo apt install cspell` 6 | 7 | In the project you are working on, `cspell init` will create the `cspell.json` configuration file. 8 | 9 | For a system/user wide configuration file, use `~/.cspell.json` and/or `~/.cspeignore` 10 | 11 | The cspell.json will look like 12 | 13 | { 14 | "version": 1, 15 | "language": "en-US", 16 | "words": ["customword1", "customword2"], 17 | "ignorePatterns": ["node_modules/", "public/"] 18 | } 19 | 20 | ## VSCode Extension 21 | 22 | Go to extensions and search for `Code Spell Checker` by `streetsidesoftware`. This extension can use cspell.json 23 | 24 | ## using cSpell 25 | 26 | to check spelling, do a `cspell` in your console/terminal/shell/tty/bash or whatever shell you use. 27 | 28 | The github is at 29 | -------------------------------------------------------------------------------- /Databases/choosing-a-database.md: -------------------------------------------------------------------------------- 1 | # Choosing a Database 2 | 3 | By following these steps, you can make a well-informed decision when choosing a database for your project, consider the following approach based on: 4 | 5 | Understand Your Data's Characteristics: Assess your project's unique requirements, such as the nature of your data, expected volume, the kind of transactions you'll be handling, and the types of queries you'll need to perform 6 | 7 | Define Your Product: Clearly define your product and the role data will play in it. Every software application is unique, so it's essential to take an individualized approach to choosing the right database 8 | 9 | Consider User Load and Advanced Technologies: Evaluate factors such as the number of people using your application simultaneously, the need for data analysis, and integration with other solutions like business intelligence tools 10 | 11 | Research Different Types of Databases: Understand the various types of databases available, such as relational, graph, document-oriented, SQL, and NoSQL, and their suitability for your specific project 12 | 13 | Compare Database Management Systems: Consider factors like cost, ease of use, and features of each database management system to make an informed decision 14 | -------------------------------------------------------------------------------- /Databases/configure-mysql.md: -------------------------------------------------------------------------------- 1 | # Configuring MySQL or MariaDB 2 | 3 | To configure MySQL or MariaDB in Linux, you can use the option files. The default option 4 | file is called my.cnf (or mariadb.cnf) on Unix-like operating systems and my.ini on Windows 5 | 6 | The default location for the option file is /etc/mysql/my.cnf 7 | 8 | However, it's important to note that there is also a directory for additional MySQL config files 9 | which is /etc/my.cnf.d 10 | 11 | You can use both /etc/my.cnf and files in the /etc/my.cnf.d directory to configure MariaDB. 12 | If settings are set in /etc/my.cnf, they will override other configurations placed inside the /etc/my.cnf.d directory 13 | -------------------------------------------------------------------------------- /Databases/learning-sql.md: -------------------------------------------------------------------------------- 1 | # Learning SQL 2 | 3 | The basic SQL syntax includes various commands and statements used to interact with a database. Some of the most important SQL commands are: 4 | 5 | SELECT: Extracts data from a database. 6 | UPDATE: Updates data in a database. 7 | DELETE: Deletes data from a database. 8 | INSERT INTO: Inserts new data into a database. 9 | CREATE DATABASE: Creates a new database. 10 | ALTER DATABASE: Modifies a database. 11 | CREATE TABLE: Creates a new table. 12 | ALTER TABLE: Modifies a table. 13 | DROP TABLE: Deletes a table. 14 | CREATE INDEX: Creates an index (search key). 15 | DROP INDEX: Deletes an index 16 | 17 | Structured Query Language (SQL) is a standardized programming language used to manage relational databases 18 | and perform various operations on the data in them. 19 | 20 | It was initially created in the 1970s and is regularly used by database administrators, developers and data analysts. 21 | 22 | SQL is used for modifying database table and index structures, adding, updating and deleting rows of data. It is also used for retrieving subsets of information from within relational database management systems. 23 | 24 | SQL commands are syntax that is designed for accessing, modifying, and extracting information from relational databases. 25 | 26 | SQL is an interactive and interpretive language and it does not demand coding skills like other programming languages. 27 | It is known for its easy usability and the ability to carry out varied functions on vast amounts of structured data. 28 | SQL queries are used to request or retrieve data from a database, and SQL statements are valid instructions that 29 | relational database management systems understand 30 | -------------------------------------------------------------------------------- /Docker/dockerfiles.md: -------------------------------------------------------------------------------- 1 | # About Docker 2 | 3 | ## Configuring Docker 4 | 5 | In order to configure Docker, there are several files that will be needed, depending how you want to use Docker. 6 | 7 | docker-compose.yml 8 | .dockerignore 9 | Dockerfile 10 | devcontainer.json 11 | 12 | ## Creating a `Dockerfile` 13 | 14 | A `Dockerfile` is a text file that contains a set of instructions for building a Docker image. It is used to automate the process of creating a Docker image by specifying the base image, adding files, installing packages, and configuring the environment 15 | 16 | The `Dockerfile` instruction syntax is defined by the specification reference in the `Dockerfile` reference: 17 | 18 | 19 | 20 | Here are the most common types of instructions: 21 | 22 | - **FROM**: Defines a base for your image. 23 | - **RUN**: Executes any commands in a new layer on top of the current image and commits the result. RUN also has a shell form for running commands. 24 | - **WORKDIR**: Sets the working directory for any RUN, CMD, ENTRYPOINT, COPY, and ADD instructions that follow it in the Dockerfile. 25 | - **COPY**: Copies new files or directories from `` and adds them to the filesystem of the container at the path ``. 26 | - `CMD`: Lets you define the default program that is run once you start the container based on this image. 27 | 28 | In addition to the `Dockerfile`, there are other files used in the Docker ecosystem, such as the `docker-compose.yml file`, which is used to define and run multi-container Docker applications 29 | 30 | The name of the `Dockerfile` is commonly `Dockerfile` but it can be named whatever you like. 31 | 32 | To write an efficient `Dockerfile`, it is important to follow some best practices, such as using a minimal base image, consolidating RUN statements, using multi-stage builds, and using explicit and deterministic Docker base image tags 33 | 34 | `Dockerfile` can be used to package software and automate the deployment of software in lightweight containers. Docker builds images automatically by reading the instructions from a `Dockerfile`, which is a text document that contains all the commands a user could call on the command line to assemble an image 35 | 36 | To specify the base image in a `Dockerfile`, you can use the FROM instruction followed by the name of the base image. For example, to use the Ubuntu 22.04 base image, you can use the following instruction: 37 | 38 | ```Dockerfile 39 | FROM ubuntu:22.04 40 | ``` 41 | 42 | When specifying dependencies in a Dockerfile, it is important to avoid some common mistakes, such as using the latest tag for the base image, installing unnecessary packages, and not cleaning up after package installation 43 | 44 | Another common command you will use is: 45 | 46 | `EXPOSE`: Specifies the port that the container listens on. 47 | 48 | The EXPOSE instruction in a Dockerfile informs Docker that the container listens on the specified network ports at runtime. 49 | 50 | It does not actually publish the specified ports to the host machine or make them accessible to the outside world. Instead, it is a way for the person who built the image to communicate to the person who will run the container, which port the service inside the container will listen to 51 | 52 | To actually publish the ports and make them accessible to the outside world, you need to use the -p or --publish flag when running the container 53 | 54 | For example, to publish port 80 on the container to port 8080 on the host machine, you can use the following command: 55 | 56 | `docker run -p 8080:80 myimage` 57 | 58 | When specifying dependencies in a Dockerfile, it is important to avoid some common mistakes, such as using the latest tag for the base image, installing unnecessary packages, and not cleaning up after package installations 59 | -------------------------------------------------------------------------------- /GitHub/configuring-github.md: -------------------------------------------------------------------------------- 1 | # Configuring GitHub 2 | 3 | This is not a note about configuring Git. This is for configuring GitHub in a project. 4 | 5 | To do this, Create a folder/directory in your project called `.github`. This will house GitHub special files. 6 | 7 | ## GitHub Actions 8 | 9 | Inside the `.github` folder create a folder called actions 10 | 11 | GitHub actions are primarily written in YAML and have the file extension .yml or .yaml 12 | 13 | ## GitHub Workflows 14 | 15 | Inside the .github folder/directory create a folder called `workflows`. 16 | 17 | GitHub workflows are also primarily written in YAML and have the file extension .yml or .yaml, the same as an action would be. 18 | 19 | ## Configuring Issue Templates 20 | 21 | ## Configuring PR Templates 22 | 23 | ## Other Special GitHub stuff 24 | 25 | -------------------------------------------------------------------------------- /GitHub/create-a-workflow.md: -------------------------------------------------------------------------------- 1 | # GitHub Workflows 2 | 3 | A GitHub workflow is a configurable automated process that runs one or more jobs. Workflows are defined by a YAML file checked into your repository and will run when triggered by one or more events. The basic components of a workflow include one or more events that will trigger the workflow, one or more jobs that will execute on a runner machine and run a series of one or more steps, and each step can either run a script that you define or run an action, which is a reusable extension that can simplify your workflow. 4 | 5 | Workflows are defined using YAML syntax and are stored as separate YAML files in your code repository, in a directory named .github/workflows. You can create a new GitHub workflow by creating a YAML file in your code repository and defining the workflow's components in the YAML file. There are different types of triggers available in GitHub workflows, including events that occur in your workflow's repository, events that occur outside of GitHub and trigger a repository_dispatch event on GitHub, scheduled times, and manual triggers. You can use the on..types keyword to define the type of activity that will trigger a workflow run. Most GitHub events are triggered by more than one type of activity. 6 | 7 | A workflow in GitHub Actions is a configurable automated process that will run one or more jobs. Workflows are defined by a YAML file checked into your repository and will run when triggered by an event in your repository, or they can be triggered manually, or at a defined schedule. On the other hand, a job is a set of steps that execute on the same runner. A workflow can have one or more jobs, and each job can run on a different runner. Jobs run in parallel by default, but you can also configure them to run sequentially. You can debug a failed GitHub workflow by checking the logs and status of each step in the workflow. You can also use the if conditional statement to run a specific step only when a certain condition is met, and use the env keyword to set environment variables that can be used in your workflow. Some best practices for writing efficient GitHub workflows include using pre-built actions from the GitHub Marketplace, using caching to speed up your workflow, and using matrix strategies to run jobs with different configurations. It is also important to limit the number of jobs and steps in your workflow to reduce complexity and improve performance. 8 | 9 | To troubleshoot a GitHub workflow that is not triggering, you can check the following: 10 | 11 | Check if the workflow YAML file is in the correct directory and has the correct name. 12 | Check if the workflow YAML file has any syntax errors. 13 | Check if the event that should trigger the workflow is configured correctly in the YAML file. 14 | Check if the workflow is enabled in the repository settings. 15 | 16 | To rerun a failed GitHub workflow, you can go to the Actions tab in your repository, select the failed workflow run, and click on the "Re-run jobs" button. You can also use the gh run rerun command in the GitHub CLI to rerun a failed workflow run. To set up notifications for failed GitHub workflows, you can enable email or web notifications for GitHub Actions in your account settings. You can also choose to receive a notification only when a workflow run has failed. Additionally, you can set up a webhook that triggers when a workflow run completes, which could then notify you of a failure. Some best practices for writing efficient GitHub workflows include using pre-built actions from the GitHub Marketplace, using caching to speed up your workflow, and using matrix strategies to run jobs with different configurations. It is also important to limit the number of jobs and steps in your workflow to reduce complexity and improve performance. 17 | -------------------------------------------------------------------------------- /GitHub/github-special-files.md: -------------------------------------------------------------------------------- 1 | # GitHub files 2 | 3 | These are the GitHub special files, 4 | 5 | `README.md` 6 | 7 | This file is an important file necessary on GitHub. This will tell everyone looking at your project on GitHub what your project is all about. 8 | 9 | `LICENSE` 10 | 11 | This special file determines what type of License you release your project under including terms and conditions. 12 | 13 | `CHANGELOG.md` 14 | 15 | This file is important if you plan on doing released versions of your project following SemVer, or other versioning system. 16 | 17 | `SUPPORT.md` 18 | 19 | This file was meant to house text 20 | 21 | `SECURITY.md` 22 | 23 | `CODE_OF_CONDUCT.md` 24 | 25 | This is a Code of Conduct file. 26 | 27 | `CONTRIBUTING.md` 28 | 29 | The CONTRIBUTING.md file was meant 30 | 31 | `CONTRIBUTORS.md` 32 | 33 | `AUTHORS.md` 34 | 35 | `ACKNOWLEDGMENTS.md` 36 | 37 | `CODEOWNERS` 38 | 39 | This is a special file that sets the GitHub project repository up so t 40 | 41 | `ISSUE_TEMPLATE` 42 | 43 | This can be a file or a folder/directory. 44 | 45 | `PULL_REQUEST_TEMPLATE` 46 | 47 | `CITATION.cff` 48 | 49 | `FUNDING.yml` 50 | -------------------------------------------------------------------------------- /LearningWebDev.odt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gbowne1/ProgrammerNotes/804c2d92f3029c24dec9cb5a3dce31e91a269445/LearningWebDev.odt -------------------------------------------------------------------------------- /Node_JavaScript_Programming/babel-and-webpack.md: -------------------------------------------------------------------------------- 1 | # Webpack and Babel.js 2 | 3 | There are a few things that you should know about when it comes to Babel.js and Webpack 4 | 5 | ## Babel.js 6 | 7 | What is Babel? 8 | 9 | Babel, also known as Babel.js, is a free and open-source JavaScript transcompiler that is primarily used to convert ECMAScript 2015+ code into a backward-compatible version of JavaScript that can be run by current and older browsers or environments 10 | 11 | It allows developers to take advantage of the newest features of the language and to use new JavaScript language features by converting their source code into versions of JavaScript that a web browser can process 12 | 13 | Babel can also be used to compile TypeScript into JavaScript, but in this case you really should just write TypeScript. 14 | 15 | The toolchain can transform syntax, polyfill features missing in the target environment, perform source code transformations, and more 16 | 17 | Babel has support for the latest version of JavaScript through syntax transformers, allowing developers to use new syntax without waiting for browser support 18 | 19 | It is widely used in the industry and has a large community of contributors and sponsors 20 | 21 | The Babel toolchain includes various tools such as @babel/cli, @babel/core, and @babel/preset-env, which make it easy to use Babel for transforming and polyfilling JavaScript code 22 | 23 | Babel is particularly useful for ensuring that modern JavaScript code can run on older browsers and environments, making it an essential tool for web developers who need to support a wide range of platforms 24 | 25 | You can find more about Babel.js here: 26 | 27 | ## Webpack 28 | 29 | Webpack is a highly extensible and configurable static module bundler for JavaScript applications. It goes through your application from a root entry point, builds a dependency graph, and produces optimized bundles of the combined modules. 30 | 31 | Webpack's configuration file is a JavaScript file that exports a webpack configuration, which is then processed by webpack based on its defined properties 32 | 33 | It is used to bundle JavaScript files for usage in a browser and can also transform, bundle, or package any resource or asset 34 | 35 | Some of its use cases include separate builds per page, creating a components library as a container, and dynamic dependency graphs in a multi-page application 36 | 37 | Webpack configuration files are written in JavaScript and can be split into smaller files for loaders, plugins, and other options 38 | 39 | The configuration file is a JavaScript object that configures one of Webpack's options 40 | 41 | It is commonly defined in a top-level webpack.config.js file, but it can also be passed as a parameter to Webpack's Node.js API 42 | 43 | If you need to create a configuration for production in a separate file, you can do so by specifying the config file to use 44 | 45 | You can find more about Webpack here: 46 | 47 | There are files genereated by Webpack. One of these is `dist/bundle.js` is a JavaScript file generated by Webpack, a module bundler used to bundle an app's modules and dependencies into a single browser-compatible JavaScript file. In the context of your project structure, dist/bundle.js is likely the output file created when you run Webpack. Webpack is responsible for transforming, bundling, and optimizing JavaScript files for usage in a browser. It does this by reading the entry points specified in your Webpack configuration file (usually webpack.config.js) and generating an output file in the specified output directory (usually dist). Some key aspects of Webpack and its output management include: 48 | 49 | Entry points: These are the files that Webpack starts with when bundling your application. You can specify multiple entry points in your Webpack configuration file. 50 | 51 | Output: The output directory, where Webpack generates the bundled JavaScript files. You can specify the output directory in your Webpack configuration file. 52 | 53 | Plugins: Plugins are used to extend the functionality of Webpack and its configuration file. For example, the HtmlWebpackPlugin is used to generate an HTML file that includes the bundled JavaScript. 54 | 55 | Development server: Webpack can be configured to use a development server, such as the Webpack Dev Server, which allows you to serve your application in the browser while still in development. 56 | 57 | To ensure that dist/bundle.js is generated correctly, you need to configure your Webpack configuration file properly. This includes specifying the correct entry points, output directory, and any necessary plugins or configurations. 58 | 59 | A developer might choose to use Webpack for several reasons, including: 60 | 61 | Bundling and performance: Webpack allows you to bundle your JavaScript applications, supporting both ESM and CommonJS, and can be extended to support other assets such as images, fonts, and stylesheets. It cares about performance and load times, offering features like async chunk loading and prefetching to deliver the best possible experience for your project and your users. 62 | 63 | Dependency management: Webpack automatically builds and infers your dependency graph based on what is required, making it easier to manage dependencies and avoid issues like missing or outdated code. This is particularly useful for complex front-end applications with many non-code static assets like CSS, images, and fonts. 64 | 65 | Minification and optimization: Webpack provides minification, which means minimizing the code without changing its functionality, removing whitespace, line breaks, and other unnecessary characters. This results in reduced file size and faster load times. 66 | Hot page reloading: Webpack allows for hot page reloading, which means that your page doesn't need to reload fully when making small changes, speeding up the development process. 67 | 68 | True CSS management: Webpack offers better management of CSS, ensuring that styles are correctly applied and reducing the risk of errors. 69 | CDN cache busting: Webpack automatically changes file names to hashes of the file contents, which can help with cache busting when using CDNs. 70 | Compatibility with other tools: Webpack is compatible with various other tools and plugins, making it a versatile choice for managing your project's assets and build process. 71 | 72 | Webpack is a popular choice for developers due to its bundling and performance capabilities, dependency management, minification and optimization, hot page reloading, true CSS management, CDN cache busting, and compatibility with other tools. -------------------------------------------------------------------------------- /Node_JavaScript_Programming/babel.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gbowne1/ProgrammerNotes/804c2d92f3029c24dec9cb5a3dce31e91a269445/Node_JavaScript_Programming/babel.md -------------------------------------------------------------------------------- /Node_JavaScript_Programming/debugging-nodejs.md: -------------------------------------------------------------------------------- 1 | # Debugging NodeJS 2 | 3 | For VSCode you can start here for documentation: 4 | 5 | 6 | 7 | We typically use console.log for debugging in Node.JS. 8 | 9 | `node debug script.js` 10 | 11 | The official NodeJS debugging is here 12 | 13 | you can also 14 | 15 | `node --inspect your_script.js` 16 | 17 | This starts your application (your_script.js) with debugging enabled on the default port (usually 9229). 18 | -------------------------------------------------------------------------------- /Node_JavaScript_Programming/make-a-package-json-file.md: -------------------------------------------------------------------------------- 1 | # package.json file 2 | 3 | You can create your own `package.json` file. Yes, running `npm init -y` or `npm init` will do it for you but it might be missing some fields, keys, properties and other things you might find useful, and unless you are creating a package to be listed on npmjs, I would suggest just making the file manually. 4 | 5 | You can use this template to start with 6 | 7 | Some things were meant to be only used for when you are publishing a package to npm (when you set private to "false") so you can just remove those. 8 | 9 | Here's a step-by-step guide to creating a package.json file: 10 | 11 | ```json 12 | { 13 | "homepage":"", 14 | "name": "name-string-goes-here", 15 | "version": "", 16 | "description": "", 17 | "scripts": {}, 18 | "author": "", 19 | "license": "", 20 | "dependencies": {}, 21 | "bugs": "", 22 | "repository": "", 23 | "contributors": [], 24 | "funding": "uri-goes-here", 25 | "files":[], 26 | "browser": "", 27 | "bin": "", 28 | "man": "", 29 | "config": {}, 30 | "devDependencies": {}, 31 | "peerDependencies": {}, 32 | "optionalDependencies": {}, 33 | "engines": {}, 34 | "private": "true", 35 | "keywords": [], 36 | "main": "", 37 | "publishConfig": {}, 38 | "types": "", 39 | "typings": "", 40 | "exports": "", 41 | "module": "", 42 | "unpkg": "", 43 | "style": "", 44 | "stylelint": {}, 45 | "eslintConfig": {}, 46 | "browserslist": "", 47 | "os": [], 48 | "cpu": [], 49 | "jest": "", 50 | "directories": {}, 51 | "stlyelintIgnore": "", 52 | "eslintIgnore": "", 53 | "prettier":"", 54 | "commitlint": "", 55 | "lintstaged": "", 56 | "husky": "" 57 | } 58 | ``` 59 | 60 | Notes: 61 | 62 | 1. "name" must be a string longer than 1 character 63 | 2. "exports" must be a file path pattern that will match the pattern of `"^\./"` 64 | 3. "funding" must be a URI for the fund 65 | 66 | You really won't need most of these for most projects unless you are going to publish to yarn, npm or pnpm. 67 | 68 | The package.json file is essential for managing and installing packages. It lists the packages your project depends on, specifies the versions of a package that your project can use using semantic versioning rules, and makes your build reproducible, and therefore easier to share with other developers 69 | 70 | If you expect to create many package.json files, and intend to use `npm init`, you can customize the questions asked and fields created during the init process so all the package.json files contain a standard set of information. In your home directory, you can create a file called .npm-init.js to customize the npm init questionnaire. 71 | 72 | There are a number of these keys and properties that should be used in all or most package.json files, and I believe some are required. From the research I have done, there is no particular order required. 73 | 74 | "name": This is the name of your package and it must be a string longer that 1 character in the double quotes. 75 | 76 | "keywords": This is recommended, especially if you list your package on npmjs 77 | 78 | "version": The "version" in the package.json file is the version of the package. 79 | The "version" field in the package.json file must be in the form x.x.x and follow the semantic versioning guidelines 80 | 81 | "main": This the entry point filename for your program, library or application, usually app.js, index.js 82 | 83 | "license": This should contain the license you are using. 84 | 85 | "devDependencies": This section should only include any packages and their version numbers which are needed for development. 86 | 87 | "dependencies": This section should include all of the packages required for your application to run, not including development packages. 88 | 89 | "engines": This should be the nodejs version(s) required for your application to run. 90 | 91 | "description": You should provide a very short description of your package or application here, especially if you are publishing to npmjs, pnpm, yarn. 92 | 93 | "private": You should set this to true, unless you intend on publishing your package to npmjs. 94 | 95 | ## Package Versioning 96 | 97 | In "dependencies" and "devDependencies" the version numbers should be listed in SemVer. 98 | 99 | The difference between using ^ and ~ in package versions is that ^ allows non-breaking updates, while ~ allows only for patch updates 100 | 101 | It's typically listed in the various dependencies sections of as `"package": "~0.0.0"`. 102 | 103 | ## Dependencies 104 | 105 | In a package.json file, the: 106 | 107 | "dependencies": field is used to list the packages that are required for the application to run, while the devDependencies field is used to list the packages that are only required during development. 108 | 109 | "peerDependencies" field is used to specify the packages that the current package relies on in order to work 110 | 111 | "optionalDependencies" field is used to specify the packages that are not necessary for the application to run, but can be used to provide extra functionality. 112 | 113 | ## Some other notes 114 | 115 | "eslintConfig" must be in JSON. 116 | -------------------------------------------------------------------------------- /Node_JavaScript_Programming/make-an-nodemon-json-file.md: -------------------------------------------------------------------------------- 1 | # Create a nodemon.json file 2 | 3 | Nodemon is a command-line interface (CLI) utility that helps in developing Node.js based applications by automatically restarting the node application when file changes in the directory are detected. 4 | 5 | `nodemon.json` is used a configuration file used to customize the behavior of Nodemon, a utility for Node.js applications that monitors for changes and automatically restarts the server. It allows you to specify options such as which files to watch, which files to ignore, and the delay before restarting the server. The file is typically named nodemon.json and can be located in the current working directory or in your home directory. Command line arguments always override the settings in the nodemon.json file. 6 | 7 | To install nodemon, you can use npm (Node Package Manager) by running the following command: 8 | 9 | ```bash 10 | npm install -g nodemon 11 | ``` 12 | 13 | This will install nodemon globally on your system, allowing you to use it from the command line. 14 | Some alternatives to nodemon include forever, gulp, Grunt, LiveReload, and PM2. These are popular 15 | tools for managing Node.js applications and providing similar functionality to nodemon 16 | 17 | If you are considering alternatives to nodemon, it's important to evaluate your specific requirements 18 | and choose the tool that best fits your needs. Each tool has its own strengths and weaknesses, so it's 19 | worth exploring and experimenting with different options to find the best fit for your project. For more 20 | details, you can refer to the official documentation of nodemon and online resources that compare 21 | different tools for managing Node.js applications. 22 | 23 | Nodemon is a useful tool for developing Node.js applications as it automates the process of stopping 24 | and starting your application when file changes are detected. It does not require any additional changes 25 | to your code or method of development 26 | 27 | To use nodemon, you need to install it either globally or locally on your project using npm or yarn. 28 | 29 | Here is the `nodemon.json` File Structure: 30 | 31 | ```json 32 | { 33 | "watch": [ 34 | "src/client", 35 | "src/server" 36 | ], 37 | "ext": "js,json", 38 | "exec": "node src/server/server.js", 39 | "restartDelay": "", 40 | "args": "", 41 | "nodeArgs": "", 42 | "verbose": true, 43 | "watchOptions": "", 44 | "ignore": [ 45 | "src/**/*.test.js" 46 | ], 47 | "delay": 1000, 48 | "execMap": { 49 | "js": "node --inspect" 50 | } 51 | } 52 | ``` 53 | 54 | This is all the information I could find out about the file. 55 | 56 | ## Starting Nodemon 57 | 58 | Typically you start nodemon by `nodemon yourApp.js` 59 | -------------------------------------------------------------------------------- /Node_JavaScript_Programming/nodejs-resources.md: -------------------------------------------------------------------------------- 1 | # Node.JS resources for learning 2 | 3 | There are a number of resources to learn NodeJS. My notes on NodeJS were not meant to explicitly be tutorials. 4 | 5 | In my experiences these resources I have reviewed to be good. 6 | 7 | NodeJS on VSCode for Windows tutorial by Microsoft 8 | 9 | freeCodeCamp Node.JS tutorial video 10 | 11 | freeCodeCamp's Node.JS and Express Full course (8 hours but has chapters) 12 | 13 | freeCodeCamp's tutorial website to go along with the above video 14 | 15 | freeCodeCamp's 10 hour NodeJS / Express course with 4 projects. 16 | 17 | freeCodeCamp's 3 hour Full Tutorial for NodeJS. 18 | 19 | NodeJS's own introduction / tutorial 20 | 21 | NodeJS's own examples from the above webpage/website 22 | 23 | MDN/Mozilla's learning NodeJS and Express 24 | 25 | ExpressJS getting started guide to get express going 26 | 27 | Programming with Mosh's Learn NodeJS Tutorial (1hr) 28 | 29 | NodeJS Playlist (37 videos less than 10 mins each) by NetNinja 30 | 31 | Another NodeJS crash course by NetNinja 32 | 33 | NodeJS Tutorial by Codevolution 34 | 35 | Traversy Media's NodeJS course (updated 2024 version here ) 36 | 37 | There is also a subreddit 38 | -------------------------------------------------------------------------------- /Node_JavaScript_Programming/nodejs-server.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gbowne1/ProgrammerNotes/804c2d92f3029c24dec9cb5a3dce31e91a269445/Node_JavaScript_Programming/nodejs-server.md -------------------------------------------------------------------------------- /Node_JavaScript_Programming/using-npm.md: -------------------------------------------------------------------------------- 1 | # Using npm 2 | 3 | `npm outdated` -- Returns list of the outdated installed packages from your package.json file 4 | `npm list` -- 5 | `npm ls` -- shows a list of installed packages. 6 | `npm update` -- Updates the packages listed in package.json 7 | `npm init` -- Initializes a new node.js project 8 | `npm run ` -- runs whatever script is in 'scripts` in package.json 9 | `npm cache clean --force` forces npm to clear its cache. 10 | `npm ci` -- Installs dependencies directly from package-lock.json or npm-shrinkwrap.json, ignoring package.json 11 | `npm audit` scans your project for known security vulnerabilities in your dependencies but no changes applied. 12 | `npm audit fix` 13 | `npm audit fix --force` 14 | `npm version ` -- updates the semver version listing in your package.json to whatever you specify. 15 | `npm i ` installs the package specified. 17 | `npm install -g ` Installs a package globally, making it available across all your Node.js projects on your system 18 | `npm uninstall ` removes a ppackage and its update from package.json 19 | `npm view ` 20 | `npm prune ` 21 | `npm show` -- similar to npm view 22 | 23 | -------------------------------------------------------------------------------- /Node_JavaScript_Programming/webpack.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gbowne1/ProgrammerNotes/804c2d92f3029c24dec9cb5a3dce31e91a269445/Node_JavaScript_Programming/webpack.md -------------------------------------------------------------------------------- /PHP/php-development.md: -------------------------------------------------------------------------------- 1 | # Developing with PHP 2 | 3 | PHP gets lots of hate. It's old, been around a while. It's gotten way better over the years than it was initially, especially post PHP 5. 4 | 5 | I don't know a great deal about PHP development nor have I used a lot of PHP in my projects. 6 | 7 | 10 | 11 | ## PHP keywords 12 | 13 | ### Control Structures 14 | 15 | abstract 16 | and 17 | array 18 | as 19 | break 20 | callable 21 | case 22 | catch 23 | class 24 | clone 25 | const 26 | continue 27 | declare 28 | default 29 | die 30 | do 31 | echo 32 | else 33 | elseif 34 | empty 35 | endfor 36 | endforeach 37 | endwhile 38 | enum 39 | eval 40 | exit 41 | extends 42 | final 43 | finally 44 | for 45 | foreach 46 | function 47 | global 48 | goto 49 | if 50 | implements 51 | import 52 | instanceof 53 | insteadof 54 | interface 55 | isset 56 | list 57 | namespace 58 | new 59 | or 60 | print 61 | private 62 | protected 63 | public 64 | require 65 | return 66 | static 67 | switch 68 | throw 69 | trait 70 | try 71 | unset 72 | use 73 | var 74 | while 75 | xor 76 | 77 | ### Error Handling 78 | 79 | @ 80 | error_reporting 81 | set_error_handler 82 | restore_error_handler 83 | set_exception_handler 84 | restore_exception_handler 85 | register_shutdown_function 86 | 87 | ### Language Constructs 88 | 89 | __construct 90 | __destruct 91 | __get 92 | __set 93 | __isset 94 | __unset 95 | __call 96 | __toString 97 | __invoke 98 | __set_state 99 | __clone 100 | __sleep 101 | __wakeup 102 | __serialize 103 | __unserialize 104 | __debugInfo 105 | __tostring 106 | __invoke 107 | __isset 108 | __unset 109 | __call 110 | __get 111 | __set 112 | __toString 113 | __set_state 114 | __clone 115 | __sleep 116 | __wakeup 117 | __serialize 118 | __unserialize 119 | __debugInfo 120 | 121 | ### Other Keywords 122 | 123 | Other Keywords 124 | 125 | abstract 126 | and 127 | array 128 | as 129 | break 130 | callable 131 | case 132 | catch 133 | class 134 | clone 135 | const 136 | continue 137 | declare 138 | default 139 | die 140 | do 141 | echo 142 | else 143 | elseif 144 | empty 145 | endwhile 146 | enum 147 | eval 148 | exit 149 | extends 150 | final 151 | finally 152 | for 153 | foreach 154 | function 155 | global 156 | goto 157 | if 158 | implements 159 | import 160 | instanceof 161 | insteadof 162 | interface 163 | isset 164 | list 165 | namespace 166 | new 167 | or 168 | print 169 | private 170 | protected 171 | public 172 | require 173 | return 174 | static 175 | switch 176 | throw 177 | trait 178 | try 179 | unset 180 | use 181 | var 182 | while 183 | xor 184 | -------------------------------------------------------------------------------- /Python/debugging-python.md: -------------------------------------------------------------------------------- 1 | # Debugging Python 2 | 3 | Here are some tips for debugging Python. 4 | 5 | 6 | ## VSCode debugging Python 7 | 8 | VSCode now uses `debugpy` for debugging, so `pip install debugpy` 9 | Set breakpoints by clicking in the gutter left of the line numbers. 10 | To debug, you need to have a launch.json file. 11 | Open the Run view by clicking on the Run icon in the Activity Bar on the side, or press Ctrl+Shift+D. 12 | -------------------------------------------------------------------------------- /Python/python-extensions-vscode.md: -------------------------------------------------------------------------------- 1 | # Python Extensions for VSCode 2 | 3 | There are a number of extensions for VSCode but there are a few basics you should install first before any others: 4 | 5 | - ms-python.python 6 | - ms-python.debugpy 7 | - kevinrose.vsc-python-indent 8 | - donjayamanne.python-extension-pack 9 | - donjayamanne.python-environment-manager 10 | - njpwerner.autodocstring 11 | - tushortz.python-extended-snippets 12 | - mgesbert.python-path 13 | - frhtylcn.pythonsnippets 14 | - littlefoxteam.vscode-python-test-adapter 15 | - njqdev.vscode-python-typehint 16 | 17 | These extensions should be enough for anyone to get started 18 | -------------------------------------------------------------------------------- /Python/python3-development.md: -------------------------------------------------------------------------------- 1 | # Create a Python program 2 | 3 | In Python, a program usually starts with importing python libraries with `include` 4 | 5 | The configuration files used for Black, Flake8, Pylint, and Pytest are as follows: 6 | 7 | - Black: Black only supports the TOML file format for its configuration, which should be placed in the root of your project. Compatible configuration files for other tools can be found in the provided examples 8 | 9 | - Flake8: Flake8 can be configured using various configuration files, such as .flake8, setup.cfg, and tox.ini. Flake8 should be configured to allow lines up to the length limit of 88, Black’s default 10 | 11 | - Pylint: Pylint can be configured using configuration files such as pylintrc and setup.cfg. Pylint should be configured to only complain about lines that surpass 88 characters via max-line-length = 88. If using pylint<2.6.0, also disable C0326 and C0330 as these are incompatible with Black formatting and have since been removed 12 | 13 | - Pytest: Pytest can be configured using a configuration file named pytest.ini. Other configuration files such as setup.cfg and tox.ini can also be used to hold pytest configuration if they have a [tool:pytest] section 14 | 15 | It is important to note that some of these tools may need a bit of tweaking to resolve conflicts, and there are other popular Python linters and formatters besides the ones mentioned here 16 | 17 | ## A python Hello World 18 | 19 | Here is a basic Python 3 Hello World 20 | 21 | ```py 22 | print("Hello, World!") 23 | ``` 24 | 25 | ## Python program structure 26 | 27 | From top to bottom, a typical Python program will have; 28 | 29 | a shebang line #!/usr/bin/env python This is optional. It is only required if you plan to execute the script directly from the command line. 30 | 31 | next if you are documenting the program, using `#` to comment and document the program 32 | 33 | After the shebang line, one imports Python modules with `import` 34 | 35 | Next, global variables are defined as in something like `GLOBAL_VARIABLE = "This is a global variable"` 36 | 37 | After global variables next comes the function definitions using a `def` 38 | 39 | If you are using OOP, next you would do some class definitions with `class` 40 | 41 | last would be a main execution block with `if __name__ == "__main__":` 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ProgrammerNotes 2 | 3 | This is a collection of Markdown notes written in Obsidian for programming, coding, software engineering, software development and it includes notes on C, C++, using and configuring VSCode and many other things programming related. Anyone is welcome to contribute to this Markdown repository and make corrections. 4 | 5 | Keep in mind these notes are Open Source. If I get it wrong, correct it in a PR or a issue or open a discussion. 6 | 7 | Also, keep in mind these things are written just as I understand them, so they might be wrong. They have been copied from either hand written or typed notes I have written to help me in case I forgot how to do something in programming. If you use these things, Please verify that they work. 8 | 9 | You don't have to use Obsidian to read this collection of notes, but it might help. 10 | -------------------------------------------------------------------------------- /React_Development/commonpackages.md: -------------------------------------------------------------------------------- 1 | # NPM Packages used in React 2 | 3 | This is a list of packages commonly installed in NodeJs and React projects 4 | 5 | "@babel/cli": "^7.21.0", the Babel command line 6 | "@babel/core": "7.0.0", the Babel compiler core 7 | "@babel/core": "^7.20.5", 8 | "@babel/core": "^7.21.4", 9 | "@babel/core": "^7.23.0", 10 | "@babel/eslint-parser": "^7.15.0", ESLint parser that allows for linting of experimental syntax transformed by Babel 11 | "@babel/eslint-parser": "^7.22.5", 12 | "@babel/eslint-parser": "^7.23.3", 13 | "@babel/parser": "^7.20.5", 14 | "@babel/plugin-proposal-private-property-in-object": "^7.21.11", 15 | "@babel/plugin-transform-private-property-in-object": "^7.23.4", 16 | "@babel/preset-env": "^7.21.4", 17 | "@babel/preset-react": "^7.18.6", 18 | "@babel/runtime": "^7.20.6", 19 | "@emotion/react": "^11.10.5", 20 | "@emotion/react": "^11.10.8", 21 | "@emotion/react": "^11.11.0", 22 | "@emotion/react": "^11.11.1", 23 | "@emotion/react": "^11.11.4", 24 | "@emotion/styled": "^11.10.5", 25 | "@emotion/styled": "^11.10.8", 26 | "@emotion/styled": "^11.11.0", 27 | "@emotion/styled": "^11.11.5", 28 | "@fortawesome/free-brands-svg-icons": "^6.2.1", 29 | "@fortawesome/react-fontawesome": "^0.2.0", 30 | "@hapi/boom": "^10.0.1", HTTP-friendly error objects 31 | "@hookform/resolvers": "^3.3.1", 32 | "@mui/base": "^5.0.0-alpha.110", 33 | "@mui/icons-material": "^5.11.0", Material UI icons package 34 | "@mui/icons-material": "^5.11.16", 35 | "@mui/icons-material": "^5.14.3", 36 | "@mui/icons-material": "^5.14.9", 37 | "@mui/icons-material": "^5.15.13", 38 | "@mui/icons-material": "^5.15.15", 39 | "@mui/lab": "^5.0.0-alpha.145", 40 | "@mui/material": "^5.11.0", \ asdfasdfasdfasdfasdfasdf 41 | "@mui/material": "^5.12.3", 42 | "@mui/material": "^5.13.0", 43 | "@mui/material": "^5.13.5", 44 | "@mui/material": "^5.14.5", 45 | "@mui/material": "^5.15.11", 46 | "@mui/material": "^5.15.15", 47 | "@mui/styled-engine": "^5.11.0", 48 | "@mui/system": "^5.11.0", 49 | "@mui/system": "^5.15.2", 50 | "@mui/x-date-pickers": "^6.19.0" 51 | "@tanstack/react-query": "^5.22.2", 52 | "@testing-library/dom": "^9.2.0", 53 | "@testing-library/dom": "^9.3.3", 54 | "@testing-library/jest-dom": "^5.16.5", 55 | "@testing-library/jest-dom": "^5.17.0", 56 | "@testing-library/jest-dom": "^6.1.2", 57 | "@testing-library/jest-dom": "^6.4.1", 58 | "@testing-library/react": "^13.4.0", 59 | "@testing-library/react": "^14.0.0", 60 | "@testing-library/react": "^14.1.2", 61 | "@testing-library/user-event": "^13.5.0", 62 | "@testing-library/user-event": "^14.4.3", 63 | "@testing-library/user-event": "^14.5.1", 64 | "axios": "^1.2.1", Promise based HTTP client for the browser and node.js 65 | "axios": "^1.3.5", 66 | "axios": "^1.4.0", 67 | "axios": "^1.5.0", 68 | "babel-eslint": "^10.1.0", ?? babel-eslint is now @babel/eslint-parser and is deprecated 69 | "babel-loader": "^8.3.0", babel module loader for webpack 70 | "babel-loader": "^9.1.2", 71 | "babel-loader": "^9.1.3", 72 | "bcrypt": "^5.1.0", A bcrypt library for NodeJS. 73 | "bcrypt": "^5.1.1", 74 | "bcryptjs": "^2.4.3", Optimized bcrypt in plain JavaScript with zero dependencies. Compatible to 'bcrypt'. 75 | "body-parser": "^1.20.2", Node.js body parsing middleware 76 | "boom": "^7.3.0", Deprecated and renamed to @hapi/boom 77 | "chai": "^4.3.7", BDD/TDD assertion library for node.js and the browser. Test framework agnostic. 78 | "chalk": "^5.2.0", Terminal string styling done right 79 | "cheerio": "^1.0.0-rc.12", Tiny, fast, and elegant implementation of core jQuery designed specifically for the server 80 | "concurrently": "^6.6.0", Run multiple commands concurrently 81 | "concurrently": "^8.2.2", 82 | "cookie-parser": "^1.4.6", Parse HTTP request cookies 83 | "cookie-parser": "~1.4.4", 84 | "cookie-session": "^2.0.0", cookie session middleware 85 | "cors": "^2.8.5", Node.js CORS middleware 86 | "cypress": "^13.3.0" 87 | "dayjs": "^1.11.10", 88 | "dayjs": "^1.11.7", 89 | "debug": "~4.3.4", Lightweight debugging utility for Node.js and the browser 90 | "discord.js": "^14.10.2", 91 | "dotenv": "^10.0.0", Loads environment variables from .env file 92 | "dotenv": "^16.0.3", 93 | "dotenv": "^16.3.1", 94 | "dotenv": "^16.4.4", 95 | "dotenv": "^16.4.5", 96 | "ejs": "^3.1.9", Embedded JavaScript templates 97 | "eslint": "^8.0.1", An AST-based pattern checker for JavaScript. 98 | "eslint": "^8.39.0", 99 | "eslint": "^8.40.0", 100 | "eslint": "^8.46.0", 101 | "eslint": "^8.49.0", 102 | "eslint": "^8.56.0", 103 | "eslint": "^8.57.0", 104 | "eslint-config-airbnb-base": "^15.0.0", 105 | "eslint-config-prettier": "^8.8.0", 106 | "eslint-config-prettier": "^9.0.0", 107 | "eslint-config-prettier": "^9.1.0", 108 | "eslint-config-react-app": "^7.0.1", 109 | "eslint-plugin-import": "^2.27.5", 110 | "eslint-plugin-jest-dom": "^5.1.0", 111 | "eslint-plugin-prettier": "^4.2.1", 112 | "eslint-plugin-prettier": "^5.0.0", 113 | "eslint-plugin-prettier": "^5.1.3", 114 | "eslint-plugin-react": "^7.28.0", 115 | "eslint-plugin-react": "^7.33.2", 116 | "eslint-plugin-react-hooks": "^4.3.0", 117 | "express": "^4.17.1", Fast, unopinionated, minimalist web framework 118 | "express": "^4.18.2", 119 | "express": "^4.18.3", 120 | "express-session": "^1.17.3", Simple session middleware for Express 121 | "express-session": "^1.18.0", 122 | "express-validator": "^7.0.1", Express middleware for the validator module. 123 | "formik": "^2.4.3", Build forms in React, without the tears 124 | "formik-mui": "^5.0.0-alpha.0", 125 | "fs": "^0.0.1-security", ?? an old filesystem library which got removed. 126 | "handlebars": "^4.7.7", Handlebars provides the power necessary to let you build semantic templates effectively with no frustration 127 | "helmet": "^6.1.5", help secure Express/Connect apps with various HTTP headers 128 | "helmet": "^7.0.0", 129 | "http-errors": "~1.6.3", Create HTTP error objects 130 | "http-errors": "~2.0.0", 131 | "husky": "^8.0.3", Modern native Git hooks 132 | "isbn-validate": "^1.0.3", 133 | "jade": "^1.11.0", Deprecated, renamed to Pug 134 | "jest": "^29.7.0", 135 | "jest-watch-typeahead": "^2.2.2", 136 | "json-server": "^1.0.0-alpha.16", 137 | "jsonwebtoken": "^9.0.0", SON Web Token implementation (symmetric and asymmetric) 138 | "jsonwebtoken": "^9.0.1", 139 | "jsonwebtoken": "^9.0.2", 140 | "link-module-alias": "^1.2.0", Create permanent links for _moduleAliases 141 | "lodash": "^4.17.21", Lodash modular utilities. 142 | "material-ui-popup-state": "^5.0.9", 143 | "md5": "^2.3.0", js function for hashing messages with MD5 144 | "mocha": "^10.2.0", simple, flexible, fun test framework 145 | "module-alias": "^2.2.3", Create aliases of directories and register custom module paths 146 | "moment": "^2.29.4", Parse, validate, manipulate, and display dates 147 | "mongodb": "^5.3.0, The official MongoDB driver for Node.js 148 | "mongodb": "^5.4.0", 149 | "mongodb": "^5.7.0", 150 | "mongodb": "^6.0.0", 151 | "mongodb": "^6.5.0", 152 | "mongodb-memory-server": "^9.1.6", 153 | "mongoose": "^7.0.3", Mongoose MongoDB ODM 154 | "mongoose": "^7.0.4", 155 | "mongoose": "^7.3.4", 156 | "mongoose": "^7.4.3", 157 | "mongoose": "^8.2.1", 158 | "morgan": "^1.10.0", HTTP request logger middleware for node.js 159 | "multer": "^1.4.5-lts.1", Middleware for handling `multipart/form-data`. 160 | "node-fetch": "^3.3.1", A light-weight module that brings Fetch API to node.js 161 | "nodemailer": "^6.9.12", Easy as cake e-mail sending from your Node.js applications 162 | "nodemailer": "^6.9.4", 163 | "nodemon": "^2.0.22", Simple monitor script for use during development of a Node.js app. 164 | "nodemon": "^2.0.22", 165 | "nodemon": "^3.0.1" 166 | "nodemon": "^3.0.2", 167 | "passport": "^0.6.0", Simple, unobtrusive authentication for Node.js. 168 | "passport": "^0.7.0", 169 | "passport-jwt": "^4.0.1", Passport authentication strategy using JSON Web Tokens 170 | "passport-local": "^1.0.0", Local username and password authentication strategy for Passport. 171 | "passport-local-mongoose": "^8.0.0", Mongoose plugin that simplifies building username and password login with Passport 172 | "pg": "^8.11.2", PostgreSQL client - pure javascript & libpq with the same API 173 | "prettier": "^2.8.8" Prettier is an opinionated code formatter 174 | "prettier": "^3.0.2" 175 | "prettier": "^3.2.5", 176 | "prettier-eslint": "^15.0.1", Formats your JavaScript using prettier followed by eslint --fix 177 | "prop-types": "^15.8.1", Runtime type checking for React props and similar objects. 178 | "pug": "^3.0.2", A clean, whitespace-sensitive template language for writing HTML 179 | "quick-lint-js": "^3.1.0", Find bugs in your JavaScript code 180 | "react": "^18.2.0", React is a JavaScript library for building user interfaces. 181 | "react-barcode-reader": "^0.0.2", 182 | "react-bootstrap": "^2.7.0", 183 | "react-burger-menu": "^3.0.9", 184 | "react-chartkick": "^0.5.3", 185 | "react-cookie-consent": "^8.0.1", 186 | "react-dom": "^18.2.0", React package for working with the DOM. 187 | "react-hook-form": "^7.50.1", 188 | "react-hot-toast": "^2.4.1", 189 | "react-jwt": "^1.2.0", 190 | "react-modal": "^3.16.1", 191 | "react-router": "^6.11.1", Declarative routing for React 192 | "react-router": "^6.13.0", 193 | "react-router": "^6.16.0", 194 | "react-router-dom": "^6.5.0", Declarative routing for React web applications 195 | "react-router-dom": "^6.11.1", 196 | "react-router-dom": "^6.13.0", 197 | "react-router-dom": "^6.15.0", 198 | "react-router-dom": "^6.16.0", 199 | "react-router-dom": "^6.22.3", 200 | "react-scripts": "^5.0.1", Configuration and scripts for Create React App. 201 | "react-simple-star-rating": "^5.1.7", 202 | "react-slick": "^0.29.0", 203 | "react-test-renderer": "^18.2.0", 204 | "request": "^2.88.2", ?? Deprecated in 2020 205 | "sequelize": "^6.32.0", Sequelize is a promise-based Node.js ORM tool 206 | "socket.io": "^4.6.1", node.js realtime framework server 207 | "socket.io-client": "^4.6.1", Realtime application framework client 208 | "sqlite3": "^5.1.6", Asynchronous, non-blocking SQLite3 bindings 209 | "styled-components": "^5.3.11", CSS for the `` Age. Style components your way with speed, strong typing, and flexibility. 210 | "styled-components": "^5.3.6", 211 | "styled-components": "^6.0.7", 212 | "supertest": "^6.3.4", SuperAgent driven library for testing HTTP servers 213 | "sweetalert2": "^11.7.28", A beautiful, responsive, customizable and accessible (WAI-ARIA) replacement for .js popup boxes, supported fork of sweetalert 214 | "tmi.js": "^1.8.4", 215 | "typescript": "4.9.4": Self explanatory. 216 | "uuid": "^9.0.0", RFC4122 (v1, v4, and v5) UUIDs 217 | "web-vitals": "^2.1.4" Easily measure performance metrics in JavaScript 218 | "web-vitals": "^3.3.1" 219 | "web-vitals": "^3.3.2" 220 | "web-vitals": "^3.4.0" 221 | "web-vitals": "^3.5.0", 222 | "webpack": "^4.46.0", Packs ECMAScript/CommonJs/AMD modules for the browser 223 | "webpack": "^5.79.0", 224 | "webpack": "^5.90.1", 225 | "webpack-cli": "^5.0.1", CLI for webpack & friends 226 | "webpack-dev-server": "^4.13.3", Serves a webpack app. Updates the browser on changes. 227 | "winston": "^3.10.0", A logger for just about everything. 228 | "winston": "^3.8.2", 229 | "ws": "^8.13.0", Simple to use, blazing fast and thoroughly tested websocket client and server for Node.js 230 | "yup": "^1.2.0", 231 | "yup": "^1.3.2", 232 | "yup-password": "^0.2.2" 233 | -------------------------------------------------------------------------------- /React_Development/react-development-notes.md: -------------------------------------------------------------------------------- 1 | # Learning React development 2 | 3 | Since I started learning React development in 2017, I've learned a lot and since then the React development landscape has changed a lot. Back then, I was using the last few versions of React 16. 4 | 5 | First of all, let me say I never felt like I grasped React development fully and am still learning React development. 6 | 7 | There's some things I have learned via getting contributions to my React applications. 8 | 9 | - The documentation has moved to react.dev 10 | 11 | - Create React App and Facebook's React core team have basically deprecated the use of Create React App to scaffhold (aka creating an initial set of files for an application) in favor of Vite, NextJS. 12 | 13 | React is typically used in conjunction or with other things called a stack or a tech stack. 14 | 15 | A typical tech stack, whether `full stack` or not, consists of various components that work together to build an application. For a web application, a tech stack usually includes both frontend and backend technologies. Some common components of a tech stack are: 16 | 17 | Frontend: 18 | 19 | Programming languages: such as JavaScript, HTML, and CSS 20 | 21 | Frameworks: like React, Angular, or Vue.js 22 | 23 | Libraries: such as Redux for state management in JavaScript applications 24 | 25 | Backend: 26 | 27 | Frameworks: such as Express for Node.js or Spring for Java 28 | 29 | Databases: including SQL databases like MySQL or PostgreSQL, and NoSQL databases like MongoDB 30 | 31 | Servers: for managing client requests, such as Apache or Nginx 32 | 33 | Other components: 34 | 35 | APIs: for integrating with third-party service(s). API's are usually JSON or YAML. 36 | 37 | Development tools: like Git for version control and Docker for containerization 38 | 39 | These components can be combined in different ways to form various tech stacks, such as the MEAN stack (MongoDB, Express, Angular, Node.js) or the LAMP stack (Linux, Apache, MySQL, PHP) 40 | 41 | The choice of tech stack depends on the specific requirements of the project and the skills of the development team. 42 | 43 | In the latest versions of React, and you are using JavaScript with React instead of TypeScript, you should be focused on using at least ES6 or ES2015 so that you have access to `import` and `export`. 44 | -------------------------------------------------------------------------------- /React_Development/react-server-apache2.md: -------------------------------------------------------------------------------- 1 | # Serving a React app from Apache2 2 | 3 | Here's an example of an Apache2 configuration file that serves a React application: 4 | 5 | ```xml 6 | 7 | ServerName example.com 8 | DocumentRoot /var/www/html 9 | # Serve the static files generated by the React build process 10 | Alias /static /var/www/html/static 11 | 12 | Require all granted 13 | 14 | # Serve the index.html file for all requests that do not match a static file 15 | RewriteEngine On 16 | RewriteCond %{REQUEST_FILENAME} !-f 17 | RewriteRule ^ /index.html [L] 18 | 19 | ``` 20 | -------------------------------------------------------------------------------- /React_Development/using-eslint-with-react.md: -------------------------------------------------------------------------------- 1 | # Configuring ESLint for React 2 | 3 | Common ESLint "rules" for React are: 4 | 5 | - react/react-in-jsx-scope: This rule warns when React is not in scope while using JSX, ensuring that React is properly imported and in scope when using JSX 6 | 7 | - react/prop-types: Enforces the declaration of prop-types for components, helping to ensure that the correct props are passed to components 8 | 9 | - react/no-unused-state: Warns about unused state variables in React components, promoting cleaner and more maintainable code 10 | 11 | - react/jsx-uses-react: This rule warns when React is not in scope while using JSX, similar to react/react-in-jsx-scope but for a different use case 12 | 13 | - react-hooks/rules-of-hooks: Enforces the Rules of Hooks, ensuring that hooks are called in the correct sequence and only from within React functional components 14 | 15 | - react/jsx-props-no-spreading: Prevents the usage of the ... spread operator on props, encouraging explicit prop passing to maintain component encapsulation 16 | 17 | - react/no-children-prop: Disallows the use of the children prop, as it can lead to unexpected behavior and make the component API less explicit 18 | 19 | - react/jsx-no-useless-fragment: Warns about unnecessary fragments in JSX, promoting cleaner and more concise JSX syntax 20 | 21 | - react/self-closing-comp: Enforces the use of self-closing components in JSX, improving consistency and readability of the code 22 | -------------------------------------------------------------------------------- /VSCode/create-a-jsconfig-json-file.md: -------------------------------------------------------------------------------- 1 | # Create a jsconfig.json 2 | 3 | The `jsconfig.json` file is used with Visual Studio Code. It is used to configure JavaScript projects in Visual Studio Code. The `jsconfig.json` file specifies the root files and options for the features provided by the JavaScript language service. The presence of a `jsconfig.json` file in a directory indicates that the directory is the root of a JavaScript project. The file itself lists the files belonging to the project as well as compiler options. . It specifies the root files and the options for the language features provided by the JavaScript language service. This file is specific to Visual Studio Code and JavaScript projects. It is primarily used to define a JavaScript project in VS Code and to exclude some files from showing up in IntelliSense. This file essentially configures IntelliSense for JavaScript. 4 | 5 | ```json 6 | { 7 | "compilerOptions": { 8 | "target": "es6", 9 | "module": "es6", 10 | "allowSyntheticDefaultImports": true, 11 | "baseUrl": ".", 12 | "paths": { 13 | "@/*": ["src/*"] 14 | } 15 | }, 16 | "exclude":["node_modules"] 17 | } 18 | ``` 19 | 20 | If you want to improve your developer experience when working in JavaScript with VS Code, adding a jsconfig.json file to your projects can significantly aid the JavaScript language service and improve the auto-complete and file browsing support in the VS Code IDE. 21 | 22 | There is also a `tsconfig.json` file that configures Typescript in Visual Studio Code and it's structure is similar and does similar things, except for TypeScript 23 | 24 | There is more documentation about this here: 25 | 26 | The tsconfig.json file is used to specify the root files and compiler options required to compile a TypeScript project. It is placed in the root of the project and is used to configure the TypeScript compiler. The presence of this file indicates that the directory is the root of a TypeScript project. The tsconfig.json file can include various options such as "compilerOptions" and "include" to customize the behavior of the TypeScript compiler. The "compilerOptions" property can be omitted, in which case the compiler’s defaults are used. This property contains the rules for the TypeScript compiler to enforce, such as "target" for specifying the ECMAScript version, "module" for specifying the module code generation, and "strictNullChecks" for enabling strict null checks. The "include" property specifies an array of filenames or patterns to include in the program, and these filenames are resolved relative to the directory containing the tsconfig.json file. This property is useful for specifying the files to be included in the program. The tsconfig.json file is useful for both individual work and team projects because it allows everyone to be on the same page about how to write their code. By including a tsconfig.json file, you can use the tsc command without any arguments in the terminal, making it easier to compile the TypeScript code. 27 | -------------------------------------------------------------------------------- /VSCode/create-a-launch.json file.md: -------------------------------------------------------------------------------- 1 | # Create a launch.json 2 | 3 | You can create a `launch.json` file for Visual Studio Code. The can be automatically created by the UI, but not too hard to create one of your own. 4 | 5 | The file format is: 6 | 7 | ```json 8 | { 9 | // Use IntelliSense to learn about possible attributes. 10 | // Hover to view descriptions of existing attributes. 11 | // For more information, visit: https://go.microsoft.com/fwlink/linkid=830387 12 | "version": "0.2.0", 13 | "configurations": [ 14 | { 15 | "name": "", 16 | "type": "", 17 | "request": "", 18 | "program": "", 19 | "args": [], 20 | "cwd": "", 21 | "env": [], 22 | "sourceMaps": "", 23 | "MIMode": "", 24 | "setupCommands": "", 25 | "externalConsole": "", 26 | "environment": [], 27 | "miDebuggerPath": "", 28 | "stopAtEntry": "", 29 | "processId": "", 30 | "remotePath": "", 31 | "showLog": "", 32 | "port": "", 33 | "host": "", 34 | "preLaunchTask": "", 35 | "envFile": "", 36 | "mode": "", 37 | "apiVersion": "", 38 | "logOutput": "", 39 | "trace": "", 40 | "logging": "", 41 | "skipFiles": "", 42 | "console": "" 43 | } 44 | ] 45 | } 46 | ``` 47 | 48 | To see the default configuration for a debugger, you can go to the Run and Debug view in Visual Studio Code and select the "create a launch.json file" link. This will create a `launch.json` file with a default configuration for the selected debugger, including a link to the official documentation for the attributes. 49 | 50 | To run a launch.json configuration in Visual Studio Code 1.81 for Linux, you can follow the steps below: 51 | 52 | 1. Open your project in Visual Studio Code. 53 | 2. Press Ctrl+Shift+D to open the Run and Debug view. 54 | 3. Select the configuration you want to run from the drop-down menu in the top toolbar. 55 | 4. Click the green "Run" button to start the debugging session. 56 | 57 | Once you have created a `launch.json` file, you can customize the configuration settings to match your debugging scenario. For example, if you want to configure `launch.json` for `cppdbg`, you can set the "type" field to `"cppdbg"` and the "request" field to "launch", and specify the path to the executable you want to debug in the "program" field. 58 | 59 | - "name" needs to be a string and the "name" is purely descriptive and is used to identify the configuration in the VS Code UI. 60 | 61 | - "type" The supported types are `node`,`php`,`go`,`cppvsdbg`,`cppdbg`,`msedge`,`python`,`python`,`clr` and it is used to 62 | specify the type of debugger to use for the launch configuration 63 | 64 | Here are uses for the supported types: 65 | 66 | - node for Node.js debugging. 67 | - php for PHP debugging. 68 | - go for Go debugging. 69 | - cppvsdbg for C++ debugging with Visual Studio Windows debugger. 70 | - cppdbg for C++ debugging with GDB or LLDB. 71 | - msedge for debugging with Microsoft Edge. 72 | - python for Python debugging. 73 | - java for Java debugging. 74 | - coreclr or clr for C# debugging with .NET Core or .NET Framework. 75 | 76 | - "request" values are either "launch" or "attach" 77 | 78 | - "program" attribute specifies the path to the program that the debugger should launch or attach to, and is usually pointed to the entry point 79 | 80 | - "args" this attribute is used to specify command-line arguments that will be passed to the program when it is launched. The accepted values for "args" are an array of strings, where each string represents an argument 81 | 82 | - "cwd" this is the current working directory and usually is set to ${workspacFolder} and in Visual Studio Code refers to the path & directory from which the debugger launches or attaches to the program. 83 | 84 | - "env" are environment variables passed to the debugger and the "env" attribute is used to define environment variables for the launched or attached program 85 | 86 | - "sourceMaps" are a boolean. true or false. For transpiled languages like TypeScript, Babel, Webpack. is used to control whether source maps are used during debugging. 87 | 88 | - "MIMode" accepts "gdb" and "lldb" and is used to specify the debugger that VS Code will connect to for C++ debugging. 89 | 90 | - "setupCommands" is used to specify an array of commands that are executed to set up the debugger environment before the debugging session starts. the array includes the properties "text", "description" and a boolean "ignoreFailures". and the array must start with [ and end with ] and contains parenthesis pairs {}. these commands are specific to GDB or LLDB. 91 | 92 | - "externalConsole" is used to specify whether the program being debugged should run in an external console or use the 93 | integrated terminal within VS Code. this value is a boolean, true or false. 94 | 95 | - "environment" is the same as "env", and the "environment" attribute is used to specify environment variables for the program being debugged. 96 | 97 | - "miDebuggerPath" is used to specify the path to the debugger executable (such as GDB or LLDB) when the "MIMode" is set to "gdb" or "lldb". 98 | 99 | - "stopAtEntry" is a boolean value, true or false and used to control whether the debugger should pause execution at the 100 | entry point of the program when a debugging session starts. 101 | 102 | - "processId" is used to specify the process ID to which the debugger should attach and should be a integer 103 | process ID or ${command:pickProcess} which will prompt you for the ID. The "request" field should be "attach". 104 | 105 | - "remotePath" is used to specify the path to the source code on the remote machine when remote debugging. the "host" 106 | and "port" should be set. use `null` if the path is the same on both machines. It should be set to the path of the 107 | source code on the remote machine. 108 | 109 | - "showLog" - is a boolean value true or false and is used to control whether the debug logs should be displayed in the Debug Console 110 | 111 | - "port": mainly used for the remote debugging. 112 | 113 | - "host": mainly used for the remote debugging. 114 | 115 | - "preLaunchTask" is used to specify the name of a task defined in tasks.json that should be executed before the debugger starts. 116 | This should be the name of a task as defined in the tasks.json file. This should be a string that matches the "label" property 117 | of a task in your tasks.json file. It can be also `null` if there is no tasks.json task. This task will run before debugging begins. 118 | 119 | - "envFile" is used to specify the path to a file containing environment variables to be loaded into the debugging session. this is typically set to ${workspaceFolder}/.env and is used with the "node" type. 120 | 121 | - "mode" is either set to "launch" or "attach" and might be used the same as "request", however this is not a standard setting and may be deprecated, use "request" instead. 122 | 123 | In this case: 124 | 125 | - "launch": This mode is used to start a new instance of the application for debugging. It's typically used when you want to debug your application from the start. 126 | 127 | - "attach": This mode is used to attach the debugger to an already running instance of the application. It's useful for debugging applications that are already in a running state. 128 | 129 | - "apiVersion" is only used to specify the "Delve" version and only using when debugging Go. 130 | 131 | - "logOutput" particularly when debugging Go code, the "logOutput" attribute is used to specify which components of the debugger should log their output. can be a comma separated list of components. the accepted values are "debugger", "gdbwire", "lldbout", "debuglineerr", "rpc". 132 | 133 | ## Alternative launch.json configuration styles 134 | 135 | launch.json configs can also be specified without the configuration array like: 136 | 137 | ```json 138 | { 139 | "name": "Launch file", 140 | "type": "go", 141 | "trace": "verbose", 142 | "showLog": true, 143 | "logOutput": "debugger,gdbwire", 144 | } 145 | ``` 146 | -------------------------------------------------------------------------------- /VSCode/create-a-settings.json file.md: -------------------------------------------------------------------------------- 1 | # Creating a settings.json file for VSCode 2 | 3 | Ideally one should create a settings.json in a .vscode folder/directory in their project or project workspace 4 | separated from the global settings.json file. 5 | 6 | There are some things to remember here. IntelliSense if you have it set up, will show you 7 | 8 | It is JSON Objects so it looks like 9 | 10 | ```json 11 | { 12 | "": , 13 | } 14 | ``` 15 | 16 | It is helpful to add what VSCode allows language specific settings. It's described by: 17 | 18 | "[languagehere]": { 19 | "vscode.specificSetting": "value", 20 | }, 21 | -------------------------------------------------------------------------------- /VSCode/create-a-tasks-json-file.md: -------------------------------------------------------------------------------- 1 | # Create a tasks.json 2 | 3 | There are certain tasks that Visual Studio Code can do. They will be made available in the `Terminal` menu. 4 | 5 | You can create a file `tasks.json` that will accomplish this. The current "version" of the file is `2.0.0` set in `version`. 6 | 7 | You can use `IntelliSense` to help you create valid `tasks.json` files. 8 | 9 | The official documentation for this is: but you can see the link in the file comment. 10 | 11 | Here is the file structure: 12 | 13 | ```json 14 | { 15 | // See https://go.microsoft.com/fwlink/?LinkId=733558 16 | // for the documentation about the tasks.json format 17 | "version": "2.0.0", 18 | "tasks": [ 19 | { 20 | "type": "process", 21 | "label": "", 22 | "command": "", 23 | "args": [], 24 | "options": {}, 25 | "problemMatcher": "", 26 | "group": "", 27 | "presentation": {}, 28 | "dependsOn": "", 29 | "isBackground": true, 30 | } 31 | ], 32 | } 33 | ``` 34 | 35 | In `type`: The type of task to be executed. 36 | 37 | These supported `types` are `"shell"`, `"process"`, `"npm"`, `"grunt"`, `"gulp"`, `"jake"`, `"msbuild"`, `"xcodebuild"`, `"ant"`, `"gradle"`, `"lein"`, `"make"`, `"pdflatex"`, `"python"`, `"ruby"`, `"gcc"`, `"clang"`, `"tsc"`, `"dotnet"`, `"docker-build"`, `"docker-run"`, `"docker-push"`, `"docker-compose"`, `"dart"`, `"dart-analyze"`, `"dart-test"`, `"dart-run"`, `"flutter"`, `"flutter-analyze"`, `"flutter-test"`, and `"flutter-run"`. 38 | 39 | In `group`: A string or an object that specifies the group to which the task belongs. 40 | 41 | The supported groups are `"build"`, `"test"`, `"deploy"`, `"clean"`, `"rebuildAll"`, `"buildAll"`, `"testAll"`, `"run"`, `"preview"`, `"install"`, `"uninstall"`, `"configure"`, `"update"`, `"watch"`, `"lint"`, `"format"`, `"extensionDevelopment"`, `"docker"`, `"docker-compose"`, `"remote-ssh"`, `"remote-containers"`, `"remote-wsl"`, `"remote-ssh-edit"`, `"remote-containers-edit"`, and `"remote-wsl-edit"`. 42 | 43 | The order of properties in `tasks.json` does not matter. You can define the properties in any order you like, as long as they are valid JSON syntax 44 | -------------------------------------------------------------------------------- /VSCode/extension-suggestion.md: -------------------------------------------------------------------------------- 1 | # Create a extensions.json file 2 | 3 | In order to suggest extensions to other users in a repository project, 4 | you can put a `extensions.json` in your project in the `.vscode` folder. 5 | 6 | The structure of the file is: 7 | 8 | ```json 9 | { 10 | // See https://go.microsoft.com/fwlink/?LinkId=827846 11 | // for the documentation about the extensions.json format 12 | "recommendations": [ 13 | "owner.extension-name" 14 | ] 15 | } 16 | ``` 17 | 18 | I could not find a way to do this automatically from Visual Studio Code. 19 | 20 | From this template file, you can suggest packages by going to: 21 | 22 | 23 | 24 | And searching for the extension(s) you want to recommend to anyone working with your repository. 25 | 26 | Then go to the individual page for the extension you want to recommend and copying and pasting the value listed in `Unique Identifier` on the right side of the page, or you can copy and paste it from the `Installation` field, leaving out the `ext install` . 27 | 28 | The format for the extension name is owner.package and usually is listed in between double quotes. For more than one, use a , after each extension. 29 | 30 | If a user is using GitHub and VSCode, they will get a message from the taskbar saying they have extension recommendations when they open the cloned/forked project in VSCode. From there they can opt out of installing new extensions. 31 | -------------------------------------------------------------------------------- /VSCode/vscode-workspaces.md: -------------------------------------------------------------------------------- 1 | # VSCode workspaces 2 | 3 | You can open workspaces in several ways 4 | 5 | Double-clicking the .code-workspace file 6 | Using the "File > Open Workspace" command in VS Code and selecting the workspace file 7 | Choosing the workspace from the "File > Open Recent" list (if you've previously opened it) 8 | 9 | Configuration file is typically `yourprojectworkspacenamehere.code-workspace` and is typically also written in JSON format. 10 | 11 | { 12 | // Folders to be included in the workspace 13 | "folders": [ 14 | { 15 | "path": "src" 16 | }, 17 | { 18 | "path": "tests" 19 | } 20 | ], 21 | // Optional: Workspace-specific settings (example: code formatting) 22 | "settings": { 23 | "javascript.format.enable": true, 24 | "javascript.format.insertSpaces": false 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /VSCode/what-is-devcontainer.md: -------------------------------------------------------------------------------- 1 | # Making a devcontainer 2 | 3 | A `.devcontainer` is a configuration file used by the Visual Studio Code Dev Containers extension to create a development environment inside a container. This allows developers to work in a consistent and isolated environment regardless of the host operating system or dependencies installed on the host machine 4 | 5 | To use a `.devcontainer` in Visual Studio Code, you need to have the Dev Containers extension installed. Once installed, you can open a folder in a container by running the `Dev Containers: Open Folder in Container...` command from the Command Palette or quick actions Status bar item. This will create a container based on the configuration specified in the `.devcontainer` file and mount the project folder inside the container. The container will then be used as the development environment for the project 6 | 7 | The `.devcontainer` file is a JSON file that specifies the container image to use, the environment variables to set, the extensions to install, and other configuration options for the container. The file is similar to the `launch.json` file used for debugging configurations 8 | 9 | It is worth noting that running the full Visual Studio Code in Windows/Linux containers is not supported, but running with the Visual Studio Code extension is supported. When using the extension, the Visual Studio Code server is running in the container while the Visual Studio Code client is on the desktop. 10 | 11 | The `.devcontainer` configuration file is a JSON file that specifies the container image to use, the environment variables to set, the extensions to install, and other configuration options for the container 12 | 13 | It is typically stored in a `.devcontainer` directory or folder in the root of a project 14 | 15 | The `.devcontainer` configuration file can be placed inside a `.devcontainer` folder or directory in the root of a project and it is not recommended to place it inside the `.vscode` folder. 16 | 17 | Here is the documentation for `.devcontainer` for more reading: 18 | 19 | 20 | This is the extension to install: `ms-vscode-remote.remote-containers` 21 | 22 | Here is an example `devcontainer.json` file: 23 | 24 | ```json 25 | { 26 | "name": "TypeScript & Node.js", 27 | "image": "mcr.microsoft.com/vscode/devcontainers/typescript-node:0-14", 28 | "extensions": [ 29 | "dbaeumer.vscode-eslint", 30 | "esbenp.prettier-vscode" 31 | ], 32 | "settings": { 33 | "terminal.integrated.shell.linux": "/bin/bash" 34 | } 35 | } 36 | ``` 37 | 38 | Looks kind of like a `settings.json` and a `extensions.json` file made into one file. 39 | 40 | There are other properties it will take: 41 | 42 | there are other allowed fields besides name, image, extensions, and settings in the devcontainer.json file. Here are some additional fields that can be used: 43 | 44 | - dockerComposeFile: Specifies the Docker Compose file to use to create the container 45 | 46 | - postCreateCommand: Specifies a command to run after the container is created 47 | 48 | - remoteUser: Specifies the user to use when connecting to the container 49 | 50 | - runArgs: Specifies additional arguments to pass to the docker run command when creating the container 51 | 52 | - workspaceFolder: Specifies the path to the workspace folder inside the container 53 | 54 | - appPort: Specifies the port number that the application inside the container is listening on 55 | 56 | - forwardPorts: Specifies a list of ports to forward from the container to the host 57 | 58 | These fields can be used to further customize the development container to fit your needs. For more information on the devcontainer.json file and its fields, refer to the official documentation. 59 | 60 | 61 | -------------------------------------------------------------------------------- /WebDevelopment/aria-accessibility.md: -------------------------------------------------------------------------------- 1 | # About using aria in Web Development projeccts. 2 | 3 | The most commonly used ARIA attributes are those that help to improve the accessibility of dynamic content, interactive widgets, and complex user interfaces. These attributes are particularly useful for making web applications more accessible to users with disabilities, especially those who rely on assistive technologies like screen readers. There are tons of these attributes. This is not intended to be a complete list of attributes. 4 | 5 | Here's a list of the most commonly used ARIA attributes. 6 | 7 | aria-label: Provides a text description of an element, which is useful for elements that do not have a visible label. It's commonly used for form inputs, buttons, and other interactive elements. 8 | 9 | aria-labelledby: Points to the ID of another element that labels the current element. This is useful for providing a label for elements that do not have a visible label. 10 | 11 | aria-describedby: Points to the ID of another element that describes the current element. This is useful for providing additional information about an element. 12 | 13 | aria-hidden: Indicates whether an element is exposed to an accessibility API. It's used to hide content from assistive technologies. 14 | 15 | aria-readonly: Indicates whether an element is read-only. This is useful for form elements that cannot be edited by the user. 16 | 17 | aria-required: Indicates that user input is required on the element before submission. This helps assistive technologies inform users about mandatory fields. 18 | 19 | aria-disabled: Indicates whether an element is disabled. This is useful for form elements that are not interactable. 20 | 21 | aria-checked: Indicates the checked state of a checkbox or option element. This is useful for form elements that can be checked or unchecked. 22 | 23 | aria-expanded: Indicates whether a section has expanded content. This is useful for collapsible sections or accordions. 24 | 25 | aria-pressed: Indicates whether a button is pressed. This is useful for toggle buttons or checkboxes. 26 | 27 | aria-selected: Indicates whether an element is selected. This is useful for list items, options, and other elements that can be selected. 28 | 29 | aria-multiselectable: Indicates whether an element that has the role attribute set to listbox, tree, or grid has multiple items that can be selected at once. 30 | 31 | aria-sort: Indicates whether an element contains sort controls. This helps users understand how to sort the content of a table or list. 32 | 33 | aria-valuemin, aria-valuemax, aria-valuenow, aria-valuetext: These attributes are used with range inputs like sliders and progress bars to provide information about the current value, minimum value, maximum value, and the text equivalent of the current value. 34 | 35 | aria-live: Indicates that an element will be updated, and describes the types of updates the user agents, assistive technologies, and user can expect from the live region. This is crucial for dynamic content that changes over time. 36 | -------------------------------------------------------------------------------- /WebDevelopment/beginning-webdev.md: -------------------------------------------------------------------------------- 1 | # Web Development 2 | 3 | Web Development or WebDev as it is sometimes called, has a wide variety of subjects. 4 | 5 | There is a LOT to learn and cover. I will do my best to cover a great deal of the basics. This is not intended to 6 | be an all encompassing guide or tutorial. 7 | 8 | You should at this point start learn HTML, CSS and JavaScript programming if you haven't already. This is the base 9 | of Web Development. From there you can learn more advanced topics. There are lots of great resources to learn how 10 | to program in these, even some great books. 11 | 12 | You should become familiar with whatever web browsers yourself or your project will be displayed using, especially 13 | any devices you or your project may encounter. 14 | 15 | ## Choosing a editor or IDE for Web Development 16 | 17 | This is a slightly controversial topic. But, just about any editor or IDE will be able to support languages used 18 | in web development. There are a few exceptions. There are a number of them that will work across different platforms 19 | like Linux, Windows and Mac. There are quite a few options, depending on whether you want to use the 20 | command line/shell/terminal/console to use your editor or use the GUI or user interface. A number of developers 21 | feel strongly about the editor or IDE they use. You should use the one that gets the job done for you. 22 | 23 | This should go without saying, but you should try a few out before you settle on one and do not rely on what 24 | someone told you their editor or IDE is. You might or might not like the first editor or IDE you choose 25 | 26 | ## About HTML 27 | 28 | Modern HTML is done with HTML5. HTML5 retains most of the tags used by HTML4 & HTML4.01 for backwards compatibility in browsers 29 | Some have been deprecated or no longer used. 30 | 31 | ## Languages used in Web Development 32 | 33 | There are some primary programming languages and scripting languages used in Web Development. These are HTML,CSS and JavaScript/ECMAScript. 34 | 35 | ## Tags used in HTML 36 | 37 | In the HTML language, it uses XML/XHTML/DHTML style of tags that are between `< >`. These are called HTML tags. 38 | 39 | The basic tags are: 40 | 41 | `,
,
,

,

, , , ,