13 |
14 | With custom dimensions in HTML:
15 |
16 |
17 |
18 | With custom dimensions in CSS:
19 |
20 |
21 |
22 | An invalid image file:
23 |
24 |
29 |
--------------------------------------------------------------------------------
/sampleContent/tests/highlight/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | From: Rui Carmo
3 | Date: 2013-07-06 23:33:00
4 | Title: Syntax Highlighting Tests
5 | ---
6 |
7 | ### Git-Flavored Markdown
8 |
9 | The standard triple-backquote form.
10 |
11 | ```clojure
12 | ; parallel consumption of perishable resources
13 | (defn foo [bar drinks]
14 | (pmap (:patrons bar) (lazy-seq drinks)))
15 | ```
16 |
17 | ```python
18 | from bottle import view, request, abort
19 |
20 | @view("rss")
21 | def render_feed()
22 | if not items:
23 | abort("418", "I'm a teapot")
24 | else:
25 | return {"items": items}
26 | ```
27 |
28 | This should be a plain text line.
29 |
--------------------------------------------------------------------------------
/triggerHandler/index.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This is the starter function, which is triggered by a blob event.
3 | */
4 |
5 | const df = require("durable-functions");
6 |
7 | module.exports = async function (context, srcBlob) {
8 | const client = df.getClient(context),
9 | name = context.bindingData.name;
10 |
11 | //length = srcBlob.length;
12 | //context.log("name:", name, " size:", length);
13 |
14 | const instanceId = await client.startNew("renderPipeline", undefined, name);
15 |
16 | //context.log("orchestrator:", instanceId);
17 |
18 | return client.createCheckStatusResponse(name, instanceId)
19 | };
20 |
--------------------------------------------------------------------------------
/sampleContent/_assets/images/functions.svg:
--------------------------------------------------------------------------------
1 |
2 |
8 |
--------------------------------------------------------------------------------
/copyActivity/index.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This function is not intended to be invoked directly. Instead it will be
3 | * triggered by an orchestrator function.
4 | */
5 |
6 | const storage = require("azure-storage"),
7 | blobService = storage.createBlobService(process.env['AzureWebJobsStorage']);
8 |
9 |
10 | const copyBlob = async (srcUri, pathname) => {
11 | return new Promise((resolve, reject) => {
12 | blobService.startCopyBlob(srcUri, "$web", pathname, (err, data) => {
13 | if (err) {
14 | reject(err);
15 | } else {
16 | resolve({ message: data });
17 | }
18 | });
19 | });
20 | }
21 |
22 | module.exports = async function (context, name) {
23 | //context.log("copy:", name);
24 | const srcUri = blobService.getUrl('raw-markup', name),
25 | result = await copyBlob(srcUri, name);
26 |
27 | //context.log(result);
28 | return result;
29 | };
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "master" ]
6 | pull_request:
7 | branches: [ "master" ]
8 | schedule:
9 | - cron: "17 10 * * 2"
10 |
11 | jobs:
12 | analyze:
13 | name: Analyze
14 | runs-on: ubuntu-latest
15 | permissions:
16 | actions: read
17 | contents: read
18 | security-events: write
19 |
20 | strategy:
21 | fail-fast: false
22 | matrix:
23 | language: [ javascript ]
24 |
25 | steps:
26 | - name: Checkout
27 | uses: actions/checkout@v3
28 |
29 | - name: Initialize CodeQL
30 | uses: github/codeql-action/init@v2
31 | with:
32 | languages: ${{ matrix.language }}
33 | queries: +security-and-quality
34 |
35 | - name: Autobuild
36 | uses: github/codeql-action/autobuild@v2
37 |
38 | - name: Perform CodeQL Analysis
39 | uses: github/codeql-action/analyze@v2
40 | with:
41 | category: "/language:${{ matrix.language }}"
42 |
--------------------------------------------------------------------------------
/renderTextileActivity/index.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This function is not intended to be invoked directly. Instead it will be
3 | * triggered by an orchestrator function.
4 | */
5 |
6 | const storage = require("azure-storage"),
7 | matter = require("gray-matter"),
8 | textile = require('textile'),
9 | blobService = storage.createBlobService(process.env['AzureWebJobsStorage']);
10 |
11 | const getMarkup = async (blobName) => {
12 | return new Promise((resolve, reject) => {
13 | blobService.getBlobToText("raw-markup", blobName, (err, data) => {
14 | if (err) {
15 | reject(new Error(err)); // fail with runtime error and kill activity
16 | } else {
17 | resolve(data);
18 | }
19 | });
20 | });
21 | };
22 |
23 | module.exports = async function (context, name) {
24 | // context.log(name);
25 |
26 | const response = await getMarkup(name),
27 | page = matter(response);
28 | page.name = name;
29 |
30 | // replace Textile with rendered HTML
31 | page.content = textile(page.content);
32 | return page;
33 | };
--------------------------------------------------------------------------------
/sampleContent/tests/highlight/animate_svg.js:
--------------------------------------------------------------------------------
1 | var paths = Array.prototype.slice.call(document.querySelectorAll('.animated path'),0);
2 |
3 | paths.map(function(path) {
4 | var bag = document.createAttribute("bag");
5 | bag.value = path.style.fill;
6 | path.setAttributeNode(bag);
7 | path.style.fill = "white";
8 | })
9 |
10 | paths.map(function(path){
11 | var length = path.getTotalLength();
12 | path.style.stroke="#000";
13 | path.style.strokeWidth="5";
14 | path.style.transition = path.style.WebkitTransition = 'none';
15 | path.style.strokeDasharray = length + ' ' + length;
16 | path.style.strokeDashoffset = length;
17 | path.getBoundingClientRect();
18 | path.style.transition = path.style.WebkitTransition = 'stroke-dashoffset 2s ease-in-out';
19 | path.style.strokeDashoffset = '0';
20 | });
21 |
22 |
23 | setTimeout(function(){
24 | paths.map(function(path){
25 | path.style.transition = path.style.WebkitTransition = 'fill 2s ease, stroke-width 2s ease';
26 | path.style.fill = path.getAttribute("bag");
27 | path.style.strokeWidth = "0";
28 | })
29 | },3000)
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Rui Carmo
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "azure-durable-functions-node-blog-engine",
3 | "version": "1.0.0",
4 | "description": "This is a static file generator that demonstrates how to build a blob storage processing pipeline with durable functions",
5 | "main": "index.js",
6 | "dependencies": {
7 | "durable-functions": "^3.0.0",
8 | "cheerio": "^1.0.0-rc.3",
9 | "gray-matter": "^4.0.2",
10 | "remarkable": "^2.0.0",
11 | "moment": "^2.24.0",
12 | "mustache": "^4.0.0",
13 | "textile-js": "^2.0.4",
14 | "highlight.js": "^11.0.1",
15 | "dompurify": "^3.0.1",
16 | "azure-storage": "^2.10.3",
17 | "mime-types": "^2.1.24"
18 | },
19 | "devDependencies": {},
20 | "scripts": {
21 | "test": "echo \"Error: no test specified\" && exit 1"
22 | },
23 | "repository": {
24 | "type": "git",
25 | "url": "git+https://github.com/rcarmo/azure-durable-functions-node-blog-engine.git"
26 | },
27 | "author": "Rui Carmo",
28 | "license": "MIT",
29 | "bugs": {
30 | "url": "https://github.com/rcarmo/azure-durable-functions-node-blog-engine/issues"
31 | },
32 | "homepage": "https://github.com/rcarmo/azure-durable-functions-node-blog-engine#readme"
33 | }
34 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # This grabs the function app name (assuming you set up a remote called "production" correctly)
2 | FUNCTION_APP_NAME?=$(shell git remote get-url production | awk -F[/:] '{print $$4}' | cut -f 1 -d.)
3 | # This assumes the function app lives in a resource group with the same name
4 | RESOURCE_GROUP?=${FUNCTION_APP_NAME}
5 | # Sanity setting for when I run this from a machine with zsh
6 | SHELL=bash
7 |
8 | # Safe default
9 | debug:
10 | @echo $(FUNCTION_APP_NAME)
11 |
12 | # Sync the sample content to the incoming blob container
13 | sync:
14 | # Grab the storage connection string and set the environment so we don't have to keep auth keys around
15 | @$(eval export AZURE_STORAGE_CONNECTION_STRING := $(shell az webapp config appsettings list \
16 | --name $(FUNCTION_APP_NAME) \
17 | --resource-group $(RESOURCE_GROUP) \
18 | | jq -r '.[] | select (.name == "AzureWebJobsStorage").value'))
19 | # Cleanup Mac filesystem metadata
20 | @find . -print | grep .DS_Store | xargs rm
21 | # Sync requires azcopy to be installed
22 | az storage blob sync --source sampleContent --container raw-markup
23 |
24 | sync-all:
25 | find sampleContent -print | xargs -J % touch %; make sync
26 |
27 | git-rewind-%:
28 | git reset --soft HEAD~$* && git commit
--------------------------------------------------------------------------------
/sampleContent/docs/internals/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | From: Rui Carmo
3 | Date: 2014-11-03 19:09:00
4 | Last-Modified: 2014-12-29 11:15:00
5 | Title: Internals
6 | ---
7 |
8 | ## Azure Durable Functions
9 |
10 | This simple blog engine is written in NodeJS atop Azure Durable Functions:
11 |
12 | 
13 |
14 | The following is a short description of each function:
15 |
16 | ### `triggerHandler`
17 |
18 | This is a simple blob trigger that is invoked whenever a file is modified in Azure storage.
19 |
20 | ### `renderPipeline`
21 |
22 | This is the pipeline orchestrator, which figures out what needs to be done for each filetype.
23 |
24 | ### `renderMarkdownActivity`
25 |
26 | This is the Markdown formatter, which takes the raw markup and issues a page object containing HTML.
27 |
28 | ### `renderTextileActivity`
29 |
30 | This is the Textile formatter, which takes the raw markup and issues a page object containing HTML.
31 |
32 | ### `renderTemplateActivity`
33 |
34 | This is the template formatter, which takes the HTML, rewrites relative URLs, and writes it to blob storage.
35 |
36 | ### `copyActivity`
37 |
38 | This is a straightforward blob-to-blob copy activity for non-markup files (images, etc.). No transformations are currently performed.
39 |
40 | ## Monitoring
41 |
42 | Since Azure Functions is usually deployed with Application Insights enabled, you have real-time monitoring and tracing, as well as per-activity metrics:
43 |
44 | 
45 |
--------------------------------------------------------------------------------
/renderMarkdownActivity/index.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This function is not intended to be invoked directly. Instead it will be
3 | * triggered by an orchestrator function.
4 | */
5 |
6 | const storage = require("azure-storage"),
7 | matter = require("gray-matter"),
8 | remarkable = require('remarkable'),
9 | hljs = require('highlight.js'),
10 | blobService = storage.createBlobService(process.env['AzureWebJobsStorage']);
11 |
12 | var md = new remarkable.Remarkable({
13 | highlight: function (str, lang) {
14 | if (lang && hljs.getLanguage(lang)) {
15 | try {
16 | return hljs.highlight(lang, str).value;
17 | } catch (err) {}
18 | }
19 |
20 | try {
21 | return hljs.highlightAuto(str).value;
22 | } catch (err) {}
23 |
24 | return ''; // use external default escaping
25 | },
26 | typographer: true,
27 | html: true
28 | });
29 |
30 | const getMarkup = async (blobName) => {
31 | return new Promise((resolve, reject) => {
32 | blobService.getBlobToText("raw-markup", blobName, (err, data) => {
33 | if (err) {
34 | reject(new Error(err)); // fail with runtime error and kill activity
35 | } else {
36 | resolve(data);
37 | }
38 | });
39 | });
40 | };
41 |
42 | module.exports = async function (context, name) {
43 | //context.log(name);
44 | const response = await getMarkup(name),
45 | page = matter(response);
46 | page.name = name;
47 | // replace Markdown with rendered HTML
48 | page.content = md.render(page.content);
49 | return page;
50 | };
--------------------------------------------------------------------------------
/renderPipeline/index.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This function is not intended to be invoked directly. Instead it will be
3 | * triggered by a starter function.
4 | */
5 |
6 | const df = require('durable-functions'),
7 | path = require('path'),
8 | activityMap = {
9 | renderMarkdownActivity: {
10 | extensions: ['.md', '.mkd', '.markdown'],
11 | pipeline: ['renderMarkdownActivity', 'renderTemplateActivity']
12 | },
13 | renderTextileActivity: {
14 | extensions: ['.text', '.textile'],
15 | pipeline: ['renderTextileActivity', 'renderTemplateActivity']
16 | },
17 | /* Render HTML snippets directly (disabled to allow custom content pass-through)
18 | renderTemplateActivity: {
19 | extensions: ['.htm', '.html'],
20 | pipeline: ['renderTemplateActivity']
21 | }
22 | */
23 | };
24 |
25 | module.exports = df.orchestrator(function* (context) {
26 | const outputs = [],
27 | name = context.df.getInput(),
28 | extension = path.extname(name);
29 |
30 | var pipeline = ["copyActivity"],
31 | currentItem = name;
32 |
33 | // context.log("pipeline:", pipeline, name, extension);
34 |
35 | Object.keys(activityMap).forEach(key => {
36 | if(activityMap[key].extensions.includes(extension)) {
37 | pipeline = activityMap[key].pipeline;
38 | }
39 | })
40 |
41 | // all activities we build should return a JS object
42 | for(let activity of pipeline) {
43 | // context.log("running:", activity);
44 | currentItem = yield context.df.callActivity(activity, currentItem);
45 | //context.log(currentItem);
46 | if(currentItem == null) { // the activity has failed
47 | context.log("error:", activity)
48 | yield cancel(activity)
49 | break
50 | }
51 | outputs.push(currentItem);
52 | }
53 | return outputs;
54 | });
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | ###############################################################################
7 | # Set default behavior for command prompt diff.
8 | #
9 | # This is need for earlier builds of msysgit that does not have it on by
10 | # default for csharp files.
11 | # Note: This is only used by command line
12 | ###############################################################################
13 | #*.cs diff=csharp
14 |
15 | ###############################################################################
16 | # Set the merge driver for project and solution files
17 | #
18 | # Merging from the command prompt will add diff markers to the files if there
19 | # are conflicts (Merging from VS is not affected by the settings below, in VS
20 | # the diff markers are never inserted). Diff markers may cause the following
21 | # file extensions to fail to load in VS. An alternative would be to treat
22 | # these files as binary and thus will always conflict and require user
23 | # intervention with every merge. To do so, just uncomment the entries below
24 | ###############################################################################
25 | #*.sln merge=binary
26 | #*.csproj merge=binary
27 | #*.vbproj merge=binary
28 | #*.vcxproj merge=binary
29 | #*.vcproj merge=binary
30 | #*.dbproj merge=binary
31 | #*.fsproj merge=binary
32 | #*.lsproj merge=binary
33 | #*.wixproj merge=binary
34 | #*.modelproj merge=binary
35 | #*.sqlproj merge=binary
36 | #*.wwaproj merge=binary
37 |
38 | ###############################################################################
39 | # behavior for image files
40 | #
41 | # image files are treated as binary by default.
42 | ###############################################################################
43 | #*.jpg binary
44 | #*.png binary
45 | #*.gif binary
46 |
47 | ###############################################################################
48 | # diff behavior for common document formats
49 | #
50 | # Convert binary document formats to text before diffing them. This feature
51 | # is only available from the command line. Turn it on by uncommenting the
52 | # entries below.
53 | ###############################################################################
54 | #*.doc diff=astextplain
55 | #*.DOC diff=astextplain
56 | #*.docx diff=astextplain
57 | #*.DOCX diff=astextplain
58 | #*.dot diff=astextplain
59 | #*.DOT diff=astextplain
60 | #*.pdf diff=astextplain
61 | #*.PDF diff=astextplain
62 | #*.rtf diff=astextplain
63 | #*.RTF diff=astextplain
64 |
--------------------------------------------------------------------------------
/sampleContent/docs/basics/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | From: Rui Carmo
3 | Date: 2014-10-04 09:17:00
4 | Title: Content Basics
5 | ---
6 |
7 | ## Files and Folders
8 |
9 | This simple blog engine expects you to organize your content using _a folder per page_ with an "index" document inside. The folder path determines the URL it's published under (so you get "nice" URLs by default), and this makes it easier to manage media assets on a per-post basis:
10 |
11 | 
12 |
13 | ## Uploading Content
14 |
15 | The easiest way to batch upload content is using `az` (which uses `azcopy sync` under the covers). Just take your Azure Storage connection string and do this:
16 |
17 | ```bash
18 | export AZURE_STORAGE_CONNECTION_STRING="