195 Commits

Author SHA1 Message Date
Tommy Parnell
a74dec639a push filter 2023-10-11 15:38:53 -04:00
Tommy Parnell
f93aea2696 rm old versions 2023-08-10 07:29:16 -04:00
Tommy Parnell
63828b3003 Create an auto-deploy file 2023-08-09 11:09:27 -04:00
Tommy Parnell
1b372b3501 reduce log 2022-11-14 17:08:59 -05:00
Tommy Parnell
5edba65844 rm some script 2022-11-14 16:41:42 -05:00
Tommy Parnell
afcae26bc1 push js 2022-11-14 16:13:23 -05:00
Tommy Parnell
5a253f6ada missed a dockerfile 2022-11-09 11:47:09 -05:00
Tommy Parnell
e8c548ab20 set target 2022-11-09 11:46:17 -05:00
Tommy Parnell
0871e037d8 dotnet 7 2022-11-09 11:43:42 -05:00
Tommy Parnell
a5931f48c6 sitemap 2022-08-26 13:35:50 -04:00
Tommy Parnell
2cff53f5d3 case insensitive url on post 2022-08-26 13:28:41 -04:00
Tommy Parnell
e065871145 revert some things 2022-08-21 22:40:44 -04:00
Tommy Parnell
5cf4086872 Revert "add disable gtm to home"
This reverts commit d3638b10c0.
2022-08-18 19:30:41 -04:00
Tommy Parnell
d3638b10c0 add disable gtm to home 2022-08-18 19:23:06 -04:00
Tommy Parnell
bd790926b3 GTM again 💣 2022-08-11 15:29:54 -04:00
Tommy Parnell
2753099f72 plausable, no amp, no google analytics 2022-08-11 13:59:47 -04:00
Tommy Parnell
20dc7ad932 fix example 2022-07-23 17:29:31 -04:00
Tommy Parnell
3892cb578e commander 2022-07-08 22:07:01 -04:00
Tommy Parnell
c6ee8f8193 no vary by user agent 2022-06-28 17:06:14 -04:00
Tommy Parnell
6147b840f2 Merge branch 'etags' 2022-06-28 16:40:33 -04:00
Tommy Parnell
d032ffcf82 etags 2022-06-28 16:40:28 -04:00
Tommy Parnell
ac28c642f8 Revert "better etag filter"
This reverts commit e8e9a1caa7.
2022-06-25 12:43:27 -04:00
Tommy Parnell
078da9731b etags 2022-06-25 12:43:17 -04:00
Tommy Parnell
e8e9a1caa7 better etag filter 2022-06-25 10:26:41 -04:00
Tommy Parnell
cde154ee3b strong etag 2022-06-19 14:52:52 -04:00
Tommy Parnell
f97bc8d938 make etag a middleware before output cache 2022-06-19 14:29:01 -04:00
Tommy Parnell
72824b70a0 add hit/miss 2022-06-19 13:55:29 -04:00
Tommy Parnell
87f50e1324 Merge branch 'master' of github.com:TerribleDev/blog.terribledev.io.dotnet 2022-06-19 13:48:55 -04:00
Tommy Parnell
b316cc7e8e etag all pages 2022-06-19 13:48:48 -04:00
Tommy Parnell
cc34f198a8 Delete captain-definition 2022-06-15 18:13:15 -04:00
Tommy Parnell
c1687cccf5 Create an auto-deploy file 2022-06-15 18:04:21 -04:00
Tommy Parnell
e6d7240996 Unlink the containerApp tparnellblogcontainerapp from this repo 2022-06-15 18:01:44 -04:00
Tommy Parnell
6b9e0c8fe3 Create an auto-deploy file 2022-06-15 17:55:24 -04:00
Tommy Parnell
910a5fee16 Unlink the containerApp tparnellblogcontainerapp from this repo 2022-06-15 17:54:26 -04:00
Tommy Parnell
6aec3294dc Create an auto-deploy file 2022-06-15 12:23:37 -04:00
Tommy Parnell
2e5161b949 rm some ld stuff 2022-06-01 23:06:49 -04:00
Tommy Parnell
2be516f73b only have prism styles on markdown pages 2022-06-01 22:41:31 -04:00
Tommy Parnell
f2198e2e06 inline critical css 2022-06-01 21:51:39 -04:00
Tommy Parnell
3136961955 fix startup 2022-06-01 21:39:45 -04:00
Tommy Parnell
844edb8fc1 add caching back 2022-06-01 21:36:26 -04:00
Tommy Parnell
96d06da5e4 rename push header 2022-06-01 21:28:56 -04:00
Tommy Parnell
16572e3e6c output cache html 2022-06-01 12:41:24 -04:00
Tommy Parnell
cc0b7c506c del old actions 2022-06-01 12:14:45 -04:00
Tommy Parnell
172a33a27c Add or update the App Service deployment workflow configuration from Azure Portal. 2022-06-01 12:13:28 -04:00
Tommy Parnell
27c2d2eaf4 Merge branch 'master' of github.com:TerribleDev/blog.terribledev.io.dotnet 2022-06-01 12:00:43 -04:00
Tommy Parnell
b79070c855 trick lighthouse 2022-06-01 12:00:36 -04:00
Tommy Parnell
a888216b61 Add or update the Azure App Service build and deployment workflow config 2022-05-05 13:19:16 -04:00
Tommy Parnell
fbab80753f master 2022-04-20 12:00:45 -04:00
Tommy Parnell
1dca63e128 master 2022-04-20 11:59:22 -04:00
Tommy Parnell
14d250ca9b node 2022-04-20 11:42:30 -04:00
Tommy Parnell
603ea65456 try cap 2022-04-20 11:39:50 -04:00
Tommy Parnell
c23158998c sitemap 2022-04-20 10:03:42 -04:00
Tommy Parnell
a146b39eef hosting craft on heroku 2022-04-13 21:00:42 -04:00
Tommy Parnell
bcbe44f86a replit embed 2022-04-13 13:38:13 -04:00
Tommy Parnell
3d62a12bfc squiggles 2022-04-13 12:14:33 -04:00
Tommy Parnell
b3bd3492ee use lazy load html instead of custom JS 2022-03-30 14:24:58 -04:00
Tommy Parnell
83a41395e7 add cli 2022-03-26 09:02:42 -07:00
Tommy Parnell
3b9c978b92 fix some issues 2022-03-18 15:36:57 -04:00
Tommy Parnell
e1919b5983 uhh amp 2022-03-16 15:09:06 -04:00
Tommy Parnell
c51cf330f8 fix layout 2022-03-16 14:59:57 -04:00
Tommy Parnell
84fc8e09e8 amp fixes 2022-03-16 14:51:43 -04:00
Tommy Parnell
fcc975bfbe gtm 2022-03-16 14:42:39 -04:00
Tommy Parnell
e9a222afc2 no layout shift 2022-03-14 18:28:41 -04:00
Tommy Parnell
51afa4389e canonical subdomain 2022-03-14 15:47:43 -04:00
Tommy Parnell
80233f3870 use azure 2022-03-11 01:51:45 -05:00
Tommy Parnell
6303af4640 prismmm 2022-03-10 22:31:39 -05:00
Tommy Parnell
1d49ce2ec5 amp pages with dotnet core 2022-03-10 20:07:08 -05:00
Tommy Parnell
bd7541f69e about more 2022-03-08 14:35:58 -05:00
Tommy Parnell
aae2a1d9e5 landing 2022-03-08 13:48:37 -05:00
Tommy Parnell
7e161d8d33 use pipe and not dash in title 2022-03-04 22:51:53 -05:00
Tommy Parnell
2a32bd1911 prod 2022-03-04 17:23:27 -05:00
Tommy Parnell
b99549c767 cache by amp 2022-03-04 17:19:47 -05:00
Tommy Parnell
3dfb19673a back to old 2022-03-04 15:29:07 -05:00
Tommy Parnell
7def19398b meh 2022-03-04 15:25:55 -05:00
Tommy Parnell
a04652aeb7 nav 2022-03-04 15:22:34 -05:00
Tommy Parnell
039262749a derp 2022-03-04 15:17:37 -05:00
Tommy Parnell
83eb1bc565 amp 2022-03-04 15:06:22 -05:00
Tommy Parnell
9952ef6aa7 more jsonld 2022-03-03 21:22:16 -05:00
Tommy Parnell
4f28b9d10c sitelink searchbox 2022-03-03 17:14:01 -05:00
Tommy Parnell
1c10d8ff6d make string before start 2022-03-02 14:08:24 -05:00
Tommy Parnell
25f0c83300 add true to schema context 2022-03-02 13:56:47 -05:00
Tommy Parnell
3aa14c37e1 adjust titles 2022-03-02 11:28:49 -05:00
Tommy Parnell
392e61fe18 author url 2022-03-02 11:23:28 -05:00
Tommy Parnell
4984f53efe sitelink fix 2022-03-02 11:05:47 -05:00
Tommy Parnell
6a2e91275a summary 2022-03-01 23:02:28 -05:00
Tommy Parnell
a3db1317f9 heroku 2022-03-01 22:58:21 -05:00
Tommy Parnell
c230ff6fec sitelink searchbox 2022-03-01 22:32:20 -05:00
Tommy Parnell
71d8f2455d breadcrumbs 2022-03-01 18:37:28 -05:00
Tommy Parnell
c938c73844 readme 2022-02-26 21:39:59 -05:00
Tommy Parnell
2924655fe2 rm unused file 2022-02-26 21:32:14 -05:00
Tommy Parnell
df69206e71 kill key 2022-02-26 21:23:30 -05:00
Tommy Parnell
0a6aeee439 readme 2022-02-26 21:23:06 -05:00
Tommy Parnell
65a24d00b8 rm things 2022-02-26 21:01:06 -05:00
Tommy Parnell
dc5bbec95a adjust txt 2022-02-26 19:47:02 -05:00
Tommy Parnell
7566baf19f craft on heroku 2022-02-26 19:31:17 -05:00
Tommy Parnell
c72f60c709 rss 2022-02-24 10:00:49 -05:00
Tommy Parnell
97bfd056bd kill some whitespace 2022-02-15 17:35:26 -05:00
Tommy Parnell
70b745a2ef bold 2022-02-15 11:35:43 -05:00
Tommy Parnell
955cc7a4ad rm extra tag 2022-02-15 11:34:59 -05:00
Tommy Parnell
c754c4f8e8 include link to repo 2022-02-15 11:34:37 -05:00
Tommy Parnell
8ce39bf4e5 turborepo 2022-02-15 10:07:40 -05:00
Tommy Parnell
beab60730b try 2 2022-01-25 17:54:44 -05:00
Tommy Parnell
ac4e00b95c cap deploy 2022-01-25 17:53:52 -05:00
Tommy Parnell
778d282640 change sw 2022-01-25 17:43:30 -05:00
Tommy Parnell
1ec0bf9f82 fix schema 2022-01-25 15:55:28 -05:00
Tommy Parnell
386865aad2 root file 2022-01-25 15:54:08 -05:00
Tommy Parnell
323ab5ff1b definition 2022-01-25 15:38:29 -05:00
Tommy Parnell
9473478c2c gtm 2022-01-24 17:07:16 -05:00
Tommy Parnell
b531e0979a gtm 2022-01-24 17:00:06 -05:00
Tommy Parnell
94808c3c6a Merge branch 'master' of github.com:TerribleDev/blog.terribledev.io.dotnet 2022-01-07 22:17:57 -05:00
Tommy Parnell
991f02a469 by gtm 2022-01-07 22:17:28 -05:00
Tommy Parnell
74d7e45e80 mini containers 2022-01-07 22:05:37 -05:00
Tommy Parnell
c8996d3ef5 Revert "stop deploying to heroku again"
This reverts commit 4795c49828.
2022-01-07 19:20:03 -05:00
Tommy Parnell
422cae2ff3 add dot 2022-01-03 11:32:08 -05:00
Tommy Parnell
81e627545f no dot 2022-01-03 11:27:39 -05:00
Tommy Parnell
4fddff2922 dotnet 6 2021-12-23 18:06:49 -05:00
Tommy Parnell
f809cd2fde adjust result 2021-10-24 20:07:10 -04:00
Tommy Parnell
4795c49828 stop deploying to heroku again 2021-10-18 22:22:56 -04:00
Tommy Parnell
e248080246 net 5 2021-10-18 22:09:34 -04:00
Tommy Parnell
629240eff4 use 5.0 runtime 2021-10-18 22:04:29 -04:00
Tommy Parnell
1d5efc2d8d use the same container 2021-10-18 21:56:52 -04:00
Tommy Parnell
449cd6d6d1 use 5.0 image 2021-10-18 21:51:41 -04:00
Tommy Parnell
f300df90c1 lazyLoad (#13)
* lazy load posts
2021-10-18 21:48:08 -04:00
Tommy Parnell
27151adf65 spelling mistake 2021-10-13 18:03:59 -04:00
Tommy Parnell
0f2b013b93 fix copy 2021-10-13 18:03:28 -04:00
Tommy Parnell
c050de459c heroku monorepo 2021-10-12 14:14:51 -04:00
Tommy Parnell
88e0ae9b64 update 2021-08-01 16:35:28 -04:00
Tommy Parnell
bbbac28221 keyz 2021-08-01 16:32:12 -04:00
Tommy Parnell
2b9ed4093a forwarded protocol 2021-08-01 16:27:32 -04:00
Tommy Parnell
99765d4267 link to proc 2021-07-19 19:20:56 -07:00
Tommy Parnell
84356eba1e fix blurb 2021-07-19 19:19:55 -07:00
Tommy Parnell
d46c44dc97 some minor editing 2021-07-19 16:18:15 -07:00
Tommy Parnell
0ddf58d2dc rm file 2021-07-19 16:15:29 -07:00
Tommy Parnell
0983225d76 dotnet core on heroku 2021-07-19 14:19:24 -04:00
Tommy Parnell
5b56e9cd57 if port is defined use that 2021-07-08 23:19:52 -04:00
Tommy Parnell
8a1a71b46d hmm try heroku? 2021-07-08 23:09:55 -04:00
Tommy Parnell
07f4a06c49 build prs (#12)
* Build all PRs
* Add new api
2021-06-17 19:35:39 -04:00
Tommy Parnell
9cdc25ff1c whitespace commit 2021-06-17 19:06:54 -04:00
Tommy Parnell
005ee858b2 Adding workflow file 2021-06-17 19:03:53 -04:00
Tommy Parnell
354c766881 skip to content link 2021-02-15 14:20:03 -05:00
tparnell
c312b49598 fix summary url 2020-10-13 15:13:14 -04:00
Tommy Parnell
c6daaf2f03 fix url 2020-09-01 20:08:11 -04:00
Tommy Parnell
f92df0b0ea resolve merge 2020-09-01 20:06:51 -04:00
Tommy Parnell
f576961974 fix link so it has path 2020-09-01 00:55:33 -04:00
Tommy Parnell
590a16dd77 fix some responsive issues 2020-08-21 23:36:11 -04:00
Tommy Parnell
a99d053f8d grid finished 2020-08-21 22:40:27 -04:00
Tommy Parnell
d3946d61d4 grid 2020-08-09 22:33:33 -04:00
Tommy Parnell
de0fafbc74 fix header 2020-08-09 19:58:43 -04:00
Tommy Parnell
8fd4dbd4ec dark theme 2020-08-09 19:27:47 -04:00
Tommy Parnell
3795a95ed0 make nav sticky 2020-08-09 18:08:05 -04:00
tparnell
0dce62ab99 split out desktop vs mobile 2020-08-09 13:35:30 -04:00
tparnell
dbea3664d6 fix gist lint 2020-08-09 10:25:33 -04:00
Tommy Parnell
e26c47f91f add my face back 2020-08-08 23:25:52 -04:00
Tommy Parnell
58e34d8177 A11y (#11)
* Accessibility Driven Development
2020-08-08 22:19:37 -04:00
Tommy Parnell
0e36180218 a11y done 2020-08-08 22:18:35 -04:00
tparnell
a245a21e02 a11y 2020-08-08 19:45:04 -04:00
tparnell
792919cb70 stop 2020-08-07 18:37:55 -04:00
tparnell
934a762939 preload script 2020-08-07 17:15:11 -04:00
Tommy Parnell
be76863dc2 fix up css better 2020-08-07 02:49:10 -04:00
Tommy Parnell
da8a0d9a4f 3.1 merge 2020-08-01 18:47:09 -04:00
Tommy Parnell
71cf945baf canonical tags, dotnet 3.1 2020-08-01 18:38:12 -04:00
tparnell
abacb42468 switch domains 2019-03-13 09:36:57 -04:00
tparnell
198f99f7f1 redirect plural tags to singular 2019-03-09 08:39:57 -05:00
tparnell
4fe77edfbc its 2019 duh 2019-02-23 14:33:44 -05:00
tparnell
78e73f2a4c Merge branch 'master' of github.com:TerribleDev/blog.terribledev.io.dotnet 2019-02-23 14:09:34 -05:00
tparnell
521b335f8a fix some nav stuff 2019-02-23 14:09:27 -05:00
tparnell
e2ad204571 web performance tips 2019-02-23 14:05:19 -05:00
Tommy Parnell
755c03303b fix media 2019-02-20 05:33:09 -05:00
Tommy Parnell
4c25db4039 prevent navbar reflow on load 2019-02-17 09:10:38 -05:00
Tommy Parnell
c3a583a33b add robots 2019-02-08 00:11:31 -05:00
Tommy Parnell
718b938a76 case insensitive tags 2019-02-07 23:20:44 -05:00
Tommy Parnell
415b62e1e7 get better 404 info 2019-02-07 23:13:13 -05:00
Tommy Parnell
e367072f21 alexa skills fix 2019-02-07 23:01:11 -05:00
tparnell
1327d87b96 site creator 2019-02-07 20:39:38 -05:00
tparnell
39dfcfe70d another twit meta 2019-02-07 20:37:16 -05:00
tparnell
86cb0cec23 more mettah fixes 2019-02-07 19:14:12 -05:00
tparnell
f87bd029f4 include redux gist 2019-02-07 18:47:05 -05:00
tparnell
69b2112e4b precconnect to analytics domain vs 2019-02-07 18:43:51 -05:00
tparnell
0b57031fc1 inject deps, fix links for rss feeds 2019-02-07 18:10:41 -05:00
tparnell
71929bb1ba fix external links 2019-02-07 16:06:52 -05:00
tparnell
b1959082dc I think I got links working 2019-02-06 22:38:30 -05:00
tparnell
be5c4cc806 fix rss 2019-02-06 22:32:14 -05:00
tparnell
ca0344c902 hopefully this fixes rss readers 2019-02-06 20:55:23 -05:00
tparnell
5a844f34f9 add img back in meta 2019-02-06 20:42:25 -05:00
tparnell
57a129cf8d tools for frontend devs 2019-02-06 19:12:18 -05:00
tparnell
a31b9d4fa9 stop 2019-02-06 19:01:46 -05:00
Tommy Parnell
7013e61c2f alt text for pictures 2019-02-06 07:56:24 -05:00
Tommy Parnell
6ce47adb8a webp avatar 2019-02-06 07:53:29 -05:00
tparnell
b9b9e81213 support webp 2019-02-05 23:32:21 -05:00
tparnell
53b8b448da webp 2019-02-05 23:32:21 -05:00
tparnell
aa6ed52d93 picture elem 2019-02-05 23:32:21 -05:00
Tommy Parnell
16c10c9ca1 app insights 2019-02-03 22:37:57 -05:00
Tommy Parnell
c3cb61619b fix some spacing 2019-02-03 21:40:49 -05:00
Tommy Parnell
7ff61450f9 Merge branch 'master' of github.com:TerribleDev/blog.terribledev.io.dotnet 2019-02-03 21:22:06 -05:00
Tommy Parnell
f3faede79e cache home page and posts in cf for 15 minutes 2019-02-03 21:21:57 -05:00
211 changed files with 4054 additions and 566 deletions

View File

@@ -4,6 +4,6 @@
.gitignore
.vs
.vscode
*/bin
*/obj
**/bin
**/obj
**/.toolstarget

View File

@@ -0,0 +1,42 @@
name: Trigger auto deployment for blogcontainergroup
# When this action will be executed
on:
# Automatically trigger it when detected changes in repo
push:
branches:
[ master ]
paths:
- '**'
- '.github/workflows/blogcontainergroup-AutoDeployTrigger-ab8fcfc6-eced-47ac-8584-4f5a983b4ee2.yml'
# Allow mannually trigger
workflow_dispatch:
jobs:
build-and-deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout to the branch
uses: actions/checkout@v2
- name: Azure Login
uses: azure/login@v1
with:
creds: ${{ secrets.BLOGCONTAINERGROUP_AZURE_CREDENTIALS }}
- name: Build and push container image to registry
uses: azure/container-apps-deploy-action@v1
with:
appSourcePath: ${{ github.workspace }}
registryUrl: terribledevreg.azurecr.io
registryUsername: ${{ secrets.BLOGCONTAINERGROUP_REGISTRY_USERNAME }}
registryPassword: ${{ secrets.BLOGCONTAINERGROUP_REGISTRY_PASSWORD }}
containerAppName: blogcontainergroup
resourceGroup: ContainerGroup
imageToBuild: terribledevreg.azurecr.io/blogcontainergroup:${{ github.sha }}

14
.github/workflows/buildContainer.yml vendored Normal file
View File

@@ -0,0 +1,14 @@
on:
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Github Action'
uses: actions/checkout@master
- name: Build image 🔧
run: |
docker build "$GITHUB_WORKSPACE/src/TerribleDev.Blog.Web" -f "src/TerribleDev.Blog.Web/Dockerfile"

13
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,13 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"ban.spellright"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [
]
}

4
.vscode/launch.json vendored
View File

@@ -9,7 +9,7 @@
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/src/TerribleDev.Blog.Web/bin/Debug/netcoreapp2.2/TerribleDev.Blog.Web.dll",
"program": "${workspaceFolder}/src/TerribleDev.Blog.Web/bin/Debug/net7.0/TerribleDev.Blog.Web.dll",
"args": [],
"cwd": "${workspaceFolder}/src/TerribleDev.Blog.Web",
"stopAtEntry": false,
@@ -42,4 +42,4 @@
"processId": "${command:pickProcess}"
}
]
}
}

19
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,19 @@
{
"files.eol": "\n",
"spellchecker.language": "en_US",
"spellchecker.ignoreWordsList": [
"dotnet",
"csproj's",
"VS2017",
"vs2017",
"refactor"
],
"spellchecker.documentTypes": [
"markdown",
"latex",
"plaintext"
],
"spellchecker.ignoreRegExp": [],
"spellchecker.ignoreFileExtensions": [],
"spellchecker.checkInterval": 5000
}

7
.vscode/spellright.dict vendored Normal file
View File

@@ -0,0 +1,7 @@
intellisense
docker
env
mydocklinting
eslint
webapps
wordpress

26
Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
# https://hub.docker.com/_/microsoft-dotnet
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
WORKDIR /source
# copy csproj and restore as distinct layers
COPY ./src/TerribleDev.Blog.Web/*.csproj .
RUN dotnet restore -r linux-musl-x64 /p:PublishReadyToRunComposite=true
# copy everything else and build app
COPY ./src/TerribleDev.Blog.Web/ .
RUN dotnet publish -c release -o /app -r linux-musl-x64 --self-contained true --no-restore /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
RUN date +%s > /app/buildtime.txt
# final stage/image
FROM mcr.microsoft.com/dotnet/runtime-deps:7.0-alpine-amd64
WORKDIR /app
COPY --from=build /app ./
# See: https://github.com/dotnet/announcements/issues/20
# Uncomment to enable globalization APIs (or delete)
# ENV \
# DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false \
# LC_ALL=en_US.UTF-8 \
# LANG=en_US.UTF-8
# RUN apk add --no-cache icu-libs
ENTRYPOINT ["./TerribleDev.Blog.Web"]

12
Dockerfile.old Normal file
View File

@@ -0,0 +1,12 @@
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
WORKDIR /app
# Copy everything else and build
COPY /src/TerribleDev.Blog.Web .
RUN dotnet publish -c release -o /out -r linux-musl-x64 --self-contained true /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
RUN date +%s > /out/buildtime.txt
# Build runtime image
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-alpine-amd64
WORKDIR /app
COPY --from=build /app/out .
ENTRYPOINT ["./TerribleDev.Blog.Web"]

34
Readme.md Normal file
View File

@@ -0,0 +1,34 @@
The code for [blog.terrible.dev](https://blog.terrible.dev). My blog powered by a bespoke blog engine, at one time I thought I'd try to make an open source product for dotnet core, but then got lazy. Probably one of the fastest blog site's you will ever see. Prior to this my blog was built with hugo, and thus authoring was made backward compatible with hugo's posts (3+ years ago). I honestly built many versions including an F#, OCAML, and typescript until I just landed on C#. The implementation here was evolved from the F# version but I switched to C# when I realized there was almost no text editor support for razor files in an F# project. At that time as well dotnet was really kicking off on the tech empower benchmarks. This uses an active server, and not a static site build, because HTML can be emitted conditionally on desktop/mobile headers to minimize html size over the wire.
## Technology
Almost everything was made based on page speed, and nothing more. Some of the code, and decisions seem weird until you realize it's all about page speed and nothing else.
- dotnet 6
- Markdown
- [Markdig](https://github.com/xoofx/markdig) for parsing markdown
- This also has some extensions to ensure all relative links end in `/` and external links have `rel="noopener"` and `target="_blank"`, also to include a picture tag for webp images
- YML for configuring a blog's posts *front matter* aka configuration
- PWA/Serviceworkers
- Used to precache pages which could be read offline, and store a 404 page for offline
- Also used to give an almost SPA like experience (html is swapped from the server, no page reloads occur)
- CSS variables used for theming
- Vanilla JS for handling the hamburger menu.
- No framework for minimum size
## Authoring
Authoring is done in markdown. Configuration for a post must be at the top of the document and placed before `---` (three dashes). Posts include a `<!-- more -->` tag to indicate where the post should be cut off for the summary.
## Convert images to webp (TODO: make this automatic)
find . -iname '*.png' -exec cwebp -lossless '{}' -o '{}'.webp \;
find . -iname '*.jpg' -exec cwebp '{}' -o '{}'.webp \;
find . -iname '*.gif' -exec gif2webp -mixed '{}' -o '{}'.webp \;
## resize image to max width (TODO: Also make this automatic 🤣)
find . -iname '*' -exec convert '{}' -resize 750 '{}' \;

View File

@@ -7,6 +7,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{E6C01762-AEB
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TerribleDev.Blog.Web", "src\TerribleDev.Blog.Web\TerribleDev.Blog.Web.csproj", "{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TerribleDev.Blog.CLI", "src\TerribleDev.Blog.CLI\TerribleDev.Blog.CLI.csproj", "{BF5B7187-1B1C-44B3-949A-38B96EEBD625}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -29,12 +31,25 @@ Global
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x64.Build.0 = Release|Any CPU
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x86.ActiveCfg = Release|Any CPU
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x86.Build.0 = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x64.ActiveCfg = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x64.Build.0 = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x86.ActiveCfg = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x86.Build.0 = Debug|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|Any CPU.ActiveCfg = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|Any CPU.Build.0 = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x64.ActiveCfg = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x64.Build.0 = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x86.ActiveCfg = Release|Any CPU
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2} = {E6C01762-AEBF-47C4-8D95-383504D8BC70}
{BF5B7187-1B1C-44B3-949A-38B96EEBD625} = {E6C01762-AEBF-47C4-8D95-383504D8BC70}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {CFA796F1-4389-452F-B224-E64C72E907C4}

View File

@@ -4,4 +4,6 @@ services:
build: ./src/TerribleDev.Blog.Web
ports:
- "80:80"
- "443:443"
- "443:443"
environment:
- ASPNETCORE_ENVIRONMENT=development

38
fly.toml Normal file
View File

@@ -0,0 +1,38 @@
# fly.toml file generated for dry-meadow-9911 on 2022-11-09T12:09:05-05:00
app = "dry-meadow-9911"
kill_signal = "SIGINT"
kill_timeout = 5
processes = []
[env]
[experimental]
allowed_public_ports = []
auto_rollback = true
[[services]]
http_checks = []
internal_port = 80
processes = ["app"]
protocol = "tcp"
script_checks = []
[services.concurrency]
hard_limit = 25
soft_limit = 20
type = "connections"
[[services.ports]]
force_https = true
handlers = ["http"]
port = 80
# [[services.ports]]
# handlers = ["tls", "http"]
# port = 443
[[services.tcp_checks]]
grace_period = "2s"
interval = "3s"
restart_limit = 0
timeout = "2s"

346
src/TerribleDev.Blog.CLI/.gitignore vendored Normal file
View File

@@ -0,0 +1,346 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# ASP.NET Core default setup: bower directory is configured as wwwroot/lib/ and bower restore is true
**/wwwroot/lib/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- Backup*.rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
nupkg

View File

@@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Linq;
using Pastel;
namespace TerribleDev.Blog.CLI
{
public class PostBuilder
{
public static string Build(string title)
{
return $@"title: {title}{Environment.NewLine}date: {DateTime.Now.ToString("yyyy-MM-dd hh:mm")}{Environment.NewLine}tags:{Environment.NewLine}---";
}
public static List<string> ListPosts()
{
if (!Directory.Exists("Posts"))
{
Console.Error.WriteLine($"Cannot find post directory, are you sure you are in the blog directory?");
Environment.Exit(1);
}
var posts = Directory.GetFiles("Posts", "*.md");
return posts.Select(x => (Path.GetFileNameWithoutExtension(x).Replace('-', ' ').Pastel(Color.LightBlue))).ToList();
}
}
}

View File

@@ -0,0 +1,80 @@
using System;
using McMaster.Extensions.CommandLineUtils;
using System.IO;
using Pastel;
using System.Drawing;
using System.Linq;
namespace TerribleDev.Blog.CLI
{
class Program
{
public static int Main(string[] args)
{
var app = new CommandLineApplication() {
Name = "Tempo",
Description = "A simple blog generator"
};
app.MakeSuggestionsInErrorMessage = true;
app.HelpOption(inherited: true);
app.OnExecute(() => {
app.ShowHelp();
return 1;
});
app.Command("post", b =>
{
b.MakeSuggestionsInErrorMessage = true;
b.OnExecute(() => {
b.ShowHelp();
Environment.Exit(1);
});
b.Command("list", a => {
a.OnExecute(() => {
PostBuilder.ListPosts().ForEach(Console.WriteLine);
return 0;
});
});
b.Command("new", a =>
{
var title = a.Argument("Title", "The title of the post");
a.OnExecute(() =>
{
var titleValue = title.Value;
var fileName = $"{titleValue.Replace(" ", "-")}.md";
var targetDir = Path.Combine("Posts", fileName);
var assetPathName = fileName.Replace(".md", "");
var assetPath = Path.Combine("wwwroot", "img", assetPathName);
if (!Directory.Exists("Posts"))
{
Console.Error.WriteLine(($"Cannot find post directory, are you sure you are in the blog directory?").Pastel("#ff3c2e"));
return 1;
}
Console.WriteLine(("Building file 🚀").Pastel("#80ff40"));
File.WriteAllText(Path.Combine("Posts", fileName), PostBuilder.Build(titleValue));
Console.WriteLine(("Creating wwwroot directory 🛠").Pastel("#80ff40"));
Directory.CreateDirectory(assetPath);
Console.WriteLine(("Adding keep files 📝").Pastel("#80ff40"));
File.Create(Path.Combine(assetPath, ".keep"));
Console.WriteLine(("Done! 🎉").Pastel("#80ff40"));
return 0;
});
});
});
try {
return app.Execute(args);
}
catch (UnrecognizedCommandParsingException e) {
Console.WriteLine();
Console.Error.WriteLine(e.Message.Pastel("#ff3c2e"));
Console.WriteLine();
Console.Error.WriteLine($"The most similar command is {Environment.NewLine} {e.NearestMatches.FirstOrDefault()}");
Console.WriteLine();
return 1;
}
catch (CommandParsingException e) {
Console.Error.WriteLine(e.Message.Pastel("#ff3c2e"));
return 1;
}
}
}
}

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp7.0</TargetFramework>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
<ToolCommandName>tempo</ToolCommandName>
<PackageOutputPath>./nupkg</PackageOutputPath>
<AssemblyName>Tempo</AssemblyName>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="McMaster.Extensions.CommandLineUtils" Version="4.0.1" />
<PackageReference Include="pastel" Version="3.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
#!/bin/bash
dotnet restore
dotnet pack /p:Version=1.2.0 /p:Configuration=Release
dotnet tool uninstall --global tempo
dotnet tool install --global --add-source ./nupkg Tempo

View File

@@ -0,0 +1,2 @@
bin
obj

View File

@@ -0,0 +1,23 @@
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Mvc;
using TerribleDev.Blog.Web.Models;
namespace TerribleDev.Blog.Web.Controllers
{
[ApiController]
[Route("[controller]")]
public class ApiController : ControllerBase
{
private readonly PostCache postCache;
public ApiController(PostCache postCache)
{
this.postCache = postCache;
}
[Route("all")]
public ActionResult<IEnumerable<IPost>> PostCount(string key)
{
return this.Ok(true);
}
}
}

View File

@@ -7,54 +7,49 @@ using Microsoft.AspNetCore.Mvc;
using TerribleDev.Blog.Web.Models;
using System.IO;
using Microsoft.AspNetCore.Html;
using TerribleDev.Blog.Web.Filters;
using Microsoft.Extensions.Logging;
using Microsoft.AspNetCore.OutputCaching;
namespace TerribleDev.Blog.Web.Controllers
{
[Http2PushFilter]
public class HomeController : Controller
{
public static List<IPost> postsAsList = new BlogFactory().GetAllPosts().OrderByDescending(a=>a.PublishDate).ToList();
public static Dictionary<string, List<IPost>> tagToPost = postsAsList.Where(a=>a.tags != null)
.Aggregate(
new Dictionary<string, List<IPost>>(),
(accum, item) => {
foreach(var tag in item.tags)
{
if(accum.TryGetValue(tag, out var list))
{
list.Add(item);
}
else
{
accum[tag] = new List<IPost>() { item };
}
}
return accum;
});
public static IDictionary<string, IPost> posts = postsAsList.ToDictionary(a => a.Url);
public static IDictionary<int, List<IPost>> postsByPage = postsAsList.Aggregate(new Dictionary<int, List<IPost>>() { [1] = new List<IPost>() }, (accum, item) =>
{
var highestPage = accum.Keys.Max();
var current = accum[highestPage].Count;
if (current >= 10)
{
accum[highestPage + 1] = new List<IPost>() { item };
return accum;
}
accum[highestPage].Add(item);
return accum;
});
private readonly ILogger<HomeController> logger;
private readonly PostCache postCache;
public HomeController(PostCache postCache, ILogger<HomeController> logger)
{
this.logger = logger;
this.postCache = postCache;
}
[Route("/index.html", Order = 2)]
[Route("/")]
[Route("/index.html")]
[Route("/page/{pageNumber?}" )]
[OutputCache(Duration = 31536000, VaryByParam = "pageNumber")]
[Route("/page/{pageNumber:required:int:min(1)}")]
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] { "pageNumber" })]
[ResponseCache(Duration = 900)]
public IActionResult Index(int pageNumber = 1)
{
if(!postsByPage.TryGetValue(pageNumber, out var result))
this.logger.LogWarning("Viewing page", pageNumber);
if (!postCache.PostsByPage.TryGetValue(pageNumber, out var result))
{
return Redirect("/404/");
return Redirect($"/404/?from=/page/{pageNumber}/");
}
return View(new HomeViewModel() { Posts = result, Page = pageNumber, HasNext = postsByPage.ContainsKey(pageNumber + 1), HasPrevious = postsByPage.ContainsKey(pageNumber - 1) });
return View(new HomeViewModel()
{
Posts = result,
Page = pageNumber,
HasNext = postCache.PostsByPage.ContainsKey(pageNumber + 1),
HasPrevious = postCache.PostsByPage.ContainsKey(pageNumber - 1),
BlogLD = postCache.BlogLD,
SiteLD = postCache.SiteLD,
BlogLDString = postCache.BlogLDString,
SiteLDString = postCache.SiteLDString
});
}
[Route("/theme/{postName?}")]
public IActionResult Theme(string postName)
@@ -75,17 +70,39 @@ namespace TerribleDev.Blog.Web.Controllers
return View();
}
[Route("{postUrl}")]
[OutputCache(Duration = 31536000, VaryByParam = "postUrl")]
[ResponseCache(Duration = 180)]
public IActionResult Post(string postUrl)
[Route("{postUrl}/{amp?}")]
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] { "postUrl", "amp" })]
[ResponseCache(Duration = 900)]
public IActionResult Post(string postUrl, string amp = "")
{
if(!posts.TryGetValue(postUrl, out var currentPost))
if (!String.IsNullOrEmpty(amp) && amp != "amp")
{
return Redirect("/404/");
return Redirect($"/404/?from=/{postUrl}/{amp}/");
}
return View(model: currentPost);
var isAmp = amp == "amp";
if (isAmp)
{
return this.RedirectPermanent($"/{postUrl}");
}
// case sensitive lookup
if (postCache.UrlToPost.TryGetValue(postUrl, out var currentPost))
{
return View("Post", model: new PostViewModel() { Post = currentPost });
}
// case insensitive lookup on post
if (postCache.CaseInsensitiveUrlToPost.TryGetValue(postUrl, out var caseInsensitivePost))
{
return View("Post", model: new PostViewModel() { Post = caseInsensitivePost });
}
if (postCache.LandingPagesUrl.TryGetValue(postUrl, out var landingPage))
{
return View("Post", model: new PostViewModel() { Post = landingPage });
}
this.StatusCode(404);
return View(nameof(FourOhFour));
}
[Route("/Error")]
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
public IActionResult Error()
@@ -93,13 +110,14 @@ namespace TerribleDev.Blog.Web.Controllers
this.Response.StatusCode = 500;
return View(new ErrorViewModel { RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier });
}
[Route("/404")]
[Route("{*url}", Order = 999)]
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
public IActionResult FourOhFour()
public IActionResult FourOhFour(string from = null)
{
this.Response.StatusCode = 404;
return View();
return View(viewName: nameof(FourOhFour));
}
[Route("/404.html")]
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]

View File

@@ -0,0 +1,41 @@
using System;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using Microsoft.AspNetCore.Mvc;
using TerribleDev.Blog.Web.Filters;
using TerribleDev.Blog.Web.Models;
namespace TerribleDev.Blog.Web.Controllers
{
[Http2PushFilter]
public class SearchController : Controller
{
private readonly BlogConfiguration configuration;
private readonly PostCache postCache;
public SearchController(BlogConfiguration configuration, PostCache postCache)
{
this.configuration = configuration;
this.postCache = postCache;
}
[Route("/search")]
public IActionResult Index([Required] [MinLength(1)] [MaxLength(500)] string q)
{
if (string.IsNullOrEmpty(q))
{
return Redirect("/404/?from=/search/");
}
if(!ModelState.IsValid)
{
return Redirect("/404/?from=/search/");
}
var queries = q.Split(" ");
var posts = postCache
.PostsAsLists
.Where(p =>
queries.Any(query => p.Title.Contains(query, System.StringComparison.InvariantCultureIgnoreCase) )
|| queries.Any(query => p.Content.ContentPlain.Contains(query, System.StringComparison.InvariantCultureIgnoreCase))).ToList();
return View(new SearchViewModel { SearchTerm = q, Posts = posts });
}
}
}

View File

@@ -7,6 +7,7 @@ using System.Threading.Tasks;
using System.Xml;
using System.Xml.Serialization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.OutputCaching;
using Microsoft.SyndicationFeed;
using Microsoft.SyndicationFeed.Rss;
using TerribleDev.Blog.Web.Models;
@@ -16,13 +17,14 @@ namespace TerribleDev.Blog.Web.Controllers
public class SeoController : Controller
{
private readonly BlogConfiguration configuration;
public SeoController(BlogConfiguration configuration)
private readonly PostCache postCache;
public SeoController(BlogConfiguration configuration, PostCache postCache)
{
this.configuration = configuration;
this.postCache = postCache;
}
public static DateTimeOffset publishDate = DateTimeOffset.UtcNow; // keep publish date in memory so we just return when the server was kicked
public static IEnumerable<SyndicationItem> postsToSyndication = HomeController.postsAsList.Select(a => a.ToSyndicationItem()).ToList();
public static DateTimeOffset publishDate = DateTimeOffset.UtcNow; // keep publish date in memory so we just return when the server was kicked
[Route("/rss")]
[Route("/rss.xml")]
[ResponseCache(Duration = 7200)]
@@ -38,7 +40,7 @@ namespace TerribleDev.Blog.Web.Controllers
await writer.WriteValue("link", configuration.Link);
await writer.WriteDescription("My name is Tommy Parnell. I usually go by TerribleDev on the internets. These are just some of my writings and rants about the software space.");
foreach (var item in postsToSyndication)
foreach (var item in postCache.PostsAsSyndication)
{
await writer.Write(item);
}
@@ -54,17 +56,18 @@ namespace TerribleDev.Blog.Web.Controllers
{
Response.StatusCode = 200;
Response.ContentType = "text/xml";
var sitewideLinks = new List<SiteMapItem>(HomeController.tagToPost.Keys.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terribledev.io/tag/{a}/" }))
var sitewideLinks = new List<SiteMapItem>(postCache.TagsToPosts.Keys.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terrible.dev/tag/{a}/" }))
{
new SiteMapItem() { LastModified = DateTime.UtcNow, Location="https://blog.terribledev.io/all-tags/" }
new SiteMapItem() { LastModified = DateTime.UtcNow, Location="https://blog.terrible.dev/all-tags/" }
};
var ser = new XmlSerializer(typeof(SiteMapRoot));
var sitemap = new SiteMapRoot()
{
Urls = HomeController.postsAsList.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terribledev.io/{a.Url}/" }).ToList()
Urls = postCache.PostsAsLists.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = a.CanonicalUrl }).ToList()
};
sitemap.Urls.AddRange(postCache.TagsToPosts.Keys.Select(i => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terrible.dev/search?q={i}" }));
sitemap.Urls.AddRange(sitewideLinks);
ser.Serialize(this.Response.Body, sitemap);
}
}
}
}

View File

@@ -3,28 +3,48 @@ using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.OutputCaching;
using TerribleDev.Blog.Web.Filters;
using TerribleDev.Blog.Web.Models;
namespace TerribleDev.Blog.Web.Controllers
{
[Http2PushFilter]
public class TagsController : Controller
{
private readonly PostCache postCache;
public TagsController(PostCache postCache)
{
this.postCache = postCache;
}
[Route("/all-tags")]
[OutputCache(Duration = 31536000)]
public IActionResult AllTags()
{
return View(HomeController.tagToPost);
return View(postCache.TagsToPosts);
}
[Route("/tags/{tagName}")]
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[]{"tagName"})]
public IActionResult TagPluralRedirect(string tagName)
{
if(string.IsNullOrEmpty(tagName))
{
return Redirect($"/404/?from=/tags/emptyString/");
}
return Redirect($"/tag/{tagName}/");
}
[Route("/tag/{tagName}")]
[OutputCache(Duration = 31536000, VaryByParam = "tagName")]
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] {"tagName"})]
public IActionResult GetTag(string tagName)
{
if(!HomeController.tagToPost.TryGetValue(tagName, out var models))
if(!postCache.TagsToPosts.TryGetValue(tagName.ToLower(), out var models))
{
return NotFound();
return Redirect($"/404/?from=/tag/{tagName}/");
}
{
return View(new Models.GetTagViewModel { Tag = tagName, Posts = models });
return View(new Models.GetTagViewModel { Tag = tagName, Posts = models, CanonicalUrl = $"https://blog.terrible.dev/tag/{tagName.ToLower()}/" });
}
}
}
}
}

View File

@@ -1,20 +1,26 @@
FROM microsoft/dotnet:2.2-aspnetcore-runtime AS base
WORKDIR /app
EXPOSE 80
EXPOSE 443
# https://hub.docker.com/_/microsoft-dotnet
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
WORKDIR /source
FROM microsoft/dotnet:2.2-sdk AS build
WORKDIR /src
COPY ["./TerribleDev.Blog.Web.csproj", "."]
RUN dotnet restore "TerribleDev.Blog.Web.csproj"
# copy csproj and restore as distinct layers
COPY *.csproj .
RUN dotnet restore -r linux-musl-x64 /p:PublishReadyToRunComposite=true
# copy everything else and build app
COPY . .
WORKDIR "/src"
RUN dotnet build "TerribleDev.Blog.Web.csproj" -c Release -o /app
RUN dotnet publish -c release -o /app -r linux-musl-x64 --self-contained true --no-restore /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
FROM build AS publish
RUN dotnet publish "TerribleDev.Blog.Web.csproj" -c Release -o /app
FROM base AS final
# final stage/image
FROM mcr.microsoft.com/dotnet/runtime-deps:7.0-alpine-amd64
WORKDIR /app
COPY --from=publish /app .
ENTRYPOINT ["dotnet", "TerribleDev.Blog.Web.dll"]
COPY --from=build /app ./
# See: https://github.com/dotnet/announcements/issues/20
# Uncomment to enable globalization APIs (or delete)
# ENV \
# DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false \
# LC_ALL=en_US.UTF-8 \
# LANG=en_US.UTF-8
# RUN apk add --no-cache icu-libs
ENTRYPOINT ["./TerribleDev.Blog.Web"]

View File

@@ -1,5 +1,5 @@
title: Hosting your blog on the cheap
date: 2018-08-22 04:49:46
date: 2019-08-17 04:49:46
tags:
- cloud
@@ -9,13 +9,12 @@ A load of people have been asking me lately how I host my blog. Incase its not a
<!-- more -->
Since I make no money, on this my strategy is about cutting costs. My grandfather use to say "take care of the pounds, because the pennies will take care of themselves." Now since my grandfather is in England, and their dollar is known as the pound, he was telling me to focus on the bigger picture.
Since I make no money, on this my strategy is about cutting costs. My grandfather use to say "take care of the pounds, let the pennies take care of themselves." Now since my grandfather is in England, and their dollar is known as the pound, he was telling me to focus on the bigger picture.
The first big decision for blogs is what "engine" you are going to use, or if you are going to make your own. These usually fall into 2 categories. Static sites, which are usually when blogs are written in text files, and are compiled into static html, or server rendered blogs such as wordpress. When a request is made to blog that has server rendering, the html is dynamically built in time and delivered to the consumer. Static sites, on the other hand are precomputed and thus are just delivered to the browser.
I won't go into the details on what is better for different scenarios. If you are being cheap, then you will want to use static sites. Static sites are precomputed, which essentially means you just need to serve files to the user. There is no dynamic server to host, you won't need a database, etc. There are a few I like. This blog is ran off [Hexo](https://hexo.io)
I won't go into the details on what is better for different scenarios. If you are being cheap, then you will want to use static sites. Static sites are precomputed, which essentially means you just need to serve files to the user. There is no dynamic server to host, you won't need a database, etc. There are a few I like, but my favorite is [gatsbyjs](https://www.gatsbyjs.org/).
<!-- So I know what you are thinking, static sites are just 'better' for page load time. While this is true, they can lack dynamic features that might be important to you, such as adding new blog posts on a schedule, or limiting ip addresses, or even some kind of login/subscription model. -->
So I know what you are thinking, static sites are just 'better' for page load time. While this is true, they can lack dynamic features that might be important to you, such as adding new blog posts on a schedule, or limiting ip addresses, or even some kind of login/subscription model.

View File

@@ -0,0 +1,12 @@
using System;
namespace TerribleDev.Blog.Web
{
public static class ArrayExtensions
{
public static string ToHexString(this byte[] bytes)
{
return Convert.ToHexString(bytes);
}
}
}

View File

@@ -9,15 +9,26 @@ namespace TerribleDev.Blog.Web
{
public static class IPostExtensions
{
public static SyndicationItem ToSyndicationItem(this IPost x)
public static SyndicationItem ToSyndicationItem(this Post x)
{
return new SyndicationItem()
Uri.TryCreate(x.CanonicalUrl, UriKind.Absolute, out var url);
var syn = new SyndicationItem()
{
Title = x.Title,
Description = x.Content.ToString(),
Id = $"https://blog.terribledev.io/{x.Url}",
Published = x.PublishDate
Description = x.Content.Content.ToString(),
Id = url.ToString(),
Published = x.PublishDate,
};
syn.AddLink(new SyndicationLink(url));
return syn;
}
public static ISet<string> ToNormalizedTagList(this Post x)
{
if(x.tags == null)
{
return new HashSet<string>();
}
return new HashSet<string>(x.tags.Where(a => !string.IsNullOrWhiteSpace(a)).Select(a => a.ToLower()));
}
}
}

View File

@@ -0,0 +1,132 @@
using System.Collections.Generic;
using TerribleDev.Blog.Web.Models;
using System.Linq;
using System.Collections.Immutable;
using System.Diagnostics;
using System;
using Microsoft.SyndicationFeed;
using Schema.NET;
namespace TerribleDev.Blog.Web.Factories
{
public static class BlogCacheFactory
{
public static int PAGE_LIMIT = 10;
public static PostCache ProjectPostCache(IEnumerable<IPost> rawPosts)
{
var orderedPosts = rawPosts.OrderByDescending(a => a.PublishDate);
var posts = new List<IPost>(orderedPosts);
var urlToPosts = new Dictionary<string, IPost>();
var caseInsensitiveUrlToPost = new Dictionary<string, IPost>(StringComparer.OrdinalIgnoreCase);
var tagsToPost = new Dictionary<string, IList<Post>>();
var postsByPage = new Dictionary<int, IList<Post>>();
var syndicationPosts = new List<SyndicationItem>();
var landingPagesUrl = new Dictionary<string, LandingPage>();
var blogPostsLD = new List<Schema.NET.IBlogPosting>();
foreach (var post in orderedPosts)
{
if (post is Post)
{
var castedPost = post as Post;
urlToPosts.Add(post.UrlWithoutPath, castedPost);
caseInsensitiveUrlToPost.Add(post.UrlWithoutPath.ToLower(), castedPost);
syndicationPosts.Add(castedPost.ToSyndicationItem());
blogPostsLD.Add(post.Content.JsonLD);
foreach (var tag in castedPost.ToNormalizedTagList())
{
if (tagsToPost.TryGetValue(tag, out var list))
{
list.Add(castedPost);
}
else
{
tagsToPost.Add(tag, new List<Post>() { castedPost });
}
}
if (postsByPage.Keys.Count < 1)
{
postsByPage.Add(1, new List<Post>() { castedPost });
}
else
{
var highestPageKey = postsByPage.Keys.Max();
var highestPage = postsByPage[highestPageKey];
if (highestPage.Count < BlogCacheFactory.PAGE_LIMIT)
{
highestPage.Add(castedPost);
}
else
{
postsByPage.Add(highestPageKey + 1, new List<Post>() { castedPost });
}
}
}
if (post is LandingPage)
{
var castedPost = post as LandingPage;
landingPagesUrl.Add(castedPost.UrlWithoutPath, castedPost);
}
}
var ld = new Schema.NET.Blog()
{
Name = "TerribleDev Blog",
Description = "The blog of Tommy Parnell",
Author = new Schema.NET.Person() { Name = "TerribleDev", Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")) },
Image = new Schema.NET.ImageObject() { Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/content/tommyAvatar4.jpg")) },
Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")),
SameAs = new Schema.NET.OneOrMany<Uri>(new Uri("https://twitter.com/terribledev")),
};
var website = new Schema.NET.WebSite()
{
Name = "TerribleDev Blog",
Description = "The blog of Tommy Parnell",
Author = new Schema.NET.Person() { Name = "TerribleDev", Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")) },
Image = new Schema.NET.ImageObject() { Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/content/tommyAvatar4.jpg")) },
Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/")),
SameAs = new Schema.NET.OneOrMany<Uri>(new Uri("https://twitter.com/terribledev")),
PotentialAction = new Schema.NET.OneOrMany<Schema.NET.IAction>(
// search action
new List<Schema.NET.SearchAction>()
{
new Schema.NET.SearchAction()
{
Target = new Schema.NET.EntryPoint()
{
UrlTemplate = new Schema.NET.OneOrMany<string>(@"https://blog.terrible.dev/search?q={search-term}")
},
QueryInput = new Schema.NET.Values<string, Schema.NET.PropertyValueSpecification>(
new OneOrMany<PropertyValueSpecification>(
new PropertyValueSpecification()
{
ValueName = "search-term",
ValueRequired = true,
ValueMinLength = 1,
ValueMaxLength = 500,
}
)
)
}
}
)
};
return new PostCache()
{
LandingPagesUrl = landingPagesUrl,
PostsAsLists = posts,
TagsToPosts = tagsToPost,
UrlToPost = urlToPosts,
PostsByPage = postsByPage,
PostsAsSyndication = syndicationPosts,
BlogLD = ld,
SiteLD = website,
BlogLDString = ld.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
SiteLDString = website.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
CaseInsensitiveUrlToPost = caseInsensitiveUrlToPost
};
}
}
}

View File

@@ -8,17 +8,31 @@ using YamlDotNet.Serialization;
using Microsoft.AspNetCore.Html;
using Markdig;
using TerribleDev.Blog.Web.MarkExtension.TerribleDev.Blog.Web.ExternalLinkParser;
using TerribleDev.Blog.Web.MarkExtension;
using Microsoft.AspNetCore.Hosting;
using System.Diagnostics;
using System.Collections.Concurrent;
using Schema.NET;
using System.Text.RegularExpressions;
using TerribleDev.Blog.Web.Factories;
using System.Text;
using System.Security.Cryptography;
namespace TerribleDev.Blog.Web
{
public class BlogFactory
{
public List<IPost> GetAllPosts()
private CodeFactory _codeFactory = new CodeFactory();
public async Task<IEnumerable<IPost>> GetAllPostsAsync(string domain)
{
// why didn't I use f# I'd have a pipe operator by now
var posts = GetPosts();
var postsAsText = posts.Select(GetFileText);
return Task.WhenAll(postsAsText).Result.AsParallel().Select(b => ParsePost(b.text, b.fileInfo.Name)).ToList();
return await Task.WhenAll(posts.Select(async (post) =>
{
var (text, fileInfo) = await GetFileText(post);
return await ParsePost(text, fileInfo.Name, domain);
}));
}
private static async Task<(string text, FileInfo fileInfo)> GetFileText(string filePath)
@@ -26,7 +40,6 @@ namespace TerribleDev.Blog.Web
var fileInfo = new FileInfo(filePath);
var text = await File.ReadAllTextAsync(fileInfo.FullName);
return (text, fileInfo);
}
public IEnumerable<string> GetPosts() => Directory.EnumerateFiles(Path.Combine(Directory.GetCurrentDirectory(), "Posts"), "*.md", SearchOption.TopDirectoryOnly);
@@ -37,36 +50,142 @@ namespace TerribleDev.Blog.Web
return serializer.Deserialize<PostSettings>(ymlText);
}
public IPost ParsePost(string postText, string fileName)
public async Task<(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, Boolean hasCode)> ResolveContentForPost(string markdownText, string fileName, string resolvedUrl, string domain)
{
List<string> postImages = new List<string>();
var pipeline = new MarkdownPipelineBuilder()
.Use(new AbsoluteLinkConverter(resolvedUrl, domain))
.Use<ImageRecorder>(new ImageRecorder(ref postImages))
.Use<TargetLinkExtension>()
.UseMediaLinks()
.Use<PictureInline>()
.UseEmojiAndSmiley()
.Build();
var (replacedText, hasCode) = await _codeFactory.ReplaceFencedCode(markdownText);
var postContent = Markdown.ToHtml(replacedText, pipeline);
var postContentPlain = String.Join("", Markdown.ToPlainText(replacedText, pipeline).Split("<!-- more -->"));
var summary = postContent.Split("<!-- more -->")[0];
var postSummaryPlain = postContentPlain.Split("<!-- more -->")[0];
return (postContent, postContentPlain, summary, postSummaryPlain, postImages, hasCode);
}
public async Task<IPost> ParsePost(string postText, string fileName, string domain)
{
var splitFile = postText.Split("---");
var ymlRaw = splitFile[0];
var markdownText = string.Join("", splitFile.Skip(1));
List<string> postImages = new List<string>();
var pipeline = new MarkdownPipelineBuilder()
.Use<TargetLinkExtension>()
.Use<ImageRecorder>(new ImageRecorder(ref postImages))
.UseMediaLinks()
.UseEmojiAndSmiley()
.Build();
var postContent = Markdown.ToHtml(markdownText, pipeline);
var postContentPlain = String.Join("", Markdown.ToPlainText(markdownText, pipeline).Split("<!-- more -->"));
var postSettings = ParseYaml(ymlRaw);
var resolvedUrl = !string.IsNullOrWhiteSpace(postSettings.permalink) ? postSettings.permalink : fileName.Split('.')[0].Replace(' ', '-').WithoutSpecialCharacters();
var summary = postContent.Split("<!-- more -->")[0];
var postSummaryPlain = postContentPlain.Split("<!-- more -->")[0];
return new Post()
var canonicalUrl = $"https://blog.terrible.dev/{resolvedUrl}/";
return postSettings.isLanding ? await BuildLandingPage(fileName, domain, markdownText, postSettings, resolvedUrl, canonicalUrl) : await BuildPost(fileName, domain, markdownText, postSettings, resolvedUrl, canonicalUrl);
}
private async Task<Post> BuildPost(string fileName, string domain, string markdownText, PostSettings postSettings, string resolvedUrl, string canonicalUrl)
{
(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, bool hasCode) = await ResolveContentForPost(markdownText, fileName, resolvedUrl, domain);
var ld = new Schema.NET.BlogPosting()
{
Headline = postSettings.title,
DatePublished = postSettings.date,
DateModified = postSettings.updated ?? postSettings.date,
WordCount = postContentPlain.Split(' ').Length,
ArticleBody = new Schema.NET.OneOrMany<string>(new HtmlString(postContentPlain).Value),
Author = new Schema.NET.Person() { Name = "Tommy Parnell", AdditionalName = "TerribleDev", Url = new Uri("https://blog.terrible.dev/about") },
Url = new Uri(canonicalUrl)
};
var breadcrumb = new Schema.NET.BreadcrumbList()
{
ItemListElement = new List<IListItem>() // Required
{
new ListItem() // Required
{
Position = 1, // Required
Url = new Uri("https://blog.terrible.dev/") // Required
},
new ListItem()
{
Position = 2,
Name = postSettings.title,
},
},
};
// regex remove picture and source tags but not the child elements
var postContentClean = Regex.Replace(postContent, "<picture.*?>|</picture>|<source.*?>|</source>", "", RegexOptions.Singleline);
var content = new PostContent()
{
PublishDate = postSettings.date.ToUniversalTime(),
tags = postSettings.tags?.Select(a => a.Replace(' ', '-').WithoutSpecialCharacters().ToLower()).ToList() ?? new List<string>(),
Title = postSettings.title,
Url = resolvedUrl,
Content = new HtmlString(postContent),
Images = postImages,
ContentPlain = postContentPlain,
Summary = new HtmlString(summary),
SummaryPlain = postSummaryPlain,
SummaryPlainShort = (postContentPlain.Length <= 147 ? postContentPlain : postContentPlain.Substring(0, 146)) + "...",
ContentPlain = postContentPlain,
Images = postImages.Distinct().Select(a => a.StartsWith('/') ? a : $"/{resolvedUrl}/{a}").ToList()
JsonLD = ld,
JsonLDString = ld.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
JsonLDBreadcrumb = breadcrumb,
JsonLDBreadcrumbString = breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
HasCode = hasCode,
MarkdownMD5 = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(markdownText)).ToHexString()
};
var thumbNailUrl = string.IsNullOrWhiteSpace(postSettings.thumbnailImage) ?
postImages?.FirstOrDefault() ?? "https://www.gravatar.com/avatar/333e3cea32cd17ff2007d131df336061?s=640" :
$"{canonicalUrl}/{postSettings.thumbnailImage}";
return new Post()
{
PublishDate = postSettings.date.ToUniversalTime(),
UpdatedDate = postSettings.updated?.ToUniversalTime() ?? null,
tags = postSettings.tags?.Select(a => a.Replace(' ', '-').WithoutSpecialCharacters().ToLower()).ToList() ?? new List<string>(),
Title = postSettings.title,
RelativeUrl = $"/{resolvedUrl}/",
CanonicalUrl = canonicalUrl,
UrlWithoutPath = resolvedUrl,
isLanding = postSettings.isLanding,
Content = content,
ThumbnailImage = thumbNailUrl,
};
}
private async Task<LandingPage> BuildLandingPage(string fileName, string domain, string markdownText, PostSettings postSettings, string resolvedUrl, string canonicalUrl)
{
(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, bool hasCode) = await ResolveContentForPost(markdownText, fileName, resolvedUrl, domain);
var breadcrumb = new Schema.NET.BreadcrumbList()
{
ItemListElement = new List<IListItem>() // Required
{
new ListItem() // Required
{
Position = 1, // Required
Url = new Uri("https://blog.terrible.dev/") // Required
},
new ListItem()
{
Position = 2,
Name = postSettings.title,
},
},
};
// regex remove picture and source tags but not the child elements
var content = new PostContent()
{
Content = new HtmlString(postContent),
Images = postImages,
ContentPlain = postContentPlain,
Summary = new HtmlString(summary),
SummaryPlain = postSummaryPlain,
SummaryPlainShort = (postContentPlain.Length <= 147 ? postContentPlain : postContentPlain.Substring(0, 146)) + "...",
JsonLDBreadcrumb = breadcrumb,
JsonLDBreadcrumbString = breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
HasCode = hasCode,
MarkdownMD5 = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(markdownText)).ToHexString()
};
return new LandingPage()
{
PublishDate = postSettings.date.ToUniversalTime(),
UpdatedDate = postSettings.updated?.ToUniversalTime() ?? null,
Title = postSettings.title,
RelativeUrl = $"/{resolvedUrl}/",
CanonicalUrl = canonicalUrl,
UrlWithoutPath = resolvedUrl,
isLanding = postSettings.isLanding,
Content = content,
};
}
}

View File

@@ -0,0 +1,41 @@
using System;
using System.Linq;
using System.Net.Http;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace TerribleDev.Blog.Web.Factories
{
public class CodeFactory
{
private HttpClient httpClient = new HttpClient();
private static Boolean IsDisabled = !String.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable("DISABLE_PRISMA"));
public async Task<(string result, bool hasCode)> ReplaceFencedCode(string markdown)
{
if(CodeFactory.IsDisabled)
{
return (markdown, false);
}
// regex grab all text between backticks
var regex = new Regex(@"```(.*?)```", RegexOptions.Singleline);
var matches = regex.Matches(markdown);
var result = await Task.WhenAll(matches.Select(async match =>
{
var code = match.Value;
var codeContent = await httpClient.PostAsync("https://prismasaservice.azurewebsites.net/api/HttpTrigger", new StringContent(code));
if(!codeContent.IsSuccessStatusCode)
{
Console.Error.WriteLine("Error posting code to prisma");
}
return (code, await codeContent.Content.ReadAsStringAsync());
}));
foreach(var (match, newValue) in result)
{
markdown = markdown.Replace(match, newValue);
}
return (markdown, matches.Count > 0);
}
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
namespace TerribleDev.Blog.Web.Filters
{
public class StaticETag: ActionFilterAttribute
{
static StaticETag()
{
string etagString;
if(File.Exists("buildtime.txt"))
{
Console.WriteLine("buildtime.txt found");
etagString = File.ReadAllText("buildtime.txt");
}
else
{
Console.WriteLine("buildtime.txt not found");
Console.WriteLine("Directory list");
Console.WriteLine(Directory.GetFiles(".", "*", SearchOption.AllDirectories).Aggregate((a, b) => a + "\n" + b));
var unixTime = DateTimeOffset.Now.ToUnixTimeMilliseconds().ToString();
Console.WriteLine("Using Unix Time for Etag: " + unixTime);
etagString = unixTime;
}
StaticETag.staticEtag = "\"" + MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(etagString)).ToHexString().Substring(0,8) + "\"";
}
public static string staticEtag;
public static ConcurrentDictionary<string, string> cache = new ConcurrentDictionary<string, string>();
public override void OnActionExecuted(ActionExecutedContext context)
{
if(context.HttpContext.Response.StatusCode >= 200 && context.HttpContext.Response.StatusCode < 300 && context.HttpContext.Response.Headers.ETag.Count == 0)
{
context.HttpContext.Response.Headers.Add("ETag", staticEtag);
}
}
}
}

View File

@@ -0,0 +1,44 @@
using System;
using System.Text;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Logging;
using TerribleDev.Blog.Web.Taghelpers;
namespace TerribleDev.Blog.Web.Filters
{
public class Http2PushFilter : ActionFilterAttribute
{
private static bool IsHttp2PushDisabled = String.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable("DISABLE_HTTP2_PUSH"));
public override void OnActionExecuted(ActionExecutedContext context)
{
if(IsHttp2PushDisabled)
{
return;
}
var logger = context.HttpContext.RequestServices.GetService(typeof(ILogger<Http2PushFilter>)) as ILogger<Http2PushFilter>;
logger.LogDebug("Http2PushFilter.OnActionExecuted");
if(!context.HttpContext.Items.TryGetValue(HttpPush.Key, out var links))
{
logger.LogDebug("Did not find any links to push");
return;
}
var linkData = links as System.Collections.Generic.List<PushUrl>;
if(linkData == null || linkData.Count == 0) {
logger.LogDebug("Http2PushFilter.OnActionExecuted: No links");
return;
}
var headerBuilder = new StringBuilder();
for(var i = 0; i < linkData.Count; i++) {
var (url, AsProperty) = linkData[i];
var resolvedUrl = url.StartsWith("~") ? context.HttpContext.Request.PathBase.ToString() + url.Substring(1) : url;
headerBuilder.Append($"<{resolvedUrl}>; rel=preload; as={AsProperty}");
if(i < linkData.Count - 1) {
headerBuilder.Append(", ");
}
}
logger.LogDebug("Http2PushFilter.OnActionExecuted: " + headerBuilder.ToString());
context.HttpContext.Response.Headers.Add("Link", headerBuilder.ToString());
base.OnActionExecuted(context);
}
}
}

View File

@@ -0,0 +1,61 @@
using System;
using Markdig;
using Markdig.Renderers;
using Markdig.Renderers.Html.Inlines;
using Markdig.Syntax.Inlines;
namespace TerribleDev.Blog.Web.MarkExtension
{
public class AbsoluteLinkConverter : IMarkdownExtension
{
public string BaseUrl { get; }
public string Domain { get; }
public AbsoluteLinkConverter(string baseUrl, string domain)
{
BaseUrl = baseUrl;
Domain = domain;
}
public void Setup(MarkdownPipelineBuilder pipeline)
{
}
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
{
var htmlRenderer = renderer as HtmlRenderer;
if (htmlRenderer != null)
{
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
inlineRenderer.TryWriters.Add(TryLinkAbsoluteUrlWriter);
}
}
private bool TryLinkAbsoluteUrlWriter(HtmlRenderer renderer, LinkInline linkInline)
{
var prevDynamic = linkInline.GetDynamicUrl;
linkInline.GetDynamicUrl = () => {
var escapeUrl = prevDynamic != null ? prevDynamic() ?? linkInline.Url : linkInline.Url;
if(!System.Uri.TryCreate(escapeUrl, UriKind.RelativeOrAbsolute, out var parsedResult))
{
throw new Exception($"Error making link for {escapeUrl} @ {BaseUrl}");
}
if(parsedResult.IsAbsoluteUri)
{
return escapeUrl;
}
var uriBuilder = new UriBuilder(Domain);
if(!escapeUrl.StartsWith("/"))
{
uriBuilder = uriBuilder.WithPathSegment($"/{BaseUrl}/{escapeUrl}");
}
else
{
uriBuilder = uriBuilder.WithPathSegment(parsedResult.ToString());
}
return uriBuilder.Uri.ToString();
};
return false;
}
}
}

View File

@@ -37,7 +37,6 @@ namespace TerribleDev.Blog.Web.MarkExtension
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
if (inlineRenderer != null)
{
inlineRenderer.TryWriters.Remove(TryLinkInlineRenderer);
inlineRenderer.TryWriters.Add(TryLinkInlineRenderer);
}
}
@@ -49,7 +48,8 @@ namespace TerribleDev.Blog.Web.MarkExtension
{
return false;
}
this.images.Add(linkInline.Url);
var url = linkInline.GetDynamicUrl != null ? linkInline.GetDynamicUrl(): linkInline.Url;
this.images.Add(url);
return false;
}
}

View File

@@ -0,0 +1,72 @@
using System;
using Markdig;
using Markdig.Renderers;
using Markdig.Renderers.Html.Inlines;
using Markdig.Syntax.Inlines;
namespace TerribleDev.Blog.Web.MarkExtension
{
public class PictureInline : IMarkdownExtension
{
public void Setup(MarkdownPipelineBuilder pipeline)
{
}
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
{
var htmlRenderer = renderer as HtmlRenderer;
if (htmlRenderer != null)
{
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
inlineRenderer.TryWriters.Add(TryLinkInlineRenderer);
}
}
private bool TryLinkInlineRenderer(HtmlRenderer renderer, LinkInline linkInline)
{
if (linkInline == null || !linkInline.IsImage)
{
return false;
}
if(linkInline.Url.EndsWith(".gif"))
{
return false;
}
renderer.Write("<picture>");
WriteImageTag(renderer, linkInline, ".webp", "image/webp");
WriteImageTag(renderer, linkInline, string.Empty);
renderer.Write("</picture>");
return true;
}
private void WriteImageTag(HtmlRenderer renderer, LinkInline link, string suffix, string type = null)
{
renderer.Write(string.IsNullOrWhiteSpace(type) ? $"<img loading=\"lazy\" src=\"" : $"<source type=\"{type}\" srcset=\"");
var escapeUrl = link.GetDynamicUrl != null ? link.GetDynamicUrl() ?? link.Url : link.Url;
renderer.WriteEscapeUrl($"{escapeUrl}{suffix}");
renderer.Write("\"");
renderer.WriteAttributes(link);
if (renderer.EnableHtmlForInline)
{
renderer.Write(" alt=\"");
}
var wasEnableHtmlForInline = renderer.EnableHtmlForInline;
renderer.EnableHtmlForInline = false;
renderer.WriteChildren(link);
renderer.EnableHtmlForInline = wasEnableHtmlForInline;
if (renderer.EnableHtmlForInline)
{
renderer.Write("\"");
}
if (renderer.EnableHtmlForInline)
{
renderer.Write(" />");
}
}
}
}

View File

@@ -2,12 +2,36 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Schema.NET;
namespace TerribleDev.Blog.Web.Models
{
public class GetTagViewModel
{
public IEnumerable<IPost> Posts { get; set; }
public string Title { get => $"Tag: {Tag}"; }
public string Tag { get; set; }
public string CanonicalUrl { get; set; }
public string ldJson ()
{
var breadcrumb = new Schema.NET.BreadcrumbList()
{
ItemListElement = new List<IListItem>() // Required
{
new ListItem() // Required
{
Position = 1, // Required
Url = new Uri("https://blog.terrible.dev/") // Required
},
new ListItem()
{
Position = 2,
Name = Tag,
},
},
};
return breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true");
}
}
}

View File

@@ -10,5 +10,10 @@ namespace TerribleDev.Blog.Web.Models
public string PreviousUrl { get; set; }
public bool HasNext { get; set; }
public bool HasPrevious { get; set; }
public Schema.NET.Blog BlogLD { get; set; }
public Schema.NET.WebSite SiteLD { get; set; }
public string BlogLDString { get; set; }
public string SiteLDString { get; set; }
}
}

View File

@@ -9,15 +9,15 @@ namespace TerribleDev.Blog.Web.Models
{
public interface IPost
{
string Url { get; set; }
string CanonicalUrl { get; set; }
string UrlWithoutPath { get; set; }
string RelativeUrl { get; set; }
string Title { get; set; }
HtmlString Summary { get; set; }
DateTime PublishDate { get; set; }
HtmlString Content { get; set; }
string ContentPlain { get; set; }
string SummaryPlain { get; set; }
string SummaryPlainShort { get; set; }
IList<string> tags { get; set; }
IList<string> Images { get; set;}
DateTime? UpdatedDate { get; set; }
IPostContent Content { get; set; }
bool isLanding { get; set; }
string ThumbnailImage { get; }
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Html;
using Schema.NET;
namespace TerribleDev.Blog.Web.Models
{
public interface IPostContent
{
HtmlString Content { get; set; }
HtmlString Summary { get; set; }
string ContentPlain { get; set; }
string SummaryPlain { get; set; }
string SummaryPlainShort { get; set; }
IList<string> Images { get; set; }
BlogPosting JsonLD { get; set; }
bool HasCode { get; set; }
public string JsonLDString { get; set; }
BreadcrumbList JsonLDBreadcrumb { get; set; }
string JsonLDBreadcrumbString { get; set; }
string MarkdownMD5 { get; set; }
}
}

View File

@@ -12,5 +12,6 @@ namespace TerribleDev.Blog.Web.Models
string thumbnailImage { get; set; }
DateTimeOffset date { get; set; }
DateTimeOffset updated { get; set; }
bool isLanding { get; set; }
}
}

View File

@@ -0,0 +1,23 @@
using Microsoft.AspNetCore.Html;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace TerribleDev.Blog.Web.Models
{
[DebuggerDisplay("{Title}")]
public class LandingPage : IPost
{
public string CanonicalUrl { get; set; }
public string UrlWithoutPath { get; set; }
public string RelativeUrl { get; set; }
public string Title { get; set; }
public DateTime PublishDate { get; set; }
public DateTime? UpdatedDate { get; set; }
public IPostContent Content { get; set; }
public bool isLanding { get; set; } = false;
public string ThumbnailImage { get => "https://www.gravatar.com/avatar/333e3cea32cd17ff2007d131df336061?s=640"; }
}
}

View File

@@ -2,21 +2,24 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace TerribleDev.Blog.Web.Models
{
[DebuggerDisplay("{Title}")]
public class Post : IPost
{
public string Url { get; set; }
public string CanonicalUrl { get; set; }
public string UrlWithoutPath { get; set; }
public string RelativeUrl { get; set; }
public string Title { get; set; }
public DateTime PublishDate { get; set; }
public HtmlString Content { get; set; }
public HtmlString Summary { get; set; }
public string ContentPlain { get; set; }
public string SummaryPlain { get; set; }
public string SummaryPlainShort { get; set; }
public DateTime? UpdatedDate { get; set; }
public IList<string> tags { get; set; }
public IList<string> Images { get; set;}
public IPostContent Content { get; set; }
public bool isLanding { get; set; } = false;
public string ThumbnailImage { get; set; }
}
}

View File

@@ -0,0 +1,23 @@
using System.Collections.Generic;
using Microsoft.SyndicationFeed;
namespace TerribleDev.Blog.Web.Models
{
public class PostCache
{
public IList<IPost> PostsAsLists { get; set;}
public IDictionary<string, IList<Post>> TagsToPosts { get; set; }
public IDictionary<string, IPost> UrlToPost { get; set; }
public IDictionary<string, IPost> CaseInsensitiveUrlToPost { get; set; }
public IDictionary<int, IList<Post>> PostsByPage { get; set; }
public IList<SyndicationItem> PostsAsSyndication { get; set; }
public Schema.NET.Blog BlogLD { get; set; }
public Schema.NET.WebSite SiteLD { get; set; }
public string BlogLDString { get; set; }
public string SiteLDString { get; set; }
public Dictionary<string, LandingPage> LandingPagesUrl { get; set; }
}
}

View File

@@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
namespace TerribleDev.Blog.Web.Models
{
public class PostComparer
{
public static PostComparisonByDateInternal PostComparisonByDate = new PostComparisonByDateInternal();
public class PostComparisonByDateInternal : IComparer<IPost>
{
public int Compare([AllowNull] IPost x, [AllowNull] IPost y)
{
return DateTime.Compare(x.PublishDate, y.PublishDate);
}
}
}
}

View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Html;
using Schema.NET;
namespace TerribleDev.Blog.Web.Models
{
public class PostContent : IPostContent
{
public HtmlString Content { get; set; }
public HtmlString Summary { get; set; }
public string ContentPlain { get; set; }
public string SummaryPlain { get; set; }
public string SummaryPlainShort { get; set; }
public IList<string> Images { get; set; }
public BlogPosting JsonLD { get; set; }
public string JsonLDString { get; set; }
public BreadcrumbList JsonLDBreadcrumb { get; set; }
public string JsonLDBreadcrumbString { get; set; }
public bool HasCode { get; set; }
public string MarkdownMD5 { get; set; }
}
}

View File

@@ -9,11 +9,14 @@ namespace TerribleDev.Blog.Web.Models
public string title { get; set; }
public string permalink { get; set; }
public DateTime date { get; set; }
public DateTime updated { get; set; }
public DateTime? updated { get; set; }
public string id { get; set; }
public string thumbnail_image { get; set; }
public string thumbnailImage { get; set; }
public string thumbnail_image_position { get; set; }
public string layout { get; set; }
public bool isLanding { get; set; } = false;
}
}

View File

@@ -0,0 +1,7 @@
namespace TerribleDev.Blog.Web.Models
{
public class PostViewModel
{
public IPost Post { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
using System.Collections.Generic;
using System.Linq;
namespace TerribleDev.Blog.Web.Models
{
public class SearchViewModel
{
public string SearchTerm { get; set; }
public IList<IPost> Posts { get; set; }
}
}

View File

@@ -0,0 +1,46 @@
title: 5 web perf tips for 2019
date: 2019-02-23 01:32
tags:
- web
- performance
- javascript
- battle of the bulge
---
As more and more of the world is getting online, a larger part of the internet community is using the internet on lower powered devices. Making websites fast is becoming paramount. Here are 5 tips to improving you web page's performance
<!-- more -->
## Brotli and gzip
So incase you didn't know, when your browser makes a request to the server it sends along a header called `Accept-Encoding` This is a comma separated list of compression types your server can use to compress the data to the user. The common ones in the past have been `gzip, and deflate`. [Broli](https://en.wikipedia.org/wiki/Brotli), is a compression
algorithm invented by google to be a more efficient for the web. This has about a 35% effectiveness over gzip based on my own testing. This means your content will be almost 1/3rd smaller over the wire. Most browsers [support this already](https://caniuse.com/#feat=brotli). You can use cloudflare to serve Brotli (br) to your users, and most web servers support this today. Make sure your server is serving br, and at minimum gzip.
## Webp, JPEG 2000
Images are among one of the largest types of files on the internet today, and picking the right file type is as important as getting your data structures right. In the past we told everyone to keep photography in `jpeg`, logos and screen shots in `png`. However google has come out with a new file format. One that is massively smaller than either `jpeg` or `png`, and that is `webp`. Webp is only supported on [chrome, edge and firefox](https://caniuse.com/#search=webp), but don't worry for IOS Safari you can use `JPEG 2000`. Sizing images is also a key concern, you can use srcset to size images appropriately, and you can use the picture element to select the right image given browser support.
```html
<picture>
<source type="image/webp" srcset="3.webp" alt="an image showing the tiny png results">
<source type="image/jp2" srcset="3.jp2" alt="an image showing the tiny png results">
<img src="3.png" alt="an image showing the tiny png results">
</picture>
```
## Lighthouse
Ok so this is less of a trick to implement and more of a tool use use. Man I keep mentioning google, but they keep making amazing web stuff so here we are. Google has made this awesome performance tool called [lighthouse](https://developers.google.com/web/tools/lighthouse/). A version of this tool is built into chrome. Open the developer tools, and click the `audits` tab. That tool is lighthouse. You can install newer versions with `npm install -g lighthouse` or `yarn global add lighthouse`. Then just run `lighthouse --view <url>` so this blog would be `lighthouse --view https://blog.terrible.dev`. You should be hit with a pretty in depth report as to how you can fix and improve your web pages. You can also have your CI system run lighthouse on every build. You can fail PR's if they reduce performance, or just track your accessibility over time.
## HTTP/2
HTTP version 2 is a newer version of the http spec. Supported [by all major browsers](https://caniuse.com/#feat=http2) this protocol offers compression of http headers, a [push feature](https://en.wikipedia.org/wiki/HTTP/2_Server_Push) that lets you push files down to the browser before they are requested, [http pipelining](https://en.wikipedia.org/wiki/HTTP_pipelining), and multiplexing multiple requests over a single TCP connection. You can easily get http2 working if you let [cloudflare](https://www.cloudflare.com/) front your http traffic, but you will still want to implement http2 in your server eventually.
## Service workers
My last and probably favorite feature. [Service Workers](https://developers.google.com/web/fundamentals/primers/service-workers/) are a worker that can stand in between your server and web page in the browser. They are mostly a proxy that let you do things like cache your content, and support offline capabilities. They are easy to implement, you need to have a `manifest.json` file which you can generate from Microsoft's [PWA Builder](https://www.pwabuilder.com/), and just serve traffic over https only. PWA Builder even has [pre-made service workers](https://www.pwabuilder.com/serviceworker) for most scenarios so you don't even need to write your own. I use this for my blog to cache static content, preload blog posts, and provide offline support.

View File

@@ -0,0 +1,103 @@
title: Accessibility Driven Development
date: 2020-08-07 05:27:00
tags:
- a11y
- accessibility
---
I've been working at [CarGurus.com](https://www.cargurus.com) for the last 2 years or so. One of the biggest journeys we've been undertaking is to take accessibility far more seriously. However with an engineering team way into the triple digits it gets harder and harder to scale accessibility knowledge.
<!-- more -->
Knowledge gaps aside CarGurus has a multitude of technologies UI are build with. The two major are [Freemarker](https://freemarker.apache.org/) and [React](https://reactjs.org/). I manage one of our infrastructure teams, we build the tools and technologies to create the site with. This includes our component library, our build systems, linting tools, authentication systems, and core utilities for product development. When we first started really taking accessibility seriously we went to several teams in the business. Many of them did not have anyone with accessibility expertise.
> Our first approach was to teach accessibility. At the same time we worked with our brand marketing team to ensure our color pallet would be accessible from the start.
After identifying advocates on every team we set out to streamline identifying accessibility issues. One approach I decided to take was to show borders around failing elements during development. I first heard of this idea years ago when GitHub released something it called [accessibilityjs](https://github.com/github/accessibilityjs). This script Github included in its pages and put a giant ugly red border around failing elements. I thought this was a really slick idea to point out issues during development.
> I was going to use accessibility JS until I found axe-core
So [axe](https://www.deque.com/axe/) is a technology built by deque to identify accessibility issues. This is a highly configurable piece of technology that includes libraries for developers, browser extensions, and bots you can scan sites with. Deque has open sourced the core technology of axe which is a JavaScript called [axe-core](https://github.com/dequelabs/axe-core).
> I first started out by writing a script to use axe-core and to add a 10px red border around elements, but I quickly ran into trouble
First problem, I need to re-run axe every time the browser changes. If we click to open a nav-bar we'll need to rescan the page. Second problem, every-time we change the DOM the script would crash react apps, and finally axe-core is quite slow on large HTML documents.
## Mutation Observers
So the first problem was easily solvable. The browser has an API called [Mutation Observer](https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver). This is an API that lets you listen to changes to certain elements and fire a function when those elements change. In our case we wanted to listen to any changes to the `<body>` tag and all of its descendants.
```js
function scanForAccesibilityIssues() { /* scan for issues */}
const observer = new MutationObserver(scanForAccesibilityIssues);
observer.observe(document.querySelector('body'), { childList: true, subtree: true });
```
## Shadow DOM
Several UI frameworks such as React keep an in memory representation of the HTML document. The reason for this is when you want to change the UI in React. React will diff its current in-memory DOM with the next DOM and determine the most efficient way to actually apply the changes to the browser. Any application such as a browser extension, or our accessibility detector that edits the DOM outside of React's in-memory DOM will cause React to freak out and either crash of apply a change in an unexpected way. Luckily in recent years browsers have added a [Shadow DOM](https://developer.mozilla.org/en-US/docs/Web/Web_Components/Using_shadow_DOM). This is essentially a DOM that is used to apply visual changes to a user, but sits outside the light DOM (or the regular DOM). However, not all HTML elements support The Shadow DOM. For us to apply the red border we need to use the shadow DOM, and if any elements do not support shadow then we have to apply the border to the parent element. I wrote a [recursive function](https://en.wikipedia.org/wiki/Recursion_(computer_science)#Tail-recursive_functions) called `resolveClosestShadowRoot` which will walk up the DOM document and find the closest parent a target element has that supports shadow. You can tell if a node supports shadow because it will have a `.attachShadow` method. So we can simply access this variable and see if its defined or not.
```js
/**
*
* @param {HTMLElement} node
* @returns
*/
function resolveClosestShadowRoot(node) {
if (!node) {
return null;
}
if (node.attachShadow) {
return node;
}
return resolveClosestShadowRoot(node.parentElement);
}
```
After we identify which element to style we just have to apply the border. The code below is doing that by calling the attach shadow function and setting its innerHTML.
```js
const resolvedNode = resolveClosestShadowRoot(node);
const shadowRoot = resolvedNode.attachShadow({ mode: 'open' });
shadowRoot.innerHTML = '<style>:host { outline: red solid 1rem; }</style><slot></slot>';
```
The `<slot></slot>` element is rendering the content of the light DOM. We still have to show the existing content, and the `:host` psudo-class selector is selecting the host of the shadow DOM.
## Debounce 🎉
In web development we often use what's known as a "debounce" to delay doing something. The simple example is sometimes people click on a button multiple times, often on accident, sometimes intentionally. Before taking any action or taking multiple actions you might wait a moment before they stop clicking to do something. You wouldn't want to take the same action multiple times for each click. This is where debounce comes into play.
```js
function debounce(fn, wait) {
let timeout = null;
return function (...args) {
const next = () => fn.apply(this, args);
clearTimeout(timeout);
timeout = setTimeout(next, wait);
};
}
```
A debounce function accepts a function and a "wait time" or delay before being called to actually executing your function. To debounce a buttons onclick function you would pass its standard onclick function into the debounce function
```js
const onclick = () => { };
const debouncedClick = debounce(onclick, 500); // 500 milliseconds before the function is actually fired
```
```html
<button onclick="debouncedClick()" ></button>
```
## The result
So the result of all this is a function that listens to changes in the HTML document, waits 1 second for all the changes to finish applying, then scans the page for failing elements and uses The Shadow DOM to apply a red border around those elements. You can see a basic version of the code at [this Github Gist](https://gist.github.com/TerribleDev/51049146e00b36b0d8643f5e09d21ea8).
We log the Deque error object to the console which includes links to the failing elements. The result is whenever anyone develops new UI at CarGurus a giant ugly red border surrounds elements they don't write as accessible. This provides **immediate** feedback during the development process and prevents huge categories of accessibility issues from reaching production.
![An example of a failing element](1.jpg)

View File

@@ -0,0 +1,131 @@
title: "Building a remote cache server for Turborepo"
date: 2022-02-12 09:52
tags:
- Go
- Javascript
- Turborepo
- Devops
- Build
- node.js
---
[Turborepo](https://turborepo.org/) is a tool that came across my virtual desk recently. Monorepo develoment has been around for a long time. This is a strategy where all of your code remains in one repository regardless of services. A lot of people use monorepo's even for microservices. The huge upside is to keep everything in one place, which allows for development efficiency, such as grepping an entire codebase for specific keywords. A quick example would be a top level directory which has child directories that each contain an npm package, unlike publishing these packages, you access them locally as though they were published.
<!-- more -->
There are many tools in the Javascript ecosystem to manage monorepos. [Yarn](https://classic.yarnpkg.com/lang/en/docs/cli/workspaces/), and [npm](https://docs.npmjs.com/cli/v7/using-npm/workspaces) both have their own workspaces. [Lerna](https://lerna.js.org/) is a tool that people use to run commands a cross these packages. I've been a huge fan of monorepos for years. One of the big problems with this setup is build times. At [Quala](https://www.quala.io) we have around 38 packages, and some of my previous employers have had over 100. When you have these large repos sometimes you can make a change in a single package, but when you run `build` you have to wait to build the entire repository which can take a long time.
[Turborepo](https://turborepo.org/), however caches the build output of packages, so when you change a package it will get cache hits on particular packages, and thus you only build the changes you make. This is not a new idea. Years ago, google built [bazel](https://bazel.build/), A lot of people in C++ land have had remote builds. With Turborepo it seems the only official way to have remote caches is to use Vercel, or host your own server. For many reasons at [Quala](https://www.quala.io) I decided to opt for hosting our own server.
So to add turborepo to your monorepo, you need to add some [simple config](https://turborepo.org/docs/features/caching) to the root of your workspace, and your root `package.json` needs to replace its build command with `turborepo build`. In the case of remote caches you need to add
`--api="https://yourCacheServer.dev" --token="token" --team="team"`
Notice, the api flag does not contain a `/` at the end. Now according to the docs you don't need to pass a team, but I was unable to get the caches to register without it 🤷‍♀️
## The API
According to the [docs](https://turborepo.org/docs/features/remote-caching)
> You can self-host your own Remote Cache or use other remote caching service providers as long as they comply with Turborepo's Remote Caching Server API. I opted to write the server in go, and [I used Go Fiber](https://github.com/gofiber/fiber). At first I figured I could copy their structs to my project but honestly the API is so simple, there is no advantage to this.
To get a list of the API's you need, you are linked to some [code written in Go](https://github.com/vercel/turborepo/blob/main/cli/internal/client/client.go). I reverse engineered this code a bit, and came up with 4 APIs, and an AUTH token
```
Authorization: Bearer ${token}
PUT: /v8/artifacts/:hash
GET: /v8/artifacts/:hash
GET: /v2/user
GET: /v2/teams
```
### Authorization
When turborepo sends requests it appends the `Authorization` header which will contain our token. Ideally you would add to your server a way to auth a user and give them this token. In the below example we have a single token that comes from an environment variable. You really should have per user auth.
```go
app.Use(func(c *fiber.Ctx) error {
authHeader := c.Get("Authorization")
if authHeader != "Bearer "+token {
c.Status(401).SendString("Unauthorized")
return nil
}
return c.Next()
})
```
### Handling Requests
The API pretty much breaks down like this.
`PUT: /v8/artifacts/:hash` will send a file that you must write somewhere. Some people opt for sending it to S3, I decided to use a persistent disk, and save on the disk. I wanted the fastest responses for the caches. Heck if I'm going to remote cache something that would still be kinda quick on an M1, it better perform.
```go
app.Put("/v8/artifacts/:hash", func(c *fiber.Ctx) error {
fmt.Println(string(c.Request().URI().QueryString()))
return os.WriteFile("./cache/"+c.Params("hash"), c.Request().Body(), 0644)
})
```
The same URL but on a get is simple. Retrieve a file and serve it up, or return a 404
```go
app.Get("/v8/artifacts/:hash", func(c *fiber.Ctx) error {
fmt.Println(string(c.Request().URI().QueryString()))
return c.SendFile("./cache/" + c.Params("hash"))
})
```
The last two honesty you don't need to make things work. You can just return a 200
```go
app.Get("/v2/teams", func(c *fiber.Ctx) error {
return c.SendStatus(fiber.StatusOK)
})
app.Get("/v2/user", func(c *fiber.Ctx) error {
return c.SendStatus(fiber.StatusOK)
})
```
The `/v2/user` API is supposed to return information about the current user in the following shape. I'm pretty sure (not positive) created at is an [epoch](https://en.wikipedia.org/wiki/Unix_time) of the time the user was created. I'm guessing its largely used for Vercel.
```
{
ID string
Username string
Email string
Name string
CreatedAt int
}
```
The team api is supposed to look something like the following.
```
{
Pagination {
Count: int,
Next: int,
Prev: int
}
Teams [
Team {
ID: string,
Slug: string,
Name: string,
CreatedAt: int,
Created: string
}
]
}
```
> What about the --team flag?
So when requests are made with `--team` a query string `?slug=team` is added to the request. You can use this to ensure a particular user is in the given team, and you can fragment your caches by team. I ommitted that code from the above example, but the easiest way would be to have `./cache/${team}/${hash}` directory structure for the caches on disk. Note, on the GET requests you should auth the token against the team ID, and return a 404 if the user is not in the team. **I would not opt to return a Unauthorized header**, as that can be used by bad actors to cycle through tokens to know which one will work to cause harm.
## The Result
An extremely minimal server [is in this github repo](https://github.com/TerribleDev/turbogo) (although you shouldn't probably use it without building it out more).

View File

@@ -0,0 +1,219 @@
title: Building attractive CLIs in TypeScript
date: 2022-07-08 05:18
tags:
- javascript
- typescript
- node
- cli
- tutorials
---
So you've come to a point where you want to build nice CLIs. There's a few different options for building CLI's. My two favorites are [oclif](https://oclif.io/) and [commander.js](https://github.com/tj/commander.js/). I tend toward leaning to commander, unless I know I'm building a super big app. However, I've really enjoyed building smaller CLIs with commander recently.
<!-- more -->
> tl;dr? You can [view this repo](https://github.com/TerribleDev/example-ts-cli)
![a video of the CLI](cli.gif)
## Commander.js Lingo
So commander has a few different nouns.
* `Program` - The root of the CLI. Handles running the core app.
* `Command` - A command that can be run. These must be registered into `Program`
* `Option` - I would also call these `flags` they're the `--something` part of the CLI.
* `Arguments` - These are named positioned arguments. For example `npm install commander` the `commander` string in this case is an argument. `--save` would be an option.
## Initial Setup
First, do an npm init, and install commander, types for node, typescript, esbuild, and optionally ora.
```bash
npm init -y
npm install --save commander typescript @types/node ora
```
Next we have to configure a build command in the package.json. This one runs typescript to check for types and then esbuild to compile the app for node.
```json
"scripts": {
"build": "tsc --noEmit ./index.ts && esbuild index.ts --bundle --platform=node --format=cjs --outfile=dist/index.js",
}
```
We now need to add a bin property in the package.json. This tells the package manager that we have an executable. The key should be the name of your CLI
```json
"bin": {
"<yourclinamehere>": "./dist/index.js"
}
```
Make a file called index.ts, and place this string on the first line. This is called a shebang and it tells your shell to use node when the file is ran.
`#!/usr/bin/env node`
## Getting started
Hopefully you have done the above. Now in index.ts you can make a very basic program. Try npm build and then run the CLI with --help. Hopefully you'll get some output.
```ts
#!/usr/bin/env node
import { Command } from 'commander'
import { spinnerError, stopSpinner } from './spinner';
const program = new Command();
program.description('Our New CLI');
program.version('0.0.1');
async function main() {
await program.parseAsync();
}
console.log() // log a new line so there is a nice space
main();
```
### Setting up the spinner
So, I really like loading spinners. I think it gives the CLI a more polished feel. So I added a spinner using ora. I made a file called `spinner.ts` which is a wrapper to handle states of spinning or stopped.
```ts
import ora from 'ora';
const spinner = ora({ // make a singleton so we don't ever have 2 spinners
spinner: 'dots',
})
export const updateSpinnerText = (message: string) => {
if(spinner.isSpinning) {
spinner.text = message
return;
}
spinner.start(message)
}
export const stopSpinner = () => {
if(spinner.isSpinning) {
spinner.stop()
}
}
export const spinnerError = (message?: string) => {
if(spinner.isSpinning) {
spinner.fail(message)
}
}
export const spinnerSuccess = (message?: string) => {
if(spinner.isSpinning) {
spinner.succeed(message)
}
}
export const spinnerInfo = (message: string) => {
spinner.info(message)
}
```
### Writing a command
So I like to separate my commands out into sub-commands. In this case we're making `widgets` a sub-command. Make a new file, I call it widgets.ts. I create a new `Command` called `widgets`. Commands can have commands making them sub-commands. So we can make a sub-command called `list` and `get`. **List** will list all the widgets we have, and **get** will retrive a widget by id. I added some promise to emulate some delay so we can see the spinner in action.
```ts
import { Command } from "commander";
import { spinnerError, spinnerInfo, spinnerSuccess, updateSpinnerText } from "./spinner";
export const widgets = new Command("widgets");
widgets.command("list").action(async () => {
updateSpinnerText("Processing ");
// do work
await new Promise(resolve => setTimeout(resolve, 1000)); // emulate work
spinnerSuccess()
console.table([{ id: 1, name: "Tommy" }, { id: 2, name: "Bob" }]);
})
widgets.command("get")
.argument("<id>", "the id of the widget")
.option("-f, --format <format>", "the format of the widget") // an optional flag, this will be in options.f
.action(async (id, options) => {
updateSpinnerText("Getting widget " + id);
await new Promise(resolve => setTimeout(resolve, 3000));
spinnerSuccess()
console.table({ id: 1, name: "Tommy" })
})
```
Now lets register this command into our program. (see the last line)
```ts
#!/usr/bin/env node
import { Command } from 'commander'
import { spinnerError, stopSpinner } from './spinner';
import { widgets } from './widgets';
const program = new Command();
program.description('Our New CLI');
program.version('0.0.1');
program.addCommand(widgets);
```
Do a build! Hopefully you can type `<yourcli> widgets list` and you'll see the spinner. When you call `spinnerSuccess` without any parameters the previous spinner text will stop and become a green check. You can pass a message instead to print that to the console. You can also call `spinnerError` to make the spinner a red `x` and print the message.
### Handle unhandled errors
Back in index.ts we need to add a hook to capture unhandled errors. Add a verbose flag to the program so we can see more details about the error, but by default lets hide the errors.
```ts
const program = new Command('Our New CLI');
program.option('-v, --verbose', 'verbose logging');
```
Now we need to listen for the node unhandled promise rejection event and process it.
```ts
process.on('unhandledRejection', function (err: Error) { // listen for unhandled promise rejections
const debug = program.opts().verbose; // is the --verbose flag set?
if(debug) {
console.error(err.stack); // print the stack trace if we're in verbose mode
}
spinnerError() // show an error spinner
stopSpinner() // stop the spinner
program.error('', { exitCode: 1 }); // exit with error code 1
})
```
#### Testing our error handling
Lets make a widget action called `unhandled-error`. Do a build, and then run this action. You should see the error is swallowed. Now try again but use `<yourcli> --verbose widgets unhandled-error` and you should see the error stack trace.
```ts
widgets.command("unhandled-error").action(async () => {
updateSpinnerText("Processing an unhandled failure ");
await new Promise(resolve => setTimeout(resolve, 3000));
throw new Error("Unhandled error");
})
```
## Organizing the folders
Ok, so you have the basics all setup. Now, how do you organize the folders. I like to have the top level commands in their own directories. That way the folder structure emulates the CLI. This is an idea I saw in oclif.
```
- index.ts
- /commands/widgets/index.ts
- /commands/widgets/list.ts
- /commands/widgets/get.ts
```
## So why not OCLIF?
A few simple reasons. OCLIF's getting started template comes with an extremely opinionated typescript configuration. For large projects, I've found it to be incredible. However, for smaller-ish things, I've found conforming to it, a trial of turning down the linter a lot. Overall, they're both great tools. Why not both?

View File

@@ -0,0 +1,97 @@
title: Dynamically changing the site-theme meta tag
date: 2022-04-12 11:05
thumbnailImage: 1.jpg
tags:
- javascript
- js
- react
---
So, incase you are unfamiliar, there is a meta tag called `<meta name="theme-color" content="...">` that is used to change the color of the navbar on desktop safari, mobile safari, and mobile chrome. If you don't set a value these browsers tend to find a color that match the site to the best of their ability. However, sometimes even setting the value can cause the site to look ugly.
<!-- more -->
So, I've been recently working on an NFT project called [Squiggle Squatches](http://squigglesquatches.io/). NFT projects are essentially digital art projects for sale. Our website, really needs to reflect our look and feel as much as we can. When I first loaded our page, I noticed this **huge** white bar on the top of Safari.
![An un-themed page with a big white bar at the top of the page](1.jpg)
> So I set out to change this. I knew there was a `<meta name="theme-color" content="...">` tag that can add the theme.
I first made the theme be the color of the top section, and this looked great!
![An theme where the top bar color clashes on scroll](2.jpg)
However after scrolling, I noticed this looked super ugly.
![An theme where the top bar color clashes on scroll](3.jpg)
So I decided to write some code to fix this problem.
## Listening to scroll events
So, I started with decorating certain tags with a `data-scroll-theme` attribute that signaled our code to look at this div to manipulate the theme color. This looks like `<section data-scroll-theme class="blue/red/etc">content</section>`
I then ended up crafting this JS code. Basically, make a throttle function so we only fire our event every 100ms. Grab the default color. Then on scroll figure out if any boxes are at the top of the page, and if so set the meta tag to that color.
```js
// a function to only call the wrapped functions every x milliseconds so the scroll event doesn't make our function run all the time
function throttle(func, timeFrame) {
var lastTime = 0;
return function(...args) {
var now = new Date().getTime();
if (now - lastTime >= timeFrame) {
func(...args);
lastTime = now;
}
};
}
// get the theme color on load so we can revert to this
const ogColor = document.querySelector('meta[name="theme-color"]')?.getAttribute('content');
// handle scroll event
const handleScroll = throttle(() => {
// find all tags that have `data-scroll as a property`
const targets = document.querySelectorAll('[data-scroll-theme]')
// are any targets at the top of the window?
const isTop = Array.from(targets).map((target) => {
const rect = target.getBoundingClientRect();
if (rect.y > 1) {
return null;
}
return { target, rect }
}).filter(Boolean).sort((a, b) => b.rect.y - a.rect.y)[0]
// if we found an element at the top of the document then
if (isTop) {
// set theme color meta tag to the background color of div
const color = window.getComputedStyle(isTop.target).getPropertyValue('background-color')
if (color) {
// find the theme color meta tag and set the attribute to it
document.querySelector('meta[name="theme-color"]')?.setAttribute('content', color);
}
} else if (ogColor) {
// set theme color meta tag to original
document.querySelector('meta[name="theme-color"]')?.setAttribute('content', ogColor);
}
// run every 100ms
}, 100)
document.addEventListener('scroll', handleScroll, { passive: true })
```
## End result
The end result is the top bar of safari changes as you scroll between blocks. This has made [Squiggle Squatches](http://squigglesquatches.io/) look way better on mobile.
<iframe width="662" height="1176" src="https://www.youtube.com/embed/iLksuqZP4L8" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
<!-- ![video](https://youtu.be/iLksuqZP4L8) -->
You can see a simpler example on [replit](https://replit.com/@TerribleDev/ScrollableTheme)
<iframe frameborder="0" width="100%" height="500px" src="https://replit.com/@TerribleDev/ScrollableTheme?embed=true#script.js"></iframe>

View File

@@ -0,0 +1,139 @@
title: Hosting Craft CMS on Heroku
date: 2022-02-24 07:19
tags:
- craftcms
- cms
- heroku
---
So, like most early startups, [Quala](https://www.quala.io) (where I currently work) bought into a Wordpress site to sell our product, probably before it really existed. Flash forward, we have customers, and we're on a path to building a platform to change the game on customer management. The Wordpress site was terrible for performance, and [core web vitals](https://web.dev/vitals/). None of us know Wordpress, and barely know any php. We had huge drive to rebrand ourselves, but to do that we needed to edit the Wordpress theme 😬 or use something else.
<!-- more -->
*tl;dr you can use this sweet [deploy to heroku button](https://github.com/oof-bar/craft-heroku) that [oof.studio](https://oof.studio/) made. Most of this post is inspired by their implementation*
## Why Craft?
I was introduced to [CraftCMS](https://craftcms.com/) 2 years ago. Back then my first instinct was *eww php*, might also still be my primary reaction 🤣. At that time, and still today, I love the headless CMS ([Contentful](https://www.contentful.com/), [Sanity](https://www.sanity.io/)) + [Gatsby](https://www.gatsbyjs.com/) strategy. However, we are a startup. For us, every dollar counts. The license for Craft is $300/year. Most of the other GraphQL CMS' we looked at were more expensive. We have a developer that's used craft, and I know some other [big brain craft people](https://www.johnlamb.me/).
## Craft + Heroku
So, Heroku is a Platform to host webapps. They have good postgres support, and we've used them in the past. Apps on Heroku need to be [12 factor apps](https://12factor.net/). Heroku has an ephemeral file system, scales horizontally, and logs stdout/stderr streams.
Craft is based on the yii php framework. You'll need to use the official `php` buildpack for craft to work, and any libraries for yii will work with Craft. When we started looking into this, I found a [deploy to heroku button](https://github.com/oof-bar/craft-heroku) that [oof.studio](https://oof.studio/) built. We had to fork this, and update it. However, since then they've updated it (almost exactly how we did), so you may want to use their deploy button to get started. I didn't have much experience with craft, so much of this writing you can attribute to me reverse engineering their configs and updating it to the newest version of craft.
## Configuring Craft
Craft configurations sit in an `app.php` file. This file will need to add redis for sessions, and cache (the cache for the cache tags). Also, using [codemix's logstream](https://github.com/codemix/yii2-streamlog), piping the stream to stdout.
```php
'production' => [
'components' => [
'redis' => [
'class' => yii\redis\Connection::class,
'hostname' => parse_url(App::env('REDIS_URL'), PHP_URL_HOST),
'port' => parse_url(App::env('REDIS_URL'), PHP_URL_PORT),
'password' => parse_url(App::env('REDIS_URL'), PHP_URL_PASS)
],
'session' => [
'class' => yii\redis\Session::class,
'as session' => [
'class' => \craft\behaviors\SessionBehavior::class
]
],
'cache' => [
'class' => yii\redis\Cache::class,
'defaultDuration' => 86400
],
'log' => [
'targets' => [
[
'class' => codemix\streamlog\Target::class,
'url' => 'php://stderr',
'levels' => ['error', 'warning'],
'logVars' => []
]
]
]
]
]
```
There is also a file to set the db configuration in `db.php`. That must have the following, which will use heroku's `DATABASE_URL` environment variable in prod, and [nitro's](https://craftcms.com/docs/nitro/2.x/) set of environment variables locally. You'll need a `bootstrap.php` file to setup the environment properly (including license keys).
```php
<?php
define('CRAFT_BASE_PATH', __DIR__);
define('CRAFT_VENDOR_PATH', CRAFT_BASE_PATH . '/vendor');
require_once CRAFT_VENDOR_PATH . '/autoload.php';
// Load dotenv?
if (class_exists('Dotenv\Dotenv')) {
Dotenv\Dotenv::createUnsafeImmutable(CRAFT_BASE_PATH)->safeLoad();
}
define('CRAFT_ENVIRONMENT', getenv('ENVIRONMENT') ?: 'production');
define('CRAFT_LICENSE_KEY', getenv('CRAFT_LICENSE_KEY'));
define('CRAFT_STORAGE_PATH', getenv('CRAFT_STORAGE_PATH') ?: '../storage');
define('CRAFT_STREAM_LOG', true);
```
## S3
In our case, the button didn't provide any support for uploaded files. We went for S3. I added the Craft s3 plugin, and configured it to read the apikeys, and bucket names from environment variables. Then I registered those variables in the environment variables in heroku.
## Other important files
Heroku requires a Procfile to launch apps.
```shell
web: vendor/bin/heroku-php-nginx -C nginx_app.conf web
worker: ./craft queue/listen --verbose
release: ./bin/release.sh
```
`release.sh` will run a db migration
```shell
if /usr/bin/env php /app/craft install/check
then
/usr/bin/env php /app/craft up --interactive=0
fi
```
A `nginx_app.conf` nginx config for heroku's php buildpack.
```nginx
if ($http_x_forwarded_proto != "https") {
return 301 https://$host$request_uri;
}
if ($host ~ ^www\.(.+)) {
return 301 https://$1$request_uri;
}
location / {
# try to serve file directly, fallback to rewrite
try_files $uri @rewriteapp;
}
location @rewriteapp {
# rewrite all to index.php
rewrite ^(.*)$ /index.php?p=$1 last;
}
location ~ ^/(index)\.php(/|$) {
fastcgi_pass heroku-fcgi;
fastcgi_split_path_info ^(.+\.php)(/.*)$;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param HTTPS on;
}
# Global Config
client_max_body_size 20M;
```
## Anything else?
Nope, not really. You need to be aware that you need to treat craft's configuration as entirely immutable. Any changes to configuration such as plugins, twig templates, etc. Will need to be changed in dev and pushed to Heroku. Nothing can be mutated in production, other than the authoring of the site. Even file uploads!

View File

@@ -0,0 +1,240 @@
title: Hosting dotnet core on Heroku
date: 2021-07-19 00:01
tags:
- dotnet core
- cloud
- Heroku
- Postgres
---
I've been getting back into building scrappy little web apps for my friends. On top of this, I recently joined [a startup](https://quala.io) and getting away from Enterprise class software has made me make a huge mind-shift. In the recent past when I wanted to build apps I was thinking Kubernetes, Helm Charts, etc. However, in small app, and startup land reducing the barriers to ship is very important.
<!-- more -->
Incase you are not familiar [Heroku](https://www.heroku.com) is a platform to host webapps. They host a free version of Postgres DB, and Redis that is directly connected to your app with environment variables. Heroku has support for many languages, but one I saw missing from the list was dotnet.
To host apps on Heroku, you must know the basic *rules of Heroku*
1. Your app must listen on `$PORT` or `%PORT%` if you come from windows. Basically, any http listeners must listen to the port defined as an environment variable.
2. Postgres is free (to a point), redis is free, most other things cost money.
3. Logs must go to `stdout` which works well for us since that's the default behavior of asp.net core!
4. In dotnet core authentication cookies are encrypted and the key is usually placed in your home directory, but in Heroku your app could be moved to any machine at any moment. The filesystem needs to be stateless
5. Heroku gives you your Postgres connection string as `postgres://<username>:<password>@<host>:<port>/<database>`
## Listening on $PORT
Traditionally dotnet core apps listen for an environment variable called `ASPNETCORE_URLS` but in this case we need to override this behavior. In your `Program.cs` file you can make the following modification, which detects if `$PORT` is defined, and if it is to listen to all requests on that port.
```csharp
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
var port = Environment.GetEnvironmentVariable("PORT");
if(!string.IsNullOrEmpty(port))
{
webBuilder.UseUrls($"http://*:{port}");
}
webBuilder.UseStartup<Startup>();
});
```
## Using Postgres with Entity Framework
On a `dotnet new mvc --auth individual` you are presented with the following block of code in `Startup.cs`
```csharp
services.AddDbContext<ApplicationDbContext>(options =>
options.UseSqlite(
Configuration.GetConnectionString("DefaultConnection")));
```
This configures your app to use SqlLite as a DB, we need to switch this. Luckily the Postgres team has an awesome integration with entity framework. Run the following command to add their package to your project
`dotnet add package Npgsql.EntityFrameworkCore.PostgreSQL`
Then simply swap the previous code block for the following, which will parse the database url from Heroku and setup a Postgres connection. You can use the following docker-compose file and `appsettings.Development.json` for local development.
```csharp
var databaseUrl = Configuration.GetValue<string>("DATABASE_URL");
var databaseUri = new Uri(databaseUrl);
var userInfo = databaseUri.UserInfo.Split(':');
var builder = new NpgsqlConnectionStringBuilder
{
Host = databaseUri.Host,
Port = databaseUri.Port,
Username = userInfo[0],
Password = userInfo[1],
Database = databaseUri.LocalPath.TrimStart('/'),
TrustServerCertificate = true
};
services.AddDbContext<ApplicationDbContext>(options =>
options.UseNpgsql(builder.ToString()));
```
*docker-compose.yml*
```yml
version: '3'
services:
postgres:
image: 'postgres:13'
ports:
- '6666:5432'
environment:
POSTGRES_PASSWORD: 'password'
POSTGRES_USER: 'admin'
```
*appsettings.Development.json*
```json
{
"DATABASE_URL": "postgres://admin:password@localhost:6666/main"
}
```
## Encryption keys
Ok so you've got the basics running, but you need to store your encryption keys. We can store them in the database using entity framework! Add this to your `startup.cs` `ConfigureServices` Method. Make sure you `dotnet add package Microsoft.AspNetCore.DataProtection.EntityFrameworkCore`. You'll also need to make sure your dbContext implements `IDataProtectionKeyContext`
```cs
services.AddDataProtection().PersistKeysToDbContext<ApplicationDbContext>();
```
## Database Migrations
There are several ways to handle database migrations. For simple webapps you can configure your app to do a migration on startup. More complex apps should shell into the `ef` command line using [Heroku's procfile](https://devcenter.heroku.com/articles/release-phase)
```csharp
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
using(var scope = app.ApplicationServices.GetRequiredService<IServiceScopeFactory>().CreateScope())
using(var ctx = scope.ServiceProvider.GetRequiredService<ApplicationDbContext>())
{
ctx.Database.EnsureCreated();
ctx.Database.Migrate();
}
}
```
## Forwarded protocol
Heroku sends an `X-Forwarded-Proto` header to tell your app what protocol a user is using. You'll want to add this to your `Configure` block before all other middleware
```csharp
app.UseForwardedHeaders(new ForwardedHeadersOptions
{
ForwardedHeaders = ForwardedHeaders.XForwardedProto
});
```
## Getting your app in Heroku with containers
There are 2 basic methods to getting your app live in Heroku. One is to push a docker container to Heroku, or use a Heroku buildpack to have Heroku build your app for you. I opted for the docker container.
I stole this sample dockerfile from the aspnet core docker docs.
```dockerfile
FROM mcr.microsoft.com/dotnet/core/aspnet:3.1 AS base
WORKDIR /app
EXPOSE 80
EXPOSE 443
FROM mcr.microsoft.com/dotnet/core/sdk:3.1 AS build
WORKDIR /src
COPY ["./MyApp.csproj", "."]
RUN dotnet restore "MyApp.csproj"
COPY . .
WORKDIR "/src"
RUN dotnet build "MyApp.csproj" -c Release -o /app
FROM build AS publish
RUN dotnet publish "MyApp.csproj" -c Release -o /app
FROM base AS final
WORKDIR /app
COPY --from=publish /app .
ENTRYPOINT ["dotnet", "MyApp.dll"]
```
I then found someone had made a *build a docker image and push to Heroku* GitHub action. All I had to do is make this a file in `.github/deployContainerToHeroku.yml`, turn on Github actions, and register my Heroku API key as a secret in GitHub
```yml
name: Deploy to Heroku.
# Run workflow on every push to master branch.
on:
push:
branches: [master]
# Your workflows jobs.
jobs:
build:
runs-on: ubuntu-latest
steps:
# Check-out your repository.
- name: Checkout
uses: actions/checkout@v2
### ⬇ IMPORTANT PART ⬇ ###
- name: Build, Push and Release a Docker container to Heroku. # Your custom step name
uses: gonuit/Heroku-docker-deploy@v1.3.3 # GitHub action name (leave it as it is).
with:
# Below you must provide variables for your Heroku app.
# The email address associated with your Heroku account.
# If you don't want to use repository secrets (which is recommended) you can do:
# email: my.email@example.com
email: ${{ secrets.HEROKU_EMAIL }}
# Heroku API key associated with provided user's email.
# Api Key is available under your Heroku account settings.
Heroku_api_key: ${{ secrets.HEROKU_API_KEY }}
# Name of the Heroku application to which the build is to be sent.
Heroku_app_name: ${{ secrets.HEROKU_APP_NAME }}
# (Optional, default: "./")
# Dockerfile directory.
# For example, if you have a Dockerfile in the root of your project, leave it as follows:
dockerfile_directory: ./src/MyApp
# (Optional, default: "Dockerfile")
# Dockerfile name.
dockerfile_name: Dockerfile
# (Optional, default: "")
# Additional options of docker build command.
docker_options: "--no-cache"
# (Optional, default: "web")
# Select the process type for which you want the docker container to be uploaded.
# By default, this argument is set to "web".
# For more information look at https://devcenter.Heroku.com/articles/process-model
process_type: web
```
## Getting your app in Heroku with buildpacks
Heroku has had this system called *buildpacks* which allow you to script the creation of the hosting environment of your app. Someone has done the dirty work and [built a dotnet core buildpack](https://elements.Heroku.com/buildpacks/jincod/dotnetcore-buildpack) which can be used to deploy dotnet core apps to Heroku. To use this, create an app in Heroku, set your [buildpack to the dotnet core buildpack](https://elements.Heroku.com/buildpacks/jincod/dotnetcore-buildpack) in settings. Connect your GitHub repo and Heroku will do the hard work for you!
## Finish
I hope you liked this. Keep on hacking away!

View File

@@ -0,0 +1,34 @@
title: How to host a javascript monorepo on Heroku
date: 2022-03-01 10:35
tags:
- javascript
- Heroku
---
So I've been using monorepos for some time, and recently I've gotten a lot of questions about how to host them on Heroku. I figured I'd give you the simple guide. There are two basic scenarios. The root of your git repo has your yarn/npm workspace, or you have a folder inside of a gitrepo you wish to use.
<!-- more -->
## Scenario 1: yarn/npm workspace
In this case, create a Heroku app with the official nodejs buildpack. Add `heroku-postbuild: "YourBuildCommand"` to your scripts section of the root package.json. This will run after the npm install, and can be used to run any build commands you need (such as compiling typescript). Then use [the multi-procfile buildpack](https://github.com/heroku/heroku-buildpack-multi-procfile) which will grab a procfile from any directory and copy it to the root to boot your app. That way your monorepo can have a `server/package.json` package that contains your web app and in there you can have the procfile `server/Procfile`.
Your buildpacks should have this order:
```
heroku/nodejs
heroku-buildpack-multi-procfile
```
The multi-procfile requires an Environment variable called `PROCFILE` which has the path to the procfile to use. For example it can be `/server/Procfile`. Usually my procfile contains a workspace command to start the server.
```
web: yarn workspace server run start
```
## Scenario 2: Folder inside of Git Repo
So this is a strategy where you make a heroku app in a nested directory. Not using a yarn workspace. In this case you can use the [monorepo buildpack](https://github.com/lstoll/heroku-buildpack-monorepo) to copy a subdirectory to the root directory before the build happens. After that buildpack include the `heroku/nodejs` buildpack which will run the npm/yarn/etc. install commands and then use the `Procfile` in that directory to start your app.

View File

@@ -15,7 +15,7 @@ Getting Started:
Ok, so the alexa .net sdk is for the full framework only, and its built for webapi. The best way to get going is in visual studio `file -> new project -> ASP.NET Web Application .net framework` A dialog comes up, and I picked `Azure API App`.
![dialog picker](dialog.png)
![dialog picker](dialog.PNG)
Now you have an empty webapi project. We don't need swashbuckle/swagger so lets get rid of that

View File

@@ -80,7 +80,7 @@ So the major feature I was blown away by with NDepend was how clean, and organiz
The code quality rules, uses the NDepends querying engine to get your code. When you click on a rule the Linq query used will be displayed in a separate window. You can use this window to create your own rules, using the same querying engine. The following is a query to find code that should not be declared public.
<pre>
//<Name>Avoid public methods not publicly visible</Name>
//Avoid public methods not publicly visible
// Matched methods are declared public but are not publicly visible by assemblies consumers.
// Their visibility level must be decreased.

View File

@@ -0,0 +1,89 @@
title: Optimizing heroku's node_module cache for JS monorepos
tags:
- JS
- javascript
- heroku
- cloud
- devops
- node.js
date: 2021-10-12 00:00
---
For many of us a JS workspace is the simplest way to structure code for future growth while providing very quick iterations. Incase you are unfamiliar, several technologies exist such as `yarn workspaces`, `lerna`, `npm workspaces`, etc. That can seamlessly stitch npm packages on disk as though they were published to a private NPM registry. This allows for fast iteration inside of a single git repo, while allowing a future where these dependencies could be abstracted.
<!-- more -->
The file system looks something like the following
```
root/
packages/
server
workers
data
utils
```
In my quick example we can pretend that an express app in in server, and some background workers are in workers. However both apps need to share code. One strategy would be to version the `data`, and `utils`, packages and ship them to a private NPM registry, or we could use these mono-repo technologies so that `import utils from 'utils'` just works without the need for a remote package store. When installing node modules into a JS workspace the following can occur
```
root/
node_modules
packages/
server/node_modules
data
utils
worker/node_modules
```
In the above scenario node modules are both resolved into the root package but also several layers deep. In heroku you can cache your `node_modules` to improve build speed. However the paths to these directories **must be declared prior to the build**. This becomes an issue when big mono-repos litter `node_modules` everywhere.
I decided to write the following JS script to walk over the directories where `node_modules` could be placed and rewrite the root `package.json` file so those directories are explicitly declared.
```js
const glob = require('glob');
const fs = require('fs');
const path = require('path');
// do not run this in the heroku build
// we treat this a bit more like a yarn lockfile
if(process.env.NODE_ENV !== 'production') {
glob("./packages/*/node_modules", {}, function (er, result) {
const packageJson = require('./package.json');
// include the root node_modules
let cacheDirectories = ['node_modules'];
cacheDirectories = cacheDirectories.concat(result)
packageJson.cacheDirectories = cacheDirectories.filter(i => {
// ensure the directory node_modules are found contain a package.json file
return fs.existsSync(path.resolve(i, '../package.json'));
});
// write out the changes to the root packaage.json
fs.writeFileSync('./package.json', JSON.stringify(packageJson, null, 2));
})
}
```
I wired up the script on the post install process of the install lifecycle. Basically adding the following to the root `package.json` file.
```json
{
"scripts": {
"postinstall": "node ./computeCacheDirectories.js",
}
}
```
Now every time a developer runs `yarn install` they will compute the cache directories. The result is a mutation to the `package.json` that looks like the following.
```json
{
"cacheDirectories": [
"node_modules",
"./packages/server/node_modules",
"./packages/worker/node_modules"
],
}
```
When we push changes to prod we get much better cache hits across our yarn workspace.

View File

@@ -339,4 +339,105 @@ Environment.Exit(result);
Here is the full source as a [gist](https://gist.github.com/TerribleDev/06abb67350745a58f9fab080bee74be1#file-program-cs):
<script src="https://gist.github.com/TerribleDev/06abb67350745a58f9fab080bee74be1.js"></script>
```csharp
public static void Main(string[] args)
{
var app = new Microsoft.Extensions.CommandLineUtils.CommandLineApplication();
var catapult = app.Command("catapult", config => {
config.OnExecute(()=>{
config.ShowHelp(); //show help for catapult
return 1; //return error since we didn't do anything
});
config.HelpOption("-? | -h | --help"); //show help on --help
});
catapult.Command("help", config => {
config.Description = "get help!";
config.OnExecute(()=>{
catapult.ShowHelp("catapult");
return 1;
});
});
catapult.Command("list", config => {
config.Description = "list catapults";
config.HelpOption("-? | -h | --help");
config.OnExecute(()=>{
Console.WriteLine("a");
Console.WriteLine("b");
return 0;
});
});
catapult.Command("add", config => {
config.Description = "Add a catapult";
config.HelpOption("-? | -h | --help");
var arg = config.Argument("name", "name of the catapult", false);
config.OnExecute(()=>{
if(!string.IsNullOrWhiteSpace(arg.Value))
{
//add snowballs somehow
Console.WriteLine($"added {arg.Value}");
return 0;
}
return 1;
});
});
catapult.Command("fling", config =>{
config.Description = "fling snow";
config.HelpOption("-? | -h | --help");
var ball = config.Argument("snowballId", "snowball id", false);
var cata = config.Argument("catapultId", "id of catapult to use", false);
config.OnExecute(()=>{
//actually do something
Console.WriteLine($"threw snowball: {ball.Value} with {cata.Value}");
return 0;
});
});
var snowball = app.Command("snowball", config => {
config.OnExecute(()=>{
config.ShowHelp(); //show help for catapult
return 1; //return error since we didn't do anything
});
config.HelpOption("-? | -h | --help"); //show help on --help
});
snowball.Command("help", config => {
config.Description = "get help!";
config.OnExecute(()=>{
catapult.ShowHelp("snowball");
return 1;
});
});
snowball.Command("list", config => {
config.HelpOption("-? | -h | --help");
config.Description = "list snowballs";
config.OnExecute(()=>{
Console.WriteLine("1");
Console.WriteLine("2");
return 0;
});
});
snowball.Command("add", config => {
config.Description = "Add a snowball";
config.HelpOption("-? | -h | --help");
var arg = config.Argument("name", "name of the snowball", false);
config.OnExecute(()=>{
if(!string.IsNullOrWhiteSpace(arg.Value))
{
//add snowballs somehow
Console.WriteLine($"added {arg.Value}");
return 0;
}
return 0;
});
});
//give people help with --help
app.HelpOption("-? | -h | --help");
var result = app.Execute(args);
Environment.Exit(result);
}
```

View File

@@ -34,5 +34,3 @@ Eventually we bit the bullet and decided to sign our requests to the cluster. Un
This project totally saved my bacon. Brandon's library plugged right into the .NET sdk, and auth'd our requests to aws without us having to figure out all that crypo. Within moments of finding it I filed an [issue](https://github.com/bcuff/elasticsearch-net-aws/issues/1) thanking Brandon as it really helped me out.
The Elasticsearch service offering by Amazon is pretty awesome. Like any platform its less flexible then hosting the instances yourself. You have to live with the plugins they ship, but on the plus side you get a full cluster, with monitoring, and a knob to turn up instances, or storage space without having to worry about the details.
<script src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>

View File

@@ -0,0 +1,256 @@
title: Serving AMP Pages with Dotnet Core
date: 2022-03-10 06:00
tags:
- dotnet
- dotnetcore
- AMP
---
I remember when (Accelerated Mobile Pages) first came out, and it was very restrictive and weird. I think this ultimately hurt the *AMP Brand* Beyond this, several companies have built AMP experiences which haven't always been the best experience. I do however think AMP pages always load extremely fast. A lot of that is just the constraints of AMP. Last night I put my blog posts on AMP for a laugh, and it was much easier than I thought it would be.
<!-- more -->
## Step 0
Download the [AMP chrome extension](https://chrome.google.com/webstore/detail/amp-validator/nmoffdblmcmgeicmolmhobpoocbbmknc?hl=en) and read what your violations are on an existing page you want to serve as an amp page.
## AMP Requirements
So these days AMP is a webpage with several restrictions.
* No JavaScript, or well very restrictive JS.
* JS is possible, but not without work. For the sake of this tutorial I decided to skip the JS.
* Inlined only css
* No `picture` tags
* A few other tags you need for AMP.
## Razor
First things first, we need to figure out how we will adjust our layout for AMP. The easiest way for a layout to get a variable either from any controller or any razor page is using the `ViewData` dictionary. I added the following at the top of my layout page. This lets me read if we are in an amp page.
```csharp
@{
var amp = ViewData["amp"] as bool? ?? false;
var htmlTag = amp ? "amp" : "";
}
```
Ok, so lets dive into the required HTML markup. AMP pages require a...
* `<html>` tag with an `amp` attribute.
* a `<head>` tag with an `<amp-boilerplate>` tag that contains some boilerplate CSS.
* The amp JS runtime
* `<link>` tags to point the non-amp page at the amp page.
HTML tag is an easy start. The code block above has an `htmlTag` attribute that is used for the tag.
```cshtml
<html lang="en" @htmlTag>
```
The head tag containing the boilerplate CSS is easy. Note that the boilerplate has `@` signs for CSS which need to be `@@` in razor, to escape the `@` sign.
```cshtml
@if(amp)
{
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
}
```
Finally, the JS runtime. This needs to also go in the head tag. You can include this with the boilerplate code.
```cshtml
@if(amp)
{
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
<script async src="https://cdn.ampproject.org/v0.js"></script>
}
```
### Inline CSS
AMP Pages must have inlined CSS. To accomplish this I wrote this tag helper which loads a comma separated list of files into memory and then inlines them. The `<link>` tag your CSS needs to be in has to have the `amp-custom` attribute.
```csharp
[HtmlTargetElement("inline-style")]
public class InlineStyleTagHelper : TagHelper
{
[HtmlAttributeName("href")]
public string Href { get; set; }
private IWebHostEnvironment HostingEnvironment { get; }
private IMemoryCache Cache { get; }
public InlineStyleTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
{
HostingEnvironment = hostingEnvironment;
Cache = cache;
}
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
{
var paths = Href.Split(',');
// Get the value from the cache, or compute the value and add it to the cache
var fileContent = await Cache.GetOrCreateAsync("InlineStyleTagHelper-" + paths, async entry =>
{
var fileProvider = HostingEnvironment.WebRootFileProvider;
var result = paths.Select(async path => {
if(HostingEnvironment.IsDevelopment())
{
var changeToken = fileProvider.Watch(path);
entry.AddExpirationToken(changeToken);
}
entry.SetPriority(CacheItemPriority.NeverRemove);
var file = fileProvider.GetFileInfo(path);
if (file == null || !file.Exists)
return null;
return await ReadFileContent(file);
});
var allFinished = await Task.WhenAll(result);
return string.Join("\n", allFinished);
});
if (fileContent == null)
{
output.SuppressOutput();
return;
}
output.TagName = "style";
output.Attributes.RemoveAll("href");
output.Content.AppendHtml(fileContent);
}
private static async Task<string> ReadFileContent(IFileInfo file)
{
using (var stream = file.CreateReadStream())
using (var textReader = new StreamReader(stream))
{
return await textReader.ReadToEndAsync();
}
}
}
```
```cshtml
@if(amp)
{
<inline-style amp-custom href="css/site.css,css/site.desktop.css,css/site.mobile.css"></inline-style>
}
else
{
<link asp-append-version="true" rel="stylesheet" href="~/css/site.css" />
<link asp-append-version="true" rel="stylesheet" href="~/css/site.mobile.css" />
}
```
### Javascript
AMP, [does allow for using JS](https://amp.dev/documentation/components/amp-script/) in a web worker. This has a lot of caveats, and for my use-case (this blog) it was better to just skip rendering any JS. I guarded the `RenderSection` call for the scripts section behind an `if(amp)` statement.
```cshtml
@if(!amp)
{
@RenderSection("Scripts", required: false)
<script asp-append-version="true" src="~/your/script.js" async></script>
}
```
### Link tags
On pages that render AMP, you'll need to be able to generate 2 meta tags. The first is a canonical tag that tells google what the canonical URL is of the page. The second is one, to tell google where your amp pages are for a URL. This is data you typically want to pass to the Model of the view you are rendering. Adding these meta to the head of the layout through a section.
```cs
@section Head {
<link rel="canonical" href="@Model.Post.CanonicalUrl" />
<link rel="amphtml" href="@Model.Post.AMPUrl">
}
```
## Routes
In my implementation I added `/amp` to the end of my URLs for amp. Then in the controller you can set `this.ViewData["amp"] = amp == "amp";` the view data for the page to be an amp page or not. If you would prefer, you can set the boolean with a view model, it would also work very well.
```csharp
[Route("{postUrl}/{amp?}")]
public IActionResult Post(string postUrl, string amp = "")
{
if(!String.IsNullOrEmpty(amp) && amp != "amp")
{
// handle 404s
return NotFound();
}
ViewDictionary["amp"] = amp == "amp";
return new View(model: new ViewModel ());
}
```
## Google Analytics
There is a snippet of code makes GA work in an AMP page. I made the following partial view that I call from the layout page.
```cshtml
@{
Layout = null;
var amp = ViewData["amp"] as bool? ?? false;
}
@if(!amp)
{
<script>
window.dataLayer = window.dataLayer || [];
function gtag() { dataLayer.push(arguments); }
gtag('js', new Date());
gtag('config', 'GTAG_ID');
document.addEventListener('DOMContentLoaded', function () {
var script = document.createElement('script');
script.src = 'https://www.googletagmanager.com/gtag/js?id=GTAG_ID';
script.async = true
document.body.appendChild(script);
});
</script>
}
else
{
<amp-analytics type="gtag" data-credentials="include">
<script type="application/json">
{
"vars" : {
"gtag_id": "GTAG_ID",
"config" : {
"GTAG_ID": { "GTAG_ID": "default" }
}
}
}
</script>
</amp-analytics>
}
```
## So what's next?
Go through your pages and look at the violations in the Chrome Extension. If you push the pages live, and register them in your sitemap. Errors with amp pages will appear in [the Google Search Console.](https://search.google.com/search-console/about) as google indexes your AMP pages.
## I need more help!
You can look at [my implementation](https://github.com/TerribleDev/blog.terrible.dev/commit/83eb1bc565dfb4bdb38d3c5f0cbfbc21b05ad4b2).

View File

@@ -0,0 +1,50 @@
title: Speeding up CraftCMS on Heroku
date: 2022-04-13 07:55
tags:
- nginx
- craftcms
- craft
---
So, I previously [blogged about how we hosted CraftCMS](/Hosting-craft-on-heroku/) on Heroku. When we built the marketing site for [Quala](https://www.quala.io) the twig templates were built for maximum authoring flexibility at the cost of some TTFB problems. We knew this going into the project. In an ideal world we would use GatsbyJS to build the frontend, but we very limited in time. When we went live, we saw a dramatic improvement to First Contentful paint, but a huge decrease to Time To First Byte, averaging at 1.3 seconds.
<!-- more -->
The initial thinking that was bounced around was *we just need caching* as our previous wordpress site had cached all renderings in memory. However, we wanted to start rendering CSRF tokens to the browser, and collecting form data. Furthermore, I struggled to come to terms with this being a solution. Simply put, I'm not a fan of PHP, and I know that the Yii framework is known to be slow even in the PHP community, but I couldn't believe that it should be *that* slow. We did sprinkle some cache tags around our twig templates, and it did improve things, but not enough to brag about. So I started digging into the docs of Heroku, Nginx, and FastCGI.
## Heroku's buildpack
So [Heroku's buildpack docs](https://devcenter.heroku.com/articles/php-support#php-fpm-configuration) contains a lot of very good information. Props to them for docs! I ran into this one quote.
> PHP-FPM is set up to automatically spawn a suitable number of worker processes depending on dyno size and the configured PHP memory_limit
This made me go look at another article I found by them regarding [php concurrency](https://devcenter.heroku.com/articles/php-concurrency). This article boils down to, different dynos have different memory limits. They allocate 128mb to a php process and divide that by the total memory on a machine and that is used to determine how many threads to have on a single dyno. They also look for a `.user.ini` file if you want to override the memory defaults. So first I realized our `.user.ini` file had specified `memory_limit = 256M` which was causing us to have half as many processes per dyno, so I set this back to 128. Ok great, this did improve things a little. I then read that you could override the concurrency default behavior by setting the environment variable `WEB_CONCURRENCY` to be whatever you wanted. This did come with a warning.
> When setting WEB_CONCURRENCY manually, make sure that its value multiplied by your memory_limit does not exceed the amount of RAM available on your dyno type.
Now I started doing some load testing of my own, and while it would over subscribe the dyno, I gave us 10 on a 2x dyno which theoretically would cause us to OOM but with some basic load testing didnt seem like it would happen. This gave us some boost, but not as much as we hoped. I was still very stuck, and I had a suspicion that maybe there was some problem between PHP and Nginx which was slowing things down. I used the craft diagnostic tools, and I couldn't find more than 400ms being wasted in sql queries which didn't account for the almost 1 second page load I still had.
## Nginx configs
Ok, so I started looking around, and I found a [TON of great CraftCMS content by nystudio107](https://nystudio107.com/). I don't quite remember which article, but I stumbled across several that implied I needed better fastcgi settings in Nginx. So, I [forked the heroku buildpack](https://github.com/qualaio/heroku-buildpack-php) and got to work. I ended up making these settings.
```nginx
fastcgi_buffers 256 16k;
fastcgi_buffer_size 128k;
fastcgi_connect_timeout 10s;
fastcgi_send_timeout 120s;
fastcgi_read_timeout 120s;
fastcgi_busy_buffers_size 256k;
fastcgi_temp_file_write_size 256k;
reset_timedout_connection on;
```
## Brotli
While I was in the config, I decided *what the hell, lets get brolti working*. [Brotli](https://github.com/google/brotli) is a compression format that is more compact than gzip. Over the wire assets are usually 5-10% smaller than gzipped. So, sending brotli if the browser supports it, is a big win. Turns out there is an [issue filed in 2019 with heroku](https://github.com/heroku/heroku-buildpack-php/issues/356) to add it, but its not gone anywhere. Ultimately, I found someone else [figured out how to add it](https://github.com/seyallin/heroku-brotli-nginx). I made some changes and added it to our fork. You can view all of our changes in [github's compare view](https://github.com/heroku/heroku-buildpack-php/compare/main...qualaio:main#diff-ff7b43f722c67a80d4c82bf656918b3bf96f553a5ad1f62ef185dff16582f033R24-R31).
## Results
So the results was a **huge** drop in TTFB, which overall improved our ligthhouse score by 30 points. The other thing that's great is, we're moderately fast without caches, which means caches can only improve the situation further.
![A graph showing a drop in response time from over 1 second to less than one](1.png)

View File

@@ -0,0 +1,49 @@
title: Must have vscode plugins for front-end devs
date: 2019-02-06
tags:
- visual studio
- javascript
- css
- front-end
---
I've had a lot of people ask me about my choice of editors, and plugins. A while back I switched to vscode for all my programming work, for both front and back end. In the past I've blogged about [the best plugins for visual studio](/VS-2017-best-extensions-on-launch/) as a backend dev, but I thought I'd give you a more front-end angle
<!-- more -->
## Document this
My first one, and in my opinion the most underrated is [document this](https://marketplace.visualstudio.com/items?itemName=joelday.docthis). So if you have ever had to write [jsdoc](http://usejsdoc.org/) comments you can know how tedious it gets, and if you haven't, trust me you should. VSCode and most other editors can read [jsdoc](http://usejsdoc.org/) comments above functions, and class declarations to improve the intellisense and type completion statements. Simply have your cursor over a function, invoke document this, and quickly you will be given jsdoc comments for your code.
![Animated gif showing off document this](document-this.gif)
## Import Cost
Another extension I find vital to my every day is [import cost](https://marketplace.visualstudio.com/items?itemName=wix.vscode-import-cost). This is a package, that leaves you little notes on the side of any import you have as to how big it will be. This package will even highlight the size text in red for large imports which you can configure. What I love about this package, is it tells me if the package I'm about to use is going to be very expensive size wise. That way I find out long before I commit the code, and my pages get slow.
![a static image showing off import cost](import-cost.png)
## ESlint and Prettier
Hopefully both of these will not be new to you. ESLint is a linting tool that looks for potential errors in your code. Prettier is an opinionated style enforcer for your code. The [eslint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) and [prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) extensions for vscode can automatically show you problems in your code as you type, and can even fix your code on save. What I love about both of these tools, is together they make a great force for improving your code base. Prettier eliminates many debates over code style between team members, and eslint prevents you from shipping many bugs to production. These extensions can call out problems as you type, which decreases the feedback loops, and increases your productivity.
## Filesize
As a web developer I spend a lot of my time looking at file size. Right now file sizes are ever inflating, and are causing pain for bandwidth constrained devices. I often download bundles, and inspect their compiled source, or just have to look at how big a file is on the filesystem. A big tool I have in my belt is [filesize](https://marketplace.visualstudio.com/items?itemName=mkxml.vscode-filesize). This is a crazy simple extension, but one that brings me joy everyday. The premise is simple, print the file size of the current file in the status bar at the bottom. Click on it, and you get a nice output of what its like gzipped, and the mime type. Dirt simple, but saved me a ton of time everyday!
![a picture of the filesize plugin in action](filesize2.jpg)
## Runner ups
Here is a list of additional extensions I certainly couldn't live without
* [path intellisense](https://marketplace.visualstudio.com/items?itemName=christian-kohler.path-intellisense) - autocomplete file paths in various files (including html)
* [npm intellisense](https://marketplace.visualstudio.com/items?itemName=christian-kohler.npm-intellisense) - autocomplete npm pages in imports
* [html 5 boilerplate](https://marketplace.visualstudio.com/items?itemName=sidthesloth.html5-boilerplate) - dirt simple html boilerplate snippets
* [icon fonts](https://marketplace.visualstudio.com/items?itemName=idleberg.icon-fonts) - Autocomplete for various icon fonts such as font awesome
* [git lens](https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens) - Show git history inline, along with other information from git

View File

@@ -26,6 +26,46 @@ So what I personally like to do is find orange bars that often make up the bulk
So digging into other blog posts, I found posts showing how to [visualize your redux actions](https://medium.com/@vcarl/performance-profiling-a-redux-app-c85e67bf84ae) using the same performance API mechanisms react uses. That blog post uses redux middleware to add timings to actions. This narrowed down on our performance problems, but did not point out the exact selector that was slow. Clearly we had an action that was triggering an expensive state update, but the time was still spent in `anonymous function`. Thats when I had the idea to wrap reselect selector functions in a function that can append the timings. [This gist is what I came up with](https://gist.github.com/TerribleDev/db48b2c8e143f9364292161346877f93)
```js
import {createSelector} from 'reselect';
const hasPerformanceApi =
window &&
window.performance &&
window.performance.measure &&
window.performance.mark;
const createFuncWithMark = (name, callback) => (...args) => {
const startMark = `${name}-Startmark`;
const endMark = `${name}-EndMark`;
window.performance.mark(startMark);
const result = callback(...args);
window.performance.mark(endMark);
window.performance.measure('♻️ ' + `${name}-Selector`, startMark, endMark);
window.performance.clearMarks(startMark);
window.performance.clearMarks(endMark);
window.performance.clearMeasures(startMark);
window.performance.clearMeasures(endMark);
return result;
};
export const createMarkedSelector = (name, ...args) => {
if (!hasPerformanceApi) {
return createSelector(...args);
}
if (!name || typeof name !== 'string') {
throw new Error('marked selectors must have names');
}
const callback = args.pop();
const funcWithMark = createFuncWithMark(name, callback);
args.push(funcWithMark);
return createSelector(...args);
};
```
So how does this work exactly? Well its a library that wraps the function you pass to reselect that adds markers to the window to tell you how fast reselect selectors take to run. Combined with the previously mentioned blog post, you can now get timings in chrome's performance tool with selectors! You can also combine this with the [redux middleware](https://medium.com/@vcarl/performance-profiling-a-redux-app-c85e67bf84ae) I previously mentioned to get a deeper insight into how your app is performing
![a preview of selectors reporting their performance](2.png)

View File

@@ -0,0 +1,14 @@
title: About
date: 2022-03-08 01:03
isLanding: true
permalink: about
---
I am a software engineer. I currently work at [Quala](https://www.quala.io). I have worked on all area's of the stack. From a sysadmin, network engineer, backend developer, and frontend developer. I've helped build some extremely large scale websites such as [Vistaprint](https://www.vistaprint.com) and [CarGurus](https://www.cargurus.com). I have a passion for high performing software, devops, and front end. I am a huge fan of [JavaScript](https://en.wikipedia.org/wiki/JavaScript), [C#](https://en.wikipedia.org/wiki/C_Sharp), [Golang](https://en.wikipedia.org/wiki/Go_(programming_language)), and [Rust](https://en.wikipedia.org/wiki/Rust_(programming_language)).
I blog about my general pains building software.
## Why Terrible Dev?
Honestly, I was a network engineer, and I worked with many developers. They'd often blame bugs on the network, or the database. I heard a lot of *it works on my machine*. I started the [TerribleDev](https://www.twitter.com/terribledev) twitter handle where I posted some things developers said. Then when I became a developer, I figured I'd just make it my handle. These days, I'm now blaming the network 🤣.

View File

@@ -43,7 +43,7 @@ Essentially I add the routing package to the container, and then have have the a
foreach(var route in Routes.RoutesDictionary)
{
a.MapGet("docker101", handler: async b=>{
b.Response.Redirect("https://blog.terribledev.io/Getting-started-with-docker-containers/", true);
b.Response.Redirect("https://blog.terrible.dev/Getting-started-with-docker-containers/", true);
});
}
});

View File

@@ -17,12 +17,18 @@ namespace TerribleDev.Blog.Web
CreateWebHostBuilder(args).Build().Run();
}
public static IWebHostBuilder CreateWebHostBuilder(string[] args) =>
WebHost.CreateDefaultBuilder(args)
public static IWebHostBuilder CreateWebHostBuilder(string[] args) {
var builder = WebHost.CreateDefaultBuilder(args)
.UseStartup<Startup>()
.ConfigureKestrel(a =>
{
a.AddServerHeader = false;
});
var port = Environment.GetEnvironmentVariable("PORT");
if(!String.IsNullOrWhiteSpace(port)) {
builder.UseUrls("http://*:" + port);
}
return builder;
}
}
}

View File

@@ -1,77 +1,111 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.HttpsPolicy;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Rewrite;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.FileProviders;
using Microsoft.Net.Http.Headers;
using HardHat.Middlewares;
using HardHat;
using TerribleDev.Blog.Web.Models;
using TerribleDev.Blog.Web.Factories;
using Microsoft.Extensions.Hosting;
using WebMarkupMin.AspNetCore7;
using Microsoft.Extensions.Logging;
using TerribleDev.Blog.Web.Filters;
namespace TerribleDev.Blog.Web
{
public class Startup
{
public Startup(IConfiguration configuration, IHostingEnvironment env)
public Startup(IConfiguration configuration, IWebHostEnvironment env)
{
Configuration = configuration;
Env = env;
}
public IConfiguration Configuration { get; }
public IHostingEnvironment Env { get; }
public IWebHostEnvironment Env { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
Func<BlogConfiguration> getBlog = () => Configuration.GetSection("Blog").Get<BlogConfiguration>();
{
var blogConfiguration = new BlogConfiguration() {
Link = "https://blog.terrible.dev",
Title = "The Ramblings of TerribleDev"
};
// Func<BlogConfiguration> getBlog = () => Configuration.GetSection("Blog").Get<BlogConfiguration>();
if (Env.IsDevelopment())
{
services.AddTransient(a => getBlog());
services.AddTransient(a => blogConfiguration);
}
else
{
services.AddSingleton(getBlog());
services.AddSingleton(blogConfiguration);
}
services.AddResponseCompression(a =>
// enable logging
services.AddLogging();
services.AddSingleton((i) => {
var posts = new BlogFactory().GetAllPostsAsync(Env.IsDevelopment() ? "https://localhost:5001": "https://blog.terrible.dev").Result;
var postCache = BlogCacheFactory.ProjectPostCache(posts);
if(Env.IsProduction()) {
foreach(var post in postCache.PostsAsLists)
{
// if we are in production turn off lazy loading
var value = post.Content;
}
}
return postCache;
});
var controllerBuilder = services.AddControllersWithViews(a => {
a.Filters.Add(new StaticETag());
});
#if DEBUG
if (Env.IsDevelopment())
{
controllerBuilder.AddRazorRuntimeCompilation();
}
#endif
services
.AddResponseCompression(a =>
{
a.EnableForHttps = true;
})
.AddMemoryCache()
.AddMvcCore()
.AddCacheTagHelper()
.AddRazorViewEngine()
.SetCompatibilityVersion(CompatibilityVersion.Version_2_2);
services.AddOutputCaching();
.AddResponseCaching()
.AddMemoryCache();
// if(Env.IsProduction())
// {
// }
services.AddOutputCache(a =>{
a.AddBasePolicy(b => {
b.Cache();
});
});
services.AddWebMarkupMin(a => {
a.AllowMinificationInDevelopmentEnvironment = true;
a.DisablePoweredByHttpHeaders = true;
})
.AddHtmlMinification()
.AddXmlMinification();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseExceptionHandler("/Error");
}
Console.WriteLine("ETag Detected As: " + StaticETag.staticEtag);
app.UseHttpsRedirection();
if (env.IsProduction())
{
app.UseOutputCache();
app.UseResponseCaching();
}
app.UseResponseCompression();
var cacheTime = env.IsDevelopment() ? 0 : 31536000;
var cacheTime = env.IsDevelopment() ? 1 : 31536000;
app.UseStaticFiles(new StaticFileOptions
{
OnPrepareResponse = ctx =>
@@ -89,6 +123,16 @@ namespace TerribleDev.Blog.Web
"public,max-age=" + cacheTime;
}
});
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseExceptionHandler("/Error");
}
app.UseRewriter(new Microsoft.AspNetCore.Rewrite.RewriteOptions().AddRedirect("(.*[^/|.xml|.html])$", "$1/", 301));
app.UseIENoOpen();
app.UseNoMimeSniff();
@@ -111,8 +155,12 @@ namespace TerribleDev.Blog.Web
// },
UpgradeInsecureRequests = true
});
app.UseOutputCaching();
app.UseMvc();
app.UseWebMarkupMin();
app.UseRouting();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllers();
});
}
}
}
}

View File

@@ -0,0 +1,45 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.AspNetCore.Razor.TagHelpers;
using System;
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
namespace TerribleDev.Blog.Web.Taghelpers
{
public abstract class AbstractPlatformTagHelper : TagHelper
{
static Regex MobileCheck = new Regex(@"(?:phone|windows\s+phone|ipod|blackberry|(?:android|bb\d+|meego|silk|googlebot) .+? mobile|palm|windows\s+ce|opera\ mini|avantgo|mobilesafari|docomo|ipad)", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.ECMAScript);
static ConcurrentDictionary<string, Platform> CachedChecks = new ConcurrentDictionary<string, Platform>(); // dictionary of user agent -> mobilre
protected HttpRequest Request => ViewContext.HttpContext.Request;
protected HttpResponse Response => ViewContext.HttpContext.Response;
[ViewContext]
public ViewContext ViewContext { get; set; }
protected abstract bool ShouldRender();
public Platform GetPlatform()
{
var userAgent = this.Request.Headers.UserAgent;
if (string.IsNullOrEmpty(userAgent))
{
return Platform.Desktop; // mobile is default
}
if(CachedChecks.TryGetValue(userAgent, out var cacheResult))
{
return cacheResult;
}
var isMobile = AbstractPlatformTagHelper.MobileCheck.IsMatch(this.Request.Headers.UserAgent);
return isMobile ? Platform.Mobile : Platform.Desktop;
}
public override void Process(TagHelperContext context, TagHelperOutput output)
{
output.TagName = null;
if(!this.ShouldRender())
{
output.SuppressOutput();
return;
}
}
}
}

View File

@@ -0,0 +1,16 @@
using Microsoft.AspNetCore.Razor.TagHelpers;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace TerribleDev.Blog.Web.Taghelpers
{
[HtmlTargetElement("desktop", TagStructure = TagStructure.NormalOrSelfClosing)]
public class DesktopTagHelper : AbstractPlatformTagHelper
{
protected override bool ShouldRender() => this.GetPlatform() == Platform.Desktop;
}
}

View File

@@ -0,0 +1,62 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.Encodings.Web;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc.Razor.Infrastructure;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.Routing;
using Microsoft.AspNetCore.Mvc.TagHelpers;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.AspNetCore.Razor.TagHelpers;
namespace TerribleDev.Blog.Web.Taghelpers
{
public record PushUrl(string Url, string asProperty);
[HtmlTargetElement("link", Attributes = "[rel=stylesheet],href,push")]
[HtmlTargetElement("img", Attributes = "src,push")]
[HtmlTargetElement("script", Attributes = "src,push")]
public class HttpPush : LinkTagHelper
{
[HtmlAttributeNotBound]
public bool Http2PushEnabled { get; set; } = true;
public static readonly string Key = "http2push-link";
public HttpPush(IWebHostEnvironment hostingEnvironment, TagHelperMemoryCacheProvider cacheProvider, IFileVersionProvider fileVersionProvider, HtmlEncoder htmlEncoder, JavaScriptEncoder javaScriptEncoder, IUrlHelperFactory urlHelperFactory) : base(hostingEnvironment, cacheProvider, fileVersionProvider, htmlEncoder, javaScriptEncoder, urlHelperFactory)
{
}
private (string Url, string AsProperty) GetTagInfo(string tag) =>
tag switch
{
"link" => ("href", "link"),
"img" => ("src", "image"),
"script" => ("src", "script"),
_ => (null, null)
};
public override void Process(TagHelperContext context, TagHelperOutput output)
{
if(!this.Http2PushEnabled)
{
return;
}
var (urlAttribute, asProperty) = GetTagInfo(output.TagName);
var url = base.TryResolveUrl(output.Attributes[urlAttribute].Value.ToString(), out string resolvedUrl) ? resolvedUrl : output.Attributes[urlAttribute].Value.ToString();
var linkList = ViewContext.HttpContext.Items.TryGetValue(Key, out var links) ? links as List<PushUrl> : null;
if(linkList == null)
{
linkList = new List<PushUrl>() { new PushUrl(url, asProperty) };
ViewContext.HttpContext.Items.Add(HttpPush.Key, linkList);
}
else
{
linkList.Add(new PushUrl(url, asProperty));
}
output.Attributes.Remove(output.Attributes["push"]);
}
}
}

View File

@@ -3,7 +3,10 @@ using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Razor.TagHelpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.FileProviders;
using Microsoft.Extensions.Hosting;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace TerribleDev.Blog.Web.Taghelpers
@@ -14,39 +17,43 @@ namespace TerribleDev.Blog.Web.Taghelpers
[HtmlAttributeName("href")]
public string Href { get; set; }
private IHostingEnvironment HostingEnvironment { get; }
private IWebHostEnvironment HostingEnvironment { get; }
private IMemoryCache Cache { get; }
public InlineStyleTagHelper(IHostingEnvironment hostingEnvironment, IMemoryCache cache)
public InlineStyleTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
{
HostingEnvironment = hostingEnvironment;
Cache = cache;
}
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
{
var path = Href;
var paths = Href.Split(',');
// Get the value from the cache, or compute the value and add it to the cache
var fileContent = await Cache.GetOrCreateAsync("InlineStyleTagHelper-" + path, async entry =>
var fileContent = await Cache.GetOrCreateAsync("InlineStyleTagHelper-" + Href, async entry =>
{
var fileProvider = HostingEnvironment.WebRootFileProvider;
if(HostingEnvironment.IsDevelopment())
{
var changeToken = fileProvider.Watch(path);
entry.AddExpirationToken(changeToken);
}
var result = paths.Select(async path => {
if(HostingEnvironment.IsDevelopment())
{
var changeToken = fileProvider.Watch(path);
entry.AddExpirationToken(changeToken);
}
entry.SetPriority(CacheItemPriority.NeverRemove);
entry.SetPriority(CacheItemPriority.NeverRemove);
var file = fileProvider.GetFileInfo(path);
if (file == null || !file.Exists)
return null;
var file = fileProvider.GetFileInfo(path);
if (file == null || !file.Exists)
return null;
return await ReadFileContent(file);
return await ReadFileContent(file);
});
var allFinished = await Task.WhenAll(result);
return string.Join("\n", allFinished);
});
if (fileContent == null)
@@ -69,4 +76,4 @@ namespace TerribleDev.Blog.Web.Taghelpers
}
}
}
}
}

View File

@@ -0,0 +1,79 @@
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Razor.TagHelpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.FileProviders;
using Microsoft.Extensions.Hosting;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace TerribleDev.Blog.Web.Taghelpers
{
[HtmlTargetElement("inline-script")]
public class InlineScriptTagHelper : TagHelper
{
[HtmlAttributeName("src")]
public string Src { get; set; }
private IWebHostEnvironment HostingEnvironment { get; }
private IMemoryCache Cache { get; }
public InlineScriptTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
{
HostingEnvironment = hostingEnvironment;
Cache = cache;
}
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
{
var paths = Src.Split(',');
// Get the value from the cache, or compute the value and add it to the cache
var fileContent = await Cache.GetOrCreateAsync("InlineScriptTagHelper-" + paths, async entry =>
{
var fileProvider = HostingEnvironment.WebRootFileProvider;
var result = paths.Select(async path => {
if(HostingEnvironment.IsDevelopment())
{
var changeToken = fileProvider.Watch(path);
entry.AddExpirationToken(changeToken);
}
entry.SetPriority(CacheItemPriority.NeverRemove);
var file = fileProvider.GetFileInfo(path);
if (file == null || !file.Exists)
return null;
return await ReadFileContent(file);
});
var allFinished = await Task.WhenAll(result);
return string.Join("\n", allFinished);
});
if (fileContent == null)
{
output.SuppressOutput();
return;
}
output.TagName = "script";
output.Attributes.RemoveAll("href");
output.Content.AppendHtml(fileContent);
}
private static async Task<string> ReadFileContent(IFileInfo file)
{
using (var stream = file.CreateReadStream())
using (var textReader = new StreamReader(stream))
{
return await textReader.ReadToEndAsync();
}
}
}
}

View File

@@ -8,34 +8,9 @@ using System.Threading.Tasks;
namespace TerribleDev.Blog.Web.Taghelpers
{
[HtmlTargetElement("desktopOnly", TagStructure = TagStructure.NormalOrSelfClosing)]
public class DesktopTagHelper : TagHelper
[HtmlTargetElement("mobile", TagStructure = TagStructure.NormalOrSelfClosing)]
public class MobileTagHelper : AbstractPlatformTagHelper
{
static Regex MobileCheck = new Regex(@"(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino", RegexOptions.IgnoreCase | RegexOptions.Multiline | RegexOptions.Compiled);
static ConcurrentDictionary<string, bool> CachedChecks = new ConcurrentDictionary<string, bool>();
public string UserAgent { get; set; }
public override void Process(TagHelperContext context, TagHelperOutput output)
{
output.TagName = null;
if (string.IsNullOrEmpty(UserAgent))
{
return;
}
var shouldRender = true;
if(CachedChecks.TryGetValue(UserAgent, out var cacheResult))
{
shouldRender = cacheResult;
}
else
{
var isMobile = MobileCheck.IsMatch(UserAgent);
shouldRender = !isMobile;
CachedChecks.TryAdd(UserAgent, !isMobile);
}
if(!shouldRender)
{
output.SuppressOutput();
}
}
protected override bool ShouldRender() => this.GetPlatform() == Platform.Mobile;
}
}

View File

@@ -0,0 +1,8 @@
namespace TerribleDev.Blog.Web.Taghelpers
{
public enum Platform
{
Desktop,
Mobile,
}
}

View File

@@ -1,10 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>netcoreapp2.2</TargetFramework>
<TargetFramework>net7.0</TargetFramework>
<AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<UserSecretsId>9a1f51b6-f4d9-4df7-a0af-e345176e9927</UserSecretsId>
<RuntimeIdentifiers>linux-musl-x64</RuntimeIdentifiers>
</PropertyGroup>
<ItemGroup>
@@ -21,19 +21,18 @@
<ItemGroup>
<PackageReference Include="BuildBundlerMinifier" Version="2.8.391" />
<PackageReference Include="Markdig" Version="0.15.7" />
<PackageReference Include="Microsoft.AspNetCore.App" />
<PackageReference Include="Microsoft.AspNetCore.Razor.Design" Version="2.2.0" PrivateAssets="All" />
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.0.2105168" />
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="2.2.0" />
<PackageReference Include="Schema.NET" Version="11.0.1" />
<PackageReference Include="UriBuilder.Fluent" Version="1.5.2" />
<PackageReference Include="WebMarkupMin.AspNetCore7" Version="2.13.0-rc1" />
<PackageReference Include="YamlDotNet" Version="5.3.0" />
<PackageReference Include="HardHat" Version="2.1.1" />
<PackageReference Include="Microsoft.SyndicationFeed.ReaderWriter" Version="1.0.2" />
<PackageReference Include="WebEssentials.AspNetCore.OutputCaching" Version="1.0.16" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Razor.RuntimeCompilation" Version="7.0.0" Condition="'$(Configuration)' == 'Debug'" />
</ItemGroup>
<ItemGroup>
<Content Include="Posts\*.md" CopyToOutputDirectory="Always" />
<Watch Include="Posts\*.md" />
</ItemGroup>
</Project>

View File

@@ -1,4 +1,4 @@
@inject Microsoft.AspNetCore.Hosting.IHostingEnvironment env
@inject Microsoft.AspNetCore.Hosting.IWebHostEnvironment env
@{
ViewData["Title"] = "Debug";
}

View File

@@ -24,4 +24,11 @@
@section Head {
<partial name="StockMeta" />
<script type="application/ld+json">
@Html.Raw(Model.BlogLDString)
</script>
<script type="application/ld+json">
@Html.Raw(Model.SiteLDString)
</script>
<link rel="canonical" href="https://blog.terrible.dev/">
}

View File

@@ -1,33 +1,40 @@
@inject BlogConfiguration config
@model IPost
@model PostViewModel
@{
ViewData["Title"] = "Post";
ViewData["HideNav"] = true;
ViewData["Title"] = Model.Post.Title;
}
<cache vary-by-route="postUrl">
@Html.DisplayForModel()
<cache vary-by-route="postUrl,amp">
<partial name="SharedPost" for="Post" />
</cache>
@section Head {
<meta name="description" content="@Model.SummaryPlainShort" />
<inline-style href="css/prism.css"></inline-style>
<meta name="description" content="@Model.Post.Content.SummaryPlainShort" />
<meta property="og:type" content="blog">
<meta property="og:title" content="@Model.Title">
<meta property="og:url" content="https://blog.terribledev.io/@Model.Url/">
<meta property="og:title" content="@Model.Post.Title">
<meta property="og:url" content="@Model.Post.CanonicalUrl">
<meta property="og:site_name" content="@config.Title">
<meta property="og:description" content="@Model.SummaryPlainShort">
<meta property="og:updated_time" content="@Model.PublishDate.ToString("O")">
<meta property="og:description" content="@Model.Post.Content.SummaryPlainShort">
<meta property="og:updated_time" content="@Model.Post.PublishDate.ToString("O")">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="@Model.Title">
<meta name="twitter:description" content="@Model.SummaryPlainShort">
<meta name="twitter:title" content="@Model.Post.Title">
<meta name="twitter:description" content="@Model.Post.Content.SummaryPlainShort">
<meta name="twitter:site" content="@@TerribleDev">
<meta name="twitter:creator" content="@@TerribleDev">
<meta property="og:image" content="https://www.gravatar.com/avatar/333e3cea32cd17ff2007d131df336061?s=640" />
@foreach(var image in Model.Images.Take(6))
<link rel="canonical" href="@Model.Post.CanonicalUrl" />
@if(!string.IsNullOrEmpty(Model.Post.ThumbnailImage))
{
<meta property="og:image" content="https://blog.terribledev.io@(image)">
}
@if(Model.Images.Count > 0)
{
<meta name="twitter:image" content="https://blog.terribledev.io@(Model.Images[0])">
<meta name="twitter:image" content="@(Model.Post.ThumbnailImage)">
<meta property="og:image" content="@(Model.Post.ThumbnailImage)" />
}
<script type="application/ld+json">
@Html.Raw(Model.Post.Content.JsonLDString)
</script>
<script type="application/ld+json">
@Html.Raw(Model.Post.Content.JsonLDBreadcrumbString)
</script>
}

View File

@@ -0,0 +1,17 @@
@model SearchViewModel
@{
ViewData["Title"] = $"Search ${Model.SearchTerm}";
}
@if(Model.Posts.Count == 0) {
<div>
No posts found.
</div>
}
else
{
@foreach (var post in Model.Posts)
{
<partial name="PostSummary" model="post" />
}
}

View File

@@ -0,0 +1,10 @@
@model LandingPage
@{
var amp = ViewData["amp"] as bool? ?? false;
}
<article>
<h1 itemprop="headline" class="headline">@Model.Title</h1>
<time class="headlineSubtext" itemprop="datePublished" content="@Model.PublishDate.ToString()">@Model.PublishDate.ToString("D")</time>
@Model.Content.Content
</article>

View File

@@ -1,9 +1,9 @@
@model IPost
@model Post
<article itemprop="blogPost">
<h1 itemprop="headline" class="headline">@Model.Title</h1>
<time class="headlineSubtext" itemprop="datePublished" content="@Model.PublishDate.ToString()">@Model.PublishDate.ToString("D")</time>
@Model.Content
@Model.Content.Content
@if (Model.tags.Count > 0)
{
<div>

View File

@@ -1,15 +1,14 @@
@{
Layout = null;
}
<link rel="preconnect" href="https://www.google-analytics.com">
<link rel="preconnect" href="https://stats.g.doubleclick.net">
<link rel="preconnect" href="https://www.googletagmanager.com">
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-48128396-1"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-48128396-1');
</script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag() { dataLayer.push(arguments); }
gtag('js', new Date());
gtag('config', 'UA-48128396-1');
document.addEventListener('DOMContentLoaded', function () {
setTimeout(() => {
var script = document.createElement('script');
script.src = 'https://www.googletagmanager.com/gtag/js?id=UA-48128396-1';
script.async = true
document.body.appendChild(script);
}, 4000)
});
</script>

View File

@@ -1,16 +1,20 @@
@{
var hideNav = ViewData["HideNav"] != null ? "" : "withBody";
}
<nav class="navBar hide @hideNav" id="navBar">
<img src="" alt="An image of TerribleDev" data-src="/content/tommyAvatar4.jpg" class="lazy round" />
<span>Tommy "Terrible Dev" Parnell</span>
<ul class="sidebarBtns">
<li><a href="/" class="link-unstyled">Home</a></li>
<li><a href="/all-tags" class="link-unstyled">Tags</a></li>
<li><a href="/rss.xml" class="link-unstyled">RSS Feed</a></li>
<li><a href="https://github.com/terribledev" rel="noopener" target="_blank" class="link-unstyled">Github</a></li>
<li><a href="https://twitter.com/terribledev" rel="noopener" target="_blank" class="link-unstyled">Twitter</a></li>
<li><a href="mailto:tommy@terribledev.io" class="link-unstyled">Email</a></li>
<li><span class="link-unstyled" id="closeNav">Close Navbar</span></li>
</ul>
</nav>
<nav class="navBar hide" id="navBar">
<div class="navContent">
<picture class="navHero">
<source srcset="/content/tommyAvatar4.jpg.webp" loading="lazy" type="image/webp" alt="An image of TerribleDev" class="round" />
<img src="/content/tommyAvatar4.jpg" loading="lazy" alt="An image of TerribleDev" class="round" />
</picture>
<span>Tommy "Terrible Dev" Parnell</span>
<ul class="sidebarBtns">
<li><a href="/" class="link-unstyled">Home</a></li>
<li><a href="/about" class="link-unstyled">About</a></li>
<li><a href="/all-tags/" class="link-unstyled">Tags</a></li>
<li><a href="/rss.xml" class="link-unstyled">RSS Feed</a></li>
<li><a href="https://github.com/terribledev" rel="noopener" target="_blank" class="link-unstyled">Github</a></li>
<li><a href="https://twitter.com/terribledev" rel="noopener" target="_blank" class="link-unstyled">Twitter</a></li>
<li><a href="mailto:tommy@terribledev.io" class="link-unstyled">Email</a></li>
<li><span onclick="toggleNav()" class="link-unstyled" id="closeNav">Close Navbar</span></li>
</ul>
</div>
</nav>

View File

@@ -1,10 +1,10 @@
@model IPost
<article class="btmRule">
<h3 itemprop="headline" class="headline"><a href="/@Model.Url/" class="link-unstyled">@Model.Title</a></h3>
<h3 itemprop="headline" class="headline"><a href="@Model.RelativeUrl" class="link-unstyled">@Model.Title</a></h3>
<time class="headlineSubtext" itemprop="datePublished" content="@Model.PublishDate.ToString()">@Model.PublishDate.ToString("D")</time>
<div itemprop="articleBody">
@Model.Summary
@Model.Content.Summary
</div>
<a href="/@Model.Url/">Continue Reading </a>
</article>
<a href="@Model.RelativeUrl">Continue Reading </a>
</article>

View File

@@ -0,0 +1,3 @@
@model IPost
@Html.DisplayForModel()

View File

@@ -2,7 +2,7 @@
<meta name="description" content="My name is Tommy Parnell. I usually go by TerribleDev on the internets. These are just some of my writings and rants about the software space." />
<meta property="og:type" content="blog">
<meta property="og:title" content="@config.Title">
<meta property="og:url" content="https://blog.terribledev.io/">
<meta property="og:url" content="https://blog.terrible.dev/">
<meta property="og:site_name" content="@config.Title">
<meta property="og:description" content="My name is Tommy Parnell. I usually go by TerribleDev on the internets. These are just some of my writings and rants about the software space.">
<meta name="twitter:card" content="summary">

View File

@@ -1,47 +1,62 @@
@inject BlogConfiguration config
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>@ViewData["Title"] | @config.Title</title>
<environment names="Development">
<inline-style href="css/site.css,css/site.mobile.css,css/site.desktop.css"></inline-style>
@* <desktop>
<inline-style href="css/site.css,css/site.desktop.css"></inline-style>
</desktop>
<mobile>
<inline-style href="css/site.css,css/site.mobile.css"></inline-style>
</mobile> *@
</environment>
<environment names="Production">
@* <desktop>
<inline-style href="css/site.min.css,css/site.desktop.min.css"></inline-style>
</desktop>
<mobile>
<inline-style href="css/site.min.css,css/site.mobile.min.css"></inline-style>
</mobile> *@
<inline-style href="css/site.min.css,css/site.mobile.min.css,css/site.desktop.min.css"></inline-style>
</environment>
<environment names="Production">
<partial name="Gtm" />
</environment>
<meta name="author" content="Tommy &quot;TerribleDev&quot; Parnell" />
<meta name="theme-color" content="#4A4A4A" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="alternate" type="application/atom+xml" async title="RSS" href="/rss.xml">
<link rel="manifest" href="~/manifest.json" async asp-append-version="true">
<link asp-append-version="true" rel="icon" async href="~/favicon.ico" />
<title>@ViewData["Title"] - @config.Title</title>
<environment names="Development">
<inline-style href="css/site.css"></inline-style>
</environment>
<environment names="Production">
<inline-style href="css/site.min.css"></inline-style>
</environment>
<link rel="alternate" type="application/atom+xml" title="RSS" href="/rss.xml">
<link rel="manifest" href="~/manifest.json" asp-append-version="true">
<meta name="author" content="Tommy &quot;TerribleDev&quot; Parnell" />
<link asp-append-version="true" rel="icon" href="~/favicon.ico" push />
@RenderSection("Head", false)
</head>
<body>
<partial name="Nav" />
<header class="header">
<svg aria-label="Open Menu" id="menuBtn" role="button" xmlns="http://www.w3.org/2000/svg" width="32" height="32"><path d="M4 10h24c1.104 0 2-.896 2-2s-.896-2-2-2H4c-1.104 0-2 .896-2 2s.896 2 2 2zm24 4H4c-1.104 0-2 .896-2 2s.896 2 2 2h24c1.104 0 2-.896 2-2s-.896-2-2-2zm0 8H4c-1.104 0-2 .896-2 2s.896 2 2 2h24c1.104 0 2-.896 2-2s-.896-2-2-2z" /></svg>
<div class="headerCallout"><a href="/" class="link-unstyled ">@config.Title</a></div>
</header>
@{
var bodyBump = ViewData["HideNav"] == null ? "bodyWithNav": "";
}
<main role="main" class="@bodyBump headerBump">
<div class="main-content-wrap">
@RenderBody()
</div>
</main>
@RenderSection("Scripts", required: false)
<environment names="Development">
<script asp-append-version="true" src="~/js/swi.js" async></script>
</environment>
<environment names="Production">
<script asp-append-version="true" src="~/js/site.min.js" async></script>
</environment>
<a class="skip-main" href="#main">Skip to main content</a>
<div class="rootbox">
<header class="header">
<svg aria-label="Open Menu" onclick="toggleNav()" id="menuBtn" role="button" xmlns="http://www.w3.org/2000/svg" width="32" height="32"><path d="M4 10h24c1.104 0 2-.896 2-2s-.896-2-2-2H4c-1.104 0-2 .896-2 2s.896 2 2 2zm24 4H4c-1.104 0-2 .896-2 2s.896 2 2 2h24c1.104 0 2-.896 2-2s-.896-2-2-2zm0 8H4c-1.104 0-2 .896-2 2s.896 2 2 2h24c1.104 0 2-.896 2-2s-.896-2-2-2z"/></svg>
<div class="headerCallout"><a href="/" class="link-unstyled ">@config.Title</a></div>
</header>
<partial name="Nav" />
<main class="headerBump main-content-wrap" id="main">
@RenderBody()
</main>
</div>
</div>
@RenderSection("Scripts", required: false)
<environment names="Development">
<script push asp-append-version="true" src="~/js/swi.js" async></script>
</environment>
<environment names="Production">
<script push asp-append-version="true" src="~/js/site.min.js" async></script>
</environment>
</body>
</html>

View File

@@ -1,4 +1,4 @@
@model Dictionary<string, List<IPost>>
@model IDictionary<string, IList<Post>>
@{
ViewData["Title"] = "all-tags";
}
@@ -11,4 +11,5 @@
</cache>
@section Head {
<partial name="StockMeta" />
}
<link rel="canonical" href="https://blog.terrible.dev/all-tags/" />
}

View File

@@ -2,10 +2,20 @@
@model GetTagViewModel
@{
ViewData["Tag:" + Model.Tag] = "GetTag";
ViewData["Title"] = @Model.Title;
}
<cache vary-by-route="tagName">
@foreach (var post in Model.Posts)
{
<partial name="PostSummary" model="post" />
}
</cache>
</cache>
@section Head {
@if(!String.IsNullOrEmpty(Model.CanonicalUrl)) {
<link rel="canonical" href="@Model.CanonicalUrl" />
}
<script type="application/ld+json">
@Html.Raw(Model.ldJson())
</script>
}

View File

@@ -1,9 +1,7 @@
{
"Logging": {
"LogLevel": {
"Default": "Debug",
"System": "Information",
"Microsoft": "Information"
"Default": "Warning"
}
}
}

View File

@@ -0,0 +1,2 @@
{
}

View File

@@ -2,16 +2,11 @@
"Logging": {
"LogLevel": {
"Default": "Warning"
},
"Console": {
"LogLevel": {
"Default": "None"
}
}
},
"AllowedHosts": "*",
"Blog": {
"title": "The Ramblings of TerribleDev",
"link": "https://blog.terribledev.io"
"link": "https://blog.terrible.dev"
}
}
}

View File

@@ -5,6 +5,18 @@
"wwwroot/css/site.css"
]
},
{
"outputFileName": "wwwroot/css/site.desktop.min.css",
"inputFiles": [
"wwwroot/css/site.desktop.css"
]
},
{
"outputFileName": "wwwroot/css/site.mobile.min.css",
"inputFiles": [
"wwwroot/css/site.mobile.css"
]
},
{
"outputFileName": "wwwroot/js/site.min.js",
"inputFiles": [

Binary file not shown.

After

Width:  |  Height:  |  Size: 300 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Some files were not shown because too many files have changed in this diff Show More