Compare commits
237 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a74dec639a | ||
|
|
f93aea2696 | ||
|
|
63828b3003 | ||
|
|
1b372b3501 | ||
|
|
5edba65844 | ||
|
|
afcae26bc1 | ||
|
|
5a253f6ada | ||
|
|
e8c548ab20 | ||
|
|
0871e037d8 | ||
|
|
a5931f48c6 | ||
|
|
2cff53f5d3 | ||
|
|
e065871145 | ||
|
|
5cf4086872 | ||
|
|
d3638b10c0 | ||
|
|
bd790926b3 | ||
|
|
2753099f72 | ||
|
|
20dc7ad932 | ||
|
|
3892cb578e | ||
|
|
c6ee8f8193 | ||
|
|
6147b840f2 | ||
|
|
d032ffcf82 | ||
|
|
ac28c642f8 | ||
|
|
078da9731b | ||
|
|
e8e9a1caa7 | ||
|
|
cde154ee3b | ||
|
|
f97bc8d938 | ||
|
|
72824b70a0 | ||
|
|
87f50e1324 | ||
|
|
b316cc7e8e | ||
|
|
cc34f198a8 | ||
|
|
c1687cccf5 | ||
|
|
e6d7240996 | ||
|
|
6b9e0c8fe3 | ||
|
|
910a5fee16 | ||
|
|
6aec3294dc | ||
|
|
2e5161b949 | ||
|
|
2be516f73b | ||
|
|
f2198e2e06 | ||
|
|
3136961955 | ||
|
|
844edb8fc1 | ||
|
|
96d06da5e4 | ||
|
|
16572e3e6c | ||
|
|
cc0b7c506c | ||
|
|
172a33a27c | ||
|
|
27c2d2eaf4 | ||
|
|
b79070c855 | ||
|
|
a888216b61 | ||
|
|
fbab80753f | ||
|
|
1dca63e128 | ||
|
|
14d250ca9b | ||
|
|
603ea65456 | ||
|
|
c23158998c | ||
|
|
a146b39eef | ||
|
|
bcbe44f86a | ||
|
|
3d62a12bfc | ||
|
|
b3bd3492ee | ||
|
|
83a41395e7 | ||
|
|
3b9c978b92 | ||
|
|
e1919b5983 | ||
|
|
c51cf330f8 | ||
|
|
84fc8e09e8 | ||
|
|
fcc975bfbe | ||
|
|
e9a222afc2 | ||
|
|
51afa4389e | ||
|
|
80233f3870 | ||
|
|
6303af4640 | ||
|
|
1d49ce2ec5 | ||
|
|
bd7541f69e | ||
|
|
aae2a1d9e5 | ||
|
|
7e161d8d33 | ||
|
|
2a32bd1911 | ||
|
|
b99549c767 | ||
|
|
3dfb19673a | ||
|
|
7def19398b | ||
|
|
a04652aeb7 | ||
|
|
039262749a | ||
|
|
83eb1bc565 | ||
|
|
9952ef6aa7 | ||
|
|
4f28b9d10c | ||
|
|
1c10d8ff6d | ||
|
|
25f0c83300 | ||
|
|
3aa14c37e1 | ||
|
|
392e61fe18 | ||
|
|
4984f53efe | ||
|
|
6a2e91275a | ||
|
|
a3db1317f9 | ||
|
|
c230ff6fec | ||
|
|
71d8f2455d | ||
|
|
c938c73844 | ||
|
|
2924655fe2 | ||
|
|
df69206e71 | ||
|
|
0a6aeee439 | ||
|
|
65a24d00b8 | ||
|
|
dc5bbec95a | ||
|
|
7566baf19f | ||
|
|
c72f60c709 | ||
|
|
97bfd056bd | ||
|
|
70b745a2ef | ||
|
|
955cc7a4ad | ||
|
|
c754c4f8e8 | ||
|
|
8ce39bf4e5 | ||
|
|
beab60730b | ||
|
|
ac4e00b95c | ||
|
|
778d282640 | ||
|
|
1ec0bf9f82 | ||
|
|
386865aad2 | ||
|
|
323ab5ff1b | ||
|
|
9473478c2c | ||
|
|
b531e0979a | ||
|
|
94808c3c6a | ||
|
|
991f02a469 | ||
|
|
74d7e45e80 | ||
|
|
c8996d3ef5 | ||
|
|
422cae2ff3 | ||
|
|
81e627545f | ||
|
|
4fddff2922 | ||
|
|
f809cd2fde | ||
|
|
4795c49828 | ||
|
|
e248080246 | ||
|
|
629240eff4 | ||
|
|
1d5efc2d8d | ||
|
|
449cd6d6d1 | ||
|
|
f300df90c1 | ||
|
|
27151adf65 | ||
|
|
0f2b013b93 | ||
|
|
c050de459c | ||
|
|
88e0ae9b64 | ||
|
|
bbbac28221 | ||
|
|
2b9ed4093a | ||
|
|
99765d4267 | ||
|
|
84356eba1e | ||
|
|
d46c44dc97 | ||
|
|
0ddf58d2dc | ||
|
|
0983225d76 | ||
|
|
5b56e9cd57 | ||
|
|
8a1a71b46d | ||
|
|
07f4a06c49 | ||
|
|
9cdc25ff1c | ||
|
|
005ee858b2 | ||
|
|
354c766881 | ||
|
|
c312b49598 | ||
|
|
c6daaf2f03 | ||
|
|
f92df0b0ea | ||
|
|
f576961974 | ||
|
|
590a16dd77 | ||
|
|
a99d053f8d | ||
|
|
d3946d61d4 | ||
|
|
de0fafbc74 | ||
|
|
8fd4dbd4ec | ||
|
|
3795a95ed0 | ||
|
|
0dce62ab99 | ||
|
|
dbea3664d6 | ||
|
|
e26c47f91f | ||
|
|
58e34d8177 | ||
|
|
0e36180218 | ||
|
|
a245a21e02 | ||
|
|
792919cb70 | ||
|
|
934a762939 | ||
|
|
be76863dc2 | ||
|
|
da8a0d9a4f | ||
|
|
71cf945baf | ||
|
|
abacb42468 | ||
|
|
198f99f7f1 | ||
|
|
4fe77edfbc | ||
|
|
78e73f2a4c | ||
|
|
521b335f8a | ||
|
|
e2ad204571 | ||
|
|
755c03303b | ||
|
|
4c25db4039 | ||
|
|
c3a583a33b | ||
|
|
718b938a76 | ||
|
|
415b62e1e7 | ||
|
|
e367072f21 | ||
|
|
1327d87b96 | ||
|
|
39dfcfe70d | ||
|
|
86cb0cec23 | ||
|
|
f87bd029f4 | ||
|
|
69b2112e4b | ||
|
|
0b57031fc1 | ||
|
|
71929bb1ba | ||
|
|
b1959082dc | ||
|
|
be5c4cc806 | ||
|
|
ca0344c902 | ||
|
|
5a844f34f9 | ||
|
|
57a129cf8d | ||
|
|
a31b9d4fa9 | ||
|
|
7013e61c2f | ||
|
|
6ce47adb8a | ||
|
|
b9b9e81213 | ||
|
|
53b8b448da | ||
|
|
aa6ed52d93 | ||
|
|
16c10c9ca1 | ||
|
|
c3cb61619b | ||
|
|
7ff61450f9 | ||
|
|
f3faede79e | ||
|
|
38f82061e9 | ||
|
|
57a8bba66a | ||
|
|
43d6e33638 | ||
|
|
d875ca6fea | ||
|
|
d846a538a0 | ||
|
|
00b711aef4 | ||
|
|
dbb6ae208b | ||
|
|
de62e6275d | ||
|
|
d873be97d8 | ||
|
|
f3080faae0 | ||
|
|
6ed0ef4205 | ||
|
|
ab9250b968 | ||
|
|
c24684fa8b | ||
|
|
6ebf9a6574 | ||
|
|
7cf143c078 | ||
|
|
f7984258a5 | ||
|
|
712e92ff6b | ||
|
|
8bf5a55dcb | ||
|
|
04d5f29fee | ||
|
|
0aa95d9988 | ||
|
|
506188041a | ||
|
|
099f570e84 | ||
|
|
6813370179 | ||
|
|
f981ba3f39 | ||
|
|
365f1730f5 | ||
|
|
d1c2d60c5a | ||
|
|
7f28de0655 | ||
|
|
fb14bb735e | ||
|
|
fdbc9c6d6a | ||
|
|
a2e6c43e56 | ||
|
|
4acdfb3d4c | ||
|
|
45d0f3361e | ||
|
|
9044e8679f | ||
|
|
7852083a8c | ||
|
|
c353269c52 | ||
|
|
091abd4561 | ||
|
|
a2dd0c0d2a | ||
|
|
3143f1c76b | ||
|
|
fd5c668820 | ||
|
|
031c5c2598 | ||
|
|
a46631b5e6 | ||
|
|
4c752803c0 |
@@ -4,6 +4,6 @@
|
||||
.gitignore
|
||||
.vs
|
||||
.vscode
|
||||
*/bin
|
||||
*/obj
|
||||
**/bin
|
||||
**/obj
|
||||
**/.toolstarget
|
||||
42
.github/workflows/blogcontainergroup-AutoDeployTrigger-ab8fcfc6-eced-47ac-8584-4f5a983b4ee2.yml
vendored
Normal file
42
.github/workflows/blogcontainergroup-AutoDeployTrigger-ab8fcfc6-eced-47ac-8584-4f5a983b4ee2.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Trigger auto deployment for blogcontainergroup
|
||||
|
||||
# When this action will be executed
|
||||
on:
|
||||
# Automatically trigger it when detected changes in repo
|
||||
push:
|
||||
branches:
|
||||
[ master ]
|
||||
paths:
|
||||
- '**'
|
||||
- '.github/workflows/blogcontainergroup-AutoDeployTrigger-ab8fcfc6-eced-47ac-8584-4f5a983b4ee2.yml'
|
||||
|
||||
# Allow mannually trigger
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout to the branch
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Azure Login
|
||||
uses: azure/login@v1
|
||||
with:
|
||||
creds: ${{ secrets.BLOGCONTAINERGROUP_AZURE_CREDENTIALS }}
|
||||
|
||||
- name: Build and push container image to registry
|
||||
uses: azure/container-apps-deploy-action@v1
|
||||
with:
|
||||
appSourcePath: ${{ github.workspace }}
|
||||
registryUrl: terribledevreg.azurecr.io
|
||||
registryUsername: ${{ secrets.BLOGCONTAINERGROUP_REGISTRY_USERNAME }}
|
||||
registryPassword: ${{ secrets.BLOGCONTAINERGROUP_REGISTRY_PASSWORD }}
|
||||
containerAppName: blogcontainergroup
|
||||
resourceGroup: ContainerGroup
|
||||
imageToBuild: terribledevreg.azurecr.io/blogcontainergroup:${{ github.sha }}
|
||||
|
||||
|
||||
|
||||
|
||||
14
.github/workflows/buildContainer.yml
vendored
Normal file
14
.github/workflows/buildContainer.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Github Action'
|
||||
uses: actions/checkout@master
|
||||
- name: Build image 🔧
|
||||
run: |
|
||||
docker build "$GITHUB_WORKSPACE/src/TerribleDev.Blog.Web" -f "src/TerribleDev.Blog.Web/Dockerfile"
|
||||
13
.vscode/extensions.json
vendored
Normal file
13
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"ban.spellright"
|
||||
],
|
||||
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
|
||||
"unwantedRecommendations": [
|
||||
|
||||
]
|
||||
}
|
||||
19
.vscode/launch.json
vendored
19
.vscode/launch.json
vendored
@@ -1,18 +1,17 @@
|
||||
{
|
||||
// Use IntelliSense to find out which attributes exist for C# debugging
|
||||
// Use hover for the description of the existing attributes
|
||||
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": ".NET Core Launch (web)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
// If you have changed target frameworks, make sure to update the program path.
|
||||
"program": "${workspaceFolder}/TerribleDev.Blog.Web/bin/Debug/netcoreapp2.2/TerribleDev.Blog.Web.dll",
|
||||
"program": "${workspaceFolder}/src/TerribleDev.Blog.Web/bin/Debug/net7.0/TerribleDev.Blog.Web.dll",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}/TerribleDev.Blog.Web",
|
||||
"cwd": "${workspaceFolder}/src/TerribleDev.Blog.Web",
|
||||
"stopAtEntry": false,
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"launchBrowser": {
|
||||
@@ -42,5 +41,5 @@
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}"
|
||||
}
|
||||
,]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
19
.vscode/settings.json
vendored
Normal file
19
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"files.eol": "\n",
|
||||
"spellchecker.language": "en_US",
|
||||
"spellchecker.ignoreWordsList": [
|
||||
"dotnet",
|
||||
"csproj's",
|
||||
"VS2017",
|
||||
"vs2017",
|
||||
"refactor"
|
||||
],
|
||||
"spellchecker.documentTypes": [
|
||||
"markdown",
|
||||
"latex",
|
||||
"plaintext"
|
||||
],
|
||||
"spellchecker.ignoreRegExp": [],
|
||||
"spellchecker.ignoreFileExtensions": [],
|
||||
"spellchecker.checkInterval": 5000
|
||||
}
|
||||
7
.vscode/spellright.dict
vendored
Normal file
7
.vscode/spellright.dict
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
intellisense
|
||||
docker
|
||||
env
|
||||
mydocklinting
|
||||
eslint
|
||||
webapps
|
||||
wordpress
|
||||
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -7,7 +7,7 @@
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/TerribleDev.Blog.Web/TerribleDev.Blog.Web.csproj"
|
||||
"${workspaceFolder}/src/TerribleDev.Blog.Web/TerribleDev.Blog.Web.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
|
||||
26
Dockerfile
Normal file
26
Dockerfile
Normal file
@@ -0,0 +1,26 @@
|
||||
# https://hub.docker.com/_/microsoft-dotnet
|
||||
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
|
||||
WORKDIR /source
|
||||
|
||||
# copy csproj and restore as distinct layers
|
||||
COPY ./src/TerribleDev.Blog.Web/*.csproj .
|
||||
RUN dotnet restore -r linux-musl-x64 /p:PublishReadyToRunComposite=true
|
||||
|
||||
# copy everything else and build app
|
||||
COPY ./src/TerribleDev.Blog.Web/ .
|
||||
RUN dotnet publish -c release -o /app -r linux-musl-x64 --self-contained true --no-restore /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
|
||||
RUN date +%s > /app/buildtime.txt
|
||||
# final stage/image
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:7.0-alpine-amd64
|
||||
WORKDIR /app
|
||||
COPY --from=build /app ./
|
||||
|
||||
# See: https://github.com/dotnet/announcements/issues/20
|
||||
# Uncomment to enable globalization APIs (or delete)
|
||||
# ENV \
|
||||
# DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false \
|
||||
# LC_ALL=en_US.UTF-8 \
|
||||
# LANG=en_US.UTF-8
|
||||
# RUN apk add --no-cache icu-libs
|
||||
|
||||
ENTRYPOINT ["./TerribleDev.Blog.Web"]
|
||||
12
Dockerfile.old
Normal file
12
Dockerfile.old
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
|
||||
WORKDIR /app
|
||||
|
||||
# Copy everything else and build
|
||||
COPY /src/TerribleDev.Blog.Web .
|
||||
RUN dotnet publish -c release -o /out -r linux-musl-x64 --self-contained true /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
|
||||
RUN date +%s > /out/buildtime.txt
|
||||
# Build runtime image
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-alpine-amd64
|
||||
WORKDIR /app
|
||||
COPY --from=build /app/out .
|
||||
ENTRYPOINT ["./TerribleDev.Blog.Web"]
|
||||
34
Readme.md
Normal file
34
Readme.md
Normal file
@@ -0,0 +1,34 @@
|
||||
The code for [blog.terrible.dev](https://blog.terrible.dev). My blog powered by a bespoke blog engine, at one time I thought I'd try to make an open source product for dotnet core, but then got lazy. Probably one of the fastest blog site's you will ever see. Prior to this my blog was built with hugo, and thus authoring was made backward compatible with hugo's posts (3+ years ago). I honestly built many versions including an F#, OCAML, and typescript until I just landed on C#. The implementation here was evolved from the F# version but I switched to C# when I realized there was almost no text editor support for razor files in an F# project. At that time as well dotnet was really kicking off on the tech empower benchmarks. This uses an active server, and not a static site build, because HTML can be emitted conditionally on desktop/mobile headers to minimize html size over the wire.
|
||||
|
||||
## Technology
|
||||
|
||||
Almost everything was made based on page speed, and nothing more. Some of the code, and decisions seem weird until you realize it's all about page speed and nothing else.
|
||||
|
||||
- dotnet 6
|
||||
- Markdown
|
||||
- [Markdig](https://github.com/xoofx/markdig) for parsing markdown
|
||||
- This also has some extensions to ensure all relative links end in `/` and external links have `rel="noopener"` and `target="_blank"`, also to include a picture tag for webp images
|
||||
- YML for configuring a blog's posts *front matter* aka configuration
|
||||
- PWA/Serviceworkers
|
||||
- Used to precache pages which could be read offline, and store a 404 page for offline
|
||||
- Also used to give an almost SPA like experience (html is swapped from the server, no page reloads occur)
|
||||
- CSS variables used for theming
|
||||
- Vanilla JS for handling the hamburger menu.
|
||||
- No framework for minimum size
|
||||
|
||||
|
||||
|
||||
## Authoring
|
||||
|
||||
Authoring is done in markdown. Configuration for a post must be at the top of the document and placed before `---` (three dashes). Posts include a `<!-- more -->` tag to indicate where the post should be cut off for the summary.
|
||||
|
||||
## Convert images to webp (TODO: make this automatic)
|
||||
|
||||
find . -iname '*.png' -exec cwebp -lossless '{}' -o '{}'.webp \;
|
||||
find . -iname '*.jpg' -exec cwebp '{}' -o '{}'.webp \;
|
||||
find . -iname '*.gif' -exec gif2webp -mixed '{}' -o '{}'.webp \;
|
||||
|
||||
|
||||
## resize image to max width (TODO: Also make this automatic 🤣)
|
||||
|
||||
find . -iname '*' -exec convert '{}' -resize 750 '{}' \;
|
||||
@@ -7,6 +7,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{E6C01762-AEB
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TerribleDev.Blog.Web", "src\TerribleDev.Blog.Web\TerribleDev.Blog.Web.csproj", "{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TerribleDev.Blog.CLI", "src\TerribleDev.Blog.CLI\TerribleDev.Blog.CLI.csproj", "{BF5B7187-1B1C-44B3-949A-38B96EEBD625}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -29,12 +31,25 @@ Global
|
||||
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2}.Release|x86.Build.0 = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{BAA8662D-6D38-4811-A6FF-7A61D0C633D2} = {E6C01762-AEBF-47C4-8D95-383504D8BC70}
|
||||
{BF5B7187-1B1C-44B3-949A-38B96EEBD625} = {E6C01762-AEBF-47C4-8D95-383504D8BC70}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {CFA796F1-4389-452F-B224-E64C72E907C4}
|
||||
|
||||
9
docker-compose.yml
Normal file
9
docker-compose.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
version: '3'
|
||||
services:
|
||||
webapp:
|
||||
build: ./src/TerribleDev.Blog.Web
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
environment:
|
||||
- ASPNETCORE_ENVIRONMENT=development
|
||||
38
fly.toml
Normal file
38
fly.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
# fly.toml file generated for dry-meadow-9911 on 2022-11-09T12:09:05-05:00
|
||||
|
||||
app = "dry-meadow-9911"
|
||||
kill_signal = "SIGINT"
|
||||
kill_timeout = 5
|
||||
processes = []
|
||||
|
||||
[env]
|
||||
|
||||
[experimental]
|
||||
allowed_public_ports = []
|
||||
auto_rollback = true
|
||||
|
||||
[[services]]
|
||||
http_checks = []
|
||||
internal_port = 80
|
||||
processes = ["app"]
|
||||
protocol = "tcp"
|
||||
script_checks = []
|
||||
[services.concurrency]
|
||||
hard_limit = 25
|
||||
soft_limit = 20
|
||||
type = "connections"
|
||||
|
||||
[[services.ports]]
|
||||
force_https = true
|
||||
handlers = ["http"]
|
||||
port = 80
|
||||
|
||||
# [[services.ports]]
|
||||
# handlers = ["tls", "http"]
|
||||
# port = 443
|
||||
|
||||
[[services.tcp_checks]]
|
||||
grace_period = "2s"
|
||||
interval = "3s"
|
||||
restart_limit = 0
|
||||
timeout = "2s"
|
||||
346
src/TerribleDev.Blog.CLI/.gitignore
vendored
Normal file
346
src/TerribleDev.Blog.CLI/.gitignore
vendored
Normal file
@@ -0,0 +1,346 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
# ASP.NET Core default setup: bower directory is configured as wwwroot/lib/ and bower restore is true
|
||||
**/wwwroot/lib/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- Backup*.rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# JetBrains Rider
|
||||
.idea/
|
||||
*.sln.iml
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
nupkg
|
||||
26
src/TerribleDev.Blog.CLI/PostBuilder.cs
Normal file
26
src/TerribleDev.Blog.CLI/PostBuilder.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Drawing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Pastel;
|
||||
namespace TerribleDev.Blog.CLI
|
||||
{
|
||||
public class PostBuilder
|
||||
{
|
||||
public static string Build(string title)
|
||||
{
|
||||
return $@"title: {title}{Environment.NewLine}date: {DateTime.Now.ToString("yyyy-MM-dd hh:mm")}{Environment.NewLine}tags:{Environment.NewLine}---";
|
||||
}
|
||||
public static List<string> ListPosts()
|
||||
{
|
||||
if (!Directory.Exists("Posts"))
|
||||
{
|
||||
Console.Error.WriteLine($"Cannot find post directory, are you sure you are in the blog directory?");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
var posts = Directory.GetFiles("Posts", "*.md");
|
||||
return posts.Select(x => (Path.GetFileNameWithoutExtension(x).Replace('-', ' ').Pastel(Color.LightBlue))).ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
80
src/TerribleDev.Blog.CLI/Program.cs
Normal file
80
src/TerribleDev.Blog.CLI/Program.cs
Normal file
@@ -0,0 +1,80 @@
|
||||
using System;
|
||||
using McMaster.Extensions.CommandLineUtils;
|
||||
using System.IO;
|
||||
using Pastel;
|
||||
using System.Drawing;
|
||||
using System.Linq;
|
||||
|
||||
namespace TerribleDev.Blog.CLI
|
||||
{
|
||||
class Program
|
||||
{
|
||||
public static int Main(string[] args)
|
||||
{
|
||||
var app = new CommandLineApplication() {
|
||||
Name = "Tempo",
|
||||
Description = "A simple blog generator"
|
||||
};
|
||||
app.MakeSuggestionsInErrorMessage = true;
|
||||
app.HelpOption(inherited: true);
|
||||
app.OnExecute(() => {
|
||||
app.ShowHelp();
|
||||
return 1;
|
||||
});
|
||||
app.Command("post", b =>
|
||||
{
|
||||
b.MakeSuggestionsInErrorMessage = true;
|
||||
b.OnExecute(() => {
|
||||
b.ShowHelp();
|
||||
Environment.Exit(1);
|
||||
});
|
||||
b.Command("list", a => {
|
||||
a.OnExecute(() => {
|
||||
PostBuilder.ListPosts().ForEach(Console.WriteLine);
|
||||
return 0;
|
||||
});
|
||||
});
|
||||
b.Command("new", a =>
|
||||
{
|
||||
var title = a.Argument("Title", "The title of the post");
|
||||
a.OnExecute(() =>
|
||||
{
|
||||
var titleValue = title.Value;
|
||||
var fileName = $"{titleValue.Replace(" ", "-")}.md";
|
||||
var targetDir = Path.Combine("Posts", fileName);
|
||||
var assetPathName = fileName.Replace(".md", "");
|
||||
var assetPath = Path.Combine("wwwroot", "img", assetPathName);
|
||||
if (!Directory.Exists("Posts"))
|
||||
{
|
||||
Console.Error.WriteLine(($"Cannot find post directory, are you sure you are in the blog directory?").Pastel("#ff3c2e"));
|
||||
return 1;
|
||||
}
|
||||
Console.WriteLine(("Building file 🚀").Pastel("#80ff40"));
|
||||
File.WriteAllText(Path.Combine("Posts", fileName), PostBuilder.Build(titleValue));
|
||||
Console.WriteLine(("Creating wwwroot directory 🛠").Pastel("#80ff40"));
|
||||
Directory.CreateDirectory(assetPath);
|
||||
Console.WriteLine(("Adding keep files 📝").Pastel("#80ff40"));
|
||||
File.Create(Path.Combine(assetPath, ".keep"));
|
||||
Console.WriteLine(("Done! 🎉").Pastel("#80ff40"));
|
||||
return 0;
|
||||
});
|
||||
});
|
||||
});
|
||||
try {
|
||||
return app.Execute(args);
|
||||
}
|
||||
catch (UnrecognizedCommandParsingException e) {
|
||||
Console.WriteLine();
|
||||
Console.Error.WriteLine(e.Message.Pastel("#ff3c2e"));
|
||||
Console.WriteLine();
|
||||
Console.Error.WriteLine($"The most similar command is {Environment.NewLine} {e.NearestMatches.FirstOrDefault()}");
|
||||
Console.WriteLine();
|
||||
return 1;
|
||||
}
|
||||
catch (CommandParsingException e) {
|
||||
Console.Error.WriteLine(e.Message.Pastel("#ff3c2e"));
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
18
src/TerribleDev.Blog.CLI/TerribleDev.Blog.CLI.csproj
Normal file
18
src/TerribleDev.Blog.CLI/TerribleDev.Blog.CLI.csproj
Normal file
@@ -0,0 +1,18 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>netcoreapp7.0</TargetFramework>
|
||||
<IsPackable>true</IsPackable>
|
||||
<PackAsTool>true</PackAsTool>
|
||||
<ToolCommandName>tempo</ToolCommandName>
|
||||
<PackageOutputPath>./nupkg</PackageOutputPath>
|
||||
<AssemblyName>Tempo</AssemblyName>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="McMaster.Extensions.CommandLineUtils" Version="4.0.1" />
|
||||
<PackageReference Include="pastel" Version="3.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
5
src/TerribleDev.Blog.CLI/install.sh
Executable file
5
src/TerribleDev.Blog.CLI/install.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
dotnet restore
|
||||
dotnet pack /p:Version=1.2.0 /p:Configuration=Release
|
||||
dotnet tool uninstall --global tempo
|
||||
dotnet tool install --global --add-source ./nupkg Tempo
|
||||
2
src/TerribleDev.Blog.Web/.dockerignore
Normal file
2
src/TerribleDev.Blog.Web/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
bin
|
||||
obj
|
||||
23
src/TerribleDev.Blog.Web/Controllers/ApiController.cs
Normal file
23
src/TerribleDev.Blog.Web/Controllers/ApiController.cs
Normal file
@@ -0,0 +1,23 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Controllers
|
||||
{
|
||||
[ApiController]
|
||||
[Route("[controller]")]
|
||||
public class ApiController : ControllerBase
|
||||
{
|
||||
private readonly PostCache postCache;
|
||||
public ApiController(PostCache postCache)
|
||||
{
|
||||
this.postCache = postCache;
|
||||
}
|
||||
[Route("all")]
|
||||
public ActionResult<IEnumerable<IPost>> PostCount(string key)
|
||||
{
|
||||
return this.Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,53 +7,49 @@ using Microsoft.AspNetCore.Mvc;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
using System.IO;
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using TerribleDev.Blog.Web.Filters;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.AspNetCore.OutputCaching;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Controllers
|
||||
{
|
||||
[Http2PushFilter]
|
||||
public class HomeController : Controller
|
||||
{
|
||||
public static List<IPost> postsAsList = new BlogFactory().GetAllPosts().OrderByDescending(a=>a.PublishDate).ToList();
|
||||
public static Dictionary<string, List<IPost>> tagToPost = postsAsList.Where(a=>a.tags != null)
|
||||
.Aggregate(
|
||||
new Dictionary<string, List<IPost>>(),
|
||||
(accum, item) => {
|
||||
foreach(var tag in item.tags)
|
||||
{
|
||||
if(accum.TryGetValue(tag, out var list))
|
||||
{
|
||||
list.Add(item);
|
||||
}
|
||||
else
|
||||
{
|
||||
accum[tag] = new List<IPost>() { item };
|
||||
}
|
||||
}
|
||||
return accum;
|
||||
});
|
||||
public static IDictionary<string, IPost> posts = postsAsList.ToDictionary(a=>a.Url);
|
||||
public static IDictionary<int, List<IPost>> postsByPage = postsAsList.Aggregate(new Dictionary<int, List<IPost>>() { [1] = new List<IPost>() }, (accum, item) =>
|
||||
{
|
||||
var highestPage = accum.Keys.Max();
|
||||
var current = accum[highestPage].Count;
|
||||
if (current >= 10)
|
||||
{
|
||||
accum[highestPage + 1] = new List<IPost>() { item };
|
||||
return accum;
|
||||
}
|
||||
accum[highestPage].Add(item);
|
||||
return accum;
|
||||
});
|
||||
private readonly ILogger<HomeController> logger;
|
||||
private readonly PostCache postCache;
|
||||
|
||||
public HomeController(PostCache postCache, ILogger<HomeController> logger)
|
||||
{
|
||||
this.logger = logger;
|
||||
this.postCache = postCache;
|
||||
}
|
||||
|
||||
|
||||
|
||||
[Route("/index.html", Order = 2)]
|
||||
[Route("/")]
|
||||
[Route("/index.html")]
|
||||
[Route("/page/{pageNumber?}" )]
|
||||
[Route("/page/{pageNumber:required:int:min(1)}")]
|
||||
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] { "pageNumber" })]
|
||||
[ResponseCache(Duration = 900)]
|
||||
public IActionResult Index(int pageNumber = 1)
|
||||
{
|
||||
if(!postsByPage.TryGetValue(pageNumber, out var result))
|
||||
this.logger.LogWarning("Viewing page", pageNumber);
|
||||
if (!postCache.PostsByPage.TryGetValue(pageNumber, out var result))
|
||||
{
|
||||
return NotFound();
|
||||
return Redirect($"/404/?from=/page/{pageNumber}/");
|
||||
}
|
||||
return View(new HomeViewModel() { Posts = result, Page = pageNumber, HasNext = postsByPage.ContainsKey(pageNumber + 1), HasPrevious = postsByPage.ContainsKey(pageNumber - 1) });
|
||||
return View(new HomeViewModel()
|
||||
{
|
||||
Posts = result,
|
||||
Page = pageNumber,
|
||||
HasNext = postCache.PostsByPage.ContainsKey(pageNumber + 1),
|
||||
HasPrevious = postCache.PostsByPage.ContainsKey(pageNumber - 1),
|
||||
BlogLD = postCache.BlogLD,
|
||||
SiteLD = postCache.SiteLD,
|
||||
BlogLDString = postCache.BlogLDString,
|
||||
SiteLDString = postCache.SiteLDString
|
||||
});
|
||||
}
|
||||
[Route("/theme/{postName?}")]
|
||||
public IActionResult Theme(string postName)
|
||||
@@ -61,6 +57,7 @@ namespace TerribleDev.Blog.Web.Controllers
|
||||
return View(model: postName);
|
||||
}
|
||||
[Route("/offline")]
|
||||
[Route("/offline.html")]
|
||||
[ResponseCache(Duration = 3600)]
|
||||
public IActionResult Offline()
|
||||
{
|
||||
@@ -73,28 +70,61 @@ namespace TerribleDev.Blog.Web.Controllers
|
||||
return View();
|
||||
}
|
||||
|
||||
[Route("{postUrl}")]
|
||||
[OutputCache(Duration = 31536000, VaryByParam = "postUrl")]
|
||||
[ResponseCache(Duration = 180)]
|
||||
public IActionResult Post(string postUrl)
|
||||
[Route("{postUrl}/{amp?}")]
|
||||
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] { "postUrl", "amp" })]
|
||||
[ResponseCache(Duration = 900)]
|
||||
public IActionResult Post(string postUrl, string amp = "")
|
||||
{
|
||||
if(!posts.TryGetValue(postUrl, out var currentPost))
|
||||
if (!String.IsNullOrEmpty(amp) && amp != "amp")
|
||||
{
|
||||
return NotFound();
|
||||
return Redirect($"/404/?from=/{postUrl}/{amp}/");
|
||||
}
|
||||
return View(model: currentPost);
|
||||
var isAmp = amp == "amp";
|
||||
if (isAmp)
|
||||
{
|
||||
return this.RedirectPermanent($"/{postUrl}");
|
||||
}
|
||||
// case sensitive lookup
|
||||
if (postCache.UrlToPost.TryGetValue(postUrl, out var currentPost))
|
||||
{
|
||||
return View("Post", model: new PostViewModel() { Post = currentPost });
|
||||
}
|
||||
// case insensitive lookup on post
|
||||
if (postCache.CaseInsensitiveUrlToPost.TryGetValue(postUrl, out var caseInsensitivePost))
|
||||
{
|
||||
return View("Post", model: new PostViewModel() { Post = caseInsensitivePost });
|
||||
}
|
||||
if (postCache.LandingPagesUrl.TryGetValue(postUrl, out var landingPage))
|
||||
{
|
||||
return View("Post", model: new PostViewModel() { Post = landingPage });
|
||||
}
|
||||
|
||||
this.StatusCode(404);
|
||||
return View(nameof(FourOhFour));
|
||||
}
|
||||
|
||||
[Route("/Error")]
|
||||
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
|
||||
public IActionResult Error()
|
||||
{
|
||||
this.Response.StatusCode = 500;
|
||||
return View(new ErrorViewModel { RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier });
|
||||
}
|
||||
|
||||
[Route("/404")]
|
||||
[Route("{*url}", Order = 999)]
|
||||
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
|
||||
public IActionResult FourOhFour()
|
||||
public IActionResult FourOhFour(string from = null)
|
||||
{
|
||||
return View();
|
||||
this.Response.StatusCode = 404;
|
||||
return View(viewName: nameof(FourOhFour));
|
||||
}
|
||||
[Route("/404.html")]
|
||||
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
|
||||
public IActionResult FourOhFourCachePage()
|
||||
{
|
||||
//make a route so the service worker can cache a 404 page, but get a valid status code
|
||||
return View(viewName: nameof(FourOhFour));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
41
src/TerribleDev.Blog.Web/Controllers/SearchController.cs
Normal file
41
src/TerribleDev.Blog.Web/Controllers/SearchController.cs
Normal file
@@ -0,0 +1,41 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Linq;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using TerribleDev.Blog.Web.Filters;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Controllers
|
||||
{
|
||||
[Http2PushFilter]
|
||||
public class SearchController : Controller
|
||||
{
|
||||
private readonly BlogConfiguration configuration;
|
||||
private readonly PostCache postCache;
|
||||
|
||||
public SearchController(BlogConfiguration configuration, PostCache postCache)
|
||||
{
|
||||
this.configuration = configuration;
|
||||
this.postCache = postCache;
|
||||
}
|
||||
[Route("/search")]
|
||||
public IActionResult Index([Required] [MinLength(1)] [MaxLength(500)] string q)
|
||||
{
|
||||
if (string.IsNullOrEmpty(q))
|
||||
{
|
||||
return Redirect("/404/?from=/search/");
|
||||
}
|
||||
if(!ModelState.IsValid)
|
||||
{
|
||||
return Redirect("/404/?from=/search/");
|
||||
}
|
||||
var queries = q.Split(" ");
|
||||
var posts = postCache
|
||||
.PostsAsLists
|
||||
.Where(p =>
|
||||
queries.Any(query => p.Title.Contains(query, System.StringComparison.InvariantCultureIgnoreCase) )
|
||||
|| queries.Any(query => p.Content.ContentPlain.Contains(query, System.StringComparison.InvariantCultureIgnoreCase))).ToList();
|
||||
return View(new SearchViewModel { SearchTerm = q, Posts = posts });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ using System.Threading.Tasks;
|
||||
using System.Xml;
|
||||
using System.Xml.Serialization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.OutputCaching;
|
||||
using Microsoft.SyndicationFeed;
|
||||
using Microsoft.SyndicationFeed.Rss;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
@@ -15,12 +16,19 @@ namespace TerribleDev.Blog.Web.Controllers
|
||||
{
|
||||
public class SeoController : Controller
|
||||
{
|
||||
public static DateTimeOffset publishDate = DateTimeOffset.UtcNow; // keep publish date in memory so we just return when the server was kicked
|
||||
public static IEnumerable<SyndicationItem> postsToSyndication = HomeController.postsAsList.Select(a => a.ToSyndicationItem()).ToList();
|
||||
private readonly BlogConfiguration configuration;
|
||||
private readonly PostCache postCache;
|
||||
|
||||
public SeoController(BlogConfiguration configuration, PostCache postCache)
|
||||
{
|
||||
this.configuration = configuration;
|
||||
this.postCache = postCache;
|
||||
}
|
||||
public static DateTimeOffset publishDate = DateTimeOffset.UtcNow; // keep publish date in memory so we just return when the server was kicked
|
||||
[Route("/rss")]
|
||||
[Route("/rss.xml")]
|
||||
[ResponseCache(Duration = 7200)]
|
||||
[OutputCache(Duration = 86400)]
|
||||
[OutputCache(Duration = 31536000)]
|
||||
public async Task Rss()
|
||||
{
|
||||
Response.StatusCode = 200;
|
||||
@@ -28,11 +36,11 @@ namespace TerribleDev.Blog.Web.Controllers
|
||||
using (XmlWriter xmlWriter = XmlWriter.Create(this.Response.Body, new XmlWriterSettings() { Async = true, Indent = false, Encoding = Encoding.UTF8 }))
|
||||
{
|
||||
var writer = new RssFeedWriter(xmlWriter);
|
||||
await writer.WriteTitle("The Ramblings of TerribleDev");
|
||||
await writer.WriteValue("link", "https://blog.terribledev.io");
|
||||
await writer.WriteTitle(configuration.Title);
|
||||
await writer.WriteValue("link", configuration.Link);
|
||||
await writer.WriteDescription("My name is Tommy Parnell. I usually go by TerribleDev on the internets. These are just some of my writings and rants about the software space.");
|
||||
|
||||
foreach (var item in postsToSyndication)
|
||||
foreach (var item in postCache.PostsAsSyndication)
|
||||
{
|
||||
await writer.Write(item);
|
||||
}
|
||||
@@ -43,22 +51,23 @@ namespace TerribleDev.Blog.Web.Controllers
|
||||
}
|
||||
[Route("/sitemap.xml")]
|
||||
[ResponseCache(Duration = 7200)]
|
||||
[OutputCache(Duration = 86400)]
|
||||
[OutputCache(Duration = 31536000)]
|
||||
public void SiteMap()
|
||||
{
|
||||
Response.StatusCode = 200;
|
||||
Response.ContentType = "text/xml";
|
||||
var sitewideLinks = new List<SiteMapItem>()
|
||||
var sitewideLinks = new List<SiteMapItem>(postCache.TagsToPosts.Keys.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terrible.dev/tag/{a}/" }))
|
||||
{
|
||||
new SiteMapItem() { LastModified = DateTime.UtcNow, Location="https://blog.terribledev.io/all-tags/" }
|
||||
new SiteMapItem() { LastModified = DateTime.UtcNow, Location="https://blog.terrible.dev/all-tags/" }
|
||||
};
|
||||
var ser = new XmlSerializer(typeof(SiteMapRoot));
|
||||
var sitemap = new SiteMapRoot()
|
||||
{
|
||||
Urls = HomeController.postsAsList.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terribledev.io/{a.Url}" }).ToList()
|
||||
Urls = postCache.PostsAsLists.Select(a => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = a.CanonicalUrl }).ToList()
|
||||
};
|
||||
sitemap.Urls.AddRange(postCache.TagsToPosts.Keys.Select(i => new SiteMapItem() { LastModified = DateTime.UtcNow, Location = $"https://blog.terrible.dev/search?q={i}" }));
|
||||
sitemap.Urls.AddRange(sitewideLinks);
|
||||
ser.Serialize(this.Response.Body, sitemap);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,26 +3,48 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.OutputCaching;
|
||||
using TerribleDev.Blog.Web.Filters;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Controllers
|
||||
{
|
||||
[Http2PushFilter]
|
||||
public class TagsController : Controller
|
||||
{
|
||||
private readonly PostCache postCache;
|
||||
|
||||
public TagsController(PostCache postCache)
|
||||
{
|
||||
this.postCache = postCache;
|
||||
}
|
||||
[Route("/all-tags")]
|
||||
[OutputCache(Duration = 31536000)]
|
||||
public IActionResult AllTags()
|
||||
{
|
||||
return View(HomeController.tagToPost);
|
||||
return View(postCache.TagsToPosts);
|
||||
}
|
||||
[Route("/tags/{tagName}")]
|
||||
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[]{"tagName"})]
|
||||
public IActionResult TagPluralRedirect(string tagName)
|
||||
{
|
||||
if(string.IsNullOrEmpty(tagName))
|
||||
{
|
||||
return Redirect($"/404/?from=/tags/emptyString/");
|
||||
}
|
||||
return Redirect($"/tag/{tagName}/");
|
||||
}
|
||||
[Route("/tag/{tagName}")]
|
||||
[OutputCache(Duration = 31536000, VaryByRouteValueNames = new string[] {"tagName"})]
|
||||
public IActionResult GetTag(string tagName)
|
||||
{
|
||||
if(!HomeController.tagToPost.TryGetValue(tagName, out var models))
|
||||
if(!postCache.TagsToPosts.TryGetValue(tagName.ToLower(), out var models))
|
||||
{
|
||||
return NotFound();
|
||||
return Redirect($"/404/?from=/tag/{tagName}/");
|
||||
}
|
||||
{
|
||||
return View(new Models.GetTagViewModel { Tag = tagName, Posts = models });
|
||||
return View(new Models.GetTagViewModel { Tag = tagName, Posts = models, CanonicalUrl = $"https://blog.terrible.dev/tag/{tagName.ToLower()}/" });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,26 @@
|
||||
FROM microsoft/dotnet:2.2-aspnetcore-runtime AS base
|
||||
WORKDIR /app
|
||||
EXPOSE 80
|
||||
EXPOSE 443
|
||||
# https://hub.docker.com/_/microsoft-dotnet
|
||||
FROM mcr.microsoft.com/dotnet/sdk:7.0-alpine AS build
|
||||
WORKDIR /source
|
||||
|
||||
FROM microsoft/dotnet:2.2-sdk AS build
|
||||
WORKDIR /src
|
||||
COPY ["./TerribleDev.Blog.Web.csproj", "."]
|
||||
RUN dotnet restore "TerribleDev.Blog.Web.csproj"
|
||||
# copy csproj and restore as distinct layers
|
||||
COPY *.csproj .
|
||||
RUN dotnet restore -r linux-musl-x64 /p:PublishReadyToRunComposite=true
|
||||
|
||||
# copy everything else and build app
|
||||
COPY . .
|
||||
WORKDIR "/src"
|
||||
RUN dotnet build "TerribleDev.Blog.Web.csproj" -c Release -o /app
|
||||
RUN dotnet publish -c release -o /app -r linux-musl-x64 --self-contained true --no-restore /p:PublishTrimmed=true /p:PublishReadyToRunComposite=true /p:PublishSingleFile=true
|
||||
|
||||
FROM build AS publish
|
||||
RUN dotnet publish "TerribleDev.Blog.Web.csproj" -c Release -o /app
|
||||
|
||||
FROM base AS final
|
||||
# final stage/image
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:7.0-alpine-amd64
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app .
|
||||
ENTRYPOINT ["dotnet", "TerribleDev.Blog.Web.dll"]
|
||||
COPY --from=build /app ./
|
||||
|
||||
# See: https://github.com/dotnet/announcements/issues/20
|
||||
# Uncomment to enable globalization APIs (or delete)
|
||||
# ENV \
|
||||
# DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false \
|
||||
# LC_ALL=en_US.UTF-8 \
|
||||
# LANG=en_US.UTF-8
|
||||
# RUN apk add --no-cache icu-libs
|
||||
|
||||
ENTRYPOINT ["./TerribleDev.Blog.Web"]
|
||||
@@ -0,0 +1,20 @@
|
||||
title: Hosting your blog on the cheap
|
||||
date: 2019-08-17 04:49:46
|
||||
tags:
|
||||
- cloud
|
||||
|
||||
---
|
||||
|
||||
A load of people have been asking me lately how I host my blog. Incase its not apparent, I make 0 dollars on this blog. I refuse to place ads on the page, just to gain pennies of revenue. I do this, not because I don't feel like I shouldn't get paid, but simply because I find ads to be disruptive to the reader. At the end of the day, blogs should have a high signal to noise ratio.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
Since I make no money, on this my strategy is about cutting costs. My grandfather use to say "take care of the pounds, let the pennies take care of themselves." Now since my grandfather is in England, and their dollar is known as the pound, he was telling me to focus on the bigger picture.
|
||||
|
||||
The first big decision for blogs is what "engine" you are going to use, or if you are going to make your own. These usually fall into 2 categories. Static sites, which are usually when blogs are written in text files, and are compiled into static html, or server rendered blogs such as wordpress. When a request is made to blog that has server rendering, the html is dynamically built in time and delivered to the consumer. Static sites, on the other hand are precomputed and thus are just delivered to the browser.
|
||||
|
||||
I won't go into the details on what is better for different scenarios. If you are being cheap, then you will want to use static sites. Static sites are precomputed, which essentially means you just need to serve files to the user. There is no dynamic server to host, you won't need a database, etc. There are a few I like, but my favorite is [gatsbyjs](https://www.gatsbyjs.org/).
|
||||
|
||||
|
||||
So I know what you are thinking, static sites are just 'better' for page load time. While this is true, they can lack dynamic features that might be important to you, such as adding new blog posts on a schedule, or limiting ip addresses, or even some kind of login/subscription model.
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
title: Hosting your webapp on the cheap
|
||||
date: 2018-08-22 05:11:20
|
||||
tags:
|
||||
- cloud
|
||||
---
|
||||
|
||||
|
||||
So many people have asked me how I've hosted apps in the past. There is a bit of an art at the moment to making your apps extremely cheap in the cloud. I've heard of hosting costs cut from thousands to pennies.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Hosting
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
title: 'I used ask.com for 30 days, and this is what I learned'
|
||||
tags:
|
||||
---
|
||||
@@ -0,0 +1,3 @@
|
||||
title: Migrating from azure web app to containers
|
||||
tags:
|
||||
---
|
||||
@@ -0,0 +1,3 @@
|
||||
title: Precompiling razor views in dotnet core
|
||||
tags:
|
||||
---
|
||||
@@ -0,0 +1,3 @@
|
||||
title: Securing your dotnet core apps with hardhat
|
||||
tags:
|
||||
---
|
||||
@@ -0,0 +1,16 @@
|
||||
title: The ultimate chaos monkey. When your cloud provider goes down!
|
||||
date: 2017-03-13 15:20:14
|
||||
tags:
|
||||
- amazon
|
||||
- aws
|
||||
- cloud
|
||||
- DevOps
|
||||
---
|
||||
|
||||
A few weeks ago, the internet delt with the fallout that was [the aws outage](https://techcrunch.com/2017/02/28/amazon-aws-s3-outage-is-breaking-things-for-a-lot-of-websites-and-apps/). AWS, or Amazon Web Services is amazon's cloud platform, and the most popular one to use. There are other platforms similar in scope such as Microsoft's Azure. Amazon had an S3 outage, that ultimately caused other services to fail in the most popular, and oldest region they own. The region dubbed `us-east-1` which is in Virgina.
|
||||
|
||||
This was one of the largest cloud outages we have seen, and users of the cloud found out first hand that the cloud is imperfect. In short when you are using the cloud, you are using services, and infrastructure developed by human beings. However most people turn to tools such as cloud vendors, since the scope of their applications do not, and should not include management of large infrastructure.
|
||||
|
||||
The Netflix, and amazon's of the world are large. Really large, and total avalibility is not just a prefered option, but a basic requirement. Companies that are huge users of the cloud, have started to think about region level depenencies. In short, for huge companies, being in one region is perilous, and frought with danger.
|
||||
|
||||
Infact this isn't the first time we have heard such things. In 2013 Netflix published [an article](http://techblog.netflix.com/2013/05/denominating-multi-region-sites.html) describing how they run in multiple regions. There is an obvious cost in making something work multi-region. This is pretty much for the large companies, however if you are a multi billion dollar organization, working multi-region would probably be an awesome idea.
|
||||
0
src/TerribleDev.Blog.Web/Drafts/keep.md
Normal file
0
src/TerribleDev.Blog.Web/Drafts/keep.md
Normal file
12
src/TerribleDev.Blog.Web/Extensions/ArrayExtensions.cs
Normal file
12
src/TerribleDev.Blog.Web/Extensions/ArrayExtensions.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace TerribleDev.Blog.Web
|
||||
{
|
||||
public static class ArrayExtensions
|
||||
{
|
||||
public static string ToHexString(this byte[] bytes)
|
||||
{
|
||||
return Convert.ToHexString(bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,15 +9,26 @@ namespace TerribleDev.Blog.Web
|
||||
{
|
||||
public static class IPostExtensions
|
||||
{
|
||||
public static SyndicationItem ToSyndicationItem(this IPost x)
|
||||
public static SyndicationItem ToSyndicationItem(this Post x)
|
||||
{
|
||||
return new SyndicationItem()
|
||||
Uri.TryCreate(x.CanonicalUrl, UriKind.Absolute, out var url);
|
||||
var syn = new SyndicationItem()
|
||||
{
|
||||
Title = x.Title,
|
||||
Description = x.ContentPlain,
|
||||
Id = $"https://blog.terribledev.io/{x.Url}",
|
||||
Published = x.PublishDate
|
||||
Description = x.Content.Content.ToString(),
|
||||
Id = url.ToString(),
|
||||
Published = x.PublishDate,
|
||||
};
|
||||
syn.AddLink(new SyndicationLink(url));
|
||||
return syn;
|
||||
}
|
||||
public static ISet<string> ToNormalizedTagList(this Post x)
|
||||
{
|
||||
if(x.tags == null)
|
||||
{
|
||||
return new HashSet<string>();
|
||||
}
|
||||
return new HashSet<string>(x.tags.Where(a => !string.IsNullOrWhiteSpace(a)).Select(a => a.ToLower()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
132
src/TerribleDev.Blog.Web/Factories/BlogCacheFactory.cs
Normal file
132
src/TerribleDev.Blog.Web/Factories/BlogCacheFactory.cs
Normal file
@@ -0,0 +1,132 @@
|
||||
using System.Collections.Generic;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
using System.Linq;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System;
|
||||
using Microsoft.SyndicationFeed;
|
||||
using Schema.NET;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Factories
|
||||
{
|
||||
public static class BlogCacheFactory
|
||||
{
|
||||
public static int PAGE_LIMIT = 10;
|
||||
|
||||
public static PostCache ProjectPostCache(IEnumerable<IPost> rawPosts)
|
||||
{
|
||||
var orderedPosts = rawPosts.OrderByDescending(a => a.PublishDate);
|
||||
var posts = new List<IPost>(orderedPosts);
|
||||
var urlToPosts = new Dictionary<string, IPost>();
|
||||
var caseInsensitiveUrlToPost = new Dictionary<string, IPost>(StringComparer.OrdinalIgnoreCase);
|
||||
var tagsToPost = new Dictionary<string, IList<Post>>();
|
||||
var postsByPage = new Dictionary<int, IList<Post>>();
|
||||
var syndicationPosts = new List<SyndicationItem>();
|
||||
var landingPagesUrl = new Dictionary<string, LandingPage>();
|
||||
var blogPostsLD = new List<Schema.NET.IBlogPosting>();
|
||||
foreach (var post in orderedPosts)
|
||||
{
|
||||
|
||||
if (post is Post)
|
||||
{
|
||||
var castedPost = post as Post;
|
||||
urlToPosts.Add(post.UrlWithoutPath, castedPost);
|
||||
caseInsensitiveUrlToPost.Add(post.UrlWithoutPath.ToLower(), castedPost);
|
||||
syndicationPosts.Add(castedPost.ToSyndicationItem());
|
||||
blogPostsLD.Add(post.Content.JsonLD);
|
||||
foreach (var tag in castedPost.ToNormalizedTagList())
|
||||
{
|
||||
if (tagsToPost.TryGetValue(tag, out var list))
|
||||
{
|
||||
list.Add(castedPost);
|
||||
}
|
||||
else
|
||||
{
|
||||
tagsToPost.Add(tag, new List<Post>() { castedPost });
|
||||
}
|
||||
}
|
||||
if (postsByPage.Keys.Count < 1)
|
||||
{
|
||||
postsByPage.Add(1, new List<Post>() { castedPost });
|
||||
}
|
||||
else
|
||||
{
|
||||
var highestPageKey = postsByPage.Keys.Max();
|
||||
var highestPage = postsByPage[highestPageKey];
|
||||
if (highestPage.Count < BlogCacheFactory.PAGE_LIMIT)
|
||||
{
|
||||
highestPage.Add(castedPost);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
postsByPage.Add(highestPageKey + 1, new List<Post>() { castedPost });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (post is LandingPage)
|
||||
{
|
||||
var castedPost = post as LandingPage;
|
||||
landingPagesUrl.Add(castedPost.UrlWithoutPath, castedPost);
|
||||
}
|
||||
}
|
||||
var ld = new Schema.NET.Blog()
|
||||
{
|
||||
Name = "TerribleDev Blog",
|
||||
Description = "The blog of Tommy Parnell",
|
||||
Author = new Schema.NET.Person() { Name = "TerribleDev", Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")) },
|
||||
Image = new Schema.NET.ImageObject() { Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/content/tommyAvatar4.jpg")) },
|
||||
Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")),
|
||||
SameAs = new Schema.NET.OneOrMany<Uri>(new Uri("https://twitter.com/terribledev")),
|
||||
};
|
||||
var website = new Schema.NET.WebSite()
|
||||
{
|
||||
Name = "TerribleDev Blog",
|
||||
Description = "The blog of Tommy Parnell",
|
||||
Author = new Schema.NET.Person() { Name = "TerribleDev", Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/about")) },
|
||||
Image = new Schema.NET.ImageObject() { Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/content/tommyAvatar4.jpg")) },
|
||||
Url = new Schema.NET.OneOrMany<Uri>(new Uri("https://blog.terrible.dev/")),
|
||||
SameAs = new Schema.NET.OneOrMany<Uri>(new Uri("https://twitter.com/terribledev")),
|
||||
PotentialAction = new Schema.NET.OneOrMany<Schema.NET.IAction>(
|
||||
// search action
|
||||
new List<Schema.NET.SearchAction>()
|
||||
{
|
||||
new Schema.NET.SearchAction()
|
||||
{
|
||||
Target = new Schema.NET.EntryPoint()
|
||||
{
|
||||
UrlTemplate = new Schema.NET.OneOrMany<string>(@"https://blog.terrible.dev/search?q={search-term}")
|
||||
},
|
||||
QueryInput = new Schema.NET.Values<string, Schema.NET.PropertyValueSpecification>(
|
||||
new OneOrMany<PropertyValueSpecification>(
|
||||
new PropertyValueSpecification()
|
||||
{
|
||||
ValueName = "search-term",
|
||||
ValueRequired = true,
|
||||
ValueMinLength = 1,
|
||||
ValueMaxLength = 500,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
};
|
||||
return new PostCache()
|
||||
{
|
||||
LandingPagesUrl = landingPagesUrl,
|
||||
PostsAsLists = posts,
|
||||
TagsToPosts = tagsToPost,
|
||||
UrlToPost = urlToPosts,
|
||||
PostsByPage = postsByPage,
|
||||
PostsAsSyndication = syndicationPosts,
|
||||
BlogLD = ld,
|
||||
SiteLD = website,
|
||||
BlogLDString = ld.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
|
||||
SiteLDString = website.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
|
||||
CaseInsensitiveUrlToPost = caseInsensitiveUrlToPost
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,23 +7,41 @@ using TerribleDev.Blog.Web.Models;
|
||||
using YamlDotNet.Serialization;
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using Markdig;
|
||||
using TerribleDev.Blog.Web.MarkExtension.TerribleDev.Blog.Web.ExternalLinkParser;
|
||||
using TerribleDev.Blog.Web.MarkExtension;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Concurrent;
|
||||
using Schema.NET;
|
||||
using System.Text.RegularExpressions;
|
||||
using TerribleDev.Blog.Web.Factories;
|
||||
using System.Text;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace TerribleDev.Blog.Web
|
||||
{
|
||||
public class BlogFactory
|
||||
{
|
||||
public List<IPost> GetAllPosts()
|
||||
private CodeFactory _codeFactory = new CodeFactory();
|
||||
public async Task<IEnumerable<IPost>> GetAllPostsAsync(string domain)
|
||||
{
|
||||
// why didn't I use f# I'd have a pipe operator by now
|
||||
var posts = GetPosts();
|
||||
var allPosts = posts.AsParallel().Select(a =>
|
||||
|
||||
return await Task.WhenAll(posts.Select(async (post) =>
|
||||
{
|
||||
var fileInfo = new FileInfo(a);
|
||||
var fileText = File.ReadAllText(fileInfo.FullName);
|
||||
return ParsePost(fileText, fileInfo.Name);
|
||||
});
|
||||
return allPosts.ToList();
|
||||
var (text, fileInfo) = await GetFileText(post);
|
||||
return await ParsePost(text, fileInfo.Name, domain);
|
||||
}));
|
||||
}
|
||||
|
||||
private static async Task<(string text, FileInfo fileInfo)> GetFileText(string filePath)
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
var text = await File.ReadAllTextAsync(fileInfo.FullName);
|
||||
return (text, fileInfo);
|
||||
}
|
||||
|
||||
public IEnumerable<string> GetPosts() => Directory.EnumerateFiles(Path.Combine(Directory.GetCurrentDirectory(), "Posts"), "*.md", SearchOption.TopDirectoryOnly);
|
||||
|
||||
public PostSettings ParseYaml(string ymlText)
|
||||
@@ -32,30 +50,143 @@ namespace TerribleDev.Blog.Web
|
||||
return serializer.Deserialize<PostSettings>(ymlText);
|
||||
|
||||
}
|
||||
public IPost ParsePost(string postText, string fileName)
|
||||
public async Task<(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, Boolean hasCode)> ResolveContentForPost(string markdownText, string fileName, string resolvedUrl, string domain)
|
||||
{
|
||||
List<string> postImages = new List<string>();
|
||||
var pipeline = new MarkdownPipelineBuilder()
|
||||
.Use(new AbsoluteLinkConverter(resolvedUrl, domain))
|
||||
.Use<ImageRecorder>(new ImageRecorder(ref postImages))
|
||||
.Use<TargetLinkExtension>()
|
||||
.UseMediaLinks()
|
||||
.Use<PictureInline>()
|
||||
.UseEmojiAndSmiley()
|
||||
.Build();
|
||||
var (replacedText, hasCode) = await _codeFactory.ReplaceFencedCode(markdownText);
|
||||
var postContent = Markdown.ToHtml(replacedText, pipeline);
|
||||
var postContentPlain = String.Join("", Markdown.ToPlainText(replacedText, pipeline).Split("<!-- more -->"));
|
||||
var summary = postContent.Split("<!-- more -->")[0];
|
||||
var postSummaryPlain = postContentPlain.Split("<!-- more -->")[0];
|
||||
return (postContent, postContentPlain, summary, postSummaryPlain, postImages, hasCode);
|
||||
}
|
||||
public async Task<IPost> ParsePost(string postText, string fileName, string domain)
|
||||
{
|
||||
var splitFile = postText.Split("---");
|
||||
var ymlRaw = splitFile[0];
|
||||
var markdownText = string.Join("", splitFile.Skip(1));
|
||||
var pipeline = new MarkdownPipelineBuilder().UseEmojiAndSmiley().Build();
|
||||
var postContent = Markdown.ToHtml(markdownText, pipeline);
|
||||
var postContentPlain = String.Join("", Markdown.ToPlainText(markdownText, pipeline).Split("<!-- more -->"));
|
||||
var postSettings = ParseYaml(ymlRaw);
|
||||
var resolvedUrl = !string.IsNullOrWhiteSpace(postSettings.permalink) ? postSettings.permalink : fileName.Split('.')[0].Replace(' ', '-').WithoutSpecialCharacters();
|
||||
var summary = postContent.Split("<!-- more -->")[0];
|
||||
var postSummaryPlain = postContentPlain.Split("<!-- more -->")[0];
|
||||
return new Post()
|
||||
{
|
||||
PublishDate = postSettings.date,
|
||||
tags = postSettings.tags?.Select(a=>a.Replace(' ', '-').WithoutSpecialCharacters().ToLower()).ToList() ?? new List<string>(),
|
||||
Title = postSettings.title,
|
||||
Url = resolvedUrl,
|
||||
Content = new HtmlString(postContent),
|
||||
Summary = new HtmlString(summary),
|
||||
SummaryPlain = postSummaryPlain,
|
||||
ContentPlain = postContentPlain
|
||||
};
|
||||
var canonicalUrl = $"https://blog.terrible.dev/{resolvedUrl}/";
|
||||
return postSettings.isLanding ? await BuildLandingPage(fileName, domain, markdownText, postSettings, resolvedUrl, canonicalUrl) : await BuildPost(fileName, domain, markdownText, postSettings, resolvedUrl, canonicalUrl);
|
||||
}
|
||||
|
||||
private async Task<Post> BuildPost(string fileName, string domain, string markdownText, PostSettings postSettings, string resolvedUrl, string canonicalUrl)
|
||||
{
|
||||
|
||||
(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, bool hasCode) = await ResolveContentForPost(markdownText, fileName, resolvedUrl, domain);
|
||||
var ld = new Schema.NET.BlogPosting()
|
||||
{
|
||||
Headline = postSettings.title,
|
||||
DatePublished = postSettings.date,
|
||||
DateModified = postSettings.updated ?? postSettings.date,
|
||||
WordCount = postContentPlain.Split(' ').Length,
|
||||
ArticleBody = new Schema.NET.OneOrMany<string>(new HtmlString(postContentPlain).Value),
|
||||
Author = new Schema.NET.Person() { Name = "Tommy Parnell", AdditionalName = "TerribleDev", Url = new Uri("https://blog.terrible.dev/about") },
|
||||
Url = new Uri(canonicalUrl)
|
||||
};
|
||||
var breadcrumb = new Schema.NET.BreadcrumbList()
|
||||
{
|
||||
ItemListElement = new List<IListItem>() // Required
|
||||
{
|
||||
new ListItem() // Required
|
||||
{
|
||||
Position = 1, // Required
|
||||
Url = new Uri("https://blog.terrible.dev/") // Required
|
||||
},
|
||||
new ListItem()
|
||||
{
|
||||
Position = 2,
|
||||
Name = postSettings.title,
|
||||
},
|
||||
},
|
||||
};
|
||||
// regex remove picture and source tags but not the child elements
|
||||
var postContentClean = Regex.Replace(postContent, "<picture.*?>|</picture>|<source.*?>|</source>", "", RegexOptions.Singleline);
|
||||
var content = new PostContent()
|
||||
{
|
||||
Content = new HtmlString(postContent),
|
||||
Images = postImages,
|
||||
ContentPlain = postContentPlain,
|
||||
Summary = new HtmlString(summary),
|
||||
SummaryPlain = postSummaryPlain,
|
||||
SummaryPlainShort = (postContentPlain.Length <= 147 ? postContentPlain : postContentPlain.Substring(0, 146)) + "...",
|
||||
JsonLD = ld,
|
||||
JsonLDString = ld.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
|
||||
JsonLDBreadcrumb = breadcrumb,
|
||||
JsonLDBreadcrumbString = breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
|
||||
HasCode = hasCode,
|
||||
MarkdownMD5 = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(markdownText)).ToHexString()
|
||||
};
|
||||
var thumbNailUrl = string.IsNullOrWhiteSpace(postSettings.thumbnailImage) ?
|
||||
postImages?.FirstOrDefault() ?? "https://www.gravatar.com/avatar/333e3cea32cd17ff2007d131df336061?s=640" :
|
||||
$"{canonicalUrl}/{postSettings.thumbnailImage}";
|
||||
return new Post()
|
||||
{
|
||||
PublishDate = postSettings.date.ToUniversalTime(),
|
||||
UpdatedDate = postSettings.updated?.ToUniversalTime() ?? null,
|
||||
tags = postSettings.tags?.Select(a => a.Replace(' ', '-').WithoutSpecialCharacters().ToLower()).ToList() ?? new List<string>(),
|
||||
Title = postSettings.title,
|
||||
RelativeUrl = $"/{resolvedUrl}/",
|
||||
CanonicalUrl = canonicalUrl,
|
||||
UrlWithoutPath = resolvedUrl,
|
||||
isLanding = postSettings.isLanding,
|
||||
Content = content,
|
||||
ThumbnailImage = thumbNailUrl,
|
||||
};
|
||||
}
|
||||
private async Task<LandingPage> BuildLandingPage(string fileName, string domain, string markdownText, PostSettings postSettings, string resolvedUrl, string canonicalUrl)
|
||||
{
|
||||
(string postContent, string postContentPlain, string summary, string postSummaryPlain, IList<string> postImages, bool hasCode) = await ResolveContentForPost(markdownText, fileName, resolvedUrl, domain);
|
||||
var breadcrumb = new Schema.NET.BreadcrumbList()
|
||||
{
|
||||
ItemListElement = new List<IListItem>() // Required
|
||||
{
|
||||
new ListItem() // Required
|
||||
{
|
||||
Position = 1, // Required
|
||||
Url = new Uri("https://blog.terrible.dev/") // Required
|
||||
},
|
||||
new ListItem()
|
||||
{
|
||||
Position = 2,
|
||||
Name = postSettings.title,
|
||||
},
|
||||
},
|
||||
};
|
||||
// regex remove picture and source tags but not the child elements
|
||||
var content = new PostContent()
|
||||
{
|
||||
Content = new HtmlString(postContent),
|
||||
Images = postImages,
|
||||
ContentPlain = postContentPlain,
|
||||
Summary = new HtmlString(summary),
|
||||
SummaryPlain = postSummaryPlain,
|
||||
SummaryPlainShort = (postContentPlain.Length <= 147 ? postContentPlain : postContentPlain.Substring(0, 146)) + "...",
|
||||
JsonLDBreadcrumb = breadcrumb,
|
||||
JsonLDBreadcrumbString = breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true"),
|
||||
HasCode = hasCode,
|
||||
MarkdownMD5 = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(markdownText)).ToHexString()
|
||||
};
|
||||
return new LandingPage()
|
||||
{
|
||||
PublishDate = postSettings.date.ToUniversalTime(),
|
||||
UpdatedDate = postSettings.updated?.ToUniversalTime() ?? null,
|
||||
Title = postSettings.title,
|
||||
RelativeUrl = $"/{resolvedUrl}/",
|
||||
CanonicalUrl = canonicalUrl,
|
||||
UrlWithoutPath = resolvedUrl,
|
||||
isLanding = postSettings.isLanding,
|
||||
Content = content,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
41
src/TerribleDev.Blog.Web/Factories/CodeFactory.cs
Normal file
41
src/TerribleDev.Blog.Web/Factories/CodeFactory.cs
Normal file
@@ -0,0 +1,41 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Factories
|
||||
{
|
||||
public class CodeFactory
|
||||
{
|
||||
private HttpClient httpClient = new HttpClient();
|
||||
private static Boolean IsDisabled = !String.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable("DISABLE_PRISMA"));
|
||||
public async Task<(string result, bool hasCode)> ReplaceFencedCode(string markdown)
|
||||
{
|
||||
if(CodeFactory.IsDisabled)
|
||||
{
|
||||
return (markdown, false);
|
||||
}
|
||||
|
||||
// regex grab all text between backticks
|
||||
var regex = new Regex(@"```(.*?)```", RegexOptions.Singleline);
|
||||
var matches = regex.Matches(markdown);
|
||||
var result = await Task.WhenAll(matches.Select(async match =>
|
||||
{
|
||||
var code = match.Value;
|
||||
var codeContent = await httpClient.PostAsync("https://prismasaservice.azurewebsites.net/api/HttpTrigger", new StringContent(code));
|
||||
if(!codeContent.IsSuccessStatusCode)
|
||||
{
|
||||
Console.Error.WriteLine("Error posting code to prisma");
|
||||
}
|
||||
return (code, await codeContent.Content.ReadAsStringAsync());
|
||||
}));
|
||||
foreach(var (match, newValue) in result)
|
||||
{
|
||||
markdown = markdown.Replace(match, newValue);
|
||||
}
|
||||
return (markdown, matches.Count > 0);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
43
src/TerribleDev.Blog.Web/Filters/ETagFilter.cs
Normal file
43
src/TerribleDev.Blog.Web/Filters/ETagFilter.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Mvc.Filters;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Filters
|
||||
{
|
||||
public class StaticETag: ActionFilterAttribute
|
||||
{
|
||||
static StaticETag()
|
||||
{
|
||||
string etagString;
|
||||
if(File.Exists("buildtime.txt"))
|
||||
{
|
||||
Console.WriteLine("buildtime.txt found");
|
||||
etagString = File.ReadAllText("buildtime.txt");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("buildtime.txt not found");
|
||||
Console.WriteLine("Directory list");
|
||||
Console.WriteLine(Directory.GetFiles(".", "*", SearchOption.AllDirectories).Aggregate((a, b) => a + "\n" + b));
|
||||
var unixTime = DateTimeOffset.Now.ToUnixTimeMilliseconds().ToString();
|
||||
Console.WriteLine("Using Unix Time for Etag: " + unixTime);
|
||||
etagString = unixTime;
|
||||
}
|
||||
StaticETag.staticEtag = "\"" + MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(etagString)).ToHexString().Substring(0,8) + "\"";
|
||||
}
|
||||
public static string staticEtag;
|
||||
public static ConcurrentDictionary<string, string> cache = new ConcurrentDictionary<string, string>();
|
||||
public override void OnActionExecuted(ActionExecutedContext context)
|
||||
{
|
||||
if(context.HttpContext.Response.StatusCode >= 200 && context.HttpContext.Response.StatusCode < 300 && context.HttpContext.Response.Headers.ETag.Count == 0)
|
||||
{
|
||||
context.HttpContext.Response.Headers.Add("ETag", staticEtag);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
44
src/TerribleDev.Blog.Web/Filters/Http2PushFilter.cs
Normal file
44
src/TerribleDev.Blog.Web/Filters/Http2PushFilter.cs
Normal file
@@ -0,0 +1,44 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using Microsoft.AspNetCore.Mvc.Filters;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using TerribleDev.Blog.Web.Taghelpers;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Filters
|
||||
{
|
||||
public class Http2PushFilter : ActionFilterAttribute
|
||||
{
|
||||
private static bool IsHttp2PushDisabled = String.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable("DISABLE_HTTP2_PUSH"));
|
||||
public override void OnActionExecuted(ActionExecutedContext context)
|
||||
{
|
||||
if(IsHttp2PushDisabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
var logger = context.HttpContext.RequestServices.GetService(typeof(ILogger<Http2PushFilter>)) as ILogger<Http2PushFilter>;
|
||||
logger.LogDebug("Http2PushFilter.OnActionExecuted");
|
||||
if(!context.HttpContext.Items.TryGetValue(HttpPush.Key, out var links))
|
||||
{
|
||||
logger.LogDebug("Did not find any links to push");
|
||||
return;
|
||||
}
|
||||
var linkData = links as System.Collections.Generic.List<PushUrl>;
|
||||
if(linkData == null || linkData.Count == 0) {
|
||||
logger.LogDebug("Http2PushFilter.OnActionExecuted: No links");
|
||||
return;
|
||||
}
|
||||
var headerBuilder = new StringBuilder();
|
||||
for(var i = 0; i < linkData.Count; i++) {
|
||||
var (url, AsProperty) = linkData[i];
|
||||
var resolvedUrl = url.StartsWith("~") ? context.HttpContext.Request.PathBase.ToString() + url.Substring(1) : url;
|
||||
headerBuilder.Append($"<{resolvedUrl}>; rel=preload; as={AsProperty}");
|
||||
if(i < linkData.Count - 1) {
|
||||
headerBuilder.Append(", ");
|
||||
}
|
||||
}
|
||||
logger.LogDebug("Http2PushFilter.OnActionExecuted: " + headerBuilder.ToString());
|
||||
context.HttpContext.Response.Headers.Add("Link", headerBuilder.ToString());
|
||||
base.OnActionExecuted(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
using System;
|
||||
using Markdig;
|
||||
using Markdig.Renderers;
|
||||
using Markdig.Renderers.Html.Inlines;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.MarkExtension
|
||||
{
|
||||
public class AbsoluteLinkConverter : IMarkdownExtension
|
||||
{
|
||||
public string BaseUrl { get; }
|
||||
public string Domain { get; }
|
||||
|
||||
public AbsoluteLinkConverter(string baseUrl, string domain)
|
||||
{
|
||||
BaseUrl = baseUrl;
|
||||
Domain = domain;
|
||||
}
|
||||
|
||||
public void Setup(MarkdownPipelineBuilder pipeline)
|
||||
{
|
||||
}
|
||||
|
||||
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
|
||||
{
|
||||
var htmlRenderer = renderer as HtmlRenderer;
|
||||
if (htmlRenderer != null)
|
||||
{
|
||||
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
|
||||
inlineRenderer.TryWriters.Add(TryLinkAbsoluteUrlWriter);
|
||||
}
|
||||
}
|
||||
private bool TryLinkAbsoluteUrlWriter(HtmlRenderer renderer, LinkInline linkInline)
|
||||
{
|
||||
var prevDynamic = linkInline.GetDynamicUrl;
|
||||
linkInline.GetDynamicUrl = () => {
|
||||
var escapeUrl = prevDynamic != null ? prevDynamic() ?? linkInline.Url : linkInline.Url;
|
||||
if(!System.Uri.TryCreate(escapeUrl, UriKind.RelativeOrAbsolute, out var parsedResult))
|
||||
{
|
||||
throw new Exception($"Error making link for {escapeUrl} @ {BaseUrl}");
|
||||
}
|
||||
if(parsedResult.IsAbsoluteUri)
|
||||
{
|
||||
return escapeUrl;
|
||||
}
|
||||
var uriBuilder = new UriBuilder(Domain);
|
||||
if(!escapeUrl.StartsWith("/"))
|
||||
{
|
||||
uriBuilder = uriBuilder.WithPathSegment($"/{BaseUrl}/{escapeUrl}");
|
||||
}
|
||||
else
|
||||
{
|
||||
uriBuilder = uriBuilder.WithPathSegment(parsedResult.ToString());
|
||||
}
|
||||
return uriBuilder.Uri.ToString();
|
||||
};
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
71
src/TerribleDev.Blog.Web/MarkExtension/ExternalLinkParser.cs
Normal file
71
src/TerribleDev.Blog.Web/MarkExtension/ExternalLinkParser.cs
Normal file
@@ -0,0 +1,71 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.MarkExtension
|
||||
{
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Markdig;
|
||||
using Markdig.Renderers;
|
||||
using Markdig.Renderers.Html;
|
||||
using Markdig.Renderers.Html.Inlines;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.ExternalLinkParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Extension for extending image Markdown links in case a video or an audio file is linked and output proper link.
|
||||
/// </summary>
|
||||
/// <seealso cref="Markdig.IMarkdownExtension" />
|
||||
public class TargetLinkExtension : IMarkdownExtension
|
||||
{
|
||||
|
||||
public void Setup(MarkdownPipelineBuilder pipeline)
|
||||
{
|
||||
}
|
||||
|
||||
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
|
||||
{
|
||||
var htmlRenderer = renderer as HtmlRenderer;
|
||||
if (htmlRenderer != null)
|
||||
{
|
||||
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
|
||||
if (inlineRenderer != null)
|
||||
{
|
||||
inlineRenderer.TryWriters.Remove(TryLinkInlineRenderer);
|
||||
inlineRenderer.TryWriters.Add(TryLinkInlineRenderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private bool TryLinkInlineRenderer(HtmlRenderer renderer, LinkInline linkInline)
|
||||
{
|
||||
if (linkInline.Url == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
Uri uri;
|
||||
// Only process absolute Uri
|
||||
if (!Uri.TryCreate(linkInline.Url, UriKind.RelativeOrAbsolute, out uri) || !uri.IsAbsoluteUri)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
RenderTargetAttribute(uri, renderer, linkInline);
|
||||
return false;
|
||||
}
|
||||
|
||||
private void RenderTargetAttribute(Uri uri, HtmlRenderer renderer, LinkInline linkInline)
|
||||
{
|
||||
|
||||
linkInline.SetAttributes(new HtmlAttributes() { Properties = new List<KeyValuePair<string, string>>() { new KeyValuePair<string, string>("target", "_blank"), new KeyValuePair<string, string>("rel", "noopener"), } });
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
57
src/TerribleDev.Blog.Web/MarkExtension/ImageRecorder.cs
Normal file
57
src/TerribleDev.Blog.Web/MarkExtension/ImageRecorder.cs
Normal file
@@ -0,0 +1,57 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.MarkExtension
|
||||
{
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Markdig;
|
||||
using Markdig.Renderers;
|
||||
using Markdig.Renderers.Html;
|
||||
using Markdig.Renderers.Html.Inlines;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.ExternalLinkParser
|
||||
{
|
||||
public class ImageRecorder : IMarkdownExtension
|
||||
{
|
||||
private List<string> images = null;
|
||||
public ImageRecorder(ref List<string> images)
|
||||
{
|
||||
this.images = images;
|
||||
}
|
||||
public void Setup(MarkdownPipelineBuilder pipeline)
|
||||
{
|
||||
}
|
||||
|
||||
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
|
||||
{
|
||||
var htmlRenderer = renderer as HtmlRenderer;
|
||||
if (htmlRenderer != null)
|
||||
{
|
||||
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
|
||||
if (inlineRenderer != null)
|
||||
{
|
||||
inlineRenderer.TryWriters.Add(TryLinkInlineRenderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private bool TryLinkInlineRenderer(HtmlRenderer renderer, LinkInline linkInline)
|
||||
{
|
||||
if (linkInline.Url == null || !linkInline.IsImage)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
var url = linkInline.GetDynamicUrl != null ? linkInline.GetDynamicUrl(): linkInline.Url;
|
||||
this.images.Add(url);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
72
src/TerribleDev.Blog.Web/MarkExtension/PictureInline.cs
Normal file
72
src/TerribleDev.Blog.Web/MarkExtension/PictureInline.cs
Normal file
@@ -0,0 +1,72 @@
|
||||
using System;
|
||||
using Markdig;
|
||||
using Markdig.Renderers;
|
||||
using Markdig.Renderers.Html.Inlines;
|
||||
using Markdig.Syntax.Inlines;
|
||||
|
||||
namespace TerribleDev.Blog.Web.MarkExtension
|
||||
{
|
||||
public class PictureInline : IMarkdownExtension
|
||||
{
|
||||
public void Setup(MarkdownPipelineBuilder pipeline)
|
||||
{
|
||||
}
|
||||
|
||||
public void Setup(MarkdownPipeline pipeline, IMarkdownRenderer renderer)
|
||||
{
|
||||
var htmlRenderer = renderer as HtmlRenderer;
|
||||
if (htmlRenderer != null)
|
||||
{
|
||||
var inlineRenderer = htmlRenderer.ObjectRenderers.FindExact<LinkInlineRenderer>();
|
||||
inlineRenderer.TryWriters.Add(TryLinkInlineRenderer);
|
||||
}
|
||||
}
|
||||
private bool TryLinkInlineRenderer(HtmlRenderer renderer, LinkInline linkInline)
|
||||
{
|
||||
if (linkInline == null || !linkInline.IsImage)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(linkInline.Url.EndsWith(".gif"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
renderer.Write("<picture>");
|
||||
WriteImageTag(renderer, linkInline, ".webp", "image/webp");
|
||||
WriteImageTag(renderer, linkInline, string.Empty);
|
||||
renderer.Write("</picture>");
|
||||
return true;
|
||||
|
||||
}
|
||||
private void WriteImageTag(HtmlRenderer renderer, LinkInline link, string suffix, string type = null)
|
||||
{
|
||||
|
||||
|
||||
renderer.Write(string.IsNullOrWhiteSpace(type) ? $"<img loading=\"lazy\" src=\"" : $"<source type=\"{type}\" srcset=\"");
|
||||
var escapeUrl = link.GetDynamicUrl != null ? link.GetDynamicUrl() ?? link.Url : link.Url;
|
||||
|
||||
renderer.WriteEscapeUrl($"{escapeUrl}{suffix}");
|
||||
renderer.Write("\"");
|
||||
renderer.WriteAttributes(link);
|
||||
if (renderer.EnableHtmlForInline)
|
||||
{
|
||||
renderer.Write(" alt=\"");
|
||||
}
|
||||
var wasEnableHtmlForInline = renderer.EnableHtmlForInline;
|
||||
renderer.EnableHtmlForInline = false;
|
||||
renderer.WriteChildren(link);
|
||||
renderer.EnableHtmlForInline = wasEnableHtmlForInline;
|
||||
if (renderer.EnableHtmlForInline)
|
||||
{
|
||||
renderer.Write("\"");
|
||||
}
|
||||
|
||||
|
||||
if (renderer.EnableHtmlForInline)
|
||||
{
|
||||
renderer.Write(" />");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
8
src/TerribleDev.Blog.Web/Models/BlogConfiguration.cs
Normal file
8
src/TerribleDev.Blog.Web/Models/BlogConfiguration.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class BlogConfiguration
|
||||
{
|
||||
public string Title { get; set; }
|
||||
public string Link { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -2,12 +2,36 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Schema.NET;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class GetTagViewModel
|
||||
{
|
||||
public IEnumerable<IPost> Posts { get; set; }
|
||||
public string Title { get => $"Tag: {Tag}"; }
|
||||
public string Tag { get; set; }
|
||||
public string CanonicalUrl { get; set; }
|
||||
|
||||
public string ldJson ()
|
||||
{
|
||||
var breadcrumb = new Schema.NET.BreadcrumbList()
|
||||
{
|
||||
ItemListElement = new List<IListItem>() // Required
|
||||
{
|
||||
new ListItem() // Required
|
||||
{
|
||||
Position = 1, // Required
|
||||
Url = new Uri("https://blog.terrible.dev/") // Required
|
||||
},
|
||||
new ListItem()
|
||||
{
|
||||
Position = 2,
|
||||
Name = Tag,
|
||||
},
|
||||
},
|
||||
};
|
||||
return breadcrumb.ToHtmlEscapedString().Replace("https://schema.org", "https://schema.org/true");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,5 +10,10 @@ namespace TerribleDev.Blog.Web.Models
|
||||
public string PreviousUrl { get; set; }
|
||||
public bool HasNext { get; set; }
|
||||
public bool HasPrevious { get; set; }
|
||||
|
||||
public Schema.NET.Blog BlogLD { get; set; }
|
||||
public Schema.NET.WebSite SiteLD { get; set; }
|
||||
public string BlogLDString { get; set; }
|
||||
public string SiteLDString { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -9,13 +9,15 @@ namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public interface IPost
|
||||
{
|
||||
string Url { get; set; }
|
||||
string CanonicalUrl { get; set; }
|
||||
string UrlWithoutPath { get; set; }
|
||||
string RelativeUrl { get; set; }
|
||||
string Title { get; set; }
|
||||
HtmlString Summary { get; set; }
|
||||
DateTime PublishDate { get; set; }
|
||||
HtmlString Content { get; set; }
|
||||
string ContentPlain { get; set; }
|
||||
string SummaryPlain { get; set; }
|
||||
IList<string> tags { get; set; }
|
||||
DateTime? UpdatedDate { get; set; }
|
||||
IPostContent Content { get; set; }
|
||||
bool isLanding { get; set; }
|
||||
string ThumbnailImage { get; }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
26
src/TerribleDev.Blog.Web/Models/IPostContent.cs
Normal file
26
src/TerribleDev.Blog.Web/Models/IPostContent.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using Schema.NET;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public interface IPostContent
|
||||
{
|
||||
HtmlString Content { get; set; }
|
||||
HtmlString Summary { get; set; }
|
||||
string ContentPlain { get; set; }
|
||||
string SummaryPlain { get; set; }
|
||||
string SummaryPlainShort { get; set; }
|
||||
IList<string> Images { get; set; }
|
||||
|
||||
BlogPosting JsonLD { get; set; }
|
||||
|
||||
bool HasCode { get; set; }
|
||||
|
||||
public string JsonLDString { get; set; }
|
||||
BreadcrumbList JsonLDBreadcrumb { get; set; }
|
||||
string JsonLDBreadcrumbString { get; set; }
|
||||
string MarkdownMD5 { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -12,5 +12,6 @@ namespace TerribleDev.Blog.Web.Models
|
||||
string thumbnailImage { get; set; }
|
||||
DateTimeOffset date { get; set; }
|
||||
DateTimeOffset updated { get; set; }
|
||||
bool isLanding { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
23
src/TerribleDev.Blog.Web/Models/LandingPage.cs
Normal file
23
src/TerribleDev.Blog.Web/Models/LandingPage.cs
Normal file
@@ -0,0 +1,23 @@
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
[DebuggerDisplay("{Title}")]
|
||||
public class LandingPage : IPost
|
||||
{
|
||||
public string CanonicalUrl { get; set; }
|
||||
public string UrlWithoutPath { get; set; }
|
||||
public string RelativeUrl { get; set; }
|
||||
public string Title { get; set; }
|
||||
public DateTime PublishDate { get; set; }
|
||||
public DateTime? UpdatedDate { get; set; }
|
||||
public IPostContent Content { get; set; }
|
||||
|
||||
public bool isLanding { get; set; } = false;
|
||||
public string ThumbnailImage { get => "https://www.gravatar.com/avatar/333e3cea32cd17ff2007d131df336061?s=640"; }
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,25 @@
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
[DebuggerDisplay("{Title}")]
|
||||
public class Post : IPost
|
||||
{
|
||||
public string Url { get; set; }
|
||||
public string CanonicalUrl { get; set; }
|
||||
public string UrlWithoutPath { get; set; }
|
||||
public string RelativeUrl { get; set; }
|
||||
public string Title { get; set; }
|
||||
public DateTime PublishDate { get; set; }
|
||||
public HtmlString Content { get; set; }
|
||||
public HtmlString Summary { get; set; }
|
||||
public string ContentPlain { get; set; }
|
||||
public string SummaryPlain { get; set; }
|
||||
public DateTime? UpdatedDate { get; set; }
|
||||
public IList<string> tags { get; set; }
|
||||
public IPostContent Content { get; set; }
|
||||
|
||||
public bool isLanding { get; set; } = false;
|
||||
|
||||
public string ThumbnailImage { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
23
src/TerribleDev.Blog.Web/Models/PostCache.cs
Normal file
23
src/TerribleDev.Blog.Web/Models/PostCache.cs
Normal file
@@ -0,0 +1,23 @@
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.SyndicationFeed;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class PostCache
|
||||
{
|
||||
public IList<IPost> PostsAsLists { get; set;}
|
||||
public IDictionary<string, IList<Post>> TagsToPosts { get; set; }
|
||||
public IDictionary<string, IPost> UrlToPost { get; set; }
|
||||
public IDictionary<string, IPost> CaseInsensitiveUrlToPost { get; set; }
|
||||
public IDictionary<int, IList<Post>> PostsByPage { get; set; }
|
||||
public IList<SyndicationItem> PostsAsSyndication { get; set; }
|
||||
|
||||
public Schema.NET.Blog BlogLD { get; set; }
|
||||
public Schema.NET.WebSite SiteLD { get; set; }
|
||||
public string BlogLDString { get; set; }
|
||||
public string SiteLDString { get; set; }
|
||||
|
||||
public Dictionary<string, LandingPage> LandingPagesUrl { get; set; }
|
||||
|
||||
}
|
||||
}
|
||||
20
src/TerribleDev.Blog.Web/Models/PostComparer.cs
Normal file
20
src/TerribleDev.Blog.Web/Models/PostComparer.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class PostComparer
|
||||
{
|
||||
public static PostComparisonByDateInternal PostComparisonByDate = new PostComparisonByDateInternal();
|
||||
|
||||
public class PostComparisonByDateInternal : IComparer<IPost>
|
||||
{
|
||||
public int Compare([AllowNull] IPost x, [AllowNull] IPost y)
|
||||
{
|
||||
return DateTime.Compare(x.PublishDate, y.PublishDate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
24
src/TerribleDev.Blog.Web/Models/PostContent.cs
Normal file
24
src/TerribleDev.Blog.Web/Models/PostContent.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.AspNetCore.Html;
|
||||
using Schema.NET;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
|
||||
public class PostContent : IPostContent
|
||||
{
|
||||
public HtmlString Content { get; set; }
|
||||
public HtmlString Summary { get; set; }
|
||||
public string ContentPlain { get; set; }
|
||||
public string SummaryPlain { get; set; }
|
||||
public string SummaryPlainShort { get; set; }
|
||||
public IList<string> Images { get; set; }
|
||||
public BlogPosting JsonLD { get; set; }
|
||||
public string JsonLDString { get; set; }
|
||||
public BreadcrumbList JsonLDBreadcrumb { get; set; }
|
||||
public string JsonLDBreadcrumbString { get; set; }
|
||||
public bool HasCode { get; set; }
|
||||
public string MarkdownMD5 { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -9,11 +9,14 @@ namespace TerribleDev.Blog.Web.Models
|
||||
public string title { get; set; }
|
||||
public string permalink { get; set; }
|
||||
public DateTime date { get; set; }
|
||||
public DateTime updated { get; set; }
|
||||
public DateTime? updated { get; set; }
|
||||
public string id { get; set; }
|
||||
public string thumbnail_image { get; set; }
|
||||
public string thumbnailImage { get; set; }
|
||||
public string thumbnail_image_position { get; set; }
|
||||
public string layout { get; set; }
|
||||
|
||||
public bool isLanding { get; set; } = false;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
7
src/TerribleDev.Blog.Web/Models/PostViewModel.cs
Normal file
7
src/TerribleDev.Blog.Web/Models/PostViewModel.cs
Normal file
@@ -0,0 +1,7 @@
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class PostViewModel
|
||||
{
|
||||
public IPost Post { get; set; }
|
||||
}
|
||||
}
|
||||
12
src/TerribleDev.Blog.Web/Models/SearchViewModel.cs
Normal file
12
src/TerribleDev.Blog.Web/Models/SearchViewModel.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
public class SearchViewModel
|
||||
{
|
||||
public string SearchTerm { get; set; }
|
||||
public IList<IPost> Posts { get; set; }
|
||||
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ using System.Xml.Serialization;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Models
|
||||
{
|
||||
[XmlRoot("urlset")]
|
||||
[XmlRoot("urlset", Namespace="http://www.sitemaps.org/schemas/sitemap/0.9")]
|
||||
public class SiteMapRoot
|
||||
{
|
||||
[XmlElement("url")]
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
title: 5 web perf tips for 2019
|
||||
date: 2019-02-23 01:32
|
||||
tags:
|
||||
- web
|
||||
- performance
|
||||
- javascript
|
||||
- battle of the bulge
|
||||
---
|
||||
|
||||
As more and more of the world is getting online, a larger part of the internet community is using the internet on lower powered devices. Making websites fast is becoming paramount. Here are 5 tips to improving you web page's performance
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Brotli and gzip
|
||||
|
||||
So incase you didn't know, when your browser makes a request to the server it sends along a header called `Accept-Encoding` This is a comma separated list of compression types your server can use to compress the data to the user. The common ones in the past have been `gzip, and deflate`. [Broli](https://en.wikipedia.org/wiki/Brotli), is a compression
|
||||
algorithm invented by google to be a more efficient for the web. This has about a 35% effectiveness over gzip based on my own testing. This means your content will be almost 1/3rd smaller over the wire. Most browsers [support this already](https://caniuse.com/#feat=brotli). You can use cloudflare to serve Brotli (br) to your users, and most web servers support this today. Make sure your server is serving br, and at minimum gzip.
|
||||
|
||||
|
||||
## Webp, JPEG 2000
|
||||
|
||||
Images are among one of the largest types of files on the internet today, and picking the right file type is as important as getting your data structures right. In the past we told everyone to keep photography in `jpeg`, logos and screen shots in `png`. However google has come out with a new file format. One that is massively smaller than either `jpeg` or `png`, and that is `webp`. Webp is only supported on [chrome, edge and firefox](https://caniuse.com/#search=webp), but don't worry for IOS Safari you can use `JPEG 2000`. Sizing images is also a key concern, you can use srcset to size images appropriately, and you can use the picture element to select the right image given browser support.
|
||||
|
||||
```html
|
||||
|
||||
<picture>
|
||||
<source type="image/webp" srcset="3.webp" alt="an image showing the tiny png results">
|
||||
<source type="image/jp2" srcset="3.jp2" alt="an image showing the tiny png results">
|
||||
<img src="3.png" alt="an image showing the tiny png results">
|
||||
</picture>
|
||||
|
||||
```
|
||||
|
||||
|
||||
## Lighthouse
|
||||
|
||||
Ok so this is less of a trick to implement and more of a tool use use. Man I keep mentioning google, but they keep making amazing web stuff so here we are. Google has made this awesome performance tool called [lighthouse](https://developers.google.com/web/tools/lighthouse/). A version of this tool is built into chrome. Open the developer tools, and click the `audits` tab. That tool is lighthouse. You can install newer versions with `npm install -g lighthouse` or `yarn global add lighthouse`. Then just run `lighthouse --view <url>` so this blog would be `lighthouse --view https://blog.terrible.dev`. You should be hit with a pretty in depth report as to how you can fix and improve your web pages. You can also have your CI system run lighthouse on every build. You can fail PR's if they reduce performance, or just track your accessibility over time.
|
||||
|
||||
## HTTP/2
|
||||
|
||||
HTTP version 2 is a newer version of the http spec. Supported [by all major browsers](https://caniuse.com/#feat=http2) this protocol offers compression of http headers, a [push feature](https://en.wikipedia.org/wiki/HTTP/2_Server_Push) that lets you push files down to the browser before they are requested, [http pipelining](https://en.wikipedia.org/wiki/HTTP_pipelining), and multiplexing multiple requests over a single TCP connection. You can easily get http2 working if you let [cloudflare](https://www.cloudflare.com/) front your http traffic, but you will still want to implement http2 in your server eventually.
|
||||
|
||||
|
||||
## Service workers
|
||||
|
||||
My last and probably favorite feature. [Service Workers](https://developers.google.com/web/fundamentals/primers/service-workers/) are a worker that can stand in between your server and web page in the browser. They are mostly a proxy that let you do things like cache your content, and support offline capabilities. They are easy to implement, you need to have a `manifest.json` file which you can generate from Microsoft's [PWA Builder](https://www.pwabuilder.com/), and just serve traffic over https only. PWA Builder even has [pre-made service workers](https://www.pwabuilder.com/serviceworker) for most scenarios so you don't even need to write your own. I use this for my blog to cache static content, preload blog posts, and provide offline support.
|
||||
@@ -0,0 +1,103 @@
|
||||
title: Accessibility Driven Development
|
||||
date: 2020-08-07 05:27:00
|
||||
tags:
|
||||
- a11y
|
||||
- accessibility
|
||||
---
|
||||
|
||||
|
||||
I've been working at [CarGurus.com](https://www.cargurus.com) for the last 2 years or so. One of the biggest journeys we've been undertaking is to take accessibility far more seriously. However with an engineering team way into the triple digits it gets harder and harder to scale accessibility knowledge.
|
||||
<!-- more -->
|
||||
Knowledge gaps aside CarGurus has a multitude of technologies UI are build with. The two major are [Freemarker](https://freemarker.apache.org/) and [React](https://reactjs.org/). I manage one of our infrastructure teams, we build the tools and technologies to create the site with. This includes our component library, our build systems, linting tools, authentication systems, and core utilities for product development. When we first started really taking accessibility seriously we went to several teams in the business. Many of them did not have anyone with accessibility expertise.
|
||||
|
||||
> Our first approach was to teach accessibility. At the same time we worked with our brand marketing team to ensure our color pallet would be accessible from the start.
|
||||
|
||||
|
||||
After identifying advocates on every team we set out to streamline identifying accessibility issues. One approach I decided to take was to show borders around failing elements during development. I first heard of this idea years ago when GitHub released something it called [accessibilityjs](https://github.com/github/accessibilityjs). This script Github included in its pages and put a giant ugly red border around failing elements. I thought this was a really slick idea to point out issues during development.
|
||||
|
||||
> I was going to use accessibility JS until I found axe-core
|
||||
|
||||
So [axe](https://www.deque.com/axe/) is a technology built by deque to identify accessibility issues. This is a highly configurable piece of technology that includes libraries for developers, browser extensions, and bots you can scan sites with. Deque has open sourced the core technology of axe which is a JavaScript called [axe-core](https://github.com/dequelabs/axe-core).
|
||||
|
||||
> I first started out by writing a script to use axe-core and to add a 10px red border around elements, but I quickly ran into trouble
|
||||
|
||||
First problem, I need to re-run axe every time the browser changes. If we click to open a nav-bar we'll need to rescan the page. Second problem, every-time we change the DOM the script would crash react apps, and finally axe-core is quite slow on large HTML documents.
|
||||
|
||||
## Mutation Observers
|
||||
|
||||
So the first problem was easily solvable. The browser has an API called [Mutation Observer](https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver). This is an API that lets you listen to changes to certain elements and fire a function when those elements change. In our case we wanted to listen to any changes to the `<body>` tag and all of its descendants.
|
||||
|
||||
```js
|
||||
function scanForAccesibilityIssues() { /* scan for issues */}
|
||||
const observer = new MutationObserver(scanForAccesibilityIssues);
|
||||
observer.observe(document.querySelector('body'), { childList: true, subtree: true });
|
||||
```
|
||||
|
||||
## Shadow DOM
|
||||
|
||||
Several UI frameworks such as React keep an in memory representation of the HTML document. The reason for this is when you want to change the UI in React. React will diff its current in-memory DOM with the next DOM and determine the most efficient way to actually apply the changes to the browser. Any application such as a browser extension, or our accessibility detector that edits the DOM outside of React's in-memory DOM will cause React to freak out and either crash of apply a change in an unexpected way. Luckily in recent years browsers have added a [Shadow DOM](https://developer.mozilla.org/en-US/docs/Web/Web_Components/Using_shadow_DOM). This is essentially a DOM that is used to apply visual changes to a user, but sits outside the light DOM (or the regular DOM). However, not all HTML elements support The Shadow DOM. For us to apply the red border we need to use the shadow DOM, and if any elements do not support shadow then we have to apply the border to the parent element. I wrote a [recursive function](https://en.wikipedia.org/wiki/Recursion_(computer_science)#Tail-recursive_functions) called `resolveClosestShadowRoot` which will walk up the DOM document and find the closest parent a target element has that supports shadow. You can tell if a node supports shadow because it will have a `.attachShadow` method. So we can simply access this variable and see if its defined or not.
|
||||
|
||||
|
||||
```js
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {HTMLElement} node
|
||||
* @returns
|
||||
*/
|
||||
function resolveClosestShadowRoot(node) {
|
||||
if (!node) {
|
||||
return null;
|
||||
}
|
||||
if (node.attachShadow) {
|
||||
return node;
|
||||
}
|
||||
return resolveClosestShadowRoot(node.parentElement);
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
After we identify which element to style we just have to apply the border. The code below is doing that by calling the attach shadow function and setting its innerHTML.
|
||||
|
||||
```js
|
||||
const resolvedNode = resolveClosestShadowRoot(node);
|
||||
const shadowRoot = resolvedNode.attachShadow({ mode: 'open' });
|
||||
shadowRoot.innerHTML = '<style>:host { outline: red solid 1rem; }</style><slot></slot>';
|
||||
```
|
||||
|
||||
The `<slot></slot>` element is rendering the content of the light DOM. We still have to show the existing content, and the `:host` psudo-class selector is selecting the host of the shadow DOM.
|
||||
|
||||
## Debounce 🎉
|
||||
|
||||
In web development we often use what's known as a "debounce" to delay doing something. The simple example is sometimes people click on a button multiple times, often on accident, sometimes intentionally. Before taking any action or taking multiple actions you might wait a moment before they stop clicking to do something. You wouldn't want to take the same action multiple times for each click. This is where debounce comes into play.
|
||||
|
||||
```js
|
||||
|
||||
function debounce(fn, wait) {
|
||||
let timeout = null;
|
||||
return function (...args) {
|
||||
const next = () => fn.apply(this, args);
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(next, wait);
|
||||
};
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
A debounce function accepts a function and a "wait time" or delay before being called to actually executing your function. To debounce a buttons onclick function you would pass its standard onclick function into the debounce function
|
||||
|
||||
```js
|
||||
const onclick = () => { };
|
||||
const debouncedClick = debounce(onclick, 500); // 500 milliseconds before the function is actually fired
|
||||
```
|
||||
|
||||
```html
|
||||
<button onclick="debouncedClick()" ></button>
|
||||
```
|
||||
## The result
|
||||
|
||||
So the result of all this is a function that listens to changes in the HTML document, waits 1 second for all the changes to finish applying, then scans the page for failing elements and uses The Shadow DOM to apply a red border around those elements. You can see a basic version of the code at [this Github Gist](https://gist.github.com/TerribleDev/51049146e00b36b0d8643f5e09d21ea8).
|
||||
|
||||
We log the Deque error object to the console which includes links to the failing elements. The result is whenever anyone develops new UI at CarGurus a giant ugly red border surrounds elements they don't write as accessible. This provides **immediate** feedback during the development process and prevents huge categories of accessibility issues from reaching production.
|
||||
|
||||

|
||||
@@ -3,6 +3,7 @@ permalink: anti-forgery-tokens-in-nancyfx-with-razor
|
||||
id: 33
|
||||
updated: '2014-06-11 20:00:34'
|
||||
date: 2014-06-11 19:34:13
|
||||
tags:
|
||||
---
|
||||
|
||||
Getting started with anti-forgery tokens in NancyFX with razor views is pretty simple.
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
title: "Building a remote cache server for Turborepo"
|
||||
date: 2022-02-12 09:52
|
||||
tags:
|
||||
- Go
|
||||
- Javascript
|
||||
- Turborepo
|
||||
- Devops
|
||||
- Build
|
||||
- node.js
|
||||
---
|
||||
|
||||
|
||||
[Turborepo](https://turborepo.org/) is a tool that came across my virtual desk recently. Monorepo develoment has been around for a long time. This is a strategy where all of your code remains in one repository regardless of services. A lot of people use monorepo's even for microservices. The huge upside is to keep everything in one place, which allows for development efficiency, such as grepping an entire codebase for specific keywords. A quick example would be a top level directory which has child directories that each contain an npm package, unlike publishing these packages, you access them locally as though they were published.
|
||||
<!-- more -->
|
||||
|
||||
There are many tools in the Javascript ecosystem to manage monorepos. [Yarn](https://classic.yarnpkg.com/lang/en/docs/cli/workspaces/), and [npm](https://docs.npmjs.com/cli/v7/using-npm/workspaces) both have their own workspaces. [Lerna](https://lerna.js.org/) is a tool that people use to run commands a cross these packages. I've been a huge fan of monorepos for years. One of the big problems with this setup is build times. At [Quala](https://www.quala.io) we have around 38 packages, and some of my previous employers have had over 100. When you have these large repos sometimes you can make a change in a single package, but when you run `build` you have to wait to build the entire repository which can take a long time.
|
||||
|
||||
[Turborepo](https://turborepo.org/), however caches the build output of packages, so when you change a package it will get cache hits on particular packages, and thus you only build the changes you make. This is not a new idea. Years ago, google built [bazel](https://bazel.build/), A lot of people in C++ land have had remote builds. With Turborepo it seems the only official way to have remote caches is to use Vercel, or host your own server. For many reasons at [Quala](https://www.quala.io) I decided to opt for hosting our own server.
|
||||
|
||||
So to add turborepo to your monorepo, you need to add some [simple config](https://turborepo.org/docs/features/caching) to the root of your workspace, and your root `package.json` needs to replace its build command with `turborepo build`. In the case of remote caches you need to add
|
||||
|
||||
`--api="https://yourCacheServer.dev" --token="token" --team="team"`
|
||||
|
||||
Notice, the api flag does not contain a `/` at the end. Now according to the docs you don't need to pass a team, but I was unable to get the caches to register without it 🤷♀️
|
||||
|
||||
## The API
|
||||
|
||||
According to the [docs](https://turborepo.org/docs/features/remote-caching)
|
||||
|
||||
> You can self-host your own Remote Cache or use other remote caching service providers as long as they comply with Turborepo's Remote Caching Server API. I opted to write the server in go, and [I used Go Fiber](https://github.com/gofiber/fiber). At first I figured I could copy their structs to my project but honestly the API is so simple, there is no advantage to this.
|
||||
|
||||
To get a list of the API's you need, you are linked to some [code written in Go](https://github.com/vercel/turborepo/blob/main/cli/internal/client/client.go). I reverse engineered this code a bit, and came up with 4 APIs, and an AUTH token
|
||||
|
||||
```
|
||||
Authorization: Bearer ${token}
|
||||
PUT: /v8/artifacts/:hash
|
||||
GET: /v8/artifacts/:hash
|
||||
GET: /v2/user
|
||||
GET: /v2/teams
|
||||
```
|
||||
|
||||
### Authorization
|
||||
|
||||
When turborepo sends requests it appends the `Authorization` header which will contain our token. Ideally you would add to your server a way to auth a user and give them this token. In the below example we have a single token that comes from an environment variable. You really should have per user auth.
|
||||
|
||||
|
||||
```go
|
||||
app.Use(func(c *fiber.Ctx) error {
|
||||
authHeader := c.Get("Authorization")
|
||||
if authHeader != "Bearer "+token {
|
||||
c.Status(401).SendString("Unauthorized")
|
||||
return nil
|
||||
}
|
||||
return c.Next()
|
||||
})
|
||||
```
|
||||
|
||||
### Handling Requests
|
||||
|
||||
The API pretty much breaks down like this.
|
||||
|
||||
`PUT: /v8/artifacts/:hash` will send a file that you must write somewhere. Some people opt for sending it to S3, I decided to use a persistent disk, and save on the disk. I wanted the fastest responses for the caches. Heck if I'm going to remote cache something that would still be kinda quick on an M1, it better perform.
|
||||
|
||||
```go
|
||||
app.Put("/v8/artifacts/:hash", func(c *fiber.Ctx) error {
|
||||
fmt.Println(string(c.Request().URI().QueryString()))
|
||||
return os.WriteFile("./cache/"+c.Params("hash"), c.Request().Body(), 0644)
|
||||
})
|
||||
```
|
||||
|
||||
The same URL but on a get is simple. Retrieve a file and serve it up, or return a 404
|
||||
|
||||
```go
|
||||
app.Get("/v8/artifacts/:hash", func(c *fiber.Ctx) error {
|
||||
fmt.Println(string(c.Request().URI().QueryString()))
|
||||
return c.SendFile("./cache/" + c.Params("hash"))
|
||||
})
|
||||
```
|
||||
|
||||
The last two honesty you don't need to make things work. You can just return a 200
|
||||
|
||||
```go
|
||||
app.Get("/v2/teams", func(c *fiber.Ctx) error {
|
||||
return c.SendStatus(fiber.StatusOK)
|
||||
})
|
||||
|
||||
app.Get("/v2/user", func(c *fiber.Ctx) error {
|
||||
return c.SendStatus(fiber.StatusOK)
|
||||
})
|
||||
```
|
||||
|
||||
The `/v2/user` API is supposed to return information about the current user in the following shape. I'm pretty sure (not positive) created at is an [epoch](https://en.wikipedia.org/wiki/Unix_time) of the time the user was created. I'm guessing its largely used for Vercel.
|
||||
|
||||
```
|
||||
{
|
||||
ID string
|
||||
Username string
|
||||
Email string
|
||||
Name string
|
||||
CreatedAt int
|
||||
}
|
||||
```
|
||||
|
||||
The team api is supposed to look something like the following.
|
||||
|
||||
```
|
||||
{
|
||||
Pagination {
|
||||
Count: int,
|
||||
Next: int,
|
||||
Prev: int
|
||||
}
|
||||
Teams [
|
||||
Team {
|
||||
ID: string,
|
||||
Slug: string,
|
||||
Name: string,
|
||||
CreatedAt: int,
|
||||
Created: string
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
> What about the --team flag?
|
||||
|
||||
So when requests are made with `--team` a query string `?slug=team` is added to the request. You can use this to ensure a particular user is in the given team, and you can fragment your caches by team. I ommitted that code from the above example, but the easiest way would be to have `./cache/${team}/${hash}` directory structure for the caches on disk. Note, on the GET requests you should auth the token against the team ID, and return a 404 if the user is not in the team. **I would not opt to return a Unauthorized header**, as that can be used by bad actors to cycle through tokens to know which one will work to cause harm.
|
||||
|
||||
## The Result
|
||||
|
||||
An extremely minimal server [is in this github repo](https://github.com/TerribleDev/turbogo) (although you shouldn't probably use it without building it out more).
|
||||
@@ -0,0 +1,219 @@
|
||||
title: Building attractive CLIs in TypeScript
|
||||
date: 2022-07-08 05:18
|
||||
tags:
|
||||
- javascript
|
||||
- typescript
|
||||
- node
|
||||
- cli
|
||||
- tutorials
|
||||
---
|
||||
|
||||
So you've come to a point where you want to build nice CLIs. There's a few different options for building CLI's. My two favorites are [oclif](https://oclif.io/) and [commander.js](https://github.com/tj/commander.js/). I tend toward leaning to commander, unless I know I'm building a super big app. However, I've really enjoyed building smaller CLIs with commander recently.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
> tl;dr? You can [view this repo](https://github.com/TerribleDev/example-ts-cli)
|
||||
|
||||

|
||||
|
||||
## Commander.js Lingo
|
||||
|
||||
So commander has a few different nouns.
|
||||
|
||||
* `Program` - The root of the CLI. Handles running the core app.
|
||||
* `Command` - A command that can be run. These must be registered into `Program`
|
||||
* `Option` - I would also call these `flags` they're the `--something` part of the CLI.
|
||||
* `Arguments` - These are named positioned arguments. For example `npm install commander` the `commander` string in this case is an argument. `--save` would be an option.
|
||||
|
||||
|
||||
|
||||
## Initial Setup
|
||||
|
||||
First, do an npm init, and install commander, types for node, typescript, esbuild, and optionally ora.
|
||||
|
||||
```bash
|
||||
npm init -y
|
||||
npm install --save commander typescript @types/node ora
|
||||
```
|
||||
|
||||
Next we have to configure a build command in the package.json. This one runs typescript to check for types and then esbuild to compile the app for node.
|
||||
|
||||
```json
|
||||
"scripts": {
|
||||
"build": "tsc --noEmit ./index.ts && esbuild index.ts --bundle --platform=node --format=cjs --outfile=dist/index.js",
|
||||
}
|
||||
```
|
||||
|
||||
We now need to add a bin property in the package.json. This tells the package manager that we have an executable. The key should be the name of your CLI
|
||||
|
||||
```json
|
||||
"bin": {
|
||||
"<yourclinamehere>": "./dist/index.js"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Make a file called index.ts, and place this string on the first line. This is called a shebang and it tells your shell to use node when the file is ran.
|
||||
|
||||
`#!/usr/bin/env node`
|
||||
|
||||
## Getting started
|
||||
|
||||
Hopefully you have done the above. Now in index.ts you can make a very basic program. Try npm build and then run the CLI with --help. Hopefully you'll get some output.
|
||||
|
||||
```ts
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Command } from 'commander'
|
||||
import { spinnerError, stopSpinner } from './spinner';
|
||||
const program = new Command();
|
||||
program.description('Our New CLI');
|
||||
program.version('0.0.1');
|
||||
|
||||
async function main() {
|
||||
await program.parseAsync();
|
||||
|
||||
}
|
||||
console.log() // log a new line so there is a nice space
|
||||
main();
|
||||
```
|
||||
|
||||
### Setting up the spinner
|
||||
|
||||
So, I really like loading spinners. I think it gives the CLI a more polished feel. So I added a spinner using ora. I made a file called `spinner.ts` which is a wrapper to handle states of spinning or stopped.
|
||||
|
||||
```ts
|
||||
import ora from 'ora';
|
||||
|
||||
const spinner = ora({ // make a singleton so we don't ever have 2 spinners
|
||||
spinner: 'dots',
|
||||
})
|
||||
|
||||
export const updateSpinnerText = (message: string) => {
|
||||
if(spinner.isSpinning) {
|
||||
spinner.text = message
|
||||
return;
|
||||
}
|
||||
spinner.start(message)
|
||||
}
|
||||
|
||||
export const stopSpinner = () => {
|
||||
if(spinner.isSpinning) {
|
||||
spinner.stop()
|
||||
}
|
||||
}
|
||||
export const spinnerError = (message?: string) => {
|
||||
if(spinner.isSpinning) {
|
||||
spinner.fail(message)
|
||||
}
|
||||
}
|
||||
export const spinnerSuccess = (message?: string) => {
|
||||
if(spinner.isSpinning) {
|
||||
spinner.succeed(message)
|
||||
}
|
||||
}
|
||||
export const spinnerInfo = (message: string) => {
|
||||
spinner.info(message)
|
||||
}
|
||||
```
|
||||
|
||||
### Writing a command
|
||||
|
||||
So I like to separate my commands out into sub-commands. In this case we're making `widgets` a sub-command. Make a new file, I call it widgets.ts. I create a new `Command` called `widgets`. Commands can have commands making them sub-commands. So we can make a sub-command called `list` and `get`. **List** will list all the widgets we have, and **get** will retrive a widget by id. I added some promise to emulate some delay so we can see the spinner in action.
|
||||
|
||||
|
||||
```ts
|
||||
import { Command } from "commander";
|
||||
import { spinnerError, spinnerInfo, spinnerSuccess, updateSpinnerText } from "./spinner";
|
||||
|
||||
export const widgets = new Command("widgets");
|
||||
|
||||
widgets.command("list").action(async () => {
|
||||
updateSpinnerText("Processing ");
|
||||
// do work
|
||||
await new Promise(resolve => setTimeout(resolve, 1000)); // emulate work
|
||||
spinnerSuccess()
|
||||
console.table([{ id: 1, name: "Tommy" }, { id: 2, name: "Bob" }]);
|
||||
})
|
||||
|
||||
widgets.command("get")
|
||||
.argument("<id>", "the id of the widget")
|
||||
.option("-f, --format <format>", "the format of the widget") // an optional flag, this will be in options.f
|
||||
.action(async (id, options) => {
|
||||
updateSpinnerText("Getting widget " + id);
|
||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
||||
spinnerSuccess()
|
||||
console.table({ id: 1, name: "Tommy" })
|
||||
})
|
||||
|
||||
```
|
||||
|
||||
Now lets register this command into our program. (see the last line)
|
||||
|
||||
```ts
|
||||
#!/usr/bin/env node
|
||||
import { Command } from 'commander'
|
||||
import { spinnerError, stopSpinner } from './spinner';
|
||||
import { widgets } from './widgets';
|
||||
const program = new Command();
|
||||
program.description('Our New CLI');
|
||||
program.version('0.0.1');
|
||||
program.addCommand(widgets);
|
||||
```
|
||||
|
||||
|
||||
Do a build! Hopefully you can type `<yourcli> widgets list` and you'll see the spinner. When you call `spinnerSuccess` without any parameters the previous spinner text will stop and become a green check. You can pass a message instead to print that to the console. You can also call `spinnerError` to make the spinner a red `x` and print the message.
|
||||
|
||||
|
||||
### Handle unhandled errors
|
||||
|
||||
Back in index.ts we need to add a hook to capture unhandled errors. Add a verbose flag to the program so we can see more details about the error, but by default lets hide the errors.
|
||||
|
||||
```ts
|
||||
const program = new Command('Our New CLI');
|
||||
program.option('-v, --verbose', 'verbose logging');
|
||||
```
|
||||
|
||||
Now we need to listen for the node unhandled promise rejection event and process it.
|
||||
|
||||
|
||||
```ts
|
||||
process.on('unhandledRejection', function (err: Error) { // listen for unhandled promise rejections
|
||||
const debug = program.opts().verbose; // is the --verbose flag set?
|
||||
if(debug) {
|
||||
console.error(err.stack); // print the stack trace if we're in verbose mode
|
||||
}
|
||||
spinnerError() // show an error spinner
|
||||
stopSpinner() // stop the spinner
|
||||
program.error('', { exitCode: 1 }); // exit with error code 1
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Testing our error handling
|
||||
|
||||
Lets make a widget action called `unhandled-error`. Do a build, and then run this action. You should see the error is swallowed. Now try again but use `<yourcli> --verbose widgets unhandled-error` and you should see the error stack trace.
|
||||
|
||||
```ts
|
||||
widgets.command("unhandled-error").action(async () => {
|
||||
updateSpinnerText("Processing an unhandled failure ");
|
||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
||||
throw new Error("Unhandled error");
|
||||
})
|
||||
```
|
||||
|
||||
## Organizing the folders
|
||||
|
||||
Ok, so you have the basics all setup. Now, how do you organize the folders. I like to have the top level commands in their own directories. That way the folder structure emulates the CLI. This is an idea I saw in oclif.
|
||||
|
||||
```
|
||||
- index.ts
|
||||
- /commands/widgets/index.ts
|
||||
- /commands/widgets/list.ts
|
||||
- /commands/widgets/get.ts
|
||||
|
||||
```
|
||||
|
||||
## So why not OCLIF?
|
||||
|
||||
A few simple reasons. OCLIF's getting started template comes with an extremely opinionated typescript configuration. For large projects, I've found it to be incredible. However, for smaller-ish things, I've found conforming to it, a trial of turning down the linter a lot. Overall, they're both great tools. Why not both?
|
||||
@@ -7,7 +7,7 @@ tags:
|
||||
- docker
|
||||
---
|
||||
|
||||
Here we are, its 2017 dotnet core is out, and finally dotnet has a proper cli. In a previous post [we explored the new cli](http://blog.terribledev.io/Exploring-the-dotnet-cli/). In short you can use the dotnet cli to build, test, package, and publish projects. However sometimes just using the cli is not enough. Sometimes, you land in a place where you have many projects to compile, test, and package.
|
||||
Here we are, its 2017 dotnet core is out, and finally dotnet has a proper cli. In a previous post [we explored the new cli](/Exploring-the-dotnet-cli/). In short you can use the dotnet cli to build, test, package, and publish projects. However sometimes just using the cli is not enough. Sometimes, you land in a place where you have many projects to compile, test, and package.
|
||||
<!-- more -->
|
||||
|
||||
You sometimes need a more complex tool to help you manage your versions, and set the right properties as part of your builds. This is where a tasking system like [gulp](http://gulpjs.com/) can help. Now gulp is not the only task engines. There are Rake, Cake, MSBuild, etc. Plenty to pick from. I personally use gulp a lot, because I'm a web developer. I need a JS based system, to help me run the [babels](https://babeljs.io), and [webpacks](https://webpack.github.io/docs/) of the world.
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
title: Compressing images with tinypng's CLI
|
||||
date: 2019-01-23 10:50
|
||||
tags:
|
||||
- javascript
|
||||
- tools
|
||||
---
|
||||
|
||||
Ok so I'm really lazy, and I honestly think that has helped me a lot in this industry. I always try to work smarter, not harder. I take many screen shots for this blog, and I need to optimize them. Incase you didn't know many images are often larger than they need to be slowing the download time. However, I don't ever want to load them into photoshop. Too much time and effort!
|
||||
|
||||
|
||||
<!-- more -->
|
||||
|
||||
At first I tried to compress images locally, but it took to long to run through all the images I had. So recently I started using a service called [tiny png](https://tinypng.com/) to compress images. Now the website seems to indicate that you upload images, and you will get back optimized versions. However to me this takes too much time. I don't want the hassle of zipping my images uploading them, downloading the results. Again, lazy!
|
||||
|
||||
So I figured out they have a cli in npm. Easy to install, just use npm to globally install it. `npm install -g tinypng-cli`.
|
||||
|
||||
Now you have to call the cli, this is the flags I use `tinypng . -r -k YourKeyHere`. The period tells tinypng to look in the current directory for images, `-r` tells it to look recursively, or essentially to look through child directories as well, and the `-k YourKeyHere` is the key you get by logging in. On the free plan you get 500 compressions a month. Hopefully you will fall into the pit of success like I did!
|
||||
|
||||

|
||||
@@ -0,0 +1,97 @@
|
||||
title: Dynamically changing the site-theme meta tag
|
||||
date: 2022-04-12 11:05
|
||||
thumbnailImage: 1.jpg
|
||||
tags:
|
||||
- javascript
|
||||
- js
|
||||
- react
|
||||
---
|
||||
|
||||
So, incase you are unfamiliar, there is a meta tag called `<meta name="theme-color" content="...">` that is used to change the color of the navbar on desktop safari, mobile safari, and mobile chrome. If you don't set a value these browsers tend to find a color that match the site to the best of their ability. However, sometimes even setting the value can cause the site to look ugly.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
So, I've been recently working on an NFT project called [Squiggle Squatches](http://squigglesquatches.io/). NFT projects are essentially digital art projects for sale. Our website, really needs to reflect our look and feel as much as we can. When I first loaded our page, I noticed this **huge** white bar on the top of Safari.
|
||||
|
||||

|
||||
|
||||
|
||||
> So I set out to change this. I knew there was a `<meta name="theme-color" content="...">` tag that can add the theme.
|
||||
|
||||
I first made the theme be the color of the top section, and this looked great!
|
||||
|
||||

|
||||
|
||||
However after scrolling, I noticed this looked super ugly.
|
||||
|
||||

|
||||
|
||||
So I decided to write some code to fix this problem.
|
||||
|
||||
## Listening to scroll events
|
||||
|
||||
So, I started with decorating certain tags with a `data-scroll-theme` attribute that signaled our code to look at this div to manipulate the theme color. This looks like `<section data-scroll-theme class="blue/red/etc">content</section>`
|
||||
|
||||
I then ended up crafting this JS code. Basically, make a throttle function so we only fire our event every 100ms. Grab the default color. Then on scroll figure out if any boxes are at the top of the page, and if so set the meta tag to that color.
|
||||
|
||||
```js
|
||||
// a function to only call the wrapped functions every x milliseconds so the scroll event doesn't make our function run all the time
|
||||
function throttle(func, timeFrame) {
|
||||
var lastTime = 0;
|
||||
return function(...args) {
|
||||
var now = new Date().getTime();
|
||||
if (now - lastTime >= timeFrame) {
|
||||
func(...args);
|
||||
lastTime = now;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// get the theme color on load so we can revert to this
|
||||
const ogColor = document.querySelector('meta[name="theme-color"]')?.getAttribute('content');
|
||||
|
||||
// handle scroll event
|
||||
const handleScroll = throttle(() => {
|
||||
// find all tags that have `data-scroll as a property`
|
||||
const targets = document.querySelectorAll('[data-scroll-theme]')
|
||||
// are any targets at the top of the window?
|
||||
const isTop = Array.from(targets).map((target) => {
|
||||
const rect = target.getBoundingClientRect();
|
||||
if (rect.y > 1) {
|
||||
return null;
|
||||
}
|
||||
return { target, rect }
|
||||
}).filter(Boolean).sort((a, b) => b.rect.y - a.rect.y)[0]
|
||||
// if we found an element at the top of the document then
|
||||
if (isTop) {
|
||||
|
||||
// set theme color meta tag to the background color of div
|
||||
const color = window.getComputedStyle(isTop.target).getPropertyValue('background-color')
|
||||
if (color) {
|
||||
// find the theme color meta tag and set the attribute to it
|
||||
document.querySelector('meta[name="theme-color"]')?.setAttribute('content', color);
|
||||
}
|
||||
} else if (ogColor) {
|
||||
// set theme color meta tag to original
|
||||
document.querySelector('meta[name="theme-color"]')?.setAttribute('content', ogColor);
|
||||
}
|
||||
// run every 100ms
|
||||
}, 100)
|
||||
|
||||
document.addEventListener('scroll', handleScroll, { passive: true })
|
||||
|
||||
```
|
||||
|
||||
## End result
|
||||
|
||||
The end result is the top bar of safari changes as you scroll between blocks. This has made [Squiggle Squatches](http://squigglesquatches.io/) look way better on mobile.
|
||||
|
||||
<iframe width="662" height="1176" src="https://www.youtube.com/embed/iLksuqZP4L8" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
|
||||
|
||||
<!--  -->
|
||||
|
||||
You can see a simpler example on [replit](https://replit.com/@TerribleDev/ScrollableTheme)
|
||||
|
||||
|
||||
|
||||
<iframe frameborder="0" width="100%" height="500px" src="https://replit.com/@TerribleDev/ScrollableTheme?embed=true#script.js"></iframe>
|
||||
@@ -3,6 +3,7 @@ permalink: fixing-could-not-load-file-or-assembly-microsoft-dnx-host-clr-2
|
||||
id: 53
|
||||
updated: '2015-09-09 17:34:41'
|
||||
date: 2015-09-09 10:08:18
|
||||
tags:
|
||||
---
|
||||
|
||||
So I recently ran into this error where the latest bits could not load Microsoft.Dnx.Host.Clr here is what I did to fix it.
|
||||
|
||||
@@ -8,5 +8,4 @@ I put together [some materials](https://github.com/TerribleDev/intro-to-docker)
|
||||
<br />
|
||||
|
||||
<!--more-->
|
||||
|
||||
{% youtube 6EGyhDlr8rs %}
|
||||

|
||||
|
||||
139
src/TerribleDev.Blog.Web/Posts/Hosting-craft-on-heroku.md
Normal file
139
src/TerribleDev.Blog.Web/Posts/Hosting-craft-on-heroku.md
Normal file
@@ -0,0 +1,139 @@
|
||||
title: Hosting Craft CMS on Heroku
|
||||
date: 2022-02-24 07:19
|
||||
tags:
|
||||
- craftcms
|
||||
- cms
|
||||
- heroku
|
||||
---
|
||||
|
||||
So, like most early startups, [Quala](https://www.quala.io) (where I currently work) bought into a Wordpress site to sell our product, probably before it really existed. Flash forward, we have customers, and we're on a path to building a platform to change the game on customer management. The Wordpress site was terrible for performance, and [core web vitals](https://web.dev/vitals/). None of us know Wordpress, and barely know any php. We had huge drive to rebrand ourselves, but to do that we needed to edit the Wordpress theme 😬 or use something else.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
*tl;dr you can use this sweet [deploy to heroku button](https://github.com/oof-bar/craft-heroku) that [oof.studio](https://oof.studio/) made. Most of this post is inspired by their implementation*
|
||||
|
||||
## Why Craft?
|
||||
|
||||
I was introduced to [CraftCMS](https://craftcms.com/) 2 years ago. Back then my first instinct was *eww php*, might also still be my primary reaction 🤣. At that time, and still today, I love the headless CMS ([Contentful](https://www.contentful.com/), [Sanity](https://www.sanity.io/)) + [Gatsby](https://www.gatsbyjs.com/) strategy. However, we are a startup. For us, every dollar counts. The license for Craft is $300/year. Most of the other GraphQL CMS' we looked at were more expensive. We have a developer that's used craft, and I know some other [big brain craft people](https://www.johnlamb.me/).
|
||||
|
||||
## Craft + Heroku
|
||||
|
||||
So, Heroku is a Platform to host webapps. They have good postgres support, and we've used them in the past. Apps on Heroku need to be [12 factor apps](https://12factor.net/). Heroku has an ephemeral file system, scales horizontally, and logs stdout/stderr streams.
|
||||
|
||||
Craft is based on the yii php framework. You'll need to use the official `php` buildpack for craft to work, and any libraries for yii will work with Craft. When we started looking into this, I found a [deploy to heroku button](https://github.com/oof-bar/craft-heroku) that [oof.studio](https://oof.studio/) built. We had to fork this, and update it. However, since then they've updated it (almost exactly how we did), so you may want to use their deploy button to get started. I didn't have much experience with craft, so much of this writing you can attribute to me reverse engineering their configs and updating it to the newest version of craft.
|
||||
|
||||
## Configuring Craft
|
||||
|
||||
Craft configurations sit in an `app.php` file. This file will need to add redis for sessions, and cache (the cache for the cache tags). Also, using [codemix's logstream](https://github.com/codemix/yii2-streamlog), piping the stream to stdout.
|
||||
|
||||
```php
|
||||
'production' => [
|
||||
'components' => [
|
||||
'redis' => [
|
||||
'class' => yii\redis\Connection::class,
|
||||
'hostname' => parse_url(App::env('REDIS_URL'), PHP_URL_HOST),
|
||||
'port' => parse_url(App::env('REDIS_URL'), PHP_URL_PORT),
|
||||
'password' => parse_url(App::env('REDIS_URL'), PHP_URL_PASS)
|
||||
],
|
||||
'session' => [
|
||||
'class' => yii\redis\Session::class,
|
||||
'as session' => [
|
||||
'class' => \craft\behaviors\SessionBehavior::class
|
||||
]
|
||||
],
|
||||
'cache' => [
|
||||
'class' => yii\redis\Cache::class,
|
||||
'defaultDuration' => 86400
|
||||
],
|
||||
'log' => [
|
||||
'targets' => [
|
||||
[
|
||||
'class' => codemix\streamlog\Target::class,
|
||||
'url' => 'php://stderr',
|
||||
'levels' => ['error', 'warning'],
|
||||
'logVars' => []
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
There is also a file to set the db configuration in `db.php`. That must have the following, which will use heroku's `DATABASE_URL` environment variable in prod, and [nitro's](https://craftcms.com/docs/nitro/2.x/) set of environment variables locally. You'll need a `bootstrap.php` file to setup the environment properly (including license keys).
|
||||
|
||||
```php
|
||||
|
||||
<?php
|
||||
define('CRAFT_BASE_PATH', __DIR__);
|
||||
define('CRAFT_VENDOR_PATH', CRAFT_BASE_PATH . '/vendor');
|
||||
|
||||
require_once CRAFT_VENDOR_PATH . '/autoload.php';
|
||||
|
||||
// Load dotenv?
|
||||
if (class_exists('Dotenv\Dotenv')) {
|
||||
Dotenv\Dotenv::createUnsafeImmutable(CRAFT_BASE_PATH)->safeLoad();
|
||||
}
|
||||
|
||||
define('CRAFT_ENVIRONMENT', getenv('ENVIRONMENT') ?: 'production');
|
||||
define('CRAFT_LICENSE_KEY', getenv('CRAFT_LICENSE_KEY'));
|
||||
define('CRAFT_STORAGE_PATH', getenv('CRAFT_STORAGE_PATH') ?: '../storage');
|
||||
define('CRAFT_STREAM_LOG', true);
|
||||
```
|
||||
|
||||
## S3
|
||||
|
||||
In our case, the button didn't provide any support for uploaded files. We went for S3. I added the Craft s3 plugin, and configured it to read the apikeys, and bucket names from environment variables. Then I registered those variables in the environment variables in heroku.
|
||||
|
||||
## Other important files
|
||||
|
||||
Heroku requires a Procfile to launch apps.
|
||||
|
||||
```shell
|
||||
web: vendor/bin/heroku-php-nginx -C nginx_app.conf web
|
||||
worker: ./craft queue/listen --verbose
|
||||
release: ./bin/release.sh
|
||||
```
|
||||
`release.sh` will run a db migration
|
||||
```shell
|
||||
if /usr/bin/env php /app/craft install/check
|
||||
then
|
||||
/usr/bin/env php /app/craft up --interactive=0
|
||||
fi
|
||||
```
|
||||
|
||||
A `nginx_app.conf` nginx config for heroku's php buildpack.
|
||||
|
||||
```nginx
|
||||
if ($http_x_forwarded_proto != "https") {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
|
||||
if ($host ~ ^www\.(.+)) {
|
||||
return 301 https://$1$request_uri;
|
||||
}
|
||||
|
||||
location / {
|
||||
# try to serve file directly, fallback to rewrite
|
||||
try_files $uri @rewriteapp;
|
||||
}
|
||||
|
||||
location @rewriteapp {
|
||||
# rewrite all to index.php
|
||||
rewrite ^(.*)$ /index.php?p=$1 last;
|
||||
}
|
||||
|
||||
location ~ ^/(index)\.php(/|$) {
|
||||
fastcgi_pass heroku-fcgi;
|
||||
fastcgi_split_path_info ^(.+\.php)(/.*)$;
|
||||
include fastcgi_params;
|
||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
||||
fastcgi_param HTTPS on;
|
||||
}
|
||||
|
||||
# Global Config
|
||||
client_max_body_size 20M;
|
||||
```
|
||||
|
||||
## Anything else?
|
||||
|
||||
Nope, not really. You need to be aware that you need to treat craft's configuration as entirely immutable. Any changes to configuration such as plugins, twig templates, etc. Will need to be changed in dev and pushed to Heroku. Nothing can be mutated in production, other than the authoring of the site. Even file uploads!
|
||||
240
src/TerribleDev.Blog.Web/Posts/Hosting-dotnet-core-on-heroku.md
Normal file
240
src/TerribleDev.Blog.Web/Posts/Hosting-dotnet-core-on-heroku.md
Normal file
@@ -0,0 +1,240 @@
|
||||
title: Hosting dotnet core on Heroku
|
||||
date: 2021-07-19 00:01
|
||||
tags:
|
||||
- dotnet core
|
||||
- cloud
|
||||
- Heroku
|
||||
- Postgres
|
||||
---
|
||||
|
||||
|
||||
I've been getting back into building scrappy little web apps for my friends. On top of this, I recently joined [a startup](https://quala.io) and getting away from Enterprise class software has made me make a huge mind-shift. In the recent past when I wanted to build apps I was thinking Kubernetes, Helm Charts, etc. However, in small app, and startup land reducing the barriers to ship is very important.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
Incase you are not familiar [Heroku](https://www.heroku.com) is a platform to host webapps. They host a free version of Postgres DB, and Redis that is directly connected to your app with environment variables. Heroku has support for many languages, but one I saw missing from the list was dotnet.
|
||||
|
||||
To host apps on Heroku, you must know the basic *rules of Heroku*
|
||||
|
||||
1. Your app must listen on `$PORT` or `%PORT%` if you come from windows. Basically, any http listeners must listen to the port defined as an environment variable.
|
||||
2. Postgres is free (to a point), redis is free, most other things cost money.
|
||||
3. Logs must go to `stdout` which works well for us since that's the default behavior of asp.net core!
|
||||
4. In dotnet core authentication cookies are encrypted and the key is usually placed in your home directory, but in Heroku your app could be moved to any machine at any moment. The filesystem needs to be stateless
|
||||
5. Heroku gives you your Postgres connection string as `postgres://<username>:<password>@<host>:<port>/<database>`
|
||||
|
||||
|
||||
|
||||
## Listening on $PORT
|
||||
|
||||
Traditionally dotnet core apps listen for an environment variable called `ASPNETCORE_URLS` but in this case we need to override this behavior. In your `Program.cs` file you can make the following modification, which detects if `$PORT` is defined, and if it is to listen to all requests on that port.
|
||||
|
||||
```csharp
|
||||
public static IHostBuilder CreateHostBuilder(string[] args) =>
|
||||
Host.CreateDefaultBuilder(args)
|
||||
.ConfigureWebHostDefaults(webBuilder =>
|
||||
{
|
||||
var port = Environment.GetEnvironmentVariable("PORT");
|
||||
if(!string.IsNullOrEmpty(port))
|
||||
{
|
||||
webBuilder.UseUrls($"http://*:{port}");
|
||||
}
|
||||
webBuilder.UseStartup<Startup>();
|
||||
});
|
||||
```
|
||||
|
||||
## Using Postgres with Entity Framework
|
||||
|
||||
On a `dotnet new mvc --auth individual` you are presented with the following block of code in `Startup.cs`
|
||||
|
||||
```csharp
|
||||
services.AddDbContext<ApplicationDbContext>(options =>
|
||||
options.UseSqlite(
|
||||
Configuration.GetConnectionString("DefaultConnection")));
|
||||
|
||||
```
|
||||
|
||||
This configures your app to use SqlLite as a DB, we need to switch this. Luckily the Postgres team has an awesome integration with entity framework. Run the following command to add their package to your project
|
||||
|
||||
`dotnet add package Npgsql.EntityFrameworkCore.PostgreSQL`
|
||||
|
||||
Then simply swap the previous code block for the following, which will parse the database url from Heroku and setup a Postgres connection. You can use the following docker-compose file and `appsettings.Development.json` for local development.
|
||||
|
||||
```csharp
|
||||
var databaseUrl = Configuration.GetValue<string>("DATABASE_URL");
|
||||
var databaseUri = new Uri(databaseUrl);
|
||||
var userInfo = databaseUri.UserInfo.Split(':');
|
||||
|
||||
var builder = new NpgsqlConnectionStringBuilder
|
||||
{
|
||||
Host = databaseUri.Host,
|
||||
Port = databaseUri.Port,
|
||||
Username = userInfo[0],
|
||||
Password = userInfo[1],
|
||||
Database = databaseUri.LocalPath.TrimStart('/'),
|
||||
TrustServerCertificate = true
|
||||
};
|
||||
services.AddDbContext<ApplicationDbContext>(options =>
|
||||
options.UseNpgsql(builder.ToString()));
|
||||
```
|
||||
|
||||
*docker-compose.yml*
|
||||
|
||||
```yml
|
||||
version: '3'
|
||||
services:
|
||||
postgres:
|
||||
image: 'postgres:13'
|
||||
ports:
|
||||
- '6666:5432'
|
||||
environment:
|
||||
POSTGRES_PASSWORD: 'password'
|
||||
POSTGRES_USER: 'admin'
|
||||
```
|
||||
|
||||
*appsettings.Development.json*
|
||||
|
||||
```json
|
||||
{
|
||||
"DATABASE_URL": "postgres://admin:password@localhost:6666/main"
|
||||
}
|
||||
```
|
||||
|
||||
## Encryption keys
|
||||
|
||||
Ok so you've got the basics running, but you need to store your encryption keys. We can store them in the database using entity framework! Add this to your `startup.cs` `ConfigureServices` Method. Make sure you `dotnet add package Microsoft.AspNetCore.DataProtection.EntityFrameworkCore`. You'll also need to make sure your dbContext implements `IDataProtectionKeyContext`
|
||||
|
||||
```cs
|
||||
|
||||
services.AddDataProtection().PersistKeysToDbContext<ApplicationDbContext>();
|
||||
|
||||
```
|
||||
|
||||
## Database Migrations
|
||||
|
||||
There are several ways to handle database migrations. For simple webapps you can configure your app to do a migration on startup. More complex apps should shell into the `ef` command line using [Heroku's procfile](https://devcenter.heroku.com/articles/release-phase)
|
||||
|
||||
```csharp
|
||||
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
|
||||
{
|
||||
using(var scope = app.ApplicationServices.GetRequiredService<IServiceScopeFactory>().CreateScope())
|
||||
using(var ctx = scope.ServiceProvider.GetRequiredService<ApplicationDbContext>())
|
||||
{
|
||||
ctx.Database.EnsureCreated();
|
||||
ctx.Database.Migrate();
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Forwarded protocol
|
||||
|
||||
Heroku sends an `X-Forwarded-Proto` header to tell your app what protocol a user is using. You'll want to add this to your `Configure` block before all other middleware
|
||||
|
||||
```csharp
|
||||
app.UseForwardedHeaders(new ForwardedHeadersOptions
|
||||
{
|
||||
ForwardedHeaders = ForwardedHeaders.XForwardedProto
|
||||
});
|
||||
```
|
||||
|
||||
## Getting your app in Heroku with containers
|
||||
|
||||
There are 2 basic methods to getting your app live in Heroku. One is to push a docker container to Heroku, or use a Heroku buildpack to have Heroku build your app for you. I opted for the docker container.
|
||||
|
||||
I stole this sample dockerfile from the aspnet core docker docs.
|
||||
|
||||
```dockerfile
|
||||
FROM mcr.microsoft.com/dotnet/core/aspnet:3.1 AS base
|
||||
WORKDIR /app
|
||||
EXPOSE 80
|
||||
EXPOSE 443
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/core/sdk:3.1 AS build
|
||||
WORKDIR /src
|
||||
COPY ["./MyApp.csproj", "."]
|
||||
RUN dotnet restore "MyApp.csproj"
|
||||
COPY . .
|
||||
WORKDIR "/src"
|
||||
RUN dotnet build "MyApp.csproj" -c Release -o /app
|
||||
|
||||
FROM build AS publish
|
||||
RUN dotnet publish "MyApp.csproj" -c Release -o /app
|
||||
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app .
|
||||
ENTRYPOINT ["dotnet", "MyApp.dll"]
|
||||
|
||||
|
||||
```
|
||||
|
||||
I then found someone had made a *build a docker image and push to Heroku* GitHub action. All I had to do is make this a file in `.github/deployContainerToHeroku.yml`, turn on Github actions, and register my Heroku API key as a secret in GitHub
|
||||
|
||||
|
||||
```yml
|
||||
name: Deploy to Heroku.
|
||||
|
||||
# Run workflow on every push to master branch.
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
# Your workflows jobs.
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check-out your repository.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
|
||||
### ⬇ IMPORTANT PART ⬇ ###
|
||||
|
||||
- name: Build, Push and Release a Docker container to Heroku. # Your custom step name
|
||||
uses: gonuit/Heroku-docker-deploy@v1.3.3 # GitHub action name (leave it as it is).
|
||||
with:
|
||||
# Below you must provide variables for your Heroku app.
|
||||
|
||||
# The email address associated with your Heroku account.
|
||||
# If you don't want to use repository secrets (which is recommended) you can do:
|
||||
# email: my.email@example.com
|
||||
email: ${{ secrets.HEROKU_EMAIL }}
|
||||
|
||||
# Heroku API key associated with provided user's email.
|
||||
# Api Key is available under your Heroku account settings.
|
||||
Heroku_api_key: ${{ secrets.HEROKU_API_KEY }}
|
||||
|
||||
# Name of the Heroku application to which the build is to be sent.
|
||||
Heroku_app_name: ${{ secrets.HEROKU_APP_NAME }}
|
||||
|
||||
# (Optional, default: "./")
|
||||
# Dockerfile directory.
|
||||
# For example, if you have a Dockerfile in the root of your project, leave it as follows:
|
||||
dockerfile_directory: ./src/MyApp
|
||||
|
||||
# (Optional, default: "Dockerfile")
|
||||
# Dockerfile name.
|
||||
dockerfile_name: Dockerfile
|
||||
|
||||
# (Optional, default: "")
|
||||
# Additional options of docker build command.
|
||||
docker_options: "--no-cache"
|
||||
|
||||
# (Optional, default: "web")
|
||||
# Select the process type for which you want the docker container to be uploaded.
|
||||
# By default, this argument is set to "web".
|
||||
# For more information look at https://devcenter.Heroku.com/articles/process-model
|
||||
process_type: web
|
||||
|
||||
|
||||
```
|
||||
|
||||
## Getting your app in Heroku with buildpacks
|
||||
|
||||
Heroku has had this system called *buildpacks* which allow you to script the creation of the hosting environment of your app. Someone has done the dirty work and [built a dotnet core buildpack](https://elements.Heroku.com/buildpacks/jincod/dotnetcore-buildpack) which can be used to deploy dotnet core apps to Heroku. To use this, create an app in Heroku, set your [buildpack to the dotnet core buildpack](https://elements.Heroku.com/buildpacks/jincod/dotnetcore-buildpack) in settings. Connect your GitHub repo and Heroku will do the hard work for you!
|
||||
|
||||
|
||||
## Finish
|
||||
|
||||
I hope you liked this. Keep on hacking away!
|
||||
@@ -0,0 +1,34 @@
|
||||
title: How to host a javascript monorepo on Heroku
|
||||
date: 2022-03-01 10:35
|
||||
tags:
|
||||
- javascript
|
||||
- Heroku
|
||||
---
|
||||
|
||||
So I've been using monorepos for some time, and recently I've gotten a lot of questions about how to host them on Heroku. I figured I'd give you the simple guide. There are two basic scenarios. The root of your git repo has your yarn/npm workspace, or you have a folder inside of a gitrepo you wish to use.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Scenario 1: yarn/npm workspace
|
||||
|
||||
In this case, create a Heroku app with the official nodejs buildpack. Add `heroku-postbuild: "YourBuildCommand"` to your scripts section of the root package.json. This will run after the npm install, and can be used to run any build commands you need (such as compiling typescript). Then use [the multi-procfile buildpack](https://github.com/heroku/heroku-buildpack-multi-procfile) which will grab a procfile from any directory and copy it to the root to boot your app. That way your monorepo can have a `server/package.json` package that contains your web app and in there you can have the procfile `server/Procfile`.
|
||||
|
||||
Your buildpacks should have this order:
|
||||
|
||||
```
|
||||
heroku/nodejs
|
||||
heroku-buildpack-multi-procfile
|
||||
```
|
||||
|
||||
The multi-procfile requires an Environment variable called `PROCFILE` which has the path to the procfile to use. For example it can be `/server/Procfile`. Usually my procfile contains a workspace command to start the server.
|
||||
|
||||
```
|
||||
web: yarn workspace server run start
|
||||
|
||||
```
|
||||
|
||||
## Scenario 2: Folder inside of Git Repo
|
||||
|
||||
So this is a strategy where you make a heroku app in a nested directory. Not using a yarn workspace. In this case you can use the [monorepo buildpack](https://github.com/lstoll/heroku-buildpack-monorepo) to copy a subdirectory to the root directory before the build happens. After that buildpack include the `heroku/nodejs` buildpack which will run the npm/yarn/etc. install commands and then use the `Procfile` in that directory to start your app.
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ Getting Started:
|
||||
|
||||
Ok, so the alexa .net sdk is for the full framework only, and its built for webapi. The best way to get going is in visual studio `file -> new project -> ASP.NET Web Application .net framework` A dialog comes up, and I picked `Azure API App`.
|
||||
|
||||

|
||||

|
||||
|
||||
Now you have an empty webapi project. We don't need swashbuckle/swagger so lets get rid of that
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ So the major feature I was blown away by with NDepend was how clean, and organiz
|
||||
The code quality rules, uses the NDepends querying engine to get your code. When you click on a rule the Linq query used will be displayed in a separate window. You can use this window to create your own rules, using the same querying engine. The following is a query to find code that should not be declared public.
|
||||
|
||||
<pre>
|
||||
//<Name>Avoid public methods not publicly visible</Name>
|
||||
//Avoid public methods not publicly visible
|
||||
// Matched methods are declared public but are not publicly visible by assemblies consumers.
|
||||
// Their visibility level must be decreased.
|
||||
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
title: Optimizing heroku's node_module cache for JS monorepos
|
||||
tags:
|
||||
- JS
|
||||
- javascript
|
||||
- heroku
|
||||
- cloud
|
||||
- devops
|
||||
- node.js
|
||||
date: 2021-10-12 00:00
|
||||
---
|
||||
|
||||
For many of us a JS workspace is the simplest way to structure code for future growth while providing very quick iterations. Incase you are unfamiliar, several technologies exist such as `yarn workspaces`, `lerna`, `npm workspaces`, etc. That can seamlessly stitch npm packages on disk as though they were published to a private NPM registry. This allows for fast iteration inside of a single git repo, while allowing a future where these dependencies could be abstracted.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
The file system looks something like the following
|
||||
|
||||
```
|
||||
root/
|
||||
packages/
|
||||
server
|
||||
workers
|
||||
data
|
||||
utils
|
||||
```
|
||||
|
||||
In my quick example we can pretend that an express app in in server, and some background workers are in workers. However both apps need to share code. One strategy would be to version the `data`, and `utils`, packages and ship them to a private NPM registry, or we could use these mono-repo technologies so that `import utils from 'utils'` just works without the need for a remote package store. When installing node modules into a JS workspace the following can occur
|
||||
|
||||
|
||||
```
|
||||
root/
|
||||
node_modules
|
||||
packages/
|
||||
server/node_modules
|
||||
data
|
||||
utils
|
||||
worker/node_modules
|
||||
```
|
||||
|
||||
In the above scenario node modules are both resolved into the root package but also several layers deep. In heroku you can cache your `node_modules` to improve build speed. However the paths to these directories **must be declared prior to the build**. This becomes an issue when big mono-repos litter `node_modules` everywhere.
|
||||
|
||||
I decided to write the following JS script to walk over the directories where `node_modules` could be placed and rewrite the root `package.json` file so those directories are explicitly declared.
|
||||
|
||||
|
||||
```js
|
||||
const glob = require('glob');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
// do not run this in the heroku build
|
||||
// we treat this a bit more like a yarn lockfile
|
||||
if(process.env.NODE_ENV !== 'production') {
|
||||
glob("./packages/*/node_modules", {}, function (er, result) {
|
||||
const packageJson = require('./package.json');
|
||||
// include the root node_modules
|
||||
let cacheDirectories = ['node_modules'];
|
||||
cacheDirectories = cacheDirectories.concat(result)
|
||||
packageJson.cacheDirectories = cacheDirectories.filter(i => {
|
||||
// ensure the directory node_modules are found contain a package.json file
|
||||
return fs.existsSync(path.resolve(i, '../package.json'));
|
||||
});
|
||||
// write out the changes to the root packaage.json
|
||||
fs.writeFileSync('./package.json', JSON.stringify(packageJson, null, 2));
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
I wired up the script on the post install process of the install lifecycle. Basically adding the following to the root `package.json` file.
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"postinstall": "node ./computeCacheDirectories.js",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Now every time a developer runs `yarn install` they will compute the cache directories. The result is a mutation to the `package.json` that looks like the following.
|
||||
|
||||
```json
|
||||
{
|
||||
"cacheDirectories": [
|
||||
"node_modules",
|
||||
"./packages/server/node_modules",
|
||||
"./packages/worker/node_modules"
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
When we push changes to prod we get much better cache hits across our yarn workspace.
|
||||
@@ -339,4 +339,105 @@ Environment.Exit(result);
|
||||
|
||||
Here is the full source as a [gist](https://gist.github.com/TerribleDev/06abb67350745a58f9fab080bee74be1#file-program-cs):
|
||||
|
||||
<script src="https://gist.github.com/TerribleDev/06abb67350745a58f9fab080bee74be1.js"></script>
|
||||
```csharp
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var app = new Microsoft.Extensions.CommandLineUtils.CommandLineApplication();
|
||||
var catapult = app.Command("catapult", config => {
|
||||
config.OnExecute(()=>{
|
||||
config.ShowHelp(); //show help for catapult
|
||||
return 1; //return error since we didn't do anything
|
||||
});
|
||||
config.HelpOption("-? | -h | --help"); //show help on --help
|
||||
});
|
||||
catapult.Command("help", config => {
|
||||
config.Description = "get help!";
|
||||
config.OnExecute(()=>{
|
||||
catapult.ShowHelp("catapult");
|
||||
return 1;
|
||||
});
|
||||
});
|
||||
catapult.Command("list", config => {
|
||||
config.Description = "list catapults";
|
||||
config.HelpOption("-? | -h | --help");
|
||||
config.OnExecute(()=>{
|
||||
|
||||
Console.WriteLine("a");
|
||||
Console.WriteLine("b");
|
||||
return 0;
|
||||
});
|
||||
});
|
||||
catapult.Command("add", config => {
|
||||
config.Description = "Add a catapult";
|
||||
config.HelpOption("-? | -h | --help");
|
||||
var arg = config.Argument("name", "name of the catapult", false);
|
||||
config.OnExecute(()=>{
|
||||
if(!string.IsNullOrWhiteSpace(arg.Value))
|
||||
{
|
||||
//add snowballs somehow
|
||||
Console.WriteLine($"added {arg.Value}");
|
||||
return 0;
|
||||
}
|
||||
return 1;
|
||||
|
||||
|
||||
});
|
||||
});
|
||||
catapult.Command("fling", config =>{
|
||||
config.Description = "fling snow";
|
||||
config.HelpOption("-? | -h | --help");
|
||||
var ball = config.Argument("snowballId", "snowball id", false);
|
||||
var cata = config.Argument("catapultId", "id of catapult to use", false);
|
||||
config.OnExecute(()=>{
|
||||
|
||||
//actually do something
|
||||
Console.WriteLine($"threw snowball: {ball.Value} with {cata.Value}");
|
||||
return 0;
|
||||
});
|
||||
});
|
||||
var snowball = app.Command("snowball", config => {
|
||||
config.OnExecute(()=>{
|
||||
config.ShowHelp(); //show help for catapult
|
||||
return 1; //return error since we didn't do anything
|
||||
});
|
||||
config.HelpOption("-? | -h | --help"); //show help on --help
|
||||
});
|
||||
snowball.Command("help", config => {
|
||||
config.Description = "get help!";
|
||||
config.OnExecute(()=>{
|
||||
catapult.ShowHelp("snowball");
|
||||
return 1;
|
||||
});
|
||||
});
|
||||
snowball.Command("list", config => {
|
||||
config.HelpOption("-? | -h | --help");
|
||||
config.Description = "list snowballs";
|
||||
config.OnExecute(()=>{
|
||||
|
||||
Console.WriteLine("1");
|
||||
Console.WriteLine("2");
|
||||
return 0;
|
||||
});
|
||||
});
|
||||
snowball.Command("add", config => {
|
||||
config.Description = "Add a snowball";
|
||||
config.HelpOption("-? | -h | --help");
|
||||
var arg = config.Argument("name", "name of the snowball", false);
|
||||
config.OnExecute(()=>{
|
||||
if(!string.IsNullOrWhiteSpace(arg.Value))
|
||||
{
|
||||
//add snowballs somehow
|
||||
Console.WriteLine($"added {arg.Value}");
|
||||
return 0;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
});
|
||||
});
|
||||
//give people help with --help
|
||||
app.HelpOption("-? | -h | --help");
|
||||
var result = app.Execute(args);
|
||||
Environment.Exit(result);
|
||||
}
|
||||
```
|
||||
|
||||
@@ -0,0 +1,83 @@
|
||||
title: Rebuilding this blog for performance
|
||||
date: 2019-01-21 17:56:34
|
||||
tags:
|
||||
- performance
|
||||
- battle of the bulge
|
||||
- javascript
|
||||
- dotnet
|
||||
---
|
||||
|
||||
So many people know me as a very performance focused engineer, and as someone that cares about perf I've always been a bit embarrassed about this blog. In actual fact this blog as it sits now is **fast** by most people's standards. I got a new job in July, and well I work with an [absolute mad lad](https://twitter.com/markuskobler) that is making me feel pretty embarrassed with his 900ms page load times. So I've decided to build my own blog engine, and compete against him.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Approach
|
||||
|
||||
Ok, so I want a really fast blog, but one that does not sacrifice design. I plan to pre-compute the HTML into memory, but I am not going to serve static files. In this case, I'll need an application server. I'm going to have my own CSS styles, but I'm hoping to be in the (almost) no-JS camp. Not that I dislike JS, but I want to do as much pre-computing as possible, and I don't want to slow the page down with compute in the client.
|
||||
|
||||
## Features
|
||||
|
||||
This blog has a view to read a post. A home page with links to the last 10 blog posts and a pager to go back further in time. A page listing blogs by tags and links for each tag to posts.
|
||||
|
||||
## Picking Technologies
|
||||
|
||||
So in the past my big philosophy has been that most programming languages and technologies really don't matter for most applications. In fact this use-case *could* and probably should be one of them, but when you go to extremes that I go, you want to look at benchmarks. [Tech empower](https://www.techempower.com/benchmarks/) does benchmarks of top programming languages and frameworks. For my blog since it will be mostly be bytes in bytes out, precomputed, we should look at the plain text benchmark. The top 10 webservers include go, java, rust, c++, and C#. Now I know rust, go and C# pretty well. Since the rust, and go webservers listed in the benchmark were mostly things no one really uses, I decided to use dotnet. This is also for a bit of a laugh, because my competition hates dotnet, and I also have deep dotnet expertise I can leverage.
|
||||
|
||||
|
||||
## Server-side approach
|
||||
|
||||
So as previously mentioned we'll be precomputing blog posts. I plan to compute the posts and hand them down to the views. If we use completely immutable data structures we'll prevent any locking that could slow down our app.
|
||||
|
||||
## ASPNET/Dotnet Gotchas
|
||||
|
||||
So dotnet is a managed language with a runtime. Microsoft has some [performance best practices](https://docs.microsoft.com/en-us/aspnet/core/performance/performance-best-practices?view=aspnetcore-2.2), but here are some of my thoughts.
|
||||
|
||||
* There is a tool called [cross gen](https://github.com/dotnet/coreclr/blob/master/Documentation/building/crossgen.md) which compiles dll's to native code.
|
||||
* Dotnet's garbage collector is really good, but it struggles to collect long living objects. Our objects will need to either be ephemeral, or pinned in memory forever.
|
||||
* The garbage collector struggles with large objects, especially large strings. We'll have to avoid large string allocations when possible.
|
||||
* dotnet has reference types such as objects, classes, strings, and most other things are value types. [Value types are allocated](/c-strings/) on the stack which is far cheaper than the heap
|
||||
* Exceptions are expensive when thrown in dotnet. I'm going to always avoid hitting them.
|
||||
* Cache all the things!
|
||||
|
||||
In the past we had to pre-compile razor views, but in 2.x of dotnet core, that is now built in. So one thing I don't have to worry about
|
||||
|
||||
|
||||
## Client side page architecture and design
|
||||
|
||||
So here are my thoughts on the client side of things.
|
||||
|
||||
* Minify all the content
|
||||
* Fingerprint all css/js content and set cache headers to maximum time
|
||||
* Deliver everything with brotli compression
|
||||
* Zopfli and gzip for fallbacks
|
||||
* Always use `Woff2` for fonts
|
||||
* Avoid expensive css selectors
|
||||
* `:nth child`
|
||||
* `fixed`
|
||||
* partial matching `[class^="wrap"]`
|
||||
* Use HTTP/2 for **all requests**
|
||||
* Images
|
||||
* Use SVG's when possible
|
||||
* Recompile all images in the build to `jpeg 2000, jpeg xr, and webp`
|
||||
* Serve `jpeg 2000` to ios
|
||||
* `jpeg XR` to ie11 and edge
|
||||
* Send `webp` to everyone else
|
||||
* PWA
|
||||
* Use a service worker to cache assets
|
||||
* Also use a service worker to prefetch blog posts
|
||||
* Offline support
|
||||
* CDN
|
||||
* Use Cloudflare to deliver assets faster
|
||||
* Cloudflare's argo improves geo-routing and latency issues
|
||||
* Throw any expected 301's inside cloudflares own datacenters with workers
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
These are the list of tools I'm using to measure performance.
|
||||
|
||||
* `lighthouse` - Built into chrome (its in the audit tab in the devtools), this displays a lot of performance and PWA improvements.
|
||||
* [Web Hint](https://webhint.io/) is like a linter for your web pages. The tool provides a ton of improvements from accessibility to performance
|
||||
* I really like [pingdom's](https://tools.pingdom.com/) page load time tool.
|
||||
* Good ol' [web page test is also great](https://www.webpagetest.org/)
|
||||
* The chrome devtools can also give you a breakdown as to what unused css you have on the page
|
||||
@@ -34,5 +34,3 @@ Eventually we bit the bullet and decided to sign our requests to the cluster. Un
|
||||
This project totally saved my bacon. Brandon's library plugged right into the .NET sdk, and auth'd our requests to aws without us having to figure out all that crypo. Within moments of finding it I filed an [issue](https://github.com/bcuff/elasticsearch-net-aws/issues/1) thanking Brandon as it really helped me out.
|
||||
|
||||
The Elasticsearch service offering by Amazon is pretty awesome. Like any platform its less flexible then hosting the instances yourself. You have to live with the plugins they ship, but on the plus side you get a full cluster, with monitoring, and a knob to turn up instances, or storage space without having to worry about the details.
|
||||
|
||||
<script src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
|
||||
|
||||
@@ -0,0 +1,256 @@
|
||||
title: Serving AMP Pages with Dotnet Core
|
||||
date: 2022-03-10 06:00
|
||||
tags:
|
||||
- dotnet
|
||||
- dotnetcore
|
||||
- AMP
|
||||
---
|
||||
|
||||
I remember when (Accelerated Mobile Pages) first came out, and it was very restrictive and weird. I think this ultimately hurt the *AMP Brand* Beyond this, several companies have built AMP experiences which haven't always been the best experience. I do however think AMP pages always load extremely fast. A lot of that is just the constraints of AMP. Last night I put my blog posts on AMP for a laugh, and it was much easier than I thought it would be.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Step 0
|
||||
|
||||
Download the [AMP chrome extension](https://chrome.google.com/webstore/detail/amp-validator/nmoffdblmcmgeicmolmhobpoocbbmknc?hl=en) and read what your violations are on an existing page you want to serve as an amp page.
|
||||
|
||||
|
||||
|
||||
## AMP Requirements
|
||||
|
||||
So these days AMP is a webpage with several restrictions.
|
||||
|
||||
* No JavaScript, or well very restrictive JS.
|
||||
* JS is possible, but not without work. For the sake of this tutorial I decided to skip the JS.
|
||||
* Inlined only css
|
||||
* No `picture` tags
|
||||
* A few other tags you need for AMP.
|
||||
|
||||
|
||||
## Razor
|
||||
|
||||
First things first, we need to figure out how we will adjust our layout for AMP. The easiest way for a layout to get a variable either from any controller or any razor page is using the `ViewData` dictionary. I added the following at the top of my layout page. This lets me read if we are in an amp page.
|
||||
|
||||
```csharp
|
||||
@{
|
||||
var amp = ViewData["amp"] as bool? ?? false;
|
||||
var htmlTag = amp ? "amp" : "";
|
||||
}
|
||||
```
|
||||
|
||||
Ok, so lets dive into the required HTML markup. AMP pages require a...
|
||||
|
||||
* `<html>` tag with an `amp` attribute.
|
||||
* a `<head>` tag with an `<amp-boilerplate>` tag that contains some boilerplate CSS.
|
||||
* The amp JS runtime
|
||||
* `<link>` tags to point the non-amp page at the amp page.
|
||||
|
||||
|
||||
HTML tag is an easy start. The code block above has an `htmlTag` attribute that is used for the tag.
|
||||
|
||||
```cshtml
|
||||
<html lang="en" @htmlTag>
|
||||
```
|
||||
|
||||
The head tag containing the boilerplate CSS is easy. Note that the boilerplate has `@` signs for CSS which need to be `@@` in razor, to escape the `@` sign.
|
||||
|
||||
```cshtml
|
||||
@if(amp)
|
||||
{
|
||||
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
|
||||
}
|
||||
```
|
||||
|
||||
Finally, the JS runtime. This needs to also go in the head tag. You can include this with the boilerplate code.
|
||||
|
||||
```cshtml
|
||||
@if(amp)
|
||||
{
|
||||
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
|
||||
|
||||
<script async src="https://cdn.ampproject.org/v0.js"></script>
|
||||
}
|
||||
```
|
||||
|
||||
### Inline CSS
|
||||
|
||||
|
||||
AMP Pages must have inlined CSS. To accomplish this I wrote this tag helper which loads a comma separated list of files into memory and then inlines them. The `<link>` tag your CSS needs to be in has to have the `amp-custom` attribute.
|
||||
|
||||
```csharp
|
||||
[HtmlTargetElement("inline-style")]
|
||||
public class InlineStyleTagHelper : TagHelper
|
||||
{
|
||||
[HtmlAttributeName("href")]
|
||||
public string Href { get; set; }
|
||||
|
||||
private IWebHostEnvironment HostingEnvironment { get; }
|
||||
private IMemoryCache Cache { get; }
|
||||
|
||||
|
||||
|
||||
public InlineStyleTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
|
||||
{
|
||||
HostingEnvironment = hostingEnvironment;
|
||||
Cache = cache;
|
||||
}
|
||||
|
||||
|
||||
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
var paths = Href.Split(',');
|
||||
|
||||
// Get the value from the cache, or compute the value and add it to the cache
|
||||
var fileContent = await Cache.GetOrCreateAsync("InlineStyleTagHelper-" + paths, async entry =>
|
||||
{
|
||||
var fileProvider = HostingEnvironment.WebRootFileProvider;
|
||||
var result = paths.Select(async path => {
|
||||
if(HostingEnvironment.IsDevelopment())
|
||||
{
|
||||
var changeToken = fileProvider.Watch(path);
|
||||
entry.AddExpirationToken(changeToken);
|
||||
}
|
||||
|
||||
entry.SetPriority(CacheItemPriority.NeverRemove);
|
||||
|
||||
var file = fileProvider.GetFileInfo(path);
|
||||
if (file == null || !file.Exists)
|
||||
return null;
|
||||
|
||||
return await ReadFileContent(file);
|
||||
});
|
||||
var allFinished = await Task.WhenAll(result);
|
||||
return string.Join("\n", allFinished);
|
||||
});
|
||||
|
||||
if (fileContent == null)
|
||||
{
|
||||
output.SuppressOutput();
|
||||
return;
|
||||
}
|
||||
|
||||
output.TagName = "style";
|
||||
output.Attributes.RemoveAll("href");
|
||||
output.Content.AppendHtml(fileContent);
|
||||
}
|
||||
|
||||
private static async Task<string> ReadFileContent(IFileInfo file)
|
||||
{
|
||||
using (var stream = file.CreateReadStream())
|
||||
using (var textReader = new StreamReader(stream))
|
||||
{
|
||||
return await textReader.ReadToEndAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```cshtml
|
||||
@if(amp)
|
||||
{
|
||||
<inline-style amp-custom href="css/site.css,css/site.desktop.css,css/site.mobile.css"></inline-style>
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
<link asp-append-version="true" rel="stylesheet" href="~/css/site.css" />
|
||||
<link asp-append-version="true" rel="stylesheet" href="~/css/site.mobile.css" />
|
||||
}
|
||||
```
|
||||
|
||||
### Javascript
|
||||
|
||||
AMP, [does allow for using JS](https://amp.dev/documentation/components/amp-script/) in a web worker. This has a lot of caveats, and for my use-case (this blog) it was better to just skip rendering any JS. I guarded the `RenderSection` call for the scripts section behind an `if(amp)` statement.
|
||||
|
||||
```cshtml
|
||||
@if(!amp)
|
||||
{
|
||||
@RenderSection("Scripts", required: false)
|
||||
|
||||
<script asp-append-version="true" src="~/your/script.js" async></script>
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Link tags
|
||||
|
||||
On pages that render AMP, you'll need to be able to generate 2 meta tags. The first is a canonical tag that tells google what the canonical URL is of the page. The second is one, to tell google where your amp pages are for a URL. This is data you typically want to pass to the Model of the view you are rendering. Adding these meta to the head of the layout through a section.
|
||||
|
||||
```cs
|
||||
@section Head {
|
||||
<link rel="canonical" href="@Model.Post.CanonicalUrl" />
|
||||
<link rel="amphtml" href="@Model.Post.AMPUrl">
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Routes
|
||||
|
||||
In my implementation I added `/amp` to the end of my URLs for amp. Then in the controller you can set `this.ViewData["amp"] = amp == "amp";` the view data for the page to be an amp page or not. If you would prefer, you can set the boolean with a view model, it would also work very well.
|
||||
|
||||
|
||||
```csharp
|
||||
[Route("{postUrl}/{amp?}")]
|
||||
public IActionResult Post(string postUrl, string amp = "")
|
||||
{
|
||||
if(!String.IsNullOrEmpty(amp) && amp != "amp")
|
||||
{
|
||||
// handle 404s
|
||||
return NotFound();
|
||||
}
|
||||
ViewDictionary["amp"] = amp == "amp";
|
||||
return new View(model: new ViewModel ());
|
||||
}
|
||||
```
|
||||
|
||||
## Google Analytics
|
||||
|
||||
There is a snippet of code makes GA work in an AMP page. I made the following partial view that I call from the layout page.
|
||||
|
||||
```cshtml
|
||||
@{
|
||||
Layout = null;
|
||||
var amp = ViewData["amp"] as bool? ?? false;
|
||||
}
|
||||
|
||||
@if(!amp)
|
||||
{
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
function gtag() { dataLayer.push(arguments); }
|
||||
gtag('js', new Date());
|
||||
gtag('config', 'GTAG_ID');
|
||||
document.addEventListener('DOMContentLoaded', function () {
|
||||
var script = document.createElement('script');
|
||||
script.src = 'https://www.googletagmanager.com/gtag/js?id=GTAG_ID';
|
||||
script.async = true
|
||||
document.body.appendChild(script);
|
||||
});
|
||||
</script>
|
||||
}
|
||||
else
|
||||
{
|
||||
<amp-analytics type="gtag" data-credentials="include">
|
||||
<script type="application/json">
|
||||
{
|
||||
"vars" : {
|
||||
"gtag_id": "GTAG_ID",
|
||||
"config" : {
|
||||
"GTAG_ID": { "GTAG_ID": "default" }
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</amp-analytics>
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## So what's next?
|
||||
|
||||
Go through your pages and look at the violations in the Chrome Extension. If you push the pages live, and register them in your sitemap. Errors with amp pages will appear in [the Google Search Console.](https://search.google.com/search-console/about) as google indexes your AMP pages.
|
||||
|
||||
## I need more help!
|
||||
|
||||
You can look at [my implementation](https://github.com/TerribleDev/blog.terrible.dev/commit/83eb1bc565dfb4bdb38d3c5f0cbfbc21b05ad4b2).
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
title: Speeding up CraftCMS on Heroku
|
||||
date: 2022-04-13 07:55
|
||||
tags:
|
||||
- nginx
|
||||
- craftcms
|
||||
- craft
|
||||
---
|
||||
|
||||
So, I previously [blogged about how we hosted CraftCMS](/Hosting-craft-on-heroku/) on Heroku. When we built the marketing site for [Quala](https://www.quala.io) the twig templates were built for maximum authoring flexibility at the cost of some TTFB problems. We knew this going into the project. In an ideal world we would use GatsbyJS to build the frontend, but we very limited in time. When we went live, we saw a dramatic improvement to First Contentful paint, but a huge decrease to Time To First Byte, averaging at 1.3 seconds.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
The initial thinking that was bounced around was *we just need caching* as our previous wordpress site had cached all renderings in memory. However, we wanted to start rendering CSRF tokens to the browser, and collecting form data. Furthermore, I struggled to come to terms with this being a solution. Simply put, I'm not a fan of PHP, and I know that the Yii framework is known to be slow even in the PHP community, but I couldn't believe that it should be *that* slow. We did sprinkle some cache tags around our twig templates, and it did improve things, but not enough to brag about. So I started digging into the docs of Heroku, Nginx, and FastCGI.
|
||||
|
||||
## Heroku's buildpack
|
||||
|
||||
So [Heroku's buildpack docs](https://devcenter.heroku.com/articles/php-support#php-fpm-configuration) contains a lot of very good information. Props to them for docs! I ran into this one quote.
|
||||
|
||||
> PHP-FPM is set up to automatically spawn a suitable number of worker processes depending on dyno size and the configured PHP memory_limit
|
||||
|
||||
This made me go look at another article I found by them regarding [php concurrency](https://devcenter.heroku.com/articles/php-concurrency). This article boils down to, different dynos have different memory limits. They allocate 128mb to a php process and divide that by the total memory on a machine and that is used to determine how many threads to have on a single dyno. They also look for a `.user.ini` file if you want to override the memory defaults. So first I realized our `.user.ini` file had specified `memory_limit = 256M` which was causing us to have half as many processes per dyno, so I set this back to 128. Ok great, this did improve things a little. I then read that you could override the concurrency default behavior by setting the environment variable `WEB_CONCURRENCY` to be whatever you wanted. This did come with a warning.
|
||||
|
||||
> When setting WEB_CONCURRENCY manually, make sure that its value multiplied by your memory_limit does not exceed the amount of RAM available on your dyno type.
|
||||
|
||||
Now I started doing some load testing of my own, and while it would over subscribe the dyno, I gave us 10 on a 2x dyno which theoretically would cause us to OOM but with some basic load testing didnt seem like it would happen. This gave us some boost, but not as much as we hoped. I was still very stuck, and I had a suspicion that maybe there was some problem between PHP and Nginx which was slowing things down. I used the craft diagnostic tools, and I couldn't find more than 400ms being wasted in sql queries which didn't account for the almost 1 second page load I still had.
|
||||
|
||||
## Nginx configs
|
||||
|
||||
Ok, so I started looking around, and I found a [TON of great CraftCMS content by nystudio107](https://nystudio107.com/). I don't quite remember which article, but I stumbled across several that implied I needed better fastcgi settings in Nginx. So, I [forked the heroku buildpack](https://github.com/qualaio/heroku-buildpack-php) and got to work. I ended up making these settings.
|
||||
|
||||
```nginx
|
||||
fastcgi_buffers 256 16k;
|
||||
fastcgi_buffer_size 128k;
|
||||
fastcgi_connect_timeout 10s;
|
||||
fastcgi_send_timeout 120s;
|
||||
fastcgi_read_timeout 120s;
|
||||
fastcgi_busy_buffers_size 256k;
|
||||
fastcgi_temp_file_write_size 256k;
|
||||
reset_timedout_connection on;
|
||||
```
|
||||
|
||||
## Brotli
|
||||
|
||||
While I was in the config, I decided *what the hell, lets get brolti working*. [Brotli](https://github.com/google/brotli) is a compression format that is more compact than gzip. Over the wire assets are usually 5-10% smaller than gzipped. So, sending brotli if the browser supports it, is a big win. Turns out there is an [issue filed in 2019 with heroku](https://github.com/heroku/heroku-buildpack-php/issues/356) to add it, but its not gone anywhere. Ultimately, I found someone else [figured out how to add it](https://github.com/seyallin/heroku-brotli-nginx). I made some changes and added it to our fork. You can view all of our changes in [github's compare view](https://github.com/heroku/heroku-buildpack-php/compare/main...qualaio:main#diff-ff7b43f722c67a80d4c82bf656918b3bf96f553a5ad1f62ef185dff16582f033R24-R31).
|
||||
|
||||
## Results
|
||||
|
||||
So the results was a **huge** drop in TTFB, which overall improved our ligthhouse score by 30 points. The other thing that's great is, we're moderately fast without caches, which means caches can only improve the situation further.
|
||||
|
||||

|
||||
@@ -16,8 +16,7 @@ Today marks the release of Visual Studio 2017, and with it the final release of
|
||||
|
||||
So I bet you are wondering, how is VS2017 improved. When you first boot the vs2017 installer you are immediately hit with a very sleek UI for the installer. The installer actually has reasonable install sizes for scenarios like nodejs only.
|
||||
|
||||
|
||||
{% image "fancybox" vs.PNG "vs 2017 installer" %}
|
||||

|
||||
|
||||
VS2017 can understand which lines of code are linked to your unit tests. As you alter, or refactor code VS can run the tests. This can allow the editor to show checkmarks or red `x`'s This is huge as it can seemingly provide constant feedback to developers during development.
|
||||
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
title: Must have vscode plugins for front-end devs
|
||||
date: 2019-02-06
|
||||
tags:
|
||||
- visual studio
|
||||
- javascript
|
||||
- css
|
||||
- front-end
|
||||
---
|
||||
|
||||
I've had a lot of people ask me about my choice of editors, and plugins. A while back I switched to vscode for all my programming work, for both front and back end. In the past I've blogged about [the best plugins for visual studio](/VS-2017-best-extensions-on-launch/) as a backend dev, but I thought I'd give you a more front-end angle
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Document this
|
||||
|
||||
My first one, and in my opinion the most underrated is [document this](https://marketplace.visualstudio.com/items?itemName=joelday.docthis). So if you have ever had to write [jsdoc](http://usejsdoc.org/) comments you can know how tedious it gets, and if you haven't, trust me you should. VSCode and most other editors can read [jsdoc](http://usejsdoc.org/) comments above functions, and class declarations to improve the intellisense and type completion statements. Simply have your cursor over a function, invoke document this, and quickly you will be given jsdoc comments for your code.
|
||||
|
||||

|
||||
|
||||
|
||||
## Import Cost
|
||||
|
||||
Another extension I find vital to my every day is [import cost](https://marketplace.visualstudio.com/items?itemName=wix.vscode-import-cost). This is a package, that leaves you little notes on the side of any import you have as to how big it will be. This package will even highlight the size text in red for large imports which you can configure. What I love about this package, is it tells me if the package I'm about to use is going to be very expensive size wise. That way I find out long before I commit the code, and my pages get slow.
|
||||
|
||||

|
||||
|
||||
## ESlint and Prettier
|
||||
|
||||
Hopefully both of these will not be new to you. ESLint is a linting tool that looks for potential errors in your code. Prettier is an opinionated style enforcer for your code. The [eslint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) and [prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) extensions for vscode can automatically show you problems in your code as you type, and can even fix your code on save. What I love about both of these tools, is together they make a great force for improving your code base. Prettier eliminates many debates over code style between team members, and eslint prevents you from shipping many bugs to production. These extensions can call out problems as you type, which decreases the feedback loops, and increases your productivity.
|
||||
|
||||
|
||||
|
||||
|
||||
## Filesize
|
||||
|
||||
As a web developer I spend a lot of my time looking at file size. Right now file sizes are ever inflating, and are causing pain for bandwidth constrained devices. I often download bundles, and inspect their compiled source, or just have to look at how big a file is on the filesystem. A big tool I have in my belt is [filesize](https://marketplace.visualstudio.com/items?itemName=mkxml.vscode-filesize). This is a crazy simple extension, but one that brings me joy everyday. The premise is simple, print the file size of the current file in the status bar at the bottom. Click on it, and you get a nice output of what its like gzipped, and the mime type. Dirt simple, but saved me a ton of time everyday!
|
||||
|
||||

|
||||
|
||||
|
||||
## Runner ups
|
||||
|
||||
Here is a list of additional extensions I certainly couldn't live without
|
||||
|
||||
* [path intellisense](https://marketplace.visualstudio.com/items?itemName=christian-kohler.path-intellisense) - autocomplete file paths in various files (including html)
|
||||
* [npm intellisense](https://marketplace.visualstudio.com/items?itemName=christian-kohler.npm-intellisense) - autocomplete npm pages in imports
|
||||
* [html 5 boilerplate](https://marketplace.visualstudio.com/items?itemName=sidthesloth.html5-boilerplate) - dirt simple html boilerplate snippets
|
||||
* [icon fonts](https://marketplace.visualstudio.com/items?itemName=idleberg.icon-fonts) - Autocomplete for various icon fonts such as font awesome
|
||||
* [git lens](https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens) - Show git history inline, along with other information from git
|
||||
@@ -1,21 +1,24 @@
|
||||
title: The battle of the buldge. Visualizing your javascript bundle
|
||||
title: The battle of the bulge. Visualizing your javascript bundle
|
||||
date: 2018-10-17 13:19:18
|
||||
tags:
|
||||
- javascript
|
||||
- battle of the bulge
|
||||
- performance
|
||||
---
|
||||
|
||||
So incase you havn't been following me. I joined Cargurus in July. At cargurus we're currently working on our mobile web experience written in react, redux and reselect. As our implementation grew so did our time to first paint.
|
||||
|
||||
<!-- more -->
|
||||
|
||||
So I've been spending a lot of time working on our performance. One tool I have found invaluable in the quest for page perf mecca is [source-map-explorer](https://www.npmjs.com/package/source-map-explorer). This is a tool that dives into a bundled file, and its map. Then visualizes the bundle in a tree view. This view lets you easily understand exactly what is taking up space in the bundle. What I love about this tool is that it works with any type of bundled javascript file, and is completely seperate of the build. So any bugs in webpack where you have duplicate files in a bundle will appear here.
|
||||
So I've been spending a lot of time working on our performance. One tool I have found invaluable in the quest for page perf mecca is [source-map-explorer](https://www.npmjs.com/package/source-map-explorer). This is a tool that dives into a bundled file, and its map. Then visualizes the bundle in a tree view. This view lets you easily understand exactly what is taking up space in the bundle. What I love about this tool is that it works with any type of bundled javascript file, and is completely de-void of any builds. So any bugs in your webpack config leading to duplicate files in a bundle will show up here.
|
||||
|
||||
|
||||
## Getting started
|
||||
|
||||
You get started by `npm install -g source-map-explorer` then just download your bundles, and sourcemaps. In the command line run `source-map-explorer ./yourbundle.js ./yourbundlemap.js` Your browser should then open with a great tree view of what is inside your bundle. From here you can look to see what dependencies you have, and their sizes. Obviously, you can then decide to keep or throw them away.
|
||||
You get started by `npm install -g source-map-explorer` then just download your bundles, and sourcemaps. You can do this from production if you have them. Otherwise build bundles locally. **Note** You should always use this on minified code where any tree shaking and dead code elimination has occurred. In the command line run `source-map-explorer ./yourbundle.js ./yourbundle.js.map` Your browser should then open with a great tree view of what is inside your bundle. From here you can look to see what dependencies you have, and their sizes. Obviously, you can then decide to keep or throw them away.
|
||||
|
||||

|
||||
|
||||
Here is a great youtube video explaining it in detail!
|
||||
|
||||
|
||||
{% youtube 7aY9BoMEpG8 %}
|
||||

|
||||
@@ -0,0 +1,94 @@
|
||||
title: 'Measuring, Visualizing and Debugging your React Redux Reselect performance bottlenecks'
|
||||
date: 2019-01-14 22:04:56
|
||||
tags:
|
||||
- battle of the bulge
|
||||
- javascript
|
||||
- performance
|
||||
---
|
||||
|
||||
In the battle of performance one tool constantly rains supreme, the all powerful profiler! In javascript land chrome has a pretty awesome profiler, but every-time I looked into our react perf issues I was always hit by a slow function called `anonymous function`
|
||||
|
||||
<!-- more -->
|
||||
|
||||
## Using the chrome profiler
|
||||
|
||||
So if you open the chrome devtools, you will see a tab called `performance`. Click on that tab. If you are looking into CPU bound workloads click the CPU dropdown and set yourself to 6x slowdown, which will emulate a device that is much slower.
|
||||
|
||||

|
||||
|
||||
Press the record button, click around on your page, then click the record button again. You are now hit with a timeline of your app, and what scripts were ran during this time.
|
||||
|
||||
So what I personally like to do is find orange bars that often make up the bulk of the time. However I've often noticed the bulk of bigger redux apps are taken up by `anonymous functions` or functions that essentially have no name. They often look like this `() => {}`. This is largely because they are inside of [reselect selectors](https://github.com/reduxjs/reselect). Incase you are unfamiliar selectors are functions that cache computations off the redux store. Back to the chrome profiler. One thing you can do it use the `window.performance` namespace to measure and record performance metrics into the browser. If you expand the `user timings section` in the chrome profiler you may find that react in dev mode has included some visualizations for how long components take to render.
|
||||
|
||||

|
||||
|
||||
## Adding your own visualizations
|
||||
|
||||
So digging into other blog posts, I found posts showing how to [visualize your redux actions](https://medium.com/@vcarl/performance-profiling-a-redux-app-c85e67bf84ae) using the same performance API mechanisms react uses. That blog post uses redux middleware to add timings to actions. This narrowed down on our performance problems, but did not point out the exact selector that was slow. Clearly we had an action that was triggering an expensive state update, but the time was still spent in `anonymous function`. Thats when I had the idea to wrap reselect selector functions in a function that can append the timings. [This gist is what I came up with](https://gist.github.com/TerribleDev/db48b2c8e143f9364292161346877f93)
|
||||
|
||||
```js
|
||||
|
||||
import {createSelector} from 'reselect';
|
||||
|
||||
const hasPerformanceApi =
|
||||
window &&
|
||||
window.performance &&
|
||||
window.performance.measure &&
|
||||
window.performance.mark;
|
||||
|
||||
const createFuncWithMark = (name, callback) => (...args) => {
|
||||
const startMark = `${name}-Startmark`;
|
||||
const endMark = `${name}-EndMark`;
|
||||
window.performance.mark(startMark);
|
||||
const result = callback(...args);
|
||||
window.performance.mark(endMark);
|
||||
window.performance.measure('♻️ ' + `${name}-Selector`, startMark, endMark);
|
||||
window.performance.clearMarks(startMark);
|
||||
window.performance.clearMarks(endMark);
|
||||
window.performance.clearMeasures(startMark);
|
||||
window.performance.clearMeasures(endMark);
|
||||
return result;
|
||||
};
|
||||
|
||||
export const createMarkedSelector = (name, ...args) => {
|
||||
if (!hasPerformanceApi) {
|
||||
return createSelector(...args);
|
||||
}
|
||||
if (!name || typeof name !== 'string') {
|
||||
throw new Error('marked selectors must have names');
|
||||
}
|
||||
const callback = args.pop();
|
||||
const funcWithMark = createFuncWithMark(name, callback);
|
||||
args.push(funcWithMark);
|
||||
return createSelector(...args);
|
||||
};
|
||||
|
||||
```
|
||||
|
||||
|
||||
So how does this work exactly? Well its a library that wraps the function you pass to reselect that adds markers to the window to tell you how fast reselect selectors take to run. Combined with the previously mentioned blog post, you can now get timings in chrome's performance tool with selectors! You can also combine this with the [redux middleware](https://medium.com/@vcarl/performance-profiling-a-redux-app-c85e67bf84ae) I previously mentioned to get a deeper insight into how your app is performing
|
||||
|
||||

|
||||
|
||||
## So how do I use your gist?
|
||||
|
||||
You can copy the code into a file of your own. If you use reselect you probably have code that looks like the following.
|
||||
|
||||
```js
|
||||
export const computeSomething = createSelector([getState], (state) => { /* compute projection */ });
|
||||
```
|
||||
|
||||
You just need to replace the above with the following
|
||||
|
||||
```js
|
||||
export const computeSomething = createMarkedSelector('computeSomething', [getState], (state) => { /* compute projection */ });
|
||||
```
|
||||
|
||||
its pretty simple, it just requires you to pass a string in the first argument slot. That string will be the name used to write to the performance API, and will show up in the chrome profiler. Inside vscode you can even do a regex find and replace to add this string.
|
||||
|
||||
|
||||
```
|
||||
find: const(\s?)(\w*)(\s?)=(\s)createSelector\(
|
||||
|
||||
replace: const$1$2$3=$4createMarkedSelector('$2',
|
||||
```
|
||||
14
src/TerribleDev.Blog.Web/Posts/about.md
Normal file
14
src/TerribleDev.Blog.Web/Posts/about.md
Normal file
@@ -0,0 +1,14 @@
|
||||
title: About
|
||||
date: 2022-03-08 01:03
|
||||
isLanding: true
|
||||
permalink: about
|
||||
---
|
||||
|
||||
|
||||
I am a software engineer. I currently work at [Quala](https://www.quala.io). I have worked on all area's of the stack. From a sysadmin, network engineer, backend developer, and frontend developer. I've helped build some extremely large scale websites such as [Vistaprint](https://www.vistaprint.com) and [CarGurus](https://www.cargurus.com). I have a passion for high performing software, devops, and front end. I am a huge fan of [JavaScript](https://en.wikipedia.org/wiki/JavaScript), [C#](https://en.wikipedia.org/wiki/C_Sharp), [Golang](https://en.wikipedia.org/wiki/Go_(programming_language)), and [Rust](https://en.wikipedia.org/wiki/Rust_(programming_language)).
|
||||
|
||||
I blog about my general pains building software.
|
||||
|
||||
## Why Terrible Dev?
|
||||
|
||||
Honestly, I was a network engineer, and I worked with many developers. They'd often blame bugs on the network, or the database. I heard a lot of *it works on my machine*. I started the [TerribleDev](https://www.twitter.com/terribledev) twitter handle where I posted some things developers said. Then when I became a developer, I figured I'd just make it my handle. These days, I'm now blaming the network 🤣.
|
||||
@@ -43,7 +43,7 @@ Essentially I add the routing package to the container, and then have have the a
|
||||
foreach(var route in Routes.RoutesDictionary)
|
||||
{
|
||||
a.MapGet("docker101", handler: async b=>{
|
||||
b.Response.Redirect("https://blog.terribledev.io/Getting-started-with-docker-containers/", true);
|
||||
b.Response.Redirect("https://blog.terrible.dev/Getting-started-with-docker-containers/", true);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -17,12 +17,18 @@ namespace TerribleDev.Blog.Web
|
||||
CreateWebHostBuilder(args).Build().Run();
|
||||
}
|
||||
|
||||
public static IWebHostBuilder CreateWebHostBuilder(string[] args) =>
|
||||
WebHost.CreateDefaultBuilder(args)
|
||||
public static IWebHostBuilder CreateWebHostBuilder(string[] args) {
|
||||
var builder = WebHost.CreateDefaultBuilder(args)
|
||||
.UseStartup<Startup>()
|
||||
.ConfigureKestrel(a =>
|
||||
{
|
||||
a.AddServerHeader = false;
|
||||
});
|
||||
var port = Environment.GetEnvironmentVariable("PORT");
|
||||
if(!String.IsNullOrWhiteSpace(port)) {
|
||||
builder.UseUrls("http://*:" + port);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +1,128 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.HttpsPolicy;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Rewrite;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.FileProviders;
|
||||
using Microsoft.Net.Http.Headers;
|
||||
using HardHat.Middlewares;
|
||||
using HardHat;
|
||||
using TerribleDev.Blog.Web.Models;
|
||||
using TerribleDev.Blog.Web.Factories;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using WebMarkupMin.AspNetCore7;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using TerribleDev.Blog.Web.Filters;
|
||||
|
||||
namespace TerribleDev.Blog.Web
|
||||
{
|
||||
public class Startup
|
||||
{
|
||||
public Startup(IConfiguration configuration)
|
||||
public Startup(IConfiguration configuration, IWebHostEnvironment env)
|
||||
{
|
||||
Configuration = configuration;
|
||||
Env = env;
|
||||
}
|
||||
|
||||
public IConfiguration Configuration { get; }
|
||||
public IWebHostEnvironment Env { get; }
|
||||
|
||||
// This method gets called by the runtime. Use this method to add services to the container.
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddResponseCompression(a =>
|
||||
{
|
||||
var blogConfiguration = new BlogConfiguration() {
|
||||
Link = "https://blog.terrible.dev",
|
||||
Title = "The Ramblings of TerribleDev"
|
||||
};
|
||||
// Func<BlogConfiguration> getBlog = () => Configuration.GetSection("Blog").Get<BlogConfiguration>();
|
||||
if (Env.IsDevelopment())
|
||||
{
|
||||
services.AddTransient(a => blogConfiguration);
|
||||
}
|
||||
else
|
||||
{
|
||||
services.AddSingleton(blogConfiguration);
|
||||
}
|
||||
// enable logging
|
||||
services.AddLogging();
|
||||
services.AddSingleton((i) => {
|
||||
var posts = new BlogFactory().GetAllPostsAsync(Env.IsDevelopment() ? "https://localhost:5001": "https://blog.terrible.dev").Result;
|
||||
var postCache = BlogCacheFactory.ProjectPostCache(posts);
|
||||
if(Env.IsProduction()) {
|
||||
foreach(var post in postCache.PostsAsLists)
|
||||
{
|
||||
// if we are in production turn off lazy loading
|
||||
var value = post.Content;
|
||||
}
|
||||
}
|
||||
return postCache;
|
||||
});
|
||||
var controllerBuilder = services.AddControllersWithViews(a => {
|
||||
a.Filters.Add(new StaticETag());
|
||||
});
|
||||
#if DEBUG
|
||||
if (Env.IsDevelopment())
|
||||
{
|
||||
controllerBuilder.AddRazorRuntimeCompilation();
|
||||
}
|
||||
#endif
|
||||
services
|
||||
.AddResponseCompression(a =>
|
||||
{
|
||||
a.EnableForHttps = true;
|
||||
|
||||
})
|
||||
.AddMemoryCache()
|
||||
.AddMvcCore()
|
||||
.AddCacheTagHelper()
|
||||
.AddRazorViewEngine()
|
||||
.SetCompatibilityVersion(CompatibilityVersion.Version_2_2);
|
||||
services.AddOutputCaching();
|
||||
.AddResponseCaching()
|
||||
.AddMemoryCache();
|
||||
// if(Env.IsProduction())
|
||||
// {
|
||||
|
||||
// }
|
||||
services.AddOutputCache(a =>{
|
||||
a.AddBasePolicy(b => {
|
||||
b.Cache();
|
||||
});
|
||||
});
|
||||
services.AddWebMarkupMin(a => {
|
||||
a.AllowMinificationInDevelopmentEnvironment = true;
|
||||
a.DisablePoweredByHttpHeaders = true;
|
||||
})
|
||||
.AddHtmlMinification()
|
||||
.AddXmlMinification();
|
||||
}
|
||||
|
||||
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
|
||||
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
|
||||
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
|
||||
{
|
||||
Console.WriteLine("ETag Detected As: " + StaticETag.staticEtag);
|
||||
|
||||
app.UseHttpsRedirection();
|
||||
if (env.IsProduction())
|
||||
{
|
||||
app.UseOutputCache();
|
||||
app.UseResponseCaching();
|
||||
}
|
||||
app.UseResponseCompression();
|
||||
var cacheTime = env.IsDevelopment() ? 1 : 31536000;
|
||||
app.UseStaticFiles(new StaticFileOptions
|
||||
{
|
||||
OnPrepareResponse = ctx =>
|
||||
{
|
||||
ctx.Context.Response.Headers[HeaderNames.CacheControl] =
|
||||
"public,max-age=" + cacheTime;
|
||||
}
|
||||
});
|
||||
app.UseStaticFiles(new StaticFileOptions
|
||||
{
|
||||
FileProvider = new PhysicalFileProvider(Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "img")),
|
||||
OnPrepareResponse = ctx =>
|
||||
{
|
||||
ctx.Context.Response.Headers[HeaderNames.CacheControl] =
|
||||
"public,max-age=" + cacheTime;
|
||||
}
|
||||
});
|
||||
if (env.IsDevelopment())
|
||||
{
|
||||
app.UseDeveloperExceptionPage();
|
||||
@@ -52,38 +130,37 @@ namespace TerribleDev.Blog.Web
|
||||
else
|
||||
{
|
||||
app.UseExceptionHandler("/Error");
|
||||
// The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts.
|
||||
app.UseHsts(TimeSpan.FromDays(30), false, preload: true);
|
||||
|
||||
}
|
||||
|
||||
app.UseRewriter(new Microsoft.AspNetCore.Rewrite.RewriteOptions().AddRedirect("(.*[^/|.xml|.html])$", "$1/", 301));
|
||||
app.UseIENoOpen();
|
||||
app.UseNoMimeSniff();
|
||||
app.UseCrossSiteScriptingFilters();
|
||||
app.UseFrameGuard(new FrameGuardOptions(FrameGuardOptions.FrameGuard.SAMEORIGIN));
|
||||
app.UseHttpsRedirection();
|
||||
app.UseResponseCompression();
|
||||
|
||||
var cacheTime = env.IsDevelopment() ? 0 : 31536000;
|
||||
app.UseStaticFiles(new StaticFileOptions
|
||||
app.UseHsts(TimeSpan.FromDays(365), false, preload: true);
|
||||
app.UseContentSecurityPolicy(
|
||||
new ContentSecurityPolicy()
|
||||
{
|
||||
// DefaultSrc = new HashSet<string>() {
|
||||
// CSPConstants.Self, "https://www.google-analytics.com", "https://www.googletagmanager.com", "https://stats.g.doubleclick.net"
|
||||
// },
|
||||
// ScriptSrc = new HashSet<string>()
|
||||
// {
|
||||
// CSPConstants.Self, CSPConstants.UnsafeInline, "https://www.google-analytics.com", "https://www.googletagmanager.com", "https://stats.g.doubleclick.net"
|
||||
// },
|
||||
// StyleSrc = new HashSet<string>()
|
||||
// {
|
||||
// CSPConstants.Self, CSPConstants.UnsafeInline
|
||||
// },
|
||||
UpgradeInsecureRequests = true
|
||||
});
|
||||
app.UseWebMarkupMin();
|
||||
app.UseRouting();
|
||||
app.UseEndpoints(endpoints =>
|
||||
{
|
||||
OnPrepareResponse = ctx =>
|
||||
{
|
||||
ctx.Context.Response.Headers[HeaderNames.CacheControl] =
|
||||
"public,max-age=" + cacheTime;
|
||||
}
|
||||
endpoints.MapControllers();
|
||||
});
|
||||
app.UseStaticFiles(new StaticFileOptions
|
||||
{
|
||||
FileProvider = new PhysicalFileProvider(Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "img")),
|
||||
OnPrepareResponse = ctx =>
|
||||
{
|
||||
ctx.Context.Response.Headers[HeaderNames.CacheControl] =
|
||||
"public,max-age=" + cacheTime;
|
||||
}
|
||||
});
|
||||
app.UseRewriter(new Microsoft.AspNetCore.Rewrite.RewriteOptions().AddRedirect("(.*[^/|.xml|.html])$", "$1/", 301));
|
||||
app.UseOutputCaching();
|
||||
app.UseMvc();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc.Rendering;
|
||||
using Microsoft.AspNetCore.Mvc.ViewFeatures;
|
||||
using Microsoft.AspNetCore.Razor.TagHelpers;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
public abstract class AbstractPlatformTagHelper : TagHelper
|
||||
{
|
||||
static Regex MobileCheck = new Regex(@"(?:phone|windows\s+phone|ipod|blackberry|(?:android|bb\d+|meego|silk|googlebot) .+? mobile|palm|windows\s+ce|opera\ mini|avantgo|mobilesafari|docomo|ipad)", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.ECMAScript);
|
||||
static ConcurrentDictionary<string, Platform> CachedChecks = new ConcurrentDictionary<string, Platform>(); // dictionary of user agent -> mobilre
|
||||
protected HttpRequest Request => ViewContext.HttpContext.Request;
|
||||
protected HttpResponse Response => ViewContext.HttpContext.Response;
|
||||
|
||||
[ViewContext]
|
||||
public ViewContext ViewContext { get; set; }
|
||||
protected abstract bool ShouldRender();
|
||||
public Platform GetPlatform()
|
||||
{
|
||||
var userAgent = this.Request.Headers.UserAgent;
|
||||
if (string.IsNullOrEmpty(userAgent))
|
||||
{
|
||||
return Platform.Desktop; // mobile is default
|
||||
}
|
||||
if(CachedChecks.TryGetValue(userAgent, out var cacheResult))
|
||||
{
|
||||
return cacheResult;
|
||||
}
|
||||
var isMobile = AbstractPlatformTagHelper.MobileCheck.IsMatch(this.Request.Headers.UserAgent);
|
||||
return isMobile ? Platform.Mobile : Platform.Desktop;
|
||||
}
|
||||
public override void Process(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
output.TagName = null;
|
||||
if(!this.ShouldRender())
|
||||
{
|
||||
output.SuppressOutput();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
src/TerribleDev.Blog.Web/Taghelpers/Desktop.cs
Normal file
16
src/TerribleDev.Blog.Web/Taghelpers/Desktop.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using Microsoft.AspNetCore.Razor.TagHelpers;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
[HtmlTargetElement("desktop", TagStructure = TagStructure.NormalOrSelfClosing)]
|
||||
public class DesktopTagHelper : AbstractPlatformTagHelper
|
||||
{
|
||||
protected override bool ShouldRender() => this.GetPlatform() == Platform.Desktop;
|
||||
}
|
||||
}
|
||||
62
src/TerribleDev.Blog.Web/Taghelpers/Http2Push.cs
Normal file
62
src/TerribleDev.Blog.Web/Taghelpers/Http2Push.cs
Normal file
@@ -0,0 +1,62 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Encodings.Web;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc.Razor.Infrastructure;
|
||||
using Microsoft.AspNetCore.Mvc.Rendering;
|
||||
using Microsoft.AspNetCore.Mvc.Routing;
|
||||
using Microsoft.AspNetCore.Mvc.TagHelpers;
|
||||
using Microsoft.AspNetCore.Mvc.ViewFeatures;
|
||||
using Microsoft.AspNetCore.Razor.TagHelpers;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
public record PushUrl(string Url, string asProperty);
|
||||
[HtmlTargetElement("link", Attributes = "[rel=stylesheet],href,push")]
|
||||
[HtmlTargetElement("img", Attributes = "src,push")]
|
||||
[HtmlTargetElement("script", Attributes = "src,push")]
|
||||
public class HttpPush : LinkTagHelper
|
||||
{
|
||||
[HtmlAttributeNotBound]
|
||||
public bool Http2PushEnabled { get; set; } = true;
|
||||
|
||||
public static readonly string Key = "http2push-link";
|
||||
|
||||
public HttpPush(IWebHostEnvironment hostingEnvironment, TagHelperMemoryCacheProvider cacheProvider, IFileVersionProvider fileVersionProvider, HtmlEncoder htmlEncoder, JavaScriptEncoder javaScriptEncoder, IUrlHelperFactory urlHelperFactory) : base(hostingEnvironment, cacheProvider, fileVersionProvider, htmlEncoder, javaScriptEncoder, urlHelperFactory)
|
||||
{
|
||||
}
|
||||
|
||||
private (string Url, string AsProperty) GetTagInfo(string tag) =>
|
||||
tag switch
|
||||
{
|
||||
"link" => ("href", "link"),
|
||||
"img" => ("src", "image"),
|
||||
"script" => ("src", "script"),
|
||||
_ => (null, null)
|
||||
};
|
||||
|
||||
public override void Process(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
if(!this.Http2PushEnabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
var (urlAttribute, asProperty) = GetTagInfo(output.TagName);
|
||||
var url = base.TryResolveUrl(output.Attributes[urlAttribute].Value.ToString(), out string resolvedUrl) ? resolvedUrl : output.Attributes[urlAttribute].Value.ToString();
|
||||
var linkList = ViewContext.HttpContext.Items.TryGetValue(Key, out var links) ? links as List<PushUrl> : null;
|
||||
|
||||
if(linkList == null)
|
||||
{
|
||||
linkList = new List<PushUrl>() { new PushUrl(url, asProperty) };
|
||||
ViewContext.HttpContext.Items.Add(HttpPush.Key, linkList);
|
||||
}
|
||||
else
|
||||
{
|
||||
linkList.Add(new PushUrl(url, asProperty));
|
||||
}
|
||||
output.Attributes.Remove(output.Attributes["push"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
79
src/TerribleDev.Blog.Web/Taghelpers/InlineCss.cs
Normal file
79
src/TerribleDev.Blog.Web/Taghelpers/InlineCss.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Razor.TagHelpers;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.FileProviders;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
[HtmlTargetElement("inline-style")]
|
||||
public class InlineStyleTagHelper : TagHelper
|
||||
{
|
||||
[HtmlAttributeName("href")]
|
||||
public string Href { get; set; }
|
||||
|
||||
private IWebHostEnvironment HostingEnvironment { get; }
|
||||
private IMemoryCache Cache { get; }
|
||||
|
||||
|
||||
|
||||
public InlineStyleTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
|
||||
{
|
||||
HostingEnvironment = hostingEnvironment;
|
||||
Cache = cache;
|
||||
}
|
||||
|
||||
|
||||
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
var paths = Href.Split(',');
|
||||
|
||||
// Get the value from the cache, or compute the value and add it to the cache
|
||||
var fileContent = await Cache.GetOrCreateAsync("InlineStyleTagHelper-" + Href, async entry =>
|
||||
{
|
||||
var fileProvider = HostingEnvironment.WebRootFileProvider;
|
||||
var result = paths.Select(async path => {
|
||||
if(HostingEnvironment.IsDevelopment())
|
||||
{
|
||||
var changeToken = fileProvider.Watch(path);
|
||||
entry.AddExpirationToken(changeToken);
|
||||
}
|
||||
|
||||
entry.SetPriority(CacheItemPriority.NeverRemove);
|
||||
|
||||
var file = fileProvider.GetFileInfo(path);
|
||||
if (file == null || !file.Exists)
|
||||
return null;
|
||||
|
||||
return await ReadFileContent(file);
|
||||
});
|
||||
var allFinished = await Task.WhenAll(result);
|
||||
return string.Join("\n", allFinished);
|
||||
});
|
||||
|
||||
if (fileContent == null)
|
||||
{
|
||||
output.SuppressOutput();
|
||||
return;
|
||||
}
|
||||
|
||||
output.TagName = "style";
|
||||
output.Attributes.RemoveAll("href");
|
||||
output.Content.AppendHtml(fileContent);
|
||||
}
|
||||
|
||||
private static async Task<string> ReadFileContent(IFileInfo file)
|
||||
{
|
||||
using (var stream = file.CreateReadStream())
|
||||
using (var textReader = new StreamReader(stream))
|
||||
{
|
||||
return await textReader.ReadToEndAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
79
src/TerribleDev.Blog.Web/Taghelpers/InlineJS.cs
Normal file
79
src/TerribleDev.Blog.Web/Taghelpers/InlineJS.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Razor.TagHelpers;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.FileProviders;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
[HtmlTargetElement("inline-script")]
|
||||
public class InlineScriptTagHelper : TagHelper
|
||||
{
|
||||
[HtmlAttributeName("src")]
|
||||
public string Src { get; set; }
|
||||
|
||||
private IWebHostEnvironment HostingEnvironment { get; }
|
||||
private IMemoryCache Cache { get; }
|
||||
|
||||
|
||||
|
||||
public InlineScriptTagHelper(IWebHostEnvironment hostingEnvironment, IMemoryCache cache)
|
||||
{
|
||||
HostingEnvironment = hostingEnvironment;
|
||||
Cache = cache;
|
||||
}
|
||||
|
||||
|
||||
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
var paths = Src.Split(',');
|
||||
|
||||
// Get the value from the cache, or compute the value and add it to the cache
|
||||
var fileContent = await Cache.GetOrCreateAsync("InlineScriptTagHelper-" + paths, async entry =>
|
||||
{
|
||||
var fileProvider = HostingEnvironment.WebRootFileProvider;
|
||||
var result = paths.Select(async path => {
|
||||
if(HostingEnvironment.IsDevelopment())
|
||||
{
|
||||
var changeToken = fileProvider.Watch(path);
|
||||
entry.AddExpirationToken(changeToken);
|
||||
}
|
||||
|
||||
entry.SetPriority(CacheItemPriority.NeverRemove);
|
||||
|
||||
var file = fileProvider.GetFileInfo(path);
|
||||
if (file == null || !file.Exists)
|
||||
return null;
|
||||
|
||||
return await ReadFileContent(file);
|
||||
});
|
||||
var allFinished = await Task.WhenAll(result);
|
||||
return string.Join("\n", allFinished);
|
||||
});
|
||||
|
||||
if (fileContent == null)
|
||||
{
|
||||
output.SuppressOutput();
|
||||
return;
|
||||
}
|
||||
|
||||
output.TagName = "script";
|
||||
output.Attributes.RemoveAll("href");
|
||||
output.Content.AppendHtml(fileContent);
|
||||
}
|
||||
|
||||
private static async Task<string> ReadFileContent(IFileInfo file)
|
||||
{
|
||||
using (var stream = file.CreateReadStream())
|
||||
using (var textReader = new StreamReader(stream))
|
||||
{
|
||||
return await textReader.ReadToEndAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,34 +8,9 @@ using System.Threading.Tasks;
|
||||
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
[HtmlTargetElement("desktopOnly", TagStructure = TagStructure.NormalOrSelfClosing)]
|
||||
public class DesktopTagHelper : TagHelper
|
||||
[HtmlTargetElement("mobile", TagStructure = TagStructure.NormalOrSelfClosing)]
|
||||
public class MobileTagHelper : AbstractPlatformTagHelper
|
||||
{
|
||||
static Regex MobileCheck = new Regex(@"(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino", RegexOptions.IgnoreCase | RegexOptions.Multiline | RegexOptions.Compiled);
|
||||
static ConcurrentDictionary<string, bool> CachedChecks = new ConcurrentDictionary<string, bool>();
|
||||
public string UserAgent { get; set; }
|
||||
public override void Process(TagHelperContext context, TagHelperOutput output)
|
||||
{
|
||||
output.TagName = null;
|
||||
if (string.IsNullOrEmpty(UserAgent))
|
||||
{
|
||||
return;
|
||||
}
|
||||
var shouldRender = true;
|
||||
if(CachedChecks.TryGetValue(UserAgent, out var cacheResult))
|
||||
{
|
||||
shouldRender = cacheResult;
|
||||
}
|
||||
else
|
||||
{
|
||||
var isMobile = MobileCheck.IsMatch(UserAgent);
|
||||
shouldRender = !isMobile;
|
||||
CachedChecks.TryAdd(UserAgent, !isMobile);
|
||||
}
|
||||
if(!shouldRender)
|
||||
{
|
||||
output.SuppressOutput();
|
||||
}
|
||||
}
|
||||
protected override bool ShouldRender() => this.GetPlatform() == Platform.Mobile;
|
||||
}
|
||||
}
|
||||
|
||||
8
src/TerribleDev.Blog.Web/Taghelpers/Platforms.cs
Normal file
8
src/TerribleDev.Blog.Web/Taghelpers/Platforms.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace TerribleDev.Blog.Web.Taghelpers
|
||||
{
|
||||
public enum Platform
|
||||
{
|
||||
Desktop,
|
||||
Mobile,
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp2.2</TargetFramework>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel>
|
||||
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||
<UserSecretsId>9a1f51b6-f4d9-4df7-a0af-e345176e9927</UserSecretsId>
|
||||
<RuntimeIdentifiers>linux-musl-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -21,19 +21,18 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BuildBundlerMinifier" Version="2.8.391" />
|
||||
<PackageReference Include="Markdig" Version="0.15.7" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.App" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Razor.Design" Version="2.2.0" PrivateAssets="All" />
|
||||
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.0.2105168" />
|
||||
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="2.2.0" />
|
||||
<PackageReference Include="Schema.NET" Version="11.0.1" />
|
||||
<PackageReference Include="UriBuilder.Fluent" Version="1.5.2" />
|
||||
<PackageReference Include="WebMarkupMin.AspNetCore7" Version="2.13.0-rc1" />
|
||||
<PackageReference Include="YamlDotNet" Version="5.3.0" />
|
||||
<PackageReference Include="HardHat" Version="2.0.0" />
|
||||
<PackageReference Include="HardHat" Version="2.1.1" />
|
||||
<PackageReference Include="Microsoft.SyndicationFeed.ReaderWriter" Version="1.0.2" />
|
||||
<PackageReference Include="WebEssentials.AspNetCore.OutputCaching" Version="1.0.16" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Razor.RuntimeCompilation" Version="7.0.0" Condition="'$(Configuration)' == 'Debug'" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="Posts\*.md" CopyToOutputDirectory="Always" />
|
||||
<Watch Include="Posts\*.md" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@inject Microsoft.AspNetCore.Hosting.IHostingEnvironment env
|
||||
@inject Microsoft.AspNetCore.Hosting.IWebHostEnvironment env
|
||||
@{
|
||||
ViewData["Title"] = "Debug";
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
|
||||
@{
|
||||
ViewData["Title"] = "FourOhFour";
|
||||
ViewData["DisableHeader"] = true;
|
||||
}
|
||||
|
||||
<h1>Ruh Oh!</h1>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user