mirror of
https://github.com/RGBCube/Site
synced 2025-08-01 13:37:49 +00:00
Compare commits
12 commits
eedc332bbe
...
7827c47756
Author | SHA1 | Date | |
---|---|---|---|
7827c47756 | |||
ccf7a1c422 | |||
d0d834836c | |||
74f71612a1 | |||
e3f64ceae5 | |||
26e215f57e | |||
1f0c8ba344 | |||
12f04bb8d5 | |||
93d82f7717 | |||
7f08bc8f86 | |||
fad1a04631 | |||
1e2d057c9e |
14 changed files with 327 additions and 11 deletions
28
site.ts
28
site.ts
|
@ -50,7 +50,7 @@ site.preprocess([".html"], (pages) =>
|
||||||
|
|
||||||
site.process([".html"], (pages) =>
|
site.process([".html"], (pages) =>
|
||||||
pages.forEach((page) => {
|
pages.forEach((page) => {
|
||||||
const document = page.document;
|
const { document } = page;
|
||||||
|
|
||||||
document.querySelectorAll("table").forEach((element) => {
|
document.querySelectorAll("table").forEach((element) => {
|
||||||
const wrapper = document.createElement("div");
|
const wrapper = document.createElement("div");
|
||||||
|
@ -145,7 +145,6 @@ site.process([".html"], (pages) =>
|
||||||
|
|
||||||
if (addFooter) {
|
if (addFooter) {
|
||||||
const hr = document.createElement("hr");
|
const hr = document.createElement("hr");
|
||||||
hr.classList.add("my-1.5");
|
|
||||||
|
|
||||||
const li = document.createElement("li");
|
const li = document.createElement("li");
|
||||||
li.id = footnoteId;
|
li.id = footnoteId;
|
||||||
|
@ -177,6 +176,31 @@ site.use(codeHighlight({
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
site.process([".html"], (pages) =>
|
||||||
|
pages.forEach((page) => {
|
||||||
|
const { document } = page;
|
||||||
|
|
||||||
|
document.querySelectorAll("pre code").forEach((code) => {
|
||||||
|
const matches = code.innerHTML.match(/\{\[\([^\)]+\)\]\}/g);
|
||||||
|
if (!matches) return;
|
||||||
|
console.log(matches);
|
||||||
|
|
||||||
|
let newHTML = code.innerHTML;
|
||||||
|
|
||||||
|
matches.forEach((match) => {
|
||||||
|
console.log(
|
||||||
|
`<span class="callout">${match.replaceAll(/[^\d]/g, "")}</span>`,
|
||||||
|
);
|
||||||
|
newHTML = newHTML.replace(
|
||||||
|
match,
|
||||||
|
`<span class="callout">${match.replaceAll(/[^\d]/g, "")}</span>`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
code.innerHTML = newHTML;
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
|
||||||
site.use(resolveUrls());
|
site.use(resolveUrls());
|
||||||
site.use(slugifyUrls({
|
site.use(slugifyUrls({
|
||||||
extensions: "*",
|
extensions: "*",
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
class="
|
class="
|
||||||
absolute inset-0
|
absolute inset-0
|
||||||
p-4
|
p-4
|
||||||
|
|
||||||
bg-[repeating-linear-gradient(-45deg,transparent_0rem,transparent_1rem,#ddd_1rem,#ddd_2rem)]
|
bg-[repeating-linear-gradient(-45deg,transparent_0rem,transparent_1rem,#ddd_1rem,#ddd_2rem)]
|
||||||
dark:bg-[repeating-linear-gradient(-45deg,transparent_0rem,transparent_1rem,#222_1rem,#222_2rem)]
|
dark:bg-[repeating-linear-gradient(-45deg,transparent_0rem,transparent_1rem,#222_1rem,#222_2rem)]
|
||||||
"
|
"
|
||||||
|
|
|
@ -21,8 +21,8 @@ title: A B O U T
|
||||||
|
|
||||||
Hey.
|
Hey.
|
||||||
|
|
||||||
I'm a systems programmer that loves functional programming and is trying to
|
I'm a systems programmer who loves functional programming and is trying to apply
|
||||||
apply ideals of it in certain areas. Currently I'm working on the
|
ideals of it in certain areas. Currently I'm working on the
|
||||||
[Cab contextful-expression language](https://github.com/cull-os/carcass/tree/master/cab).
|
[Cab contextful-expression language](https://github.com/cull-os/carcass/tree/master/cab).
|
||||||
|
|
||||||
I plan on creating a build system that uses Cab to build anything and
|
I plan on creating a build system that uses Cab to build anything and
|
||||||
|
|
|
@ -136,7 +136,7 @@ body {
|
||||||
* {
|
* {
|
||||||
@apply wrap-break-word text-pretty;
|
@apply wrap-break-word text-pretty;
|
||||||
|
|
||||||
&:not(ol, ul) {
|
&:not(ul) {
|
||||||
@apply space-y-3;
|
@apply space-y-3;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -249,6 +249,10 @@ body {
|
||||||
ol {
|
ol {
|
||||||
& li {
|
& li {
|
||||||
counter-increment: item;
|
counter-increment: item;
|
||||||
|
|
||||||
|
> * {
|
||||||
|
@apply inline;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
& li::before {
|
& li::before {
|
||||||
|
@ -280,6 +284,17 @@ body {
|
||||||
dark:bg-[#111] dark:shadow-[4px_4px_#bbb];
|
dark:bg-[#111] dark:shadow-[4px_4px_#bbb];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.callout {
|
||||||
|
@apply px-2.5 py-1 select-none text-white bg-black dark:text-black
|
||||||
|
dark:bg-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.token-addition {
|
||||||
|
@apply text-[green] dark:text-[mediumspringgreen];
|
||||||
|
}
|
||||||
|
.token-deletion {
|
||||||
|
@apply text-[maroon] dark:text-[crimson];
|
||||||
|
}
|
||||||
.token-attr {
|
.token-attr {
|
||||||
@apply text-[darkblue] dark:text-[lightblue];
|
@apply text-[darkblue] dark:text-[lightblue];
|
||||||
}
|
}
|
||||||
|
@ -292,6 +307,9 @@ body {
|
||||||
.token-keyword {
|
.token-keyword {
|
||||||
@apply text-[darkred] dark:text-[firebrick];
|
@apply text-[darkred] dark:text-[firebrick];
|
||||||
}
|
}
|
||||||
|
.token-meta {
|
||||||
|
@apply text-[darkmagenta] dark:text-[orchid];
|
||||||
|
}
|
||||||
.token-number {
|
.token-number {
|
||||||
@apply text-[darkslateblue] dark:text-[mediumslateblue];
|
@apply text-[darkslateblue] dark:text-[mediumslateblue];
|
||||||
}
|
}
|
||||||
|
|
BIN
site/assets/images/nginix-morbius.webp
Normal file
BIN
site/assets/images/nginix-morbius.webp
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
BIN
site/assets/images/nixpkgs-nginx-etag.webp
Normal file
BIN
site/assets/images/nixpkgs-nginx-etag.webp
Normal file
Binary file not shown.
After Width: | Height: | Size: 31 KiB |
|
@ -18,7 +18,7 @@ Blog Articles
|
||||||
|
|
||||||
<style>ul * { overflow-wrap:anywhere !important; }</style>
|
<style>ul * { overflow-wrap:anywhere !important; }</style>
|
||||||
<ul>
|
<ul>
|
||||||
{{ for entry of search.pages("url^=/blog/ unlisted!=true", "order=asc date=desc")}}
|
{{ for entry of search.pages("url^=/blog/ url!=/blog/ unlisted!=true", "order=asc date=desc")}}
|
||||||
<li class="flex">
|
<li class="flex">
|
||||||
<a id="matrix" class="text-right font-mono" style="margin-right:calc(var(--spacing)*2)" href="{{ entry.url }}">
|
<a id="matrix" class="text-right font-mono" style="margin-right:calc(var(--spacing)*2)" href="{{ entry.url }}">
|
||||||
{{ entry.date.toISOString().slice(2, 10).replaceAll("-", " ") }}
|
{{ entry.date.toISOString().slice(2, 10).replaceAll("-", " ") }}
|
||||||
|
|
|
@ -4,7 +4,7 @@ description: How the absolutely cursed HTMNIX project works.
|
||||||
|
|
||||||
tags:
|
tags:
|
||||||
- html
|
- html
|
||||||
- nix
|
- nix/os
|
||||||
---
|
---
|
||||||
|
|
||||||
So, you may have seen the [HTMNIX](https://github.com/RGBCube/HTMNIX) project
|
So, you may have seen the [HTMNIX](https://github.com/RGBCube/HTMNIX) project
|
||||||
|
|
|
@ -5,7 +5,7 @@ description: And revealing how cursed Nix is.
|
||||||
unlisted: true
|
unlisted: true
|
||||||
|
|
||||||
tags:
|
tags:
|
||||||
- nix
|
- nix/os
|
||||||
---
|
---
|
||||||
|
|
||||||
Everyone who has ever interacted with Nix tooling knows that it keeps some
|
Everyone who has ever interacted with Nix tooling knows that it keeps some
|
||||||
|
|
266
site/blog/2025-06-09-nginix.md
Normal file
266
site/blog/2025-06-09-nginix.md
Normal file
|
@ -0,0 +1,266 @@
|
||||||
|
---
|
||||||
|
title: NgiNix
|
||||||
|
description: How the Nixpkgs Nginx differs and why it's better.
|
||||||
|
|
||||||
|
color: "#009639"
|
||||||
|
thumbnail: /assets/images/nginix-morbius.webp
|
||||||
|
|
||||||
|
tags:
|
||||||
|
- nginx
|
||||||
|
- nix/os
|
||||||
|
---
|
||||||
|
|
||||||
|
[Nginx](https://nginx.org) automatically creates
|
||||||
|
[ETag](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/ETag)
|
||||||
|
headers for static files. But it does it in such a way that is suboptimal.
|
||||||
|
|
||||||
|
I'll explain why that is, and how the
|
||||||
|
[Nixpkgs distribution of Nginx](https://github.com/NixOS/nixpkgs/blob/8c742e834fd39a9b912a9237b5a6453cb4ec222b/pkgs/servers/http/nginx/generic.nix)
|
||||||
|
does it better.
|
||||||
|
|
||||||
|
First, let's take a look at how
|
||||||
|
[Nginx sets Etags](https://github.com/nginx/nginx/blob/5b8a5c08ce28639e788734b2528faad70baa113c/src/http/ngx_http_core_module.c#L1681-L1716):
|
||||||
|
|
||||||
|
```c
|
||||||
|
// in ngx_http_set_etag @ src/http/ngx_http_core_module.c:1681
|
||||||
|
|
||||||
|
{[(1)]}
|
||||||
|
etag = ngx_list_push(&r->headers_out.headers);
|
||||||
|
if (etag == NULL) {
|
||||||
|
return NGX_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
{[(2)]}
|
||||||
|
etag->hash = 1;
|
||||||
|
etag->next = NULL;
|
||||||
|
ngx_str_set(&etag->key, "ETag");
|
||||||
|
|
||||||
|
{[(3)]}
|
||||||
|
etag->value.data = ngx_pnalloc(r->pool, NGX_OFF_T_LEN + NGX_TIME_T_LEN + 3);
|
||||||
|
if (etag->value.data == NULL) {
|
||||||
|
etag->hash = 0;
|
||||||
|
return NGX_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
{[(4)]}
|
||||||
|
etag->value.len = ngx_sprintf(etag->value.data, "\"%xT-%xO\"",
|
||||||
|
r->headers_out.last_modified_time,
|
||||||
|
r->headers_out.content_length_n)
|
||||||
|
- etag->value.data;
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Here we create an empty header entry in the requests's outgoing headers.
|
||||||
|
|
||||||
|
2. We initialize it, and set its key to "ETag".
|
||||||
|
|
||||||
|
3. We then allocate memory for the ETag value from the current request's arena.
|
||||||
|
The size is calculated to be large enough to hold the timestamp
|
||||||
|
(`NGX_TIME_T_LEN`), the content length (`NGX_OFF_T_LEN`), and extra
|
||||||
|
characters (two quotes " and a hyphen -).
|
||||||
|
|
||||||
|
4. And then we fill the header's value to a string like `"5f7e1b2a-1c8f"` using
|
||||||
|
the last modified time of the file being served and its content length.
|
||||||
|
Something weird is `- etag->value.data`, which is required because
|
||||||
|
`ngx_sprintf` returns a pointer to the end of the string. We do it to get the
|
||||||
|
start.
|
||||||
|
|
||||||
|
Result: The outgoing request now contains an `Etag` header, which is based off
|
||||||
|
of the file's length and last modified date.
|
||||||
|
|
||||||
|
## But wait...
|
||||||
|
|
||||||
|
What if we use a tool that aims to produce
|
||||||
|
[reproducible build artifacts](https://reproducible-builds.org/) to build our
|
||||||
|
website, and we just so happen to modify a file in such a way that doesn't
|
||||||
|
change the size of its contents? (more common than you think)
|
||||||
|
|
||||||
|
> For example:
|
||||||
|
> Nix[^[A hermetic, declarative, reproducible and input(and soon content)-addressed build system.](https://nix.dev/)],
|
||||||
|
> which _always_ sets the last-modified timestamps for files under the Nix
|
||||||
|
> store[^The place Nix stores all build results, _usually_ `/nix/store`. Every
|
||||||
|
> "Nix store path" is represented like so:
|
||||||
|
> `/nix/store/6bxcxc6xvg5xv70z55adcwhgik5m41a0-package-1.0.0`, where the hash is
|
||||||
|
> derived from its inputs] to `1`, which corresponds to January 1st, 1970,
|
||||||
|
> 00:00:01 UTC.
|
||||||
|
|
||||||
|
In that case, neither the last modified date or the content length would change.
|
||||||
|
So the `ETag` header would stay **constant, which would make the client assume
|
||||||
|
the asset has not changed**.
|
||||||
|
|
||||||
|
## Silently Broken
|
||||||
|
|
||||||
|
The caching on your website being silently broken is a monumental issue, and has
|
||||||
|
to be fixed. The way Nixpkgs does it is through
|
||||||
|
[this patch:](https://github.com/NixOS/nixpkgs/blob/8c742e834fd39a9b912a9237b5a6453cb4ec222b/pkgs/servers/http/nginx/nix-etag-1.15.4.patch)
|
||||||
|
|
||||||
|
```patch
|
||||||
|
This patch makes it possible to serve static content from Nix store paths, by
|
||||||
|
using the hash of the store path for the ETag header.
|
||||||
|
|
||||||
|
diff --git a/src/http/ngx_http_core_module.c b/src/http/ngx_http_core_module.c
|
||||||
|
index 97a91aee2..2d07d71e6 100644
|
||||||
|
--- a/src/http/ngx_http_core_module.c
|
||||||
|
+++ b/src/http/ngx_http_core_module.c
|
||||||
|
@@ -1676,6 +1676,8 @@ ngx_http_set_etag(ngx_http_request_t *r)
|
||||||
|
{
|
||||||
|
ngx_table_elt_t *etag;
|
||||||
|
ngx_http_core_loc_conf_t *clcf;
|
||||||
|
{[(2)]}
|
||||||
|
+ u_char *real, *ptr1, *ptr2;
|
||||||
|
+ ngx_err_t err;
|
||||||
|
|
||||||
|
clcf = ngx_http_get_module_loc_conf(r, ngx_http_core_module);
|
||||||
|
|
||||||
|
@@ -1692,16 +1694,82 @@ ngx_http_set_etag(ngx_http_request_t *r)
|
||||||
|
etag->next = NULL;
|
||||||
|
ngx_str_set(&etag->key, "ETag");
|
||||||
|
|
||||||
|
{[(1)]}
|
||||||
|
- etag->value.data = ngx_pnalloc(r->pool, NGX_OFF_T_LEN + NGX_TIME_T_LEN + 3);
|
||||||
|
- if (etag->value.data == NULL) {
|
||||||
|
- etag->hash = 0;
|
||||||
|
- return NGX_ERROR;
|
||||||
|
+ // Upstream nginx uses file mod timestamp and content-length for Etag, but
|
||||||
|
+ // files in the Nix store have their timestamps reset, so that doesn't work.
|
||||||
|
+ // Instead, when serving from the Nix store, we use the hash from the store
|
||||||
|
+ // path and content-length.
|
||||||
|
+ //
|
||||||
|
+ // Every file in under the given store path will share the same store path
|
||||||
|
+ // hash. It is fine to serve different resources with the same Etag, but
|
||||||
|
+ // different representations of the same resource (eg the same file, but
|
||||||
|
+ // gzip-compressed) should have different Etags. Thus, we also append
|
||||||
|
+ // content-length, which should be different when the response is compressed
|
||||||
|
+
|
||||||
|
+ err = ngx_errno; {[(3)]}
|
||||||
|
+ real = ngx_realpath(clcf->root.data, NULL); {[(4)]}
|
||||||
|
+ ngx_set_errno(err); {[(5)]}
|
||||||
|
+
|
||||||
|
+ #define NIX_STORE_DIR "@nixStoreDir@" {[(6)]}
|
||||||
|
+ #define NIX_STORE_LEN @nixStoreDirLen@
|
||||||
|
+
|
||||||
|
+ if (r->headers_out.last_modified_time == 1 {[(7)]}
|
||||||
|
+ && real != NULL
|
||||||
|
+ && !ngx_strncmp(real, NIX_STORE_DIR, NIX_STORE_LEN)
|
||||||
|
+ && real[NIX_STORE_LEN] == '/'
|
||||||
|
+ && real[NIX_STORE_LEN + 1] != '\0')
|
||||||
|
+ {
|
||||||
|
+ // extract the hash from a path formatted like
|
||||||
|
+ // /nix/store/hashhere1234-pname-1.0.0
|
||||||
|
+ // +1 to skip the leading /
|
||||||
|
+ ptr1 = real + NIX_STORE_LEN + 1; {[(8)]}
|
||||||
|
+
|
||||||
|
+ ptr2 = (u_char *) ngx_strchr(ptr1, '-');
|
||||||
|
+
|
||||||
|
+ if (ptr2 == NULL) {
|
||||||
|
+ ngx_free(real);
|
||||||
|
+ etag->hash = 0;
|
||||||
|
+ return NGX_ERROR;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ *ptr2 = '\0'; {[(9)]}
|
||||||
|
+
|
||||||
|
+ // hash + content-length + quotes and hyphen. Note that the
|
||||||
|
+ // content-length part of the string can vary in length.
|
||||||
|
+ etag->value.data = ngx_pnalloc(r->pool, ngx_strlen(ptr1) + NGX_OFF_T_LEN + 3);
|
||||||
|
+
|
||||||
|
+ if (etag->value.data == NULL) {
|
||||||
|
+ ngx_free(real);
|
||||||
|
+ etag->hash = 0;
|
||||||
|
+ return NGX_ERROR;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+ // set value.data content to "{hash}-{content-length}" (including quote
|
||||||
|
+ // marks), and set value.len to the length of the resulting string
|
||||||
|
+ etag->value.len = ngx_sprintf(etag->value.data, "\"\%s-%xO\"",
|
||||||
|
+ ptr1,
|
||||||
|
+ r->headers_out.content_length_n)
|
||||||
|
+ - etag->value.data;
|
||||||
|
+
|
||||||
|
+ ngx_http_clear_last_modified(r);
|
||||||
|
+ } else { {[(10)]}
|
||||||
|
+ // outside of Nix store, use the upstream Nginx logic for etags
|
||||||
|
+
|
||||||
|
+ etag->value.data = ngx_pnalloc(r->pool, NGX_OFF_T_LEN + NGX_TIME_T_LEN + 3);
|
||||||
|
+
|
||||||
|
+ if (etag->value.data == NULL) {
|
||||||
|
+ ngx_free(real);
|
||||||
|
+ etag->hash = 0;
|
||||||
|
+ return NGX_ERROR;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ etag->value.len = ngx_sprintf(etag->value.data, "\"%xT-%xO\"",
|
||||||
|
+ r->headers_out.last_modified_time,
|
||||||
|
+ r->headers_out.content_length_n)
|
||||||
|
+ - etag->value.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
- etag->value.len = ngx_sprintf(etag->value.data, "\"%xT-%xO\"",
|
||||||
|
- r->headers_out.last_modified_time,
|
||||||
|
- r->headers_out.content_length_n)
|
||||||
|
- - etag->value.data;
|
||||||
|
+ ngx_free(real);
|
||||||
|
|
||||||
|
r->headers_out.etag = etag;
|
||||||
|
```
|
||||||
|
|
||||||
|
The way this works is as follows:
|
||||||
|
|
||||||
|
1. The old logic, explained above is deleted.
|
||||||
|
|
||||||
|
2. We create 4 new variables, 3 used for path manipulation and 1 for storing
|
||||||
|
`errno`.
|
||||||
|
|
||||||
|
3. Save the current value of `errno` (ugh) to a local.
|
||||||
|
|
||||||
|
4. Then we try to resolve `clcf->root.data`. `clcf->root.data` is the path
|
||||||
|
specified in the root directive of the Nginx configuration and could be a
|
||||||
|
symbolic link (e.g., it could be `/var/www` which might be a symlink to a Nix
|
||||||
|
store path[^Please don't do this.]). `ngx_realpath` resolves all symbolic
|
||||||
|
links and returns the canonical, absolute path. This is necessary to reliably
|
||||||
|
check if the path is inside the Nix store.
|
||||||
|
|
||||||
|
5. Restore the value of `errno`, as the previous function call might have
|
||||||
|
changed it. We handle the error case of `ngx_realpath` later.
|
||||||
|
|
||||||
|
6. These are **Nix placeholders**, aka they get replaced by the path of the Nix
|
||||||
|
store at build time. The reason we do not hardcode `/nix/store` is because
|
||||||
|
[Nix's store path, when on the local filesystem, is arbitrary](https://nix.dev/manual/nix/2.29/store/store-path.html#store-directory).
|
||||||
|
This makes our patch portable.
|
||||||
|
|
||||||
|
7. **The main condition:** We check if the `r->headers_out.last_modified_time`
|
||||||
|
is `1`, which is what Nix sets file's last modified times to, we then check
|
||||||
|
if the canonical path resolution has succeeded (`real != NULL`), if the
|
||||||
|
canonical path starts with the Nix store path (`NIX_STORE_DIR`), and if the
|
||||||
|
store path has anything after it (`@[next] != '/' && @[next + 1] != NULL`).
|
||||||
|
|
||||||
|
8. (If the condition is true) We then extract the hash. A path like
|
||||||
|
`/nix/store/6bxcxc6xvg5xv70z55adcwhgik5m41a0-package-1.0.0/bin/package` gets
|
||||||
|
turned into `6bxcxc6xvg5xv70z55adcwhgik5m41a0`.
|
||||||
|
|
||||||
|
9. (If the condition is true) _SIDENOTE:_ This is a nice hack for one of many
|
||||||
|
design failures in C, this one being NULL-terminated strings. We just place a
|
||||||
|
`0` byte at the index we want to end our string out for efficiency, instead
|
||||||
|
of creating a copy.
|
||||||
|
|
||||||
|
10. (If the condition is false) We continue on with the old logic, as explained
|
||||||
|
at the top.
|
||||||
|
|
||||||
|
We then set the value of the `ETag` header to `"{hash}-{content-length}"`, the
|
||||||
|
reason for the content length being explained in the comment in the patch:
|
||||||
|
|
||||||
|
> It is fine to serve different resources with the same Etag, but different
|
||||||
|
> representations of the same resource (eg the same file, but gzip-compressed)
|
||||||
|
> should have different Etags. Thus, we also append content-length, which should
|
||||||
|
> be different when the response is compressed
|
||||||
|
|
||||||
|
This way, by depending on the Nix store path instead of the last modification
|
||||||
|
date, we can serve files more reliably and with less[^No, not none. You can
|
||||||
|
still cause unreproducible builds with Nix and have the store path not change,
|
||||||
|
at least with input-addressed derivations (derivation = the smallest Nix build
|
||||||
|
unit).] issues.
|
||||||
|
|
||||||
|
I hope you learned something in this blog post, and perhaps a new way to
|
||||||
|
fingerprint Nginx installs :).
|
||||||
|
|
||||||
|

|
|
@ -8,6 +8,6 @@ mapAnchor: true
|
||||||
|
|
||||||
<h1>Info and Thought Dumps</h1>
|
<h1>Info and Thought Dumps</h1>
|
||||||
|
|
||||||
<p>Here is where I store all my random thoughts, grievences, random bits of information and perhaps the nice stuff I encounter.</p>
|
<p>Here is where I store all my random thoughts, grievances, random bits of information and perhaps the nice stuff I encounter.</p>
|
||||||
|
|
||||||
<p>Nothing here is blog-post tier good, so do not expect very high quality.</p>
|
<p>Nothing here is blog-post tier good, so do not expect very high quality.</p>
|
||||||
|
|
|
@ -7,7 +7,7 @@ Just had to deal with Go iterators and I can say that they fit the language
|
||||||
well.
|
well.
|
||||||
|
|
||||||
They're push iterators. It's as if you're just appending items to an array.
|
They're push iterators. It's as if you're just appending items to an array.
|
||||||
Actually much easier to explain when you are teaching the language to begginers
|
Actually much easier to explain when you are teaching the language to beginners
|
||||||
while not expecting them to use it for much other than surface level utilities.
|
while not expecting them to use it for much other than surface level utilities.
|
||||||
|
|
||||||
They compile down to nonlocal returns to try to keep it fast, but of course it's
|
They compile down to nonlocal returns to try to keep it fast, but of course it's
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
---
|
||||||
|
date: 2025-06-09
|
||||||
|
---
|
||||||
|
|
||||||
This site and other websites under this domain run
|
This site and other websites under this domain run
|
||||||
[Plausible](https://plausible.io/) analytics on a self-hosted instance at
|
[Plausible](https://plausible.io/) analytics on a self-hosted instance at
|
||||||
[shekels.rgbcu.be](https://shekels.rgbcu.be/).
|
[shekels.rgbcu.be](https://shekels.rgbcu.be/).
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
---
|
||||||
|
date: 2025-06-09
|
||||||
|
---
|
||||||
|
|
||||||
The website of RGBCube is up.
|
The website of RGBCube is up.
|
||||||
|
|
||||||
This webpage is **absolutely never** wrong.
|
This webpage is **absolutely never** wrong.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue