Added basic usage tracking for posts.
This commit is contained in:
parent
c224ad55b9
commit
7063fd5604
|
@ -0,0 +1,173 @@
|
||||||
|
// NOTE(asaf): The built-in base64 capabilities can't handle non-ascii characters.
|
||||||
|
// This code is copied from MDN at https://developer.mozilla.org/en-US/docs/Glossary/Base64
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// var sMyInput = "Base 64 \u2014 Mozilla Developer Network";
|
||||||
|
// var aMyUTF8Input = strToUTF8Arr(sMyInput);
|
||||||
|
// var sMyBase64 = base64EncArr(aMyUTF8Input);
|
||||||
|
// alert(sMyBase64);
|
||||||
|
//
|
||||||
|
// var aMyUTF8Output = base64DecToArr(sMyBase64);
|
||||||
|
// var sMyOutput = UTF8ArrToStr(aMyUTF8Output);
|
||||||
|
// alert(sMyOutput);
|
||||||
|
|
||||||
|
function b64ToUint6 (nChr) {
|
||||||
|
|
||||||
|
return nChr > 64 && nChr < 91 ?
|
||||||
|
nChr - 65
|
||||||
|
: nChr > 96 && nChr < 123 ?
|
||||||
|
nChr - 71
|
||||||
|
: nChr > 47 && nChr < 58 ?
|
||||||
|
nChr + 4
|
||||||
|
: nChr === 43 ?
|
||||||
|
62
|
||||||
|
: nChr === 47 ?
|
||||||
|
63
|
||||||
|
:
|
||||||
|
0;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function base64DecToArr (sBase64, nBlocksSize) {
|
||||||
|
|
||||||
|
var
|
||||||
|
sB64Enc = sBase64.replace(/[^A-Za-z0-9\+\/]/g, ""), nInLen = sB64Enc.length,
|
||||||
|
nOutLen = nBlocksSize ? Math.ceil((nInLen * 3 + 1 >> 2) / nBlocksSize) * nBlocksSize : nInLen * 3 + 1 >> 2, taBytes = new Uint8Array(nOutLen);
|
||||||
|
|
||||||
|
for (var nMod3, nMod4, nUint24 = 0, nOutIdx = 0, nInIdx = 0; nInIdx < nInLen; nInIdx++) {
|
||||||
|
nMod4 = nInIdx & 3;
|
||||||
|
nUint24 |= b64ToUint6(sB64Enc.charCodeAt(nInIdx)) << 6 * (3 - nMod4);
|
||||||
|
if (nMod4 === 3 || nInLen - nInIdx === 1) {
|
||||||
|
for (nMod3 = 0; nMod3 < 3 && nOutIdx < nOutLen; nMod3++, nOutIdx++) {
|
||||||
|
taBytes[nOutIdx] = nUint24 >>> (16 >>> nMod3 & 24) & 255;
|
||||||
|
}
|
||||||
|
nUint24 = 0;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return taBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Base64 string to array encoding */
|
||||||
|
|
||||||
|
function uint6ToB64 (nUint6) {
|
||||||
|
|
||||||
|
return nUint6 < 26 ?
|
||||||
|
nUint6 + 65
|
||||||
|
: nUint6 < 52 ?
|
||||||
|
nUint6 + 71
|
||||||
|
: nUint6 < 62 ?
|
||||||
|
nUint6 - 4
|
||||||
|
: nUint6 === 62 ?
|
||||||
|
43
|
||||||
|
: nUint6 === 63 ?
|
||||||
|
47
|
||||||
|
:
|
||||||
|
65;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function base64EncArr (aBytes) {
|
||||||
|
|
||||||
|
var nMod3 = 2, sB64Enc = "";
|
||||||
|
|
||||||
|
for (var nLen = aBytes.length, nUint24 = 0, nIdx = 0; nIdx < nLen; nIdx++) {
|
||||||
|
nMod3 = nIdx % 3;
|
||||||
|
if (nIdx > 0 && (nIdx * 4 / 3) % 76 === 0) { sB64Enc += "\r\n"; }
|
||||||
|
nUint24 |= aBytes[nIdx] << (16 >>> nMod3 & 24);
|
||||||
|
if (nMod3 === 2 || aBytes.length - nIdx === 1) {
|
||||||
|
sB64Enc += String.fromCharCode(uint6ToB64(nUint24 >>> 18 & 63), uint6ToB64(nUint24 >>> 12 & 63), uint6ToB64(nUint24 >>> 6 & 63), uint6ToB64(nUint24 & 63));
|
||||||
|
nUint24 = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sB64Enc.substr(0, sB64Enc.length - 2 + nMod3) + (nMod3 === 2 ? '' : nMod3 === 1 ? '=' : '==');
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/* UTF-8 array to DOMString and vice versa */
|
||||||
|
|
||||||
|
function UTF8ArrToStr (aBytes) {
|
||||||
|
|
||||||
|
var sView = "";
|
||||||
|
|
||||||
|
for (var nPart, nLen = aBytes.length, nIdx = 0; nIdx < nLen; nIdx++) {
|
||||||
|
nPart = aBytes[nIdx];
|
||||||
|
sView += String.fromCharCode(
|
||||||
|
nPart > 251 && nPart < 254 && nIdx + 5 < nLen ? /* six bytes */
|
||||||
|
/* (nPart - 252 << 30) may be not so safe in ECMAScript! So...: */
|
||||||
|
(nPart - 252) * 1073741824 + (aBytes[++nIdx] - 128 << 24) + (aBytes[++nIdx] - 128 << 18) + (aBytes[++nIdx] - 128 << 12) + (aBytes[++nIdx] - 128 << 6) + aBytes[++nIdx] - 128
|
||||||
|
: nPart > 247 && nPart < 252 && nIdx + 4 < nLen ? /* five bytes */
|
||||||
|
(nPart - 248 << 24) + (aBytes[++nIdx] - 128 << 18) + (aBytes[++nIdx] - 128 << 12) + (aBytes[++nIdx] - 128 << 6) + aBytes[++nIdx] - 128
|
||||||
|
: nPart > 239 && nPart < 248 && nIdx + 3 < nLen ? /* four bytes */
|
||||||
|
(nPart - 240 << 18) + (aBytes[++nIdx] - 128 << 12) + (aBytes[++nIdx] - 128 << 6) + aBytes[++nIdx] - 128
|
||||||
|
: nPart > 223 && nPart < 240 && nIdx + 2 < nLen ? /* three bytes */
|
||||||
|
(nPart - 224 << 12) + (aBytes[++nIdx] - 128 << 6) + aBytes[++nIdx] - 128
|
||||||
|
: nPart > 191 && nPart < 224 && nIdx + 1 < nLen ? /* two bytes */
|
||||||
|
(nPart - 192 << 6) + aBytes[++nIdx] - 128
|
||||||
|
: /* nPart < 127 ? */ /* one byte */
|
||||||
|
nPart
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sView;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function strToUTF8Arr (sDOMStr) {
|
||||||
|
|
||||||
|
var aBytes, nChr, nStrLen = sDOMStr.length, nArrLen = 0;
|
||||||
|
|
||||||
|
/* mapping... */
|
||||||
|
|
||||||
|
for (var nMapIdx = 0; nMapIdx < nStrLen; nMapIdx++) {
|
||||||
|
nChr = sDOMStr.charCodeAt(nMapIdx);
|
||||||
|
nArrLen += nChr < 0x80 ? 1 : nChr < 0x800 ? 2 : nChr < 0x10000 ? 3 : nChr < 0x200000 ? 4 : nChr < 0x4000000 ? 5 : 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
aBytes = new Uint8Array(nArrLen);
|
||||||
|
|
||||||
|
/* transcription... */
|
||||||
|
|
||||||
|
for (var nIdx = 0, nChrIdx = 0; nIdx < nArrLen; nChrIdx++) {
|
||||||
|
nChr = sDOMStr.charCodeAt(nChrIdx);
|
||||||
|
if (nChr < 128) {
|
||||||
|
/* one byte */
|
||||||
|
aBytes[nIdx++] = nChr;
|
||||||
|
} else if (nChr < 0x800) {
|
||||||
|
/* two bytes */
|
||||||
|
aBytes[nIdx++] = 192 + (nChr >>> 6);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr & 63);
|
||||||
|
} else if (nChr < 0x10000) {
|
||||||
|
/* three bytes */
|
||||||
|
aBytes[nIdx++] = 224 + (nChr >>> 12);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 6 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr & 63);
|
||||||
|
} else if (nChr < 0x200000) {
|
||||||
|
/* four bytes */
|
||||||
|
aBytes[nIdx++] = 240 + (nChr >>> 18);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 12 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 6 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr & 63);
|
||||||
|
} else if (nChr < 0x4000000) {
|
||||||
|
/* five bytes */
|
||||||
|
aBytes[nIdx++] = 248 + (nChr >>> 24);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 18 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 12 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 6 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr & 63);
|
||||||
|
} else /* if (nChr <= 0x7fffffff) */ {
|
||||||
|
/* six bytes */
|
||||||
|
aBytes[nIdx++] = 252 + (nChr >>> 30);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 24 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 18 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 12 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr >>> 6 & 63);
|
||||||
|
aBytes[nIdx++] = 128 + (nChr & 63);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return aBytes;
|
||||||
|
|
||||||
|
}
|
|
@ -63,7 +63,7 @@ func SanitizeFilename(filename string) string {
|
||||||
if filename == "" {
|
if filename == "" {
|
||||||
return "unnamed"
|
return "unnamed"
|
||||||
}
|
}
|
||||||
return REIllegalFilenameChars.ReplaceAllString(filename, "")
|
return REIllegalFilenameChars.ReplaceAllString(filename, "_")
|
||||||
}
|
}
|
||||||
|
|
||||||
func AssetKey(id, filename string) string {
|
func AssetKey(id, filename string) string {
|
||||||
|
|
|
@ -3,6 +3,7 @@ package hmnurl
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/config"
|
"git.handmade.network/hmn/hmn/src/config"
|
||||||
|
@ -54,6 +55,7 @@ func SetCacheBust(newCacheBust string) {
|
||||||
|
|
||||||
func SetS3BaseUrl(base string) {
|
func SetS3BaseUrl(base string) {
|
||||||
S3BaseUrl = base
|
S3BaseUrl = base
|
||||||
|
RegexS3Asset = regexp.MustCompile(fmt.Sprintf("%s(?P<key>[\\w\\-./]+)", regexp.QuoteMeta(S3BaseUrl)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetBaseHost() string {
|
func GetBaseHost() string {
|
||||||
|
|
|
@ -313,6 +313,10 @@ func TestForumMarkRead(t *testing.T) {
|
||||||
AssertSubdomain(t, BuildForumMarkRead("hero", 5), "hero")
|
AssertSubdomain(t, BuildForumMarkRead("hero", 5), "hero")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestS3Asset(t *testing.T) {
|
||||||
|
AssertRegexMatchFull(t, BuildS3Asset("hello"), RegexS3Asset, map[string]string{"key": "hello"})
|
||||||
|
}
|
||||||
|
|
||||||
func AssertSubdomain(t *testing.T, fullUrl string, expectedSubdomain string) {
|
func AssertSubdomain(t *testing.T, fullUrl string, expectedSubdomain string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
|
@ -343,8 +347,15 @@ func AssertRegexMatch(t *testing.T, fullUrl string, regex *regexp.Regexp, params
|
||||||
if len(requestPath) == 0 {
|
if len(requestPath) == 0 {
|
||||||
requestPath = "/"
|
requestPath = "/"
|
||||||
}
|
}
|
||||||
match := regex.FindStringSubmatch(requestPath)
|
|
||||||
assert.NotNilf(t, match, "Url did not match regex: [%s] vs [%s]", requestPath, regex.String())
|
AssertRegexMatchFull(t, requestPath, regex, paramsToVerify)
|
||||||
|
}
|
||||||
|
|
||||||
|
func AssertRegexMatchFull(t *testing.T, fullUrl string, regex *regexp.Regexp, paramsToVerify map[string]string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
match := regex.FindStringSubmatch(fullUrl)
|
||||||
|
assert.NotNilf(t, match, "Url did not match regex: [%s] vs [%s]", fullUrl, regex.String())
|
||||||
|
|
||||||
if paramsToVerify != nil {
|
if paramsToVerify != nil {
|
||||||
subexpNames := regex.SubexpNames()
|
subexpNames := regex.SubexpNames()
|
||||||
|
@ -386,5 +397,4 @@ func AssertRegexNoMatch(t *testing.T, fullUrl string, regex *regexp.Regexp) {
|
||||||
func TestThingsThatDontNeedCoverage(t *testing.T) {
|
func TestThingsThatDontNeedCoverage(t *testing.T) {
|
||||||
// look the other way ಠ_ಠ
|
// look the other way ಠ_ಠ
|
||||||
BuildPodcastEpisodeFile("foo", "bar")
|
BuildPodcastEpisodeFile("foo", "bar")
|
||||||
BuildS3Asset("ha ha")
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -638,10 +638,12 @@ func BuildEditorPreviewsJS() string {
|
||||||
return Url("/assets/editorpreviews.js", nil)
|
return Url("/assets/editorpreviews.js", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE(asaf): No Regex or tests for remote assets, since we don't parse it ourselves
|
var RegexS3Asset *regexp.Regexp
|
||||||
|
|
||||||
func BuildS3Asset(s3key string) string {
|
func BuildS3Asset(s3key string) string {
|
||||||
defer CatchPanic()
|
defer CatchPanic()
|
||||||
return fmt.Sprintf("%s%s", S3BaseUrl, s3key)
|
res := fmt.Sprintf("%s%s", S3BaseUrl, s3key)
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
var RegexPublic = regexp.MustCompile("^/public/.+$")
|
var RegexPublic = regexp.MustCompile("^/public/.+$")
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.handmade.network/hmn/hmn/src/migration/types"
|
||||||
|
"git.handmade.network/hmn/hmn/src/oops"
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
registerMigration(AddHandmadePostAssetUsage{})
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddHandmadePostAssetUsage struct{}
|
||||||
|
|
||||||
|
func (m AddHandmadePostAssetUsage) Version() types.MigrationVersion {
|
||||||
|
return types.MigrationVersion(time.Date(2021, 9, 22, 18, 27, 18, 0, time.UTC))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m AddHandmadePostAssetUsage) Name() string {
|
||||||
|
return "AddHandmadePostAssetUsage"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m AddHandmadePostAssetUsage) Description() string {
|
||||||
|
return "Add table for tracking asset usage in posts, and a unique index on handmade_asset.s3_key"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m AddHandmadePostAssetUsage) Up(ctx context.Context, tx pgx.Tx) error {
|
||||||
|
_, err := tx.Exec(ctx,
|
||||||
|
`
|
||||||
|
CREATE TABLE handmade_post_asset_usage (
|
||||||
|
post_id INTEGER NOT NULL REFERENCES handmade_post(id) ON DELETE CASCADE,
|
||||||
|
asset_id UUID NOT NULL REFERENCES handmade_asset(id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT handmade_post_asset_usage_unique UNIQUE(post_id, asset_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX handmade_post_asset_usage_post_id ON handmade_post_asset_usage(post_id);
|
||||||
|
CREATE INDEX handmade_post_asset_usage_asset_id ON handmade_post_asset_usage(asset_id);
|
||||||
|
|
||||||
|
ALTER TABLE handmade_asset
|
||||||
|
ADD CONSTRAINT handmade_asset_s3_key UNIQUE(s3_key);
|
||||||
|
`,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return oops.New(err, "failed to add table and indexes")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m AddHandmadePostAssetUsage) Down(ctx context.Context, tx pgx.Tx) error {
|
||||||
|
_, err := tx.Exec(ctx,
|
||||||
|
`
|
||||||
|
DROP INDEX handmade_post_asset_usage_post_id;
|
||||||
|
DROP INDEX handmade_post_asset_usage_asset_id;
|
||||||
|
DROP TABLE handmade_post_asset_usage;
|
||||||
|
`,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return oops.New(err, "failed to drop table and indexes")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
{{ define "extrahead" }}
|
{{ define "extrahead" }}
|
||||||
<script src="{{ static "go_wasm_exec.js" }}"></script>
|
<script src="{{ static "go_wasm_exec.js" }}"></script>
|
||||||
|
<script src="{{ static "js/base64.js" }}"></script>
|
||||||
<script>
|
<script>
|
||||||
const previewWorker = new Worker('/assets/editorpreviews.js');
|
const previewWorker = new Worker('/assets/editorpreviews.js');
|
||||||
</script>
|
</script>
|
||||||
|
@ -298,7 +299,7 @@
|
||||||
if (items[i].file) {
|
if (items[i].file) {
|
||||||
fileCounter++;
|
fileCounter++;
|
||||||
toInsert += makeUploadString(fileCounter, items[i].file.name) + "\n";
|
toInsert += makeUploadString(fileCounter, items[i].file.name) + "\n";
|
||||||
startUpload(fileCounter, items[i].file);
|
queueUpload(fileCounter, items[i].file);
|
||||||
} else {
|
} else {
|
||||||
toInsert += `${items[i].error}\n`;
|
toInsert += `${items[i].error}\n`;
|
||||||
}
|
}
|
||||||
|
@ -306,6 +307,7 @@
|
||||||
|
|
||||||
textField.value = textField.value.substring(0, cursorStart) + toInsert + textField.value.substring(cursorEnd, textField.value.length);
|
textField.value = textField.value.substring(0, cursorStart) + toInsert + textField.value.substring(cursorEnd, textField.value.length);
|
||||||
doMarkdown();
|
doMarkdown();
|
||||||
|
uploadNext();
|
||||||
}
|
}
|
||||||
|
|
||||||
function replaceUploadString(upload, newString) {
|
function replaceUploadString(upload, newString) {
|
||||||
|
@ -327,7 +329,11 @@
|
||||||
doMarkdown();
|
doMarkdown();
|
||||||
}
|
}
|
||||||
|
|
||||||
function startUpload(uploadNumber, file) {
|
function replaceUploadStringError(upload) {
|
||||||
|
replaceUploadString(upload, `There was a problem uploading your file \`${upload.file.name}\`.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function queueUpload(uploadNumber, file) {
|
||||||
uploadQueue.push({
|
uploadQueue.push({
|
||||||
uploadNumber: uploadNumber,
|
uploadNumber: uploadNumber,
|
||||||
file: file
|
file: file
|
||||||
|
@ -335,7 +341,6 @@
|
||||||
|
|
||||||
currentBatchSize++;
|
currentBatchSize++;
|
||||||
uploadProgressText.textContent = `Uploading files ${currentBatchDone+1}/${currentBatchSize}`;
|
uploadProgressText.textContent = `Uploading files ${currentBatchDone+1}/${currentBatchSize}`;
|
||||||
uploadNext();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function uploadDone(ev) {
|
function uploadDone(ev) {
|
||||||
|
@ -352,14 +357,14 @@
|
||||||
} else if (currentXhr.response.error) {
|
} else if (currentXhr.response.error) {
|
||||||
replaceUploadString(currentUpload, `Upload failed for \`${currentUpload.file.name}\`: ${currentXhr.response.error}.`);
|
replaceUploadString(currentUpload, `Upload failed for \`${currentUpload.file.name}\`: ${currentXhr.response.error}.`);
|
||||||
} else {
|
} else {
|
||||||
replaceUploadString(currentUpload, `There was a problem uploading your file \`${currentUpload.file.name}\`.`);
|
replaceUploadStringError(currentUpload);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
replaceUploadString(currentUpload, `There was a problem uploading your file \`${currentUpload.file.name}\`.`);
|
replaceUploadStringError(currentUpload);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
replaceUploadString(currentUpload, `There was a problem uploading your file \`${currentUpload.file.name}\`.`);
|
|
||||||
console.error(err);
|
console.error(err);
|
||||||
|
replaceUploadStringError(currentUpload);
|
||||||
}
|
}
|
||||||
currentUpload = null;
|
currentUpload = null;
|
||||||
currentXhr = null;
|
currentXhr = null;
|
||||||
|
@ -384,15 +389,23 @@
|
||||||
submitButton.disabled = true;
|
submitButton.disabled = true;
|
||||||
submitButton.value = "Uploading files...";
|
submitButton.value = "Uploading files...";
|
||||||
|
|
||||||
|
try {
|
||||||
|
let utf8Filename = strToUTF8Arr(next.file.name);
|
||||||
|
let base64Filename = base64EncArr(utf8Filename);
|
||||||
// NOTE(asaf): We use XHR because fetch can't do upload progress reports. Womp womp. https://youtu.be/Pubd-spHN-0?t=2
|
// NOTE(asaf): We use XHR because fetch can't do upload progress reports. Womp womp. https://youtu.be/Pubd-spHN-0?t=2
|
||||||
currentXhr = new XMLHttpRequest();
|
currentXhr = new XMLHttpRequest();
|
||||||
currentXhr.upload.addEventListener("progress", updateUploadProgress);
|
currentXhr.upload.addEventListener("progress", updateUploadProgress);
|
||||||
currentXhr.open("POST", uploadUrl, true);
|
currentXhr.open("POST", uploadUrl, true);
|
||||||
currentXhr.setRequestHeader("Hmn-Upload-Filename", next.file.name);
|
currentXhr.setRequestHeader("Hmn-Upload-Filename", base64Filename);
|
||||||
currentXhr.responseType = "json";
|
currentXhr.responseType = "json";
|
||||||
currentXhr.addEventListener("loadend", uploadDone);
|
currentXhr.addEventListener("loadend", uploadDone);
|
||||||
currentXhr.send(next.file);
|
currentXhr.send(next.file);
|
||||||
currentUpload = next;
|
currentUpload = next;
|
||||||
|
} catch (err) {
|
||||||
|
replaceUploadStringError(next);
|
||||||
|
console.error(err);
|
||||||
|
uploadNext();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
submitButton.disabled = false;
|
submitButton.disabled = false;
|
||||||
submitButton.value = submitText;
|
submitButton.value = submitText;
|
||||||
|
|
|
@ -2,6 +2,7 @@ package website
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
|
@ -51,9 +52,12 @@ func AssetUpload(c *RequestContext) ResponseData {
|
||||||
}
|
}
|
||||||
|
|
||||||
filenameHeader, hasFilename := c.Req.Header["Hmn-Upload-Filename"]
|
filenameHeader, hasFilename := c.Req.Header["Hmn-Upload-Filename"]
|
||||||
originalFilename := ""
|
originalFilename := "upload"
|
||||||
if hasFilename {
|
if hasFilename {
|
||||||
originalFilename = strings.ReplaceAll(filenameHeader[0], " ", "_")
|
decodedFilename, err := base64.StdEncoding.DecodeString(filenameHeader[0])
|
||||||
|
if err == nil {
|
||||||
|
originalFilename = string(decodedFilename)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bodyReader := http.MaxBytesReader(c.Res, c.Req.Body, int64(maxFilesize))
|
bodyReader := http.MaxBytesReader(c.Res, c.Req.Body, int64(maxFilesize))
|
||||||
|
|
|
@ -10,10 +10,12 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/db"
|
"git.handmade.network/hmn/hmn/src/db"
|
||||||
|
"git.handmade.network/hmn/hmn/src/hmnurl"
|
||||||
"git.handmade.network/hmn/hmn/src/logging"
|
"git.handmade.network/hmn/hmn/src/logging"
|
||||||
"git.handmade.network/hmn/hmn/src/models"
|
"git.handmade.network/hmn/hmn/src/models"
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
"git.handmade.network/hmn/hmn/src/oops"
|
||||||
"git.handmade.network/hmn/hmn/src/parsing"
|
"git.handmade.network/hmn/hmn/src/parsing"
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -429,6 +431,51 @@ func CreatePostVersion(ctx context.Context, tx pgx.Tx, postId int, unparsedConte
|
||||||
panic(oops.New(err, "failed to set current post version and preview"))
|
panic(oops.New(err, "failed to set current post version and preview"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update asset usage
|
||||||
|
|
||||||
|
_, err = tx.Exec(ctx,
|
||||||
|
`
|
||||||
|
DELETE FROM handmade_post_asset_usage
|
||||||
|
WHERE post_id = $1
|
||||||
|
`,
|
||||||
|
postId,
|
||||||
|
)
|
||||||
|
|
||||||
|
matches := hmnurl.RegexS3Asset.FindAllStringSubmatch(unparsedContent, -1)
|
||||||
|
keyIdx := hmnurl.RegexS3Asset.SubexpIndex("key")
|
||||||
|
|
||||||
|
var keys []string
|
||||||
|
for _, match := range matches {
|
||||||
|
key := match[keyIdx]
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
type assetId struct {
|
||||||
|
AssetID uuid.UUID `db:"id"`
|
||||||
|
}
|
||||||
|
assetResult, err := db.Query(ctx, tx, assetId{},
|
||||||
|
`
|
||||||
|
SELECT $columns
|
||||||
|
FROM handmade_asset
|
||||||
|
WHERE s3_key = ANY($1)
|
||||||
|
`,
|
||||||
|
keys,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
panic(oops.New(err, "failed to get assets matching keys"))
|
||||||
|
}
|
||||||
|
|
||||||
|
var values [][]interface{}
|
||||||
|
|
||||||
|
for _, asset := range assetResult.ToSlice() {
|
||||||
|
values = append(values, []interface{}{postId, asset.(*assetId).AssetID})
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.CopyFrom(ctx, pgx.Identifier{"handmade_post_asset_usage"}, []string{"post_id", "asset_id"}, pgx.CopyFromRows(values))
|
||||||
|
if err != nil {
|
||||||
|
panic(oops.New(err, "failed to insert post asset usage"))
|
||||||
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue