Compare commits
2 Commits
Author | SHA1 | Date |
---|---|---|
Ben Visness | bd6c95203c | |
Ben Visness | e74b3273cb |
41
go.mod
41
go.mod
|
@ -1,10 +1,8 @@
|
||||||
module git.handmade.network/hmn/hmn
|
module git.handmade.network/hmn/hmn
|
||||||
|
|
||||||
go 1.16
|
go 1.18
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
|
||||||
github.com/Masterminds/semver v1.5.0 // indirect
|
|
||||||
github.com/Masterminds/sprig v2.22.0+incompatible
|
github.com/Masterminds/sprig v2.22.0+incompatible
|
||||||
github.com/alecthomas/chroma v0.9.2
|
github.com/alecthomas/chroma v0.9.2
|
||||||
github.com/aws/aws-sdk-go-v2 v1.8.1
|
github.com/aws/aws-sdk-go-v2 v1.8.1
|
||||||
|
@ -16,13 +14,10 @@ require (
|
||||||
github.com/go-stack/stack v1.8.0
|
github.com/go-stack/stack v1.8.0
|
||||||
github.com/google/uuid v1.2.0
|
github.com/google/uuid v1.2.0
|
||||||
github.com/gorilla/websocket v1.4.2
|
github.com/gorilla/websocket v1.4.2
|
||||||
github.com/huandu/xstrings v1.3.2 // indirect
|
|
||||||
github.com/imdario/mergo v0.3.12 // indirect
|
|
||||||
github.com/jackc/pgconn v1.8.0
|
github.com/jackc/pgconn v1.8.0
|
||||||
github.com/jackc/pgtype v1.6.2
|
github.com/jackc/pgtype v1.6.2
|
||||||
github.com/jackc/pgx/v4 v4.10.1
|
github.com/jackc/pgx/v4 v4.10.1
|
||||||
github.com/jpillora/backoff v1.0.0
|
github.com/jpillora/backoff v1.0.0
|
||||||
github.com/mitchellh/copystructure v1.1.1 // indirect
|
|
||||||
github.com/rs/zerolog v1.21.0
|
github.com/rs/zerolog v1.21.0
|
||||||
github.com/spf13/cobra v1.1.3
|
github.com/spf13/cobra v1.1.3
|
||||||
github.com/stretchr/testify v1.7.0
|
github.com/stretchr/testify v1.7.0
|
||||||
|
@ -32,9 +27,43 @@ require (
|
||||||
github.com/yuin/goldmark v1.4.1
|
github.com/yuin/goldmark v1.4.1
|
||||||
github.com/yuin/goldmark-highlighting v0.0.0-20210516132338-9216f9c5aa01
|
github.com/yuin/goldmark-highlighting v0.0.0-20210516132338-9216f9c5aa01
|
||||||
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2
|
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2
|
||||||
|
golang.org/x/exp v0.0.0-20220314205449-43aec2f8a4e7
|
||||||
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d
|
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d
|
||||||
)
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
|
github.com/Masterminds/semver v1.5.0 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.1 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.1 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.3 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.3 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/sso v1.3.3 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/sts v1.6.2 // indirect
|
||||||
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/dlclark/regexp2 v1.4.0 // indirect
|
||||||
|
github.com/huandu/xstrings v1.3.2 // indirect
|
||||||
|
github.com/imdario/mergo v0.3.12 // indirect
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||||
|
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||||
|
github.com/jackc/pgio v1.0.0 // indirect
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
|
github.com/jackc/pgproto3/v2 v2.0.6 // indirect
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||||
|
github.com/jackc/puddle v1.1.3 // indirect
|
||||||
|
github.com/mitchellh/copystructure v1.1.1 // indirect
|
||||||
|
github.com/mitchellh/reflectwalk v1.0.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 // indirect
|
||||||
|
golang.org/x/text v0.3.6 // indirect
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
|
||||||
|
)
|
||||||
|
|
||||||
replace (
|
replace (
|
||||||
github.com/frustra/bbcode v0.0.0-20201127003707-6ef347fbe1c8 => github.com/HandmadeNetwork/bbcode v0.0.0-20210623031351-ec0e2e2e39d9
|
github.com/frustra/bbcode v0.0.0-20201127003707-6ef347fbe1c8 => github.com/HandmadeNetwork/bbcode v0.0.0-20210623031351-ec0e2e2e39d9
|
||||||
github.com/yuin/goldmark v1.4.1 => github.com/HandmadeNetwork/goldmark v1.4.1-0.20210707024600-f7e596e26b5e
|
github.com/yuin/goldmark v1.4.1 => github.com/HandmadeNetwork/goldmark v1.4.1-0.20210707024600-f7e596e26b5e
|
||||||
|
|
7
go.sum
7
go.sum
|
@ -159,7 +159,6 @@ github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
|
||||||
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0=
|
|
||||||
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
||||||
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||||
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
|
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
|
||||||
|
@ -178,7 +177,6 @@ github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye47
|
||||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A=
|
|
||||||
github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=
|
github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=
|
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
||||||
|
@ -372,6 +370,8 @@ golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||||
|
golang.org/x/exp v0.0.0-20220314205449-43aec2f8a4e7 h1:jynE66seADJbyWMUdeOyVTvPtBZt7L6LJHupGwxPZRM=
|
||||||
|
golang.org/x/exp v0.0.0-20220314205449-43aec2f8a4e7/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE=
|
||||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d h1:RNPAfi2nHY7C2srAV8A49jpsYr0ADedCk1wq6fTMTvs=
|
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d h1:RNPAfi2nHY7C2srAV8A49jpsYr0ADedCk1wq6fTMTvs=
|
||||||
|
@ -439,8 +439,9 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k=
|
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0=
|
||||||
|
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
const TimelineMediaTypes = {
|
const TimelineMediaTypes = {
|
||||||
UNKNOWN: 0,
|
|
||||||
IMAGE: 1,
|
IMAGE: 1,
|
||||||
VIDEO: 2,
|
VIDEO: 2,
|
||||||
AUDIO: 3,
|
AUDIO: 3,
|
||||||
|
|
|
@ -140,7 +140,7 @@ func Create(ctx context.Context, dbConn db.ConnOrTx, in CreateInput) (*models.As
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch and return the new record
|
// Fetch and return the new record
|
||||||
iasset, err := db.QueryOne(ctx, dbConn, models.Asset{},
|
asset, err := db.QueryOne[models.Asset](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_asset
|
FROM handmade_asset
|
||||||
|
@ -152,5 +152,5 @@ func Create(ctx context.Context, dbConn db.ConnOrTx, in CreateInput) (*models.As
|
||||||
return nil, oops.New(err, "failed to fetch newly-created asset")
|
return nil, oops.New(err, "failed to fetch newly-created asset")
|
||||||
}
|
}
|
||||||
|
|
||||||
return iasset.(*models.Asset), nil
|
return asset, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,7 @@ func makeCSRFToken() string {
|
||||||
var ErrNoSession = errors.New("no session found")
|
var ErrNoSession = errors.New("no session found")
|
||||||
|
|
||||||
func GetSession(ctx context.Context, conn *pgxpool.Pool, id string) (*models.Session, error) {
|
func GetSession(ctx context.Context, conn *pgxpool.Pool, id string) (*models.Session, error) {
|
||||||
row, err := db.QueryOne(ctx, conn, models.Session{}, "SELECT $columns FROM sessions WHERE id = $1", id)
|
sess, err := db.QueryOne[models.Session](ctx, conn, "SELECT $columns FROM sessions WHERE id = $1", id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(err, db.NotFound) {
|
if errors.Is(err, db.NotFound) {
|
||||||
return nil, ErrNoSession
|
return nil, ErrNoSession
|
||||||
|
@ -53,7 +53,6 @@ func GetSession(ctx context.Context, conn *pgxpool.Pool, id string) (*models.Ses
|
||||||
return nil, oops.New(err, "failed to get session")
|
return nil, oops.New(err, "failed to get session")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sess := row.(*models.Session)
|
|
||||||
|
|
||||||
return sess, nil
|
return sess, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,14 +57,6 @@ var Config = HMNConfig{
|
||||||
MemberRoleID: "",
|
MemberRoleID: "",
|
||||||
ShowcaseChannelID: "",
|
ShowcaseChannelID: "",
|
||||||
LibraryChannelID: "",
|
LibraryChannelID: "",
|
||||||
StreamsChannelID: "",
|
|
||||||
},
|
|
||||||
Twitch: TwitchConfig{
|
|
||||||
ClientID: "",
|
|
||||||
ClientSecret: "",
|
|
||||||
EventSubSecret: "",
|
|
||||||
BaseUrl: "https://api.twitch.tv/helix",
|
|
||||||
BaseIDUrl: "https://id.twitch.tv/oauth2",
|
|
||||||
},
|
},
|
||||||
EpisodeGuide: EpisodeGuide{
|
EpisodeGuide: EpisodeGuide{
|
||||||
CineraOutputPath: "./annotations/",
|
CineraOutputPath: "./annotations/",
|
||||||
|
|
|
@ -27,7 +27,6 @@ type HMNConfig struct {
|
||||||
Email EmailConfig
|
Email EmailConfig
|
||||||
DigitalOcean DigitalOceanConfig
|
DigitalOcean DigitalOceanConfig
|
||||||
Discord DiscordConfig
|
Discord DiscordConfig
|
||||||
Twitch TwitchConfig
|
|
||||||
EpisodeGuide EpisodeGuide
|
EpisodeGuide EpisodeGuide
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,18 +76,9 @@ type DiscordConfig struct {
|
||||||
MemberRoleID string
|
MemberRoleID string
|
||||||
ShowcaseChannelID string
|
ShowcaseChannelID string
|
||||||
LibraryChannelID string
|
LibraryChannelID string
|
||||||
StreamsChannelID string
|
|
||||||
JamShowcaseChannelID string
|
JamShowcaseChannelID string
|
||||||
}
|
}
|
||||||
|
|
||||||
type TwitchConfig struct {
|
|
||||||
ClientID string
|
|
||||||
ClientSecret string
|
|
||||||
EventSubSecret string // NOTE(asaf): Between 10-100 chars long. Anything will do.
|
|
||||||
BaseUrl string
|
|
||||||
BaseIDUrl string
|
|
||||||
}
|
|
||||||
|
|
||||||
type EpisodeGuide struct {
|
type EpisodeGuide struct {
|
||||||
CineraOutputPath string
|
CineraOutputPath string
|
||||||
Projects map[string]string // NOTE(asaf): Maps from slugs to default episode guide topic
|
Projects map[string]string // NOTE(asaf): Maps from slugs to default episode guide topic
|
||||||
|
|
128
src/db/db.go
128
src/db/db.go
|
@ -5,6 +5,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/config"
|
"git.handmade.network/hmn/hmn/src/config"
|
||||||
|
@ -95,14 +96,20 @@ func NewConnPool(minConns, maxConns int32) *pgxpool.Pool {
|
||||||
return conn
|
return conn
|
||||||
}
|
}
|
||||||
|
|
||||||
type StructQueryIterator struct {
|
type columnName []string
|
||||||
fieldPaths [][]int
|
|
||||||
|
// A path to a particular field in query's destination type. Each index in the slice
|
||||||
|
// corresponds to a field index for use with Field on a reflect.Type or reflect.Value.
|
||||||
|
type fieldPath []int
|
||||||
|
|
||||||
|
type StructQueryIterator[T any] struct {
|
||||||
|
fieldPaths []fieldPath
|
||||||
rows pgx.Rows
|
rows pgx.Rows
|
||||||
destType reflect.Type
|
destType reflect.Type
|
||||||
closed chan struct{}
|
closed chan struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (it *StructQueryIterator) Next() (interface{}, bool) {
|
func (it *StructQueryIterator[T]) Next() (*T, bool) {
|
||||||
hasNext := it.rows.Next()
|
hasNext := it.rows.Next()
|
||||||
if !hasNext {
|
if !hasNext {
|
||||||
it.Close()
|
it.Close()
|
||||||
|
@ -172,10 +179,10 @@ func (it *StructQueryIterator) Next() (interface{}, bool) {
|
||||||
currentValue = reflect.Value{}
|
currentValue = reflect.Value{}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result.Interface(), true
|
return result.Interface().(*T), true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (it *StructQueryIterator) Close() {
|
func (it *StructQueryIterator[any]) Close() {
|
||||||
it.rows.Close()
|
it.rows.Close()
|
||||||
select {
|
select {
|
||||||
case it.closed <- struct{}{}:
|
case it.closed <- struct{}{}:
|
||||||
|
@ -183,9 +190,9 @@ func (it *StructQueryIterator) Close() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (it *StructQueryIterator) ToSlice() []interface{} {
|
func (it *StructQueryIterator[T]) ToSlice() []*T {
|
||||||
defer it.Close()
|
defer it.Close()
|
||||||
var result []interface{}
|
var result []*T
|
||||||
for {
|
for {
|
||||||
row, ok := it.Next()
|
row, ok := it.Next()
|
||||||
if !ok {
|
if !ok {
|
||||||
|
@ -231,8 +238,8 @@ func followPathThroughStructs(structPtrVal reflect.Value, path []int) (reflect.V
|
||||||
return val, field
|
return val, field
|
||||||
}
|
}
|
||||||
|
|
||||||
func Query(ctx context.Context, conn ConnOrTx, destExample interface{}, query string, args ...interface{}) ([]interface{}, error) {
|
func Query[T any](ctx context.Context, conn ConnOrTx, query string, args ...interface{}) ([]*T, error) {
|
||||||
it, err := QueryIterator(ctx, conn, destExample, query, args...)
|
it, err := QueryIterator[T](ctx, conn, query, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else {
|
} else {
|
||||||
|
@ -240,27 +247,13 @@ func Query(ctx context.Context, conn ConnOrTx, destExample interface{}, query st
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func QueryIterator(ctx context.Context, conn ConnOrTx, destExample interface{}, query string, args ...interface{}) (*StructQueryIterator, error) {
|
func QueryIterator[T any](ctx context.Context, conn ConnOrTx, query string, args ...interface{}) (*StructQueryIterator[T], error) {
|
||||||
|
var destExample T
|
||||||
destType := reflect.TypeOf(destExample)
|
destType := reflect.TypeOf(destExample)
|
||||||
columnNames, fieldPaths, err := getColumnNamesAndPaths(destType, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to generate column names")
|
|
||||||
}
|
|
||||||
|
|
||||||
columns := make([]string, 0, len(columnNames))
|
compiled := compileQuery(query, destType)
|
||||||
for _, strSlice := range columnNames {
|
|
||||||
tableName := strings.Join(strSlice[0:len(strSlice)-1], "_")
|
|
||||||
fullName := strSlice[len(strSlice)-1]
|
|
||||||
if tableName != "" {
|
|
||||||
fullName = tableName + "." + fullName
|
|
||||||
}
|
|
||||||
columns = append(columns, fullName)
|
|
||||||
}
|
|
||||||
|
|
||||||
columnNamesString := strings.Join(columns, ", ")
|
rows, err := conn.Query(ctx, compiled.query, args...)
|
||||||
query = strings.Replace(query, "$columns", columnNamesString, -1)
|
|
||||||
|
|
||||||
rows, err := conn.Query(ctx, query, args...)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(err, context.DeadlineExceeded) {
|
if errors.Is(err, context.DeadlineExceeded) {
|
||||||
panic("query exceeded its deadline")
|
panic("query exceeded its deadline")
|
||||||
|
@ -268,10 +261,10 @@ func QueryIterator(ctx context.Context, conn ConnOrTx, destExample interface{},
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
it := &StructQueryIterator{
|
it := &StructQueryIterator[T]{
|
||||||
fieldPaths: fieldPaths,
|
fieldPaths: compiled.fieldPaths,
|
||||||
rows: rows,
|
rows: rows,
|
||||||
destType: destType,
|
destType: compiled.destType,
|
||||||
closed: make(chan struct{}, 1),
|
closed: make(chan struct{}, 1),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -292,16 +285,70 @@ func QueryIterator(ctx context.Context, conn ConnOrTx, destExample interface{},
|
||||||
return it, nil
|
return it, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getColumnNamesAndPaths(destType reflect.Type, pathSoFar []int, prefix []string) (names [][]string, paths [][]int, err error) {
|
type compiledQuery struct {
|
||||||
var columnNames [][]string
|
query string
|
||||||
var fieldPaths [][]int
|
destType reflect.Type
|
||||||
|
fieldPaths []fieldPath
|
||||||
|
}
|
||||||
|
|
||||||
|
var reColumnsPlaceholder = regexp.MustCompile(`\$columns({(.*?)})?`)
|
||||||
|
|
||||||
|
func compileQuery(query string, destType reflect.Type) compiledQuery {
|
||||||
|
columnsMatch := reColumnsPlaceholder.FindStringSubmatch(query)
|
||||||
|
hasColumnsPlaceholder := columnsMatch != nil
|
||||||
|
|
||||||
|
if hasColumnsPlaceholder {
|
||||||
|
// The presence of the $columns placeholder means that the destination type
|
||||||
|
// must be a struct, and we will plonk that struct's fields into the query.
|
||||||
|
|
||||||
|
if destType.Kind() != reflect.Struct {
|
||||||
|
panic("$columns can only be used when querying into some kind of struct")
|
||||||
|
}
|
||||||
|
|
||||||
|
var prefix []string
|
||||||
|
prefixText := columnsMatch[2]
|
||||||
|
if prefixText != "" {
|
||||||
|
prefix = []string{prefixText}
|
||||||
|
}
|
||||||
|
|
||||||
|
columnNames, fieldPaths := getColumnNamesAndPaths(destType, nil, prefix)
|
||||||
|
|
||||||
|
columns := make([]string, 0, len(columnNames))
|
||||||
|
for _, strSlice := range columnNames {
|
||||||
|
tableName := strings.Join(strSlice[0:len(strSlice)-1], "_")
|
||||||
|
fullName := strSlice[len(strSlice)-1]
|
||||||
|
if tableName != "" {
|
||||||
|
fullName = tableName + "." + fullName
|
||||||
|
}
|
||||||
|
columns = append(columns, fullName)
|
||||||
|
}
|
||||||
|
|
||||||
|
columnNamesString := strings.Join(columns, ", ")
|
||||||
|
query = reColumnsPlaceholder.ReplaceAllString(query, columnNamesString)
|
||||||
|
|
||||||
|
return compiledQuery{
|
||||||
|
query: query,
|
||||||
|
destType: destType,
|
||||||
|
fieldPaths: fieldPaths,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return compiledQuery{
|
||||||
|
query: query,
|
||||||
|
destType: destType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getColumnNamesAndPaths(destType reflect.Type, pathSoFar []int, prefix []string) (names []columnName, paths []fieldPath) {
|
||||||
|
var columnNames []columnName
|
||||||
|
var fieldPaths []fieldPath
|
||||||
|
|
||||||
if destType.Kind() == reflect.Ptr {
|
if destType.Kind() == reflect.Ptr {
|
||||||
destType = destType.Elem()
|
destType = destType.Elem()
|
||||||
}
|
}
|
||||||
|
|
||||||
if destType.Kind() != reflect.Struct {
|
if destType.Kind() != reflect.Struct {
|
||||||
return nil, nil, oops.New(nil, "can only get column names and paths from a struct, got type '%v' (at prefix '%v')", destType.Name(), prefix)
|
panic(fmt.Errorf("can only get column names and paths from a struct, got type '%v' (at prefix '%v')", destType.Name(), prefix))
|
||||||
}
|
}
|
||||||
|
|
||||||
type AnonPrefix struct {
|
type AnonPrefix struct {
|
||||||
|
@ -348,19 +395,16 @@ func getColumnNamesAndPaths(destType reflect.Type, pathSoFar []int, prefix []str
|
||||||
columnNames = append(columnNames, fieldColumnNames)
|
columnNames = append(columnNames, fieldColumnNames)
|
||||||
fieldPaths = append(fieldPaths, path)
|
fieldPaths = append(fieldPaths, path)
|
||||||
} else if fieldType.Kind() == reflect.Struct {
|
} else if fieldType.Kind() == reflect.Struct {
|
||||||
subCols, subPaths, err := getColumnNamesAndPaths(fieldType, path, fieldColumnNames)
|
subCols, subPaths := getColumnNamesAndPaths(fieldType, path, fieldColumnNames)
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
columnNames = append(columnNames, subCols...)
|
columnNames = append(columnNames, subCols...)
|
||||||
fieldPaths = append(fieldPaths, subPaths...)
|
fieldPaths = append(fieldPaths, subPaths...)
|
||||||
} else {
|
} else {
|
||||||
return nil, nil, oops.New(nil, "field '%s' in type %s has invalid type '%s'", field.Name, destType, field.Type)
|
panic(fmt.Errorf("field '%s' in type %s has invalid type '%s'", field.Name, destType, field.Type))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return columnNames, fieldPaths, nil
|
return columnNames, fieldPaths
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -370,8 +414,8 @@ result but find nothing.
|
||||||
*/
|
*/
|
||||||
var NotFound = errors.New("not found")
|
var NotFound = errors.New("not found")
|
||||||
|
|
||||||
func QueryOne(ctx context.Context, conn ConnOrTx, destExample interface{}, query string, args ...interface{}) (interface{}, error) {
|
func QueryOne[T any](ctx context.Context, conn ConnOrTx, query string, args ...interface{}) (*T, error) {
|
||||||
rows, err := QueryIterator(ctx, conn, destExample, query, args...)
|
rows, err := QueryIterator[T](ctx, conn, query, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,90 @@ import (
|
||||||
|
|
||||||
func TestPaths(t *testing.T) {
|
func TestPaths(t *testing.T) {
|
||||||
type CustomInt int
|
type CustomInt int
|
||||||
|
type S2 struct {
|
||||||
|
B bool `db:"B"` // field 0
|
||||||
|
PB *bool `db:"PB"` // field 1
|
||||||
|
|
||||||
|
NoTag string // field 2
|
||||||
|
}
|
||||||
|
type S struct {
|
||||||
|
I int `db:"I"` // field 0
|
||||||
|
PI *int `db:"PI"` // field 1
|
||||||
|
CI CustomInt `db:"CI"` // field 2
|
||||||
|
PCI *CustomInt `db:"PCI"` // field 3
|
||||||
|
S2 `db:"S2"` // field 4 (embedded!)
|
||||||
|
PS2 *S2 `db:"PS2"` // field 5
|
||||||
|
|
||||||
|
NoTag int // field 6
|
||||||
|
}
|
||||||
|
type Nested struct {
|
||||||
|
S S `db:"S"` // field 0
|
||||||
|
PS *S `db:"PS"` // field 1
|
||||||
|
|
||||||
|
NoTag S // field 2
|
||||||
|
}
|
||||||
|
type Embedded struct {
|
||||||
|
NoTag S // field 0
|
||||||
|
Nested // field 1
|
||||||
|
}
|
||||||
|
|
||||||
|
names, paths := getColumnNamesAndPaths(reflect.TypeOf(Embedded{}), nil, nil)
|
||||||
|
assert.Equal(t, []columnName{
|
||||||
|
{"S", "I"}, {"S", "PI"},
|
||||||
|
{"S", "CI"}, {"S", "PCI"},
|
||||||
|
{"S", "S2", "B"}, {"S", "S2", "PB"},
|
||||||
|
{"S", "PS2", "B"}, {"S", "PS2", "PB"},
|
||||||
|
{"PS", "I"}, {"PS", "PI"},
|
||||||
|
{"PS", "CI"}, {"PS", "PCI"},
|
||||||
|
{"PS", "S2", "B"}, {"PS", "S2", "PB"},
|
||||||
|
{"PS", "PS2", "B"}, {"PS", "PS2", "PB"},
|
||||||
|
}, names)
|
||||||
|
assert.Equal(t, []fieldPath{
|
||||||
|
{1, 0, 0}, {1, 0, 1}, // Nested.S.I, Nested.S.PI
|
||||||
|
{1, 0, 2}, {1, 0, 3}, // Nested.S.CI, Nested.S.PCI
|
||||||
|
{1, 0, 4, 0}, {1, 0, 4, 1}, // Nested.S.S2.B, Nested.S.S2.PB
|
||||||
|
{1, 0, 5, 0}, {1, 0, 5, 1}, // Nested.S.PS2.B, Nested.S.PS2.PB
|
||||||
|
{1, 1, 0}, {1, 1, 1}, // Nested.PS.I, Nested.PS.PI
|
||||||
|
{1, 1, 2}, {1, 1, 3}, // Nested.PS.CI, Nested.PS.PCI
|
||||||
|
{1, 1, 4, 0}, {1, 1, 4, 1}, // Nested.PS.S2.B, Nested.PS.S2.PB
|
||||||
|
{1, 1, 5, 0}, {1, 1, 5, 1}, // Nested.PS.PS2.B, Nested.PS.PS2.PB
|
||||||
|
}, paths)
|
||||||
|
assert.True(t, len(names) == len(paths))
|
||||||
|
|
||||||
|
testStruct := Embedded{}
|
||||||
|
for i, path := range paths {
|
||||||
|
val, field := followPathThroughStructs(reflect.ValueOf(&testStruct), path)
|
||||||
|
assert.True(t, val.IsValid())
|
||||||
|
assert.True(t, strings.Contains(names[i][len(names[i])-1], field.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCompileQuery(t *testing.T) {
|
||||||
|
t.Run("simple struct", func(t *testing.T) {
|
||||||
|
type Dest struct {
|
||||||
|
Foo int `db:"foo"`
|
||||||
|
Bar bool `db:"bar"`
|
||||||
|
Nope string // no tag
|
||||||
|
}
|
||||||
|
|
||||||
|
compiled := compileQuery("SELECT $columns FROM greeblies", reflect.TypeOf(Dest{}))
|
||||||
|
assert.Equal(t, "SELECT foo, bar FROM greeblies", compiled.query)
|
||||||
|
})
|
||||||
|
t.Run("complex structs", func(t *testing.T) {
|
||||||
|
type CustomInt int
|
||||||
|
type S2 struct {
|
||||||
|
B bool `db:"B"`
|
||||||
|
PB *bool `db:"PB"`
|
||||||
|
|
||||||
|
NoTag string
|
||||||
|
}
|
||||||
type S struct {
|
type S struct {
|
||||||
I int `db:"I"`
|
I int `db:"I"`
|
||||||
PI *int `db:"PI"`
|
PI *int `db:"PI"`
|
||||||
CI CustomInt `db:"CI"`
|
CI CustomInt `db:"CI"`
|
||||||
PCI *CustomInt `db:"PCI"`
|
PCI *CustomInt `db:"PCI"`
|
||||||
B bool `db:"B"`
|
S2 `db:"S2"` // embedded!
|
||||||
PB *bool `db:"PB"`
|
PS2 *S2 `db:"PS2"`
|
||||||
|
|
||||||
NoTag int
|
NoTag int
|
||||||
}
|
}
|
||||||
|
@ -26,34 +103,48 @@ func TestPaths(t *testing.T) {
|
||||||
|
|
||||||
NoTag S
|
NoTag S
|
||||||
}
|
}
|
||||||
type Embedded struct {
|
type Dest struct {
|
||||||
NoTag S
|
NoTag S
|
||||||
Nested
|
Nested
|
||||||
}
|
}
|
||||||
|
|
||||||
names, paths, err := getColumnNamesAndPaths(reflect.TypeOf(Embedded{}), nil, "")
|
compiled := compileQuery("SELECT $columns FROM greeblies", reflect.TypeOf(Dest{}))
|
||||||
if assert.Nil(t, err) {
|
assert.Equal(t, "SELECT S.I, S.PI, S.CI, S.PCI, S_S2.B, S_S2.PB, S_PS2.B, S_PS2.PB, PS.I, PS.PI, PS.CI, PS.PCI, PS_S2.B, PS_S2.PB, PS_PS2.B, PS_PS2.PB FROM greeblies", compiled.query)
|
||||||
assert.Equal(t, []string{
|
})
|
||||||
"S.I", "S.PI",
|
t.Run("int", func(t *testing.T) {
|
||||||
"S.CI", "S.PCI",
|
type Dest int
|
||||||
"S.B", "S.PB",
|
|
||||||
"PS.I", "PS.PI",
|
// There should be no error here because we do not need to extract columns from
|
||||||
"PS.CI", "PS.PCI",
|
// the destination type. There may be errors down the line in value iteration, but
|
||||||
"PS.B", "PS.PB",
|
// that is always the case if the Go types don't match the query.
|
||||||
}, names)
|
compiled := compileQuery("SELECT id FROM greeblies", reflect.TypeOf(Dest(0)))
|
||||||
assert.Equal(t, [][]int{
|
assert.Equal(t, "SELECT id FROM greeblies", compiled.query)
|
||||||
{1, 0, 0}, {1, 0, 1}, {1, 0, 2}, {1, 0, 3}, {1, 0, 4}, {1, 0, 5},
|
})
|
||||||
{1, 1, 0}, {1, 1, 1}, {1, 1, 2}, {1, 1, 3}, {1, 1, 4}, {1, 1, 5},
|
t.Run("just one table", func(t *testing.T) {
|
||||||
}, paths)
|
type Dest struct {
|
||||||
assert.True(t, len(names) == len(paths))
|
Foo int `db:"foo"`
|
||||||
|
Bar bool `db:"bar"`
|
||||||
|
Nope string // no tag
|
||||||
}
|
}
|
||||||
|
|
||||||
testStruct := Embedded{}
|
// The prefix is necessary because otherwise we would have to provide a struct with
|
||||||
for i, path := range paths {
|
// a db tag in order to provide the query with the `greeblies.` prefix in the
|
||||||
val, field := followPathThroughStructs(reflect.ValueOf(&testStruct), path)
|
// final query. This comes up a lot when we do a JOIN to help with a condition, but
|
||||||
assert.True(t, val.IsValid())
|
// don't actually care about any of the data we joined to.
|
||||||
assert.True(t, strings.Contains(names[i], field.Name))
|
compiled := compileQuery(
|
||||||
}
|
"SELECT $columns{greeblies} FROM greeblies NATURAL JOIN props",
|
||||||
|
reflect.TypeOf(Dest{}),
|
||||||
|
)
|
||||||
|
assert.Equal(t, "SELECT greeblies.foo, greeblies.bar FROM greeblies NATURAL JOIN props", compiled.query)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("using $columns without a struct is not allowed", func(t *testing.T) {
|
||||||
|
type Dest int
|
||||||
|
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
compileQuery("SELECT $columns FROM greeblies", reflect.TypeOf(Dest(0)))
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryBuilder(t *testing.T) {
|
func TestQueryBuilder(t *testing.T) {
|
||||||
|
|
|
@ -93,7 +93,7 @@ func (bot *botInstance) handleProfileCommand(ctx context.Context, i *Interaction
|
||||||
type profileResult struct {
|
type profileResult struct {
|
||||||
HMNUser models.User `db:"auth_user"`
|
HMNUser models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
ires, err := db.QueryOne(ctx, bot.dbConn, profileResult{},
|
res, err := db.QueryOne[profileResult](ctx, bot.dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -122,7 +122,6 @@ func (bot *botInstance) handleProfileCommand(ctx context.Context, i *Interaction
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
res := ires.(*profileResult)
|
|
||||||
|
|
||||||
projectsAndStuff, err := hmndata.FetchProjects(ctx, bot.dbConn, nil, hmndata.ProjectsQuery{
|
projectsAndStuff, err := hmndata.FetchProjects(ctx, bot.dbConn, nil, hmndata.ProjectsQuery{
|
||||||
OwnerIDs: []int{res.HMNUser.ID},
|
OwnerIDs: []int{res.HMNUser.ID},
|
||||||
|
|
|
@ -250,7 +250,7 @@ func (bot *botInstance) connect(ctx context.Context) error {
|
||||||
// an old one or starting a new one.
|
// an old one or starting a new one.
|
||||||
|
|
||||||
shouldResume := true
|
shouldResume := true
|
||||||
isession, err := db.QueryOne(ctx, bot.dbConn, models.DiscordSession{}, `SELECT $columns FROM discord_session`)
|
session, err := db.QueryOne[models.DiscordSession](ctx, bot.dbConn, `SELECT $columns FROM discord_session`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Is(err, db.NotFound) {
|
if errors.Is(err, db.NotFound) {
|
||||||
// No session yet! Just identify and get on with it
|
// No session yet! Just identify and get on with it
|
||||||
|
@ -262,8 +262,6 @@ func (bot *botInstance) connect(ctx context.Context) error {
|
||||||
|
|
||||||
if shouldResume {
|
if shouldResume {
|
||||||
// Reconnect to the previous session
|
// Reconnect to the previous session
|
||||||
session := isession.(*models.DiscordSession)
|
|
||||||
|
|
||||||
err := bot.sendGatewayMessage(ctx, GatewayMessage{
|
err := bot.sendGatewayMessage(ctx, GatewayMessage{
|
||||||
Opcode: OpcodeResume,
|
Opcode: OpcodeResume,
|
||||||
Data: Resume{
|
Data: Resume{
|
||||||
|
@ -408,7 +406,7 @@ func (bot *botInstance) doSender(ctx context.Context) {
|
||||||
}
|
}
|
||||||
defer tx.Rollback(ctx)
|
defer tx.Rollback(ctx)
|
||||||
|
|
||||||
msgs, err := db.Query(ctx, tx, models.DiscordOutgoingMessage{}, `
|
msgs, err := db.Query[models.DiscordOutgoingMessage](ctx, tx, `
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM discord_outgoingmessages
|
FROM discord_outgoingmessages
|
||||||
ORDER BY id ASC
|
ORDER BY id ASC
|
||||||
|
@ -418,8 +416,7 @@ func (bot *botInstance) doSender(ctx context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, imsg := range msgs {
|
for _, msg := range msgs {
|
||||||
msg := imsg.(*models.DiscordOutgoingMessage)
|
|
||||||
if time.Now().After(msg.ExpiresAt) {
|
if time.Now().After(msg.ExpiresAt) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,7 +76,7 @@ func fetchMissingContent(ctx context.Context, dbConn *pgxpool.Pool) {
|
||||||
type query struct {
|
type query struct {
|
||||||
Message models.DiscordMessage `db:"msg"`
|
Message models.DiscordMessage `db:"msg"`
|
||||||
}
|
}
|
||||||
imessagesWithoutContent, err := db.Query(ctx, dbConn, query{},
|
messagesWithoutContent, err := db.Query[query](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -95,10 +95,10 @@ func fetchMissingContent(ctx context.Context, dbConn *pgxpool.Pool) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(imessagesWithoutContent) > 0 {
|
if len(messagesWithoutContent) > 0 {
|
||||||
log.Info().Msgf("There are %d Discord messages without content, fetching their content now...", len(imessagesWithoutContent))
|
log.Info().Msgf("There are %d Discord messages without content, fetching their content now...", len(messagesWithoutContent))
|
||||||
msgloop:
|
msgloop:
|
||||||
for _, imsg := range imessagesWithoutContent {
|
for _, msgRow := range messagesWithoutContent {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
log.Info().Msg("Scrape was canceled")
|
log.Info().Msg("Scrape was canceled")
|
||||||
|
@ -106,7 +106,7 @@ func fetchMissingContent(ctx context.Context, dbConn *pgxpool.Pool) {
|
||||||
default:
|
default:
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := imsg.(*query).Message
|
msg := msgRow.Message
|
||||||
|
|
||||||
discordMsg, err := GetChannelMessage(ctx, msg.ChannelID, msg.ID)
|
discordMsg, err := GetChannelMessage(ctx, msg.ChannelID, msg.ID)
|
||||||
if errors.Is(err, NotFound) {
|
if errors.Is(err, NotFound) {
|
||||||
|
|
|
@ -165,7 +165,7 @@ func InternMessage(
|
||||||
dbConn db.ConnOrTx,
|
dbConn db.ConnOrTx,
|
||||||
msg *Message,
|
msg *Message,
|
||||||
) error {
|
) error {
|
||||||
_, err := db.QueryOne(ctx, dbConn, models.DiscordMessage{},
|
_, err := db.QueryOne[models.DiscordMessage](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessage
|
FROM handmade_discordmessage
|
||||||
|
@ -219,7 +219,7 @@ type InternedMessage struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func FetchInternedMessage(ctx context.Context, dbConn db.ConnOrTx, msgId string) (*InternedMessage, error) {
|
func FetchInternedMessage(ctx context.Context, dbConn db.ConnOrTx, msgId string) (*InternedMessage, error) {
|
||||||
result, err := db.QueryOne(ctx, dbConn, InternedMessage{},
|
interned, err := db.QueryOne[InternedMessage](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -236,7 +236,6 @@ func FetchInternedMessage(ctx context.Context, dbConn db.ConnOrTx, msgId string)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
interned := result.(*InternedMessage)
|
|
||||||
return interned, nil
|
return interned, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,7 +282,7 @@ func HandleInternedMessage(ctx context.Context, dbConn db.ConnOrTx, msg *Message
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteInternedMessage(ctx context.Context, dbConn db.ConnOrTx, interned *InternedMessage) error {
|
func DeleteInternedMessage(ctx context.Context, dbConn db.ConnOrTx, interned *InternedMessage) error {
|
||||||
isnippet, err := db.QueryOne(ctx, dbConn, models.Snippet{},
|
snippet, err := db.QueryOne[models.Snippet](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_snippet
|
FROM handmade_snippet
|
||||||
|
@ -294,10 +293,6 @@ func DeleteInternedMessage(ctx context.Context, dbConn db.ConnOrTx, interned *In
|
||||||
if err != nil && !errors.Is(err, db.NotFound) {
|
if err != nil && !errors.Is(err, db.NotFound) {
|
||||||
return oops.New(err, "failed to fetch snippet for discord message")
|
return oops.New(err, "failed to fetch snippet for discord message")
|
||||||
}
|
}
|
||||||
var snippet *models.Snippet
|
|
||||||
if !errors.Is(err, db.NotFound) {
|
|
||||||
snippet = isnippet.(*models.Snippet)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE(asaf): Also deletes the following through a db cascade:
|
// NOTE(asaf): Also deletes the following through a db cascade:
|
||||||
// * handmade_discordmessageattachment
|
// * handmade_discordmessageattachment
|
||||||
|
@ -367,7 +362,7 @@ func SaveMessageContents(
|
||||||
return oops.New(err, "failed to create or update message contents")
|
return oops.New(err, "failed to create or update message contents")
|
||||||
}
|
}
|
||||||
|
|
||||||
icontent, err := db.QueryOne(ctx, dbConn, models.DiscordMessageContent{},
|
content, err := db.QueryOne[models.DiscordMessageContent](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -380,7 +375,7 @@ func SaveMessageContents(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return oops.New(err, "failed to fetch message contents")
|
return oops.New(err, "failed to fetch message contents")
|
||||||
}
|
}
|
||||||
interned.MessageContent = icontent.(*models.DiscordMessageContent)
|
interned.MessageContent = content
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save attachments
|
// Save attachments
|
||||||
|
@ -472,7 +467,7 @@ func saveAttachment(
|
||||||
hmnUserID int,
|
hmnUserID int,
|
||||||
discordMessageID string,
|
discordMessageID string,
|
||||||
) (*models.DiscordMessageAttachment, error) {
|
) (*models.DiscordMessageAttachment, error) {
|
||||||
iexisting, err := db.QueryOne(ctx, tx, models.DiscordMessageAttachment{},
|
existing, err := db.QueryOne[models.DiscordMessageAttachment](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessageattachment
|
FROM handmade_discordmessageattachment
|
||||||
|
@ -481,7 +476,7 @@ func saveAttachment(
|
||||||
attachment.ID,
|
attachment.ID,
|
||||||
)
|
)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return iexisting.(*models.DiscordMessageAttachment), nil
|
return existing, nil
|
||||||
} else if errors.Is(err, db.NotFound) {
|
} else if errors.Is(err, db.NotFound) {
|
||||||
// this is fine, just create it
|
// this is fine, just create it
|
||||||
} else {
|
} else {
|
||||||
|
@ -534,7 +529,7 @@ func saveAttachment(
|
||||||
return nil, oops.New(err, "failed to save Discord attachment data")
|
return nil, oops.New(err, "failed to save Discord attachment data")
|
||||||
}
|
}
|
||||||
|
|
||||||
iDiscordAttachment, err := db.QueryOne(ctx, tx, models.DiscordMessageAttachment{},
|
discordAttachment, err := db.QueryOne[models.DiscordMessageAttachment](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessageattachment
|
FROM handmade_discordmessageattachment
|
||||||
|
@ -546,7 +541,7 @@ func saveAttachment(
|
||||||
return nil, oops.New(err, "failed to fetch new Discord attachment data")
|
return nil, oops.New(err, "failed to fetch new Discord attachment data")
|
||||||
}
|
}
|
||||||
|
|
||||||
return iDiscordAttachment.(*models.DiscordMessageAttachment), nil
|
return discordAttachment, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Saves an embed from Discord. NOTE: This is _not_ idempotent, so only call it
|
// Saves an embed from Discord. NOTE: This is _not_ idempotent, so only call it
|
||||||
|
@ -636,7 +631,7 @@ func saveEmbed(
|
||||||
return nil, oops.New(err, "failed to insert new embed")
|
return nil, oops.New(err, "failed to insert new embed")
|
||||||
}
|
}
|
||||||
|
|
||||||
iDiscordEmbed, err := db.QueryOne(ctx, tx, models.DiscordMessageEmbed{},
|
discordEmbed, err := db.QueryOne[models.DiscordMessageEmbed](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessageembed
|
FROM handmade_discordmessageembed
|
||||||
|
@ -648,11 +643,11 @@ func saveEmbed(
|
||||||
return nil, oops.New(err, "failed to fetch new Discord embed data")
|
return nil, oops.New(err, "failed to fetch new Discord embed data")
|
||||||
}
|
}
|
||||||
|
|
||||||
return iDiscordEmbed.(*models.DiscordMessageEmbed), nil
|
return discordEmbed, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func FetchSnippetForMessage(ctx context.Context, dbConn db.ConnOrTx, msgID string) (*models.Snippet, error) {
|
func FetchSnippetForMessage(ctx context.Context, dbConn db.ConnOrTx, msgID string) (*models.Snippet, error) {
|
||||||
iresult, err := db.QueryOne(ctx, dbConn, models.Snippet{},
|
snippet, err := db.QueryOne[models.Snippet](ctx, dbConn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_snippet
|
FROM handmade_snippet
|
||||||
|
@ -669,7 +664,7 @@ func FetchSnippetForMessage(ctx context.Context, dbConn db.ConnOrTx, msgID strin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return iresult.(*models.Snippet), nil
|
return snippet, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -808,7 +803,7 @@ func HandleSnippetForInternedMessage(ctx context.Context, dbConn db.ConnOrTx, in
|
||||||
type tagsRow struct {
|
type tagsRow struct {
|
||||||
Tag models.Tag `db:"tags"`
|
Tag models.Tag `db:"tags"`
|
||||||
}
|
}
|
||||||
iUserTags, err := db.Query(ctx, tx, tagsRow{},
|
userTags, err := db.Query[tagsRow](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -823,8 +818,8 @@ func HandleSnippetForInternedMessage(ctx context.Context, dbConn db.ConnOrTx, in
|
||||||
return oops.New(err, "failed to fetch tags for user projects")
|
return oops.New(err, "failed to fetch tags for user projects")
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, itag := range iUserTags {
|
for _, userTag := range userTags {
|
||||||
tag := itag.(*tagsRow).Tag
|
tag := userTag.Tag
|
||||||
allTags = append(allTags, tag.ID)
|
allTags = append(allTags, tag.ID)
|
||||||
for _, messageTag := range messageTags {
|
for _, messageTag := range messageTags {
|
||||||
if strings.EqualFold(tag.Text, messageTag) {
|
if strings.EqualFold(tag.Text, messageTag) {
|
||||||
|
@ -890,7 +885,7 @@ var RESnippetableUrl = regexp.MustCompile(`^https?://(youtu\.be|(www\.)?youtube\
|
||||||
|
|
||||||
func getSnippetAssetOrUrl(ctx context.Context, tx db.ConnOrTx, msg *models.DiscordMessage) (*uuid.UUID, *string, error) {
|
func getSnippetAssetOrUrl(ctx context.Context, tx db.ConnOrTx, msg *models.DiscordMessage) (*uuid.UUID, *string, error) {
|
||||||
// Check attachments
|
// Check attachments
|
||||||
attachments, err := db.Query(ctx, tx, models.DiscordMessageAttachment{},
|
attachments, err := db.Query[models.DiscordMessageAttachment](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessageattachment
|
FROM handmade_discordmessageattachment
|
||||||
|
@ -901,13 +896,12 @@ func getSnippetAssetOrUrl(ctx context.Context, tx db.ConnOrTx, msg *models.Disco
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, oops.New(err, "failed to fetch message attachments")
|
return nil, nil, oops.New(err, "failed to fetch message attachments")
|
||||||
}
|
}
|
||||||
for _, iattachment := range attachments {
|
for _, attachment := range attachments {
|
||||||
attachment := iattachment.(*models.DiscordMessageAttachment)
|
|
||||||
return &attachment.AssetID, nil, nil
|
return &attachment.AssetID, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check embeds
|
// Check embeds
|
||||||
embeds, err := db.Query(ctx, tx, models.DiscordMessageEmbed{},
|
embeds, err := db.Query[models.DiscordMessageEmbed](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_discordmessageembed
|
FROM handmade_discordmessageembed
|
||||||
|
@ -918,8 +912,7 @@ func getSnippetAssetOrUrl(ctx context.Context, tx db.ConnOrTx, msg *models.Disco
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, oops.New(err, "failed to fetch discord embeds")
|
return nil, nil, oops.New(err, "failed to fetch discord embeds")
|
||||||
}
|
}
|
||||||
for _, iembed := range embeds {
|
for _, embed := range embeds {
|
||||||
embed := iembed.(*models.DiscordMessageEmbed)
|
|
||||||
if embed.VideoID != nil {
|
if embed.VideoID != nil {
|
||||||
return embed.VideoID, nil, nil
|
return embed.VideoID, nil, nil
|
||||||
} else if embed.ImageID != nil {
|
} else if embed.ImageID != nil {
|
||||||
|
|
|
@ -140,15 +140,15 @@ func FetchProjects(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do the query
|
// Do the query
|
||||||
iprojects, err := db.Query(ctx, dbConn, projectRow{}, qb.String(), qb.Args()...)
|
projects, err := db.Query[projectRow](ctx, dbConn, qb.String(), qb.Args()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch projects")
|
return nil, oops.New(err, "failed to fetch projects")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch project owners to do permission checks
|
// Fetch project owners to do permission checks
|
||||||
projectIds := make([]int, len(iprojects))
|
projectIds := make([]int, len(projects))
|
||||||
for i, iproject := range iprojects {
|
for i, projectRow := range projects {
|
||||||
projectIds[i] = iproject.(*projectRow).Project.ID
|
projectIds[i] = projectRow.Project.ID
|
||||||
}
|
}
|
||||||
projectOwners, err := FetchMultipleProjectsOwners(ctx, tx, projectIds)
|
projectOwners, err := FetchMultipleProjectsOwners(ctx, tx, projectIds)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -156,8 +156,7 @@ func FetchProjects(
|
||||||
}
|
}
|
||||||
|
|
||||||
var res []ProjectAndStuff
|
var res []ProjectAndStuff
|
||||||
for i, iproject := range iprojects {
|
for i, row := range projects {
|
||||||
row := iproject.(*projectRow)
|
|
||||||
owners := projectOwners[i].Owners
|
owners := projectOwners[i].Owners
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -334,7 +333,7 @@ func FetchMultipleProjectsOwners(
|
||||||
UserID int `db:"user_id"`
|
UserID int `db:"user_id"`
|
||||||
ProjectID int `db:"project_id"`
|
ProjectID int `db:"project_id"`
|
||||||
}
|
}
|
||||||
iuserprojects, err := db.Query(ctx, tx, userProject{},
|
userProjects, err := db.Query[userProject](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_user_projects
|
FROM handmade_user_projects
|
||||||
|
@ -348,9 +347,7 @@ func FetchMultipleProjectsOwners(
|
||||||
|
|
||||||
// Get the unique user IDs from this set and fetch the users from the db
|
// Get the unique user IDs from this set and fetch the users from the db
|
||||||
var userIds []int
|
var userIds []int
|
||||||
for _, iuserproject := range iuserprojects {
|
for _, userProject := range userProjects {
|
||||||
userProject := iuserproject.(*userProject)
|
|
||||||
|
|
||||||
addUserId := true
|
addUserId := true
|
||||||
for _, uid := range userIds {
|
for _, uid := range userIds {
|
||||||
if uid == userProject.UserID {
|
if uid == userProject.UserID {
|
||||||
|
@ -364,7 +361,7 @@ func FetchMultipleProjectsOwners(
|
||||||
type userQuery struct {
|
type userQuery struct {
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
iusers, err := db.Query(ctx, tx, userQuery{},
|
projectUsers, err := db.Query[userQuery](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM auth_user
|
FROM auth_user
|
||||||
|
@ -383,9 +380,7 @@ func FetchMultipleProjectsOwners(
|
||||||
for i, pid := range projectIds {
|
for i, pid := range projectIds {
|
||||||
res[i] = ProjectOwners{ProjectID: pid}
|
res[i] = ProjectOwners{ProjectID: pid}
|
||||||
}
|
}
|
||||||
for _, iuserproject := range iuserprojects {
|
for _, userProject := range userProjects {
|
||||||
userProject := iuserproject.(*userProject)
|
|
||||||
|
|
||||||
// Get a pointer to the existing record in the result
|
// Get a pointer to the existing record in the result
|
||||||
var projectOwners *ProjectOwners
|
var projectOwners *ProjectOwners
|
||||||
for i := range res {
|
for i := range res {
|
||||||
|
@ -396,8 +391,8 @@ func FetchMultipleProjectsOwners(
|
||||||
|
|
||||||
// Get the full user record we fetched
|
// Get the full user record we fetched
|
||||||
var user *models.User
|
var user *models.User
|
||||||
for _, iuser := range iusers {
|
for _, projectUser := range projectUsers {
|
||||||
u := iuser.(*userQuery).User
|
u := projectUser.User
|
||||||
if u.ID == userProject.UserID {
|
if u.ID == userProject.UserID {
|
||||||
user = &u
|
user = &u
|
||||||
}
|
}
|
||||||
|
@ -473,7 +468,7 @@ func SetProjectTag(
|
||||||
resultTag = p.Tag
|
resultTag = p.Tag
|
||||||
} else if p.Project.TagID == nil {
|
} else if p.Project.TagID == nil {
|
||||||
// Create a tag
|
// Create a tag
|
||||||
itag, err := db.QueryOne(ctx, tx, models.Tag{},
|
tag, err := db.QueryOne[models.Tag](ctx, tx,
|
||||||
`
|
`
|
||||||
INSERT INTO tags (text) VALUES ($1)
|
INSERT INTO tags (text) VALUES ($1)
|
||||||
RETURNING $columns
|
RETURNING $columns
|
||||||
|
@ -483,7 +478,7 @@ func SetProjectTag(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to create new tag for project")
|
return nil, oops.New(err, "failed to create new tag for project")
|
||||||
}
|
}
|
||||||
resultTag = itag.(*models.Tag)
|
resultTag = tag
|
||||||
|
|
||||||
// Attach it to the project
|
// Attach it to the project
|
||||||
_, err = tx.Exec(ctx,
|
_, err = tx.Exec(ctx,
|
||||||
|
@ -499,7 +494,7 @@ func SetProjectTag(
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Update the text of an existing one
|
// Update the text of an existing one
|
||||||
itag, err := db.QueryOne(ctx, tx, models.Tag{},
|
tag, err := db.QueryOne[models.Tag](ctx, tx,
|
||||||
`
|
`
|
||||||
UPDATE tags
|
UPDATE tags
|
||||||
SET text = $1
|
SET text = $1
|
||||||
|
@ -511,7 +506,7 @@ func SetProjectTag(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to update existing tag")
|
return nil, oops.New(err, "failed to update existing tag")
|
||||||
}
|
}
|
||||||
resultTag = itag.(*models.Tag)
|
resultTag = tag
|
||||||
}
|
}
|
||||||
|
|
||||||
err = tx.Commit(ctx)
|
err = tx.Commit(ctx)
|
||||||
|
|
|
@ -47,7 +47,7 @@ func FetchSnippets(
|
||||||
type snippetIDRow struct {
|
type snippetIDRow struct {
|
||||||
SnippetID int `db:"snippet_id"`
|
SnippetID int `db:"snippet_id"`
|
||||||
}
|
}
|
||||||
iSnippetIDs, err := db.Query(ctx, tx, snippetIDRow{},
|
snippetIDs, err := db.Query[snippetIDRow](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT DISTINCT snippet_id
|
SELECT DISTINCT snippet_id
|
||||||
FROM
|
FROM
|
||||||
|
@ -63,13 +63,13 @@ func FetchSnippets(
|
||||||
}
|
}
|
||||||
|
|
||||||
// special early-out: no snippets found for these tags at all
|
// special early-out: no snippets found for these tags at all
|
||||||
if len(iSnippetIDs) == 0 {
|
if len(snippetIDs) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
q.IDs = make([]int, len(iSnippetIDs))
|
q.IDs = make([]int, len(snippetIDs))
|
||||||
for i := range iSnippetIDs {
|
for i := range snippetIDs {
|
||||||
q.IDs[i] = iSnippetIDs[i].(*snippetIDRow).SnippetID
|
q.IDs[i] = snippetIDs[i].SnippetID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,16 +125,14 @@ func FetchSnippets(
|
||||||
DiscordMessage *models.DiscordMessage `db:"discord_message"`
|
DiscordMessage *models.DiscordMessage `db:"discord_message"`
|
||||||
}
|
}
|
||||||
|
|
||||||
iresults, err := db.Query(ctx, tx, resultRow{}, qb.String(), qb.Args()...)
|
rows, err := db.Query[resultRow](ctx, tx, qb.String(), qb.Args()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch threads")
|
return nil, oops.New(err, "failed to fetch threads")
|
||||||
}
|
}
|
||||||
|
|
||||||
result := make([]SnippetAndStuff, len(iresults)) // allocate extra space because why not
|
result := make([]SnippetAndStuff, len(rows)) // allocate extra space because why not
|
||||||
snippetIDs := make([]int, len(iresults))
|
snippetIDs := make([]int, len(rows))
|
||||||
for i, iresult := range iresults {
|
for i, row := range rows {
|
||||||
row := *iresult.(*resultRow)
|
|
||||||
|
|
||||||
result[i] = SnippetAndStuff{
|
result[i] = SnippetAndStuff{
|
||||||
Snippet: row.Snippet,
|
Snippet: row.Snippet,
|
||||||
Owner: row.Owner,
|
Owner: row.Owner,
|
||||||
|
@ -150,7 +148,7 @@ func FetchSnippets(
|
||||||
SnippetID int `db:"snippet_tags.snippet_id"`
|
SnippetID int `db:"snippet_tags.snippet_id"`
|
||||||
Tag *models.Tag `db:"tags"`
|
Tag *models.Tag `db:"tags"`
|
||||||
}
|
}
|
||||||
iSnippetTags, err := db.Query(ctx, tx, snippetTagRow{},
|
snippetTags, err := db.Query[snippetTagRow](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -170,8 +168,7 @@ func FetchSnippets(
|
||||||
for i := range result {
|
for i := range result {
|
||||||
resultBySnippetId[result[i].Snippet.ID] = &result[i]
|
resultBySnippetId[result[i].Snippet.ID] = &result[i]
|
||||||
}
|
}
|
||||||
for _, iSnippetTag := range iSnippetTags {
|
for _, snippetTag := range snippetTags {
|
||||||
snippetTag := iSnippetTag.(*snippetTagRow)
|
|
||||||
item := resultBySnippetId[snippetTag.SnippetID]
|
item := resultBySnippetId[snippetTag.SnippetID]
|
||||||
item.Tags = append(item.Tags, snippetTag.Tag)
|
item.Tags = append(item.Tags, snippetTag.Tag)
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,18 +40,12 @@ func FetchTags(ctx context.Context, dbConn db.ConnOrTx, q TagQuery) ([]*models.T
|
||||||
qb.Add(`LIMIT $? OFFSET $?`, q.Limit, q.Offset)
|
qb.Add(`LIMIT $? OFFSET $?`, q.Limit, q.Offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
itags, err := db.Query(ctx, dbConn, models.Tag{}, qb.String(), qb.Args()...)
|
tags, err := db.Query[models.Tag](ctx, dbConn, qb.String(), qb.Args()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch tags")
|
return nil, oops.New(err, "failed to fetch tags")
|
||||||
}
|
}
|
||||||
|
|
||||||
res := make([]*models.Tag, len(itags))
|
return tags, nil
|
||||||
for i, itag := range itags {
|
|
||||||
tag := itag.(*models.Tag)
|
|
||||||
res[i] = tag
|
|
||||||
}
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func FetchTag(ctx context.Context, dbConn db.ConnOrTx, q TagQuery) (*models.Tag, error) {
|
func FetchTag(ctx context.Context, dbConn db.ConnOrTx, q TagQuery) (*models.Tag, error) {
|
||||||
|
|
|
@ -145,15 +145,13 @@ func FetchThreads(
|
||||||
ForumLastReadTime *time.Time `db:"slri.lastread"`
|
ForumLastReadTime *time.Time `db:"slri.lastread"`
|
||||||
}
|
}
|
||||||
|
|
||||||
iresults, err := db.Query(ctx, dbConn, resultRow{}, qb.String(), qb.Args()...)
|
results, err := db.Query[resultRow](ctx, dbConn, qb.String(), qb.Args()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch threads")
|
return nil, oops.New(err, "failed to fetch threads")
|
||||||
}
|
}
|
||||||
|
|
||||||
result := make([]ThreadAndStuff, len(iresults))
|
result := make([]ThreadAndStuff, len(results))
|
||||||
for i, iresult := range iresults {
|
for i, row := range results {
|
||||||
row := *iresult.(*resultRow)
|
|
||||||
|
|
||||||
hasRead := false
|
hasRead := false
|
||||||
if currentUser != nil && currentUser.MarkedAllReadAt.After(row.LastPost.PostDate) {
|
if currentUser != nil && currentUser.MarkedAllReadAt.After(row.LastPost.PostDate) {
|
||||||
hasRead = true
|
hasRead = true
|
||||||
|
@ -405,15 +403,13 @@ func FetchPosts(
|
||||||
qb.Add(`LIMIT $? OFFSET $?`, q.Limit, q.Offset)
|
qb.Add(`LIMIT $? OFFSET $?`, q.Limit, q.Offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
iresults, err := db.Query(ctx, dbConn, resultRow{}, qb.String(), qb.Args()...)
|
results, err := db.Query[resultRow](ctx, dbConn, qb.String(), qb.Args()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch posts")
|
return nil, oops.New(err, "failed to fetch posts")
|
||||||
}
|
}
|
||||||
|
|
||||||
result := make([]PostAndStuff, len(iresults))
|
result := make([]PostAndStuff, len(results))
|
||||||
for i, iresult := range iresults {
|
for i, row := range results {
|
||||||
row := *iresult.(*resultRow)
|
|
||||||
|
|
||||||
hasRead := false
|
hasRead := false
|
||||||
if currentUser != nil && currentUser.MarkedAllReadAt.After(row.Post.PostDate) {
|
if currentUser != nil && currentUser.MarkedAllReadAt.After(row.Post.PostDate) {
|
||||||
hasRead = true
|
hasRead = true
|
||||||
|
@ -611,7 +607,7 @@ func UserCanEditPost(ctx context.Context, connOrTx db.ConnOrTx, user models.User
|
||||||
type postResult struct {
|
type postResult struct {
|
||||||
AuthorID *int `db:"post.author_id"`
|
AuthorID *int `db:"post.author_id"`
|
||||||
}
|
}
|
||||||
iresult, err := db.QueryOne(ctx, connOrTx, postResult{},
|
result, err := db.QueryOne[postResult](ctx, connOrTx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -629,7 +625,6 @@ func UserCanEditPost(ctx context.Context, connOrTx db.ConnOrTx, user models.User
|
||||||
panic(oops.New(err, "failed to get author of post when checking permissions"))
|
panic(oops.New(err, "failed to get author of post when checking permissions"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result := iresult.(*postResult)
|
|
||||||
|
|
||||||
return result.AuthorID != nil && *result.AuthorID == user.ID
|
return result.AuthorID != nil && *result.AuthorID == user.ID
|
||||||
}
|
}
|
||||||
|
@ -709,7 +704,7 @@ func DeletePost(
|
||||||
FirstPostID int `db:"first_id"`
|
FirstPostID int `db:"first_id"`
|
||||||
Deleted bool `db:"deleted"`
|
Deleted bool `db:"deleted"`
|
||||||
}
|
}
|
||||||
ti, err := db.QueryOne(ctx, tx, threadInfo{},
|
info, err := db.QueryOne[threadInfo](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -722,7 +717,6 @@ func DeletePost(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(oops.New(err, "failed to fetch thread info"))
|
panic(oops.New(err, "failed to fetch thread info"))
|
||||||
}
|
}
|
||||||
info := ti.(*threadInfo)
|
|
||||||
if info.Deleted {
|
if info.Deleted {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -851,7 +845,7 @@ func CreatePostVersion(ctx context.Context, tx pgx.Tx, postId int, unparsedConte
|
||||||
type assetId struct {
|
type assetId struct {
|
||||||
AssetID uuid.UUID `db:"id"`
|
AssetID uuid.UUID `db:"id"`
|
||||||
}
|
}
|
||||||
assetResult, err := db.Query(ctx, tx, assetId{},
|
assets, err := db.Query[assetId](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_asset
|
FROM handmade_asset
|
||||||
|
@ -865,8 +859,8 @@ func CreatePostVersion(ctx context.Context, tx pgx.Tx, postId int, unparsedConte
|
||||||
|
|
||||||
var values [][]interface{}
|
var values [][]interface{}
|
||||||
|
|
||||||
for _, asset := range assetResult {
|
for _, asset := range assets {
|
||||||
values = append(values, []interface{}{postId, asset.(*assetId).AssetID})
|
values = append(values, []interface{}{postId, asset.AssetID})
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.CopyFrom(ctx, pgx.Identifier{"handmade_post_asset_usage"}, []string{"post_id", "asset_id"}, pgx.CopyFromRows(values))
|
_, err = tx.CopyFrom(ctx, pgx.Identifier{"handmade_post_asset_usage"}, []string{"post_id", "asset_id"}, pgx.CopyFromRows(values))
|
||||||
|
@ -886,7 +880,7 @@ Returns errThreadEmpty if the thread contains no visible posts any more.
|
||||||
You should probably mark the thread as deleted in this case.
|
You should probably mark the thread as deleted in this case.
|
||||||
*/
|
*/
|
||||||
func FixThreadPostIds(ctx context.Context, tx pgx.Tx, threadId int) error {
|
func FixThreadPostIds(ctx context.Context, tx pgx.Tx, threadId int) error {
|
||||||
postsIter, err := db.Query(ctx, tx, models.Post{},
|
posts, err := db.Query[models.Post](ctx, tx,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_post
|
FROM handmade_post
|
||||||
|
@ -901,9 +895,7 @@ func FixThreadPostIds(ctx context.Context, tx pgx.Tx, threadId int) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
var firstPost, lastPost *models.Post
|
var firstPost, lastPost *models.Post
|
||||||
for _, ipost := range postsIter {
|
for _, post := range posts {
|
||||||
post := ipost.(*models.Post)
|
|
||||||
|
|
||||||
if firstPost == nil || post.PostDate.Before(firstPost.PostDate) {
|
if firstPost == nil || post.PostDate.Before(firstPost.PostDate) {
|
||||||
firstPost = post
|
firstPost = post
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,111 +0,0 @@
|
||||||
package hmndata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/db"
|
|
||||||
"git.handmade.network/hmn/hmn/src/models"
|
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
|
||||||
)
|
|
||||||
|
|
||||||
const InvalidUserTwitchID = "INVALID_USER"
|
|
||||||
|
|
||||||
type TwitchStreamer struct {
|
|
||||||
TwitchID string
|
|
||||||
TwitchLogin string
|
|
||||||
UserID *int
|
|
||||||
ProjectID *int
|
|
||||||
}
|
|
||||||
|
|
||||||
var twitchRegex = regexp.MustCompile(`twitch\.tv/(?P<login>[^/]+)$`)
|
|
||||||
|
|
||||||
func FetchTwitchStreamers(ctx context.Context, dbConn db.ConnOrTx) ([]TwitchStreamer, error) {
|
|
||||||
type linkResult struct {
|
|
||||||
Link models.Link `db:"link"`
|
|
||||||
}
|
|
||||||
streamers, err := db.Query(ctx, dbConn, linkResult{},
|
|
||||||
`
|
|
||||||
SELECT $columns
|
|
||||||
FROM
|
|
||||||
handmade_links AS link
|
|
||||||
LEFT JOIN auth_user AS link_owner ON link_owner.id = link.user_id
|
|
||||||
WHERE
|
|
||||||
url ~* 'twitch\.tv/([^/]+)$' AND
|
|
||||||
((link.user_id IS NOT NULL AND link_owner.status = $1) OR (link.project_id IS NOT NULL AND
|
|
||||||
(SELECT COUNT(*)
|
|
||||||
FROM
|
|
||||||
handmade_user_projects AS hup
|
|
||||||
JOIN auth_user AS project_owner ON project_owner.id = hup.user_id
|
|
||||||
WHERE
|
|
||||||
hup.project_id = link.project_id AND
|
|
||||||
project_owner.status != $1
|
|
||||||
) = 0))
|
|
||||||
`,
|
|
||||||
models.UserStatusApproved,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to fetch twitch links")
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make([]TwitchStreamer, 0, len(streamers))
|
|
||||||
for _, s := range streamers {
|
|
||||||
dbStreamer := s.(*linkResult).Link
|
|
||||||
|
|
||||||
streamer := TwitchStreamer{
|
|
||||||
UserID: dbStreamer.UserID,
|
|
||||||
ProjectID: dbStreamer.ProjectID,
|
|
||||||
}
|
|
||||||
|
|
||||||
match := twitchRegex.FindStringSubmatch(dbStreamer.URL)
|
|
||||||
if match != nil {
|
|
||||||
login := strings.ToLower(match[twitchRegex.SubexpIndex("login")])
|
|
||||||
streamer.TwitchLogin = login
|
|
||||||
}
|
|
||||||
if len(streamer.TwitchLogin) > 0 {
|
|
||||||
duplicate := false
|
|
||||||
for _, r := range result {
|
|
||||||
if r.TwitchLogin == streamer.TwitchLogin {
|
|
||||||
duplicate = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !duplicate {
|
|
||||||
result = append(result, streamer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func FetchTwitchLoginsForUserOrProject(ctx context.Context, dbConn db.ConnOrTx, userId *int, projectId *int) ([]string, error) {
|
|
||||||
links, err := db.Query(ctx, dbConn, models.Link{},
|
|
||||||
`
|
|
||||||
SELECT $columns
|
|
||||||
FROM
|
|
||||||
handmade_links AS link
|
|
||||||
WHERE
|
|
||||||
url ~* 'twitch\.tv/([^/]+)$'
|
|
||||||
AND ((user_id = $1 AND project_id IS NULL) OR (user_id IS NULL AND project_id = $2))
|
|
||||||
ORDER BY url ASC
|
|
||||||
`,
|
|
||||||
userId,
|
|
||||||
projectId,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to fetch twitch links")
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(links))
|
|
||||||
|
|
||||||
for _, l := range links {
|
|
||||||
url := l.(*models.Link).URL
|
|
||||||
match := twitchRegex.FindStringSubmatch(url)
|
|
||||||
if match != nil {
|
|
||||||
login := strings.ToLower(match[twitchRegex.SubexpIndex("login")])
|
|
||||||
result = append(result, login)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
|
@ -687,18 +687,6 @@ func BuildAPICheckUsername() string {
|
||||||
return Url("/api/check_username", nil)
|
return Url("/api/check_username", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Twitch stuff
|
|
||||||
*/
|
|
||||||
|
|
||||||
var RegexTwitchEventSubCallback = regexp.MustCompile("^/twitch_eventsub$")
|
|
||||||
|
|
||||||
func BuildTwitchEventSubCallback() string {
|
|
||||||
return Url("/twitch_eventsub", nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
var RegexTwitchDebugPage = regexp.MustCompile("^/twitch_debug$")
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* User assets
|
* User assets
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
package migrations
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/migration/types"
|
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
|
||||||
"github.com/jackc/pgx/v4"
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
registerMigration(TwitchTables{})
|
|
||||||
}
|
|
||||||
|
|
||||||
type TwitchTables struct{}
|
|
||||||
|
|
||||||
func (m TwitchTables) Version() types.MigrationVersion {
|
|
||||||
return types.MigrationVersion(time.Date(2022, 3, 15, 1, 21, 44, 0, time.UTC))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m TwitchTables) Name() string {
|
|
||||||
return "TwitchTables"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m TwitchTables) Description() string {
|
|
||||||
return "Create tables for live twitch streams and twitch ID cache"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m TwitchTables) Up(ctx context.Context, tx pgx.Tx) error {
|
|
||||||
_, err := tx.Exec(ctx,
|
|
||||||
`
|
|
||||||
CREATE TABLE twitch_streams (
|
|
||||||
twitch_id VARCHAR(255) NOT NULL,
|
|
||||||
twitch_login VARCHAR(255) NOT NULL,
|
|
||||||
title VARCHAR(255) NOT NULL,
|
|
||||||
started_at TIMESTAMP WITH TIME ZONE
|
|
||||||
);
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create twitch tables")
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m TwitchTables) Down(ctx context.Context, tx pgx.Tx) error {
|
|
||||||
_, err := tx.Exec(ctx,
|
|
||||||
`
|
|
||||||
DROP TABLE twitch_ids;
|
|
||||||
DROP TABLE twitch_streams;
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create twitch tables")
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
|
@ -1,45 +0,0 @@
|
||||||
package migrations
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/migration/types"
|
|
||||||
"github.com/jackc/pgx/v4"
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
registerMigration(AddIndexOnTwitchStreams{})
|
|
||||||
}
|
|
||||||
|
|
||||||
type AddIndexOnTwitchStreams struct{}
|
|
||||||
|
|
||||||
func (m AddIndexOnTwitchStreams) Version() types.MigrationVersion {
|
|
||||||
return types.MigrationVersion(time.Date(2022, 3, 15, 6, 35, 6, 0, time.UTC))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m AddIndexOnTwitchStreams) Name() string {
|
|
||||||
return "AddIndexOnTwitchStreams"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m AddIndexOnTwitchStreams) Description() string {
|
|
||||||
return "Add unique index on twitch streams"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m AddIndexOnTwitchStreams) Up(ctx context.Context, tx pgx.Tx) error {
|
|
||||||
_, err := tx.Exec(ctx,
|
|
||||||
`
|
|
||||||
CREATE UNIQUE INDEX twitch_streams_twitch_id ON twitch_streams (twitch_id);
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m AddIndexOnTwitchStreams) Down(ctx context.Context, tx pgx.Tx) error {
|
|
||||||
_, err := tx.Exec(ctx,
|
|
||||||
`
|
|
||||||
DROP INDEX twitch_streams_twitch_id;
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
return err
|
|
||||||
}
|
|
|
@ -47,7 +47,7 @@ func GetFullSubforumTree(ctx context.Context, conn *pgxpool.Pool) SubforumTree {
|
||||||
type subforumRow struct {
|
type subforumRow struct {
|
||||||
Subforum Subforum `db:"sf"`
|
Subforum Subforum `db:"sf"`
|
||||||
}
|
}
|
||||||
rowsSlice, err := db.Query(ctx, conn, subforumRow{},
|
rowsSlice, err := db.Query[subforumRow](ctx, conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -61,7 +61,7 @@ func GetFullSubforumTree(ctx context.Context, conn *pgxpool.Pool) SubforumTree {
|
||||||
|
|
||||||
sfTreeMap := make(map[int]*SubforumTreeNode, len(rowsSlice))
|
sfTreeMap := make(map[int]*SubforumTreeNode, len(rowsSlice))
|
||||||
for _, row := range rowsSlice {
|
for _, row := range rowsSlice {
|
||||||
sf := row.(*subforumRow).Subforum
|
sf := row.Subforum
|
||||||
sfTreeMap[sf.ID] = &SubforumTreeNode{Subforum: sf}
|
sfTreeMap[sf.ID] = &SubforumTreeNode{Subforum: sf}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ func GetFullSubforumTree(ctx context.Context, conn *pgxpool.Pool) SubforumTree {
|
||||||
|
|
||||||
for _, row := range rowsSlice {
|
for _, row := range rowsSlice {
|
||||||
// NOTE(asaf): Doing this in a separate loop over rowsSlice to ensure that Children are in db order.
|
// NOTE(asaf): Doing this in a separate loop over rowsSlice to ensure that Children are in db order.
|
||||||
cat := row.(*subforumRow).Subforum
|
cat := row.Subforum
|
||||||
node := sfTreeMap[cat.ID]
|
node := sfTreeMap[cat.ID]
|
||||||
if node.Parent != nil {
|
if node.Parent != nil {
|
||||||
node.Parent.Children = append(node.Parent.Children, node)
|
node.Parent.Children = append(node.Parent.Children, node)
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
package models
|
|
||||||
|
|
||||||
import "time"
|
|
||||||
|
|
||||||
type TwitchID struct {
|
|
||||||
ID string `db:"id"`
|
|
||||||
Login string `db:"login"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TwitchStream struct {
|
|
||||||
ID string `db:"twitch_id"`
|
|
||||||
Login string `db:"twitch_login"`
|
|
||||||
Title string `db:"title"`
|
|
||||||
StartedAt time.Time `db:"started_at"`
|
|
||||||
}
|
|
|
@ -56,10 +56,6 @@
|
||||||
<audio src="{{ .AssetUrl }}" controls>
|
<audio src="{{ .AssetUrl }}" controls>
|
||||||
{{ else if eq .Type mediaembed }}
|
{{ else if eq .Type mediaembed }}
|
||||||
{{ .EmbedHTML }}
|
{{ .EmbedHTML }}
|
||||||
{{ else }}
|
|
||||||
<div class="project-card br2 pv1 ph2">
|
|
||||||
<a href="{{ .AssetUrl }}" target="_blank">{{ .Filename }} ({{ filesize .FileSize }})</a>
|
|
||||||
</div>
|
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
|
@ -121,7 +121,6 @@
|
||||||
#welcome-logo svg {
|
#welcome-logo svg {
|
||||||
height: 100%;
|
height: 100%;
|
||||||
fill: currentColor;
|
fill: currentColor;
|
||||||
opacity: 0.9;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#welcome-content a {
|
#welcome-content a {
|
||||||
|
|
|
@ -2,21 +2,22 @@
|
||||||
|
|
||||||
{{ define "content" }}
|
{{ define "content" }}
|
||||||
<div class="center-layout mw7 mv3 ph3 ph0-ns post-content">
|
<div class="center-layout mw7 mv3 ph3 ph0-ns post-content">
|
||||||
<h2>Computers are amazing.</h2>
|
<p>Modern computer hardware is amazing. Manufacturers have orchestrated billions of pieces of silicon into terrifyingly complex and efficient structures that sweep electrons through innumerable tangled paths, branchings, and reunions with the sole purpose of performing computations at more than a billion times per second. This awe-inspiring piece of computational wizardry has at its disposal multiple billions of uniquely addressible silicon plates where it can store the results of millions of computations in an array of several vanishingly small chips. All of this hardware, though each component often sits no further than 7 or 8 centimeters away from the others, cycles so fast that the speed of light, a physical law of the universe, limits the rate at which they communicate with each other.</p>
|
||||||
<p>Computers have changed our lives for the better. They allow us to learn, connect with each other, and express ourselves in amazing new ways. And every year computers get more powerful, less expensive, and more accessible - computers today can do things we hardly dreamed of twenty years ago.</p>
|
<h2>So why is software still slow?</h2>
|
||||||
<h2>So why is software so terrible?</h2>
|
<p>Why does it take your operating system 10 seconds, 30 seconds, a minute to boot up? Why does your word processor freeze when you save a document on the cloud? Why does your web browser take 3, 4, 10 seconds to load a web page? Why does your phone struggle to keep more than a few apps open at a time? And why does each update somehow make the problem worse?</p>
|
||||||
<p>Why do web pages take ten seconds to load? Why do apps mess up scrolling? Why does your phone battery still die so quickly? And why does each update somehow make the problem worse?</p>
|
<h2>We made it slow.</h2>
|
||||||
<p>And why do we all use huge frameworks that no one understands? Why do our projects take several minutes to compile? Why do we have to restart our language servers every twenty minutes? And why does everyone think this is fine?</p>
|
<p>Not necessarily you, not necessarily me, not necessarily any single person in particular. But we, the software development community, made it slow by ignoring the fundamental reality of our occupation. We write code, code that runs on computers. Real computers, with central processing units and random access memory and hard disk drives and display buffers. Real computers, with integer and bitwise math and floating point units and L2 caches, with threads and cores and a tenuous little network connection to a million billion other computers. Real computers not built for ease of human understanding but for blindingly, incomprehensibly fast speed.</p>
|
||||||
<h2>We made it terrible.</h2>
|
<h2>A lot of us have forgotten that.</h2>
|
||||||
<p>Not necessarily you or me, not necessarily anyone in particular. But we, the software development community, made it terrible through our thoughtless behavior. We ignored the hardware. We glued together libraries so we didn't have to learn. We built layers on top of layers, until no one knew how anything worked.</p>
|
<p>In our haste to get our products, our projects, the works of our hands and minds, to as many people as possible, we take shortcuts. We make assumptions. We generalize, and abstract, and assume that just because these problems have been solved before that they never need to be solved again. We build abstraction layers, then forget we built them and build more on top.</p>
|
||||||
<p>But worst of all: we put our own desires above the user's.</p>
|
<p>And it's true that many of us think we do not have the time, the money, the mental bandwidth to always consider these things in detail. The deadline is approaching or the rent is due or we have taxes to fill out and a manager on our back and someone asking us why we always spend so much time at the office, and we just have to stick the library or virtual machine or garbage collector in there to cover up the places we can't think through right now.</p>
|
||||||
<p>You may have learned that programming is about classes, monads, or type systems. You may have been taught to keep your code clean and pretty, abstract and future-proof. None of that matters when the end result is garbage.</p>
|
<p>Others of us were never taught to think about the computer itself. We learned about objects and classes and templates and how to make our code clean and pretty. We learned how to write code to make the client or the manager or the teacher happy, but made the processor churn. And because we did, that amazing speed we'd been granted was wasted, by us, in a death by a thousand abstraction layers.</p>
|
||||||
<h2>But there is another way.</h2>
|
<h2>But some of us aren't satisfied with that.</h2>
|
||||||
<p>Some of us aren't satisfied with the current state of software. We think that wheels need to be reinvented. We like looking under the hood, understanding what others take for granted. We remember how software used to be, and know how much potential there is to make it better. We fight against the status quo, because we know how things <em>could</em> be.</p>
|
<p>Some of us take a few extra steps into the covered territory, the wheels sitting, motionless, in a pile behind us, examine their designs and decide there is a better way. The more experienced among us remember how software used to be, the potential that we know exists for computer programs to be useful, general, and efficient. Others of us got fed up with the tools we were expected to use without complaint, but which failed us time and time again. Some of us are just curious and don't know what's good for us. Don't trust what we've been told is good for us.</p>
|
||||||
<p>This is what Handmade means. It's not a technique or a language or a management strategy. It's not a library or a framework or a paradigm. It's an idea. The idea that we can build software that works with the computer, not against it. The idea that the user matters more than the programmer. The idea that sometimes a small group can do more than an army of software engineers, and <em>do it better</em>.</p>
|
<p>We sat down and looked at our hardware, and examined our data, and thought about how to use the one to transform the other. We tinkered, and measured, and read, and compared, and wrote, and refined, and modified, and measured again, over and over, until we found we had built the same thing, but 10 times faster and incomparably more useful to the people we designed it for. And we had built it by hand.</p>
|
||||||
<p>You don't need a degree, a dissertation, or a decade of experience. You don't need an expensive computer or a certificate. All you need is an open mind and a sense of curiosity. We'll help you with the rest.</p>
|
<p>That is what Handmade means. It's not a technique or a language or a management strategy, it isn't a formula or a library or an abstraction. It's an idea. The idea that we can build software that works with the computer, not against it. The idea that sometimes an individual programmer can be more productive than a large team, that a small group can do more than an army of software engineers and *do it better*. The idea that programming is about transforming data and we wield the code, the tool we use to bend that data to our will.</p>
|
||||||
|
<p>It doesn't require a degree, or a dissertation, or a decade of experience. You don't need an expensive computer or a certificate or even prior knowledge. All you need is an open mind and a sense of curiosity. We'll help you with the rest.</p>
|
||||||
<h2>Will you join us?</h2>
|
<h2>Will you join us?</h2>
|
||||||
<p>Will you build your software by hand?</p>
|
<p>Will you build your software by hand?</p>
|
||||||
<p class="c--dim i">Written by Ben Visness and the Handmade community. Original by Andrew Chronister.</p>
|
<p class="c--dim i">Written by Andrew Chronister</p>
|
||||||
</div>
|
</div>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
|
@ -21,10 +21,6 @@
|
||||||
<div class="mb3 aspect-ratio aspect-ratio--16x9">
|
<div class="mb3 aspect-ratio aspect-ratio--16x9">
|
||||||
{{ .EmbedHTML }}
|
{{ .EmbedHTML }}
|
||||||
</div>
|
</div>
|
||||||
{{ else }}
|
|
||||||
<div class="project-card br2 pv1 ph2">
|
|
||||||
<a href="{{ .AssetUrl }}" target="_blank">{{ .Filename }} ({{ filesize .FileSize }})</a>
|
|
||||||
</div>
|
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,83 +1,102 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
<svg width="100%" height="100%" viewBox="0 0 2000 629" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
|
<svg viewBox="0 0 3706 1082" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
|
||||||
<g id="Wires" transform="matrix(26.3259,0,0,26.3259,-5070.03,-5340.32)">
|
<g id="Banner-Logo" serif:id="Banner Logo" transform="matrix(48.7805,0,0,48.7805,-9370.23,-9931.81)">
|
||||||
|
<g transform="matrix(1,0,0,1,-0.497771,-0.118656)">
|
||||||
<g transform="matrix(1,0,0,-1,-0.808418,430.006)">
|
<g transform="matrix(1,0,0,-1,-0.808418,430.006)">
|
||||||
<path d="M194.555,216.234C193.915,216.234 193.396,215.714 193.396,215.074C193.396,214.434 193.915,213.915 194.555,213.915C195.195,213.915 195.715,214.434 195.715,215.074C195.715,215.714 195.195,216.234 194.555,216.234ZM194.555,215.461C194.768,215.461 194.942,215.288 194.942,215.074C194.942,214.861 194.768,214.688 194.555,214.688C194.342,214.688 194.169,214.861 194.169,215.074C194.169,215.288 194.342,215.461 194.555,215.461Z"/>
|
<path d="M193.396,215.074L201.366,215.074" style="fill:none;"/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,-0.808418,430.006)">
|
<g transform="matrix(1,0,0,-1,-0.808418,430.006)">
|
||||||
<path d="M195.328,214.688L201.366,214.688C201.579,214.688 201.753,214.861 201.753,215.074C201.753,215.288 201.579,215.461 201.366,215.461L195.328,215.461C195.115,215.461 194.942,215.288 194.942,215.074C194.942,214.861 195.115,214.688 195.328,214.688Z"/>
|
<path d="M194.555,216.234C193.915,216.234 193.396,215.714 193.396,215.074C193.396,214.434 193.915,213.915 194.555,213.915C195.195,213.915 195.715,214.434 195.715,215.074C195.715,215.714 195.195,216.234 194.555,216.234ZM194.555,215.461C194.768,215.461 194.942,215.288 194.942,215.074C194.942,214.861 194.768,214.688 194.555,214.688C194.342,214.688 194.169,214.861 194.169,215.074C194.169,215.288 194.342,215.461 194.555,215.461Z" style=""/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(1,0,0,-1,-0.808418,430.006)">
|
||||||
|
<path d="M195.328,214.688L201.366,214.688C201.579,214.688 201.753,214.861 201.753,215.074C201.753,215.288 201.579,215.461 201.366,215.461L195.328,215.461C195.115,215.461 194.942,215.288 194.942,215.074C194.942,214.861 195.115,214.688 195.328,214.688Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,-1,461.953,430.006)">
|
<g transform="matrix(-1,0,0,-1,461.953,430.006)">
|
||||||
<path d="M194.555,213.915C193.915,213.915 193.396,214.434 193.396,215.074C193.396,215.714 193.915,216.234 194.555,216.234C195.195,216.234 195.715,215.714 195.715,215.074C195.715,214.434 195.195,213.915 194.555,213.915ZM194.555,214.688C194.768,214.688 194.942,214.861 194.942,215.074C194.942,215.288 194.768,215.461 194.555,215.461C194.342,215.461 194.169,215.288 194.169,215.074C194.169,214.861 194.342,214.688 194.555,214.688Z"/>
|
<path d="M193.396,215.074L201.366,215.074" style="fill:none;"/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,-1,461.953,430.006)">
|
<g transform="matrix(-1,0,0,-1,461.953,430.006)">
|
||||||
<path d="M195.328,215.461L201.366,215.461C201.579,215.461 201.753,215.288 201.753,215.074C201.753,214.861 201.579,214.688 201.366,214.688L195.328,214.688C195.115,214.688 194.942,214.861 194.942,215.074C194.942,215.288 195.115,215.461 195.328,215.461Z"/>
|
<path d="M194.555,213.915C193.915,213.915 193.396,214.434 193.396,215.074C193.396,215.714 193.915,216.234 194.555,216.234C195.195,216.234 195.715,215.714 195.715,215.074C195.715,214.434 195.195,213.915 194.555,213.915ZM194.555,214.688C194.768,214.688 194.942,214.861 194.942,215.074C194.942,215.288 194.768,215.461 194.555,215.461C194.342,215.461 194.169,215.288 194.169,215.074C194.169,214.861 194.342,214.688 194.555,214.688Z" style=""/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,-1,461.953,430.006)">
|
||||||
|
<path d="M195.328,215.461L201.366,215.461C201.579,215.461 201.753,215.288 201.753,215.074C201.753,214.861 201.579,214.688 201.366,214.688L195.328,214.688C195.115,214.688 194.942,214.861 194.942,215.074C194.942,215.288 195.115,215.461 195.328,215.461Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,0.0431238,430.006)">
|
<g transform="matrix(1,0,0,-1,0.0431238,430.006)">
|
||||||
<path d="M200.241,214.801L201.383,213.66L201.929,214.206L200.788,215.348L200.241,214.801Z"/>
|
<path d="M200.515,215.074L201.656,213.933" style="fill:none;"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(1,0,0,-1,0.0431238,430.006)">
|
||||||
|
<path d="M200.241,214.801L201.383,213.66L201.929,214.206L200.788,215.348L200.241,214.801Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,58.9314,428.865)">
|
<g transform="matrix(1,0,0,-1,58.9314,428.865)">
|
||||||
<path d="M200.241,214.801L201.383,213.66L201.929,214.206L200.788,215.348L200.241,214.801Z"/>
|
<path d="M200.515,215.074L201.656,213.933" style="fill:none;"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(1,0,0,-1,58.9314,428.865)">
|
||||||
|
<path d="M200.241,214.801L201.383,213.66L201.929,214.206L200.788,215.348L200.241,214.801Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
||||||
<path d="M205.56,209.482L207.365,207.677L207.912,208.223L206.107,210.029L205.56,209.482Z"/>
|
<path d="M205.833,209.755L207.639,207.95" style="fill:none;"/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
||||||
<path d="M229.737,206.754C229.436,207.055 228.946,207.055 228.644,206.754C228.342,206.452 228.342,205.962 228.644,205.66C228.946,205.359 229.436,205.359 229.737,205.66C230.039,205.962 230.039,206.452 229.737,206.754ZM229.192,206.208L229.191,206.207L229.192,206.208Z"/>
|
<path d="M205.56,209.482L207.365,207.677L207.912,208.223L206.107,210.029L205.56,209.482Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
||||||
<path d="M207.639,207.564L227.288,207.564C227.288,207.564 228.644,206.207 228.644,206.207C228.795,206.056 229.04,206.056 229.191,206.207C229.342,206.358 229.342,206.603 229.191,206.754C228.534,207.411 227.721,208.223 227.721,208.223C227.648,208.296 227.55,208.337 227.448,208.337L207.639,208.337C207.425,208.337 207.252,208.163 207.252,207.95C207.252,207.737 207.425,207.564 207.639,207.564Z"/>
|
<path d="M229.737,206.754C229.436,207.055 228.946,207.055 228.644,206.754C228.342,206.452 228.342,205.962 228.644,205.66C228.946,205.359 229.436,205.359 229.737,205.66C230.039,205.962 230.039,206.452 229.737,206.754ZM229.191,206.207C229.191,206.207 229.191,206.207 229.191,206.207Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
||||||
<path d="M230.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 230.737,208.337 230.737,208.337L230.737,207.564Z"/>
|
<path d="M207.639,207.564L227.288,207.564C227.288,207.564 228.644,206.207 228.644,206.207C228.795,206.056 229.04,206.056 229.191,206.207C229.342,206.358 229.342,206.603 229.191,206.754C228.534,207.411 227.721,208.223 227.721,208.223C227.648,208.296 227.55,208.337 227.448,208.337L207.639,208.337C207.425,208.337 207.252,208.163 207.252,207.95C207.252,207.737 207.425,207.564 207.639,207.564Z" style=""/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(1,0,0,-1,1.04312,431.006)">
|
||||||
|
<path d="M230.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 230.737,208.337 230.737,208.337L230.737,207.564Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
||||||
<path d="M205.56,209.482L207.365,207.677L207.912,208.223L206.107,210.029L205.56,209.482Z"/>
|
<path d="M205.833,209.755L207.639,207.95" style="fill:none;"/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
||||||
<path d="M229.737,206.754C229.436,207.055 228.946,207.055 228.644,206.754C228.342,206.452 228.342,205.962 228.644,205.66C228.946,205.359 229.436,205.359 229.737,205.66C230.039,205.962 230.039,206.452 229.737,206.754L229.737,206.754Z"/>
|
<path d="M205.56,209.482L207.365,207.677L207.912,208.223L206.107,210.029L205.56,209.482Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
||||||
<path d="M207.639,207.564L227.288,207.564C227.288,207.564 228.644,206.207 228.644,206.207C228.795,206.056 229.04,206.056 229.191,206.207C229.342,206.358 229.342,206.603 229.191,206.754C228.534,207.411 227.721,208.223 227.721,208.223C227.648,208.296 227.55,208.337 227.448,208.337L207.639,208.337C207.425,208.337 207.252,208.163 207.252,207.95C207.252,207.737 207.425,207.564 207.639,207.564Z"/>
|
<path d="M229.737,206.754C229.436,207.055 228.946,207.055 228.644,206.754C228.342,206.452 228.342,205.962 228.644,205.66C228.946,205.359 229.436,205.359 229.737,205.66C230.039,205.962 230.039,206.452 229.737,206.754Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
||||||
<path d="M230.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 230.737,208.337 230.737,208.337L230.737,207.564Z"/>
|
<path d="M207.639,207.564L227.288,207.564C227.288,207.564 228.644,206.207 228.644,206.207C228.795,206.056 229.04,206.056 229.191,206.207C229.342,206.358 229.342,206.603 229.191,206.754C228.534,207.411 227.721,208.223 227.721,208.223C227.648,208.296 227.55,208.337 227.448,208.337L207.639,208.337C207.425,208.337 207.252,208.163 207.252,207.95C207.252,207.737 207.425,207.564 207.639,207.564Z" style=""/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,1,459.812,-1.40971)">
|
||||||
|
<path d="M230.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 230.737,208.337 230.737,208.337L230.737,207.564Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,-1,469.812,412.749)">
|
<g transform="matrix(-1,0,0,-1,469.812,412.749)">
|
||||||
<path d="M240.737,208.337L250.97,208.337C250.97,208.337 251.662,209.029 251.662,209.029L252.209,208.482L251.403,207.677C251.331,207.604 251.233,207.564 251.13,207.564L240.737,207.564L240.737,208.337Z"/>
|
<path d="M240.737,208.337L250.97,208.337C250.97,208.337 251.662,209.029 251.662,209.029L252.209,208.482L251.403,207.677C251.331,207.604 251.233,207.564 251.13,207.564L240.737,207.564L240.737,208.337Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,-36.8607,412.749)">
|
<g transform="matrix(1,0,0,-1,-36.8607,412.749)">
|
||||||
<path d="M243.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L252.209,208.482L251.662,209.029L250.97,208.337C250.97,208.337 243.737,208.337 243.737,208.337L243.737,207.564Z"/>
|
<path d="M243.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L252.209,208.482L251.662,209.029L250.97,208.337C250.97,208.337 243.737,208.337 243.737,208.337L243.737,207.564Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,-1,-31.3696,432.749)">
|
<g transform="matrix(1,0,0,-1,-31.3696,432.749)">
|
||||||
<path d="M242.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 242.737,208.337 242.737,208.337L242.737,207.564Z"/>
|
<path d="M242.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C250.97,208.337 242.737,208.337 242.737,208.337L242.737,207.564Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,-1,487.321,432.821)">
|
<g transform="matrix(-1,0,0,-1,487.321,432.821)">
|
||||||
<path d="M242.737,208.337L250.97,208.337C250.97,208.337 252.662,210.029 252.662,210.029L253.209,209.482L251.403,207.677C251.331,207.604 251.233,207.564 251.13,207.564L242.737,207.564L242.737,208.337Z"/>
|
<path d="M242.737,208.337L250.97,208.337C250.97,208.337 252.662,210.029 252.662,210.029L253.209,209.482L251.403,207.677C251.331,207.604 251.233,207.564 251.13,207.564L242.737,207.564L242.737,208.337Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(-1,0,0,1,492.867,-3.15102)">
|
<g transform="matrix(-1,0,0,1,492.867,-3.15102)">
|
||||||
<path d="M242.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C249.81,208.337 242.737,208.337 242.737,208.337L242.737,207.564Z"/>
|
<path d="M242.737,207.564L251.13,207.564C251.233,207.564 251.331,207.604 251.403,207.677L253.209,209.482L252.662,210.029L250.97,208.337C249.81,208.337 242.737,208.337 242.737,208.337L242.737,207.564Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,1,-9.29379,0)">
|
<g transform="matrix(1,0,0,1,-9.29379,0)">
|
||||||
<path d="M216.233,225.892L217.326,224.799L216.233,223.706L215.139,224.799L216.233,225.892Z"/>
|
<path d="M217.976,223.056L215.686,225.346" style="fill:none;"/>
|
||||||
</g>
|
</g>
|
||||||
<g transform="matrix(1,0,0,1,-9.29379,0)">
|
<g transform="matrix(1,0,0,1,-9.29379,0)">
|
||||||
<path d="M217.702,222.783C217.702,222.783 216.89,223.595 216.233,224.252L216.779,224.799C217.436,224.142 218.249,223.329 218.249,223.329L217.702,222.783Z"/>
|
<path d="M216.233,225.892L217.326,224.799L216.233,223.706L215.139,224.799L216.233,225.892Z" style=""/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(1,0,0,1,-9.29379,0)">
|
||||||
|
<path d="M217.702,222.783C217.702,222.783 216.89,223.595 216.233,224.252L216.779,224.799C217.436,224.142 218.249,223.329 218.249,223.329L217.702,222.783Z" style=""/>
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
<g id="Name" transform="matrix(26.3259,0,0,26.3259,-5177.33,-4754.86)">
|
<g id="Name-3" serif:id="Name 3" transform="matrix(1,0,0,1,-4.57363,22.1201)">
|
||||||
<path d="M213.538,196.869L213.538,188.32L214.496,188.32L214.496,192.663L216.491,192.663L216.491,188.32L217.448,188.32L217.448,196.869L216.491,196.869L216.491,193.62L214.496,193.62L214.496,196.869L213.538,196.869Z" style="fill-rule:nonzero;"/>
|
<path d="M213.538,196.869L213.538,188.32L214.496,188.32L214.496,192.663L216.491,192.663L216.491,188.32L217.448,188.32L217.448,196.869L216.491,196.869L216.491,193.62L214.496,193.62L214.496,196.869L213.538,196.869Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M221.267,188.32L223.079,196.869L222.087,196.869L221.757,195.216L219.751,195.216L219.409,196.869L218.417,196.869L220.241,188.32L221.267,188.32ZM219.922,194.258L221.586,194.258L220.754,190.03L219.922,194.258Z" style="fill-rule:nonzero;"/>
|
<path d="M221.267,188.32L223.079,196.869L222.087,196.869L221.757,195.216L219.751,195.216L219.409,196.869L218.417,196.869L220.241,188.32L221.267,188.32ZM219.922,194.258L221.586,194.258L220.754,190.03L219.922,194.258Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M227.331,188.32L228.242,188.32L228.242,196.869L227.433,196.869L225.131,190.816L225.131,196.869L224.219,196.869L224.219,188.32L225.028,188.32L227.331,194.372L227.331,188.32Z" style="fill-rule:nonzero;"/>
|
<path d="M227.331,188.32L228.242,188.32L228.242,196.869L227.433,196.869L225.131,190.816L225.131,196.869L224.219,196.869L224.219,188.32L225.028,188.32L227.331,194.372L227.331,188.32Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M229.496,196.869L229.496,188.32L231.457,188.32C232.072,188.32 232.566,188.514 232.939,188.901C233.311,189.289 233.497,189.798 233.497,190.429L233.497,194.76C233.497,195.391 233.311,195.9 232.939,196.287C232.566,196.675 232.072,196.869 231.457,196.869L229.496,196.869ZM230.454,195.911L231.457,195.911C231.784,195.911 232.046,195.807 232.243,195.598C232.441,195.389 232.54,195.11 232.54,194.76L232.54,190.429C232.54,190.079 232.441,189.8 232.243,189.591C232.046,189.382 231.784,189.277 231.457,189.277L230.454,189.277L230.454,195.911Z" style="fill-rule:nonzero;"/>
|
<path d="M229.496,196.869L229.496,188.32L231.457,188.32C232.072,188.32 232.566,188.514 232.939,188.901C233.311,189.289 233.497,189.798 233.497,190.429L233.497,194.76C233.497,195.391 233.311,195.9 232.939,196.287C232.566,196.675 232.072,196.869 231.457,196.869L229.496,196.869ZM230.454,195.911L231.457,195.911C231.784,195.911 232.046,195.807 232.243,195.598C232.441,195.389 232.54,195.11 232.54,194.76L232.54,190.429C232.54,190.079 232.441,189.8 232.243,189.591C232.046,189.382 231.784,189.277 231.457,189.277L230.454,189.277L230.454,195.911Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<g transform="matrix(1,0,0,1,0,0.0379854)">
|
<path d="M239.265,188.32L240.222,188.32L240.222,196.869L239.31,196.869L239.31,191.306L237.84,196.869L237.122,196.869L235.663,191.306L235.663,196.869L234.751,196.869L234.751,188.32L235.708,188.32L237.487,195.113L239.265,188.32Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M239.265,188.32L240.222,188.32L240.222,196.869L239.31,196.869L239.31,191.306L237.84,196.869L237.122,196.869L235.663,191.306L235.663,196.869L234.751,196.869L234.751,188.32L235.708,188.32L237.487,195.113L239.265,188.32Z" style="fill-rule:nonzero;"/>
|
<path d="M244.212,188.32L246.024,196.869L245.032,196.869L244.702,195.216L242.696,195.216L242.354,196.869L241.362,196.869L243.186,188.32L244.212,188.32ZM242.867,194.258L244.531,194.258L243.699,190.03L242.867,194.258Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
</g>
|
<path d="M247.05,196.869L247.05,188.32L249.01,188.32C249.626,188.32 250.12,188.514 250.492,188.901C250.864,189.289 251.051,189.798 251.051,190.429L251.051,194.76C251.051,195.391 250.864,195.9 250.492,196.287C250.12,196.675 249.626,196.869 249.01,196.869L247.05,196.869ZM248.007,195.911L249.01,195.911C249.337,195.911 249.599,195.807 249.797,195.598C249.994,195.389 250.093,195.11 250.093,194.76L250.093,190.429C250.093,190.079 249.994,189.8 249.797,189.591C249.599,189.382 249.337,189.277 249.01,189.277L248.007,189.277L248.007,195.911Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M244.212,188.32L246.024,196.869L245.032,196.869L244.702,195.216L242.696,195.216L242.354,196.869L241.362,196.869L243.186,188.32L244.212,188.32ZM242.867,194.258L244.531,194.258L243.699,190.03L242.867,194.258Z" style="fill-rule:nonzero;"/>
|
<path d="M255.758,188.32L255.758,189.277L253.262,189.277L253.262,192.081L255.45,192.081L255.45,193.039L253.262,193.039L253.262,195.911L255.758,195.911L255.758,196.869L252.304,196.869L252.304,188.32L255.758,188.32Z" style="fill-opacity:0.85;fill-rule:nonzero;"/>
|
||||||
<path d="M247.05,196.869L247.05,188.32L249.01,188.32C249.626,188.32 250.12,188.514 250.492,188.901C250.864,189.289 251.051,189.798 251.051,190.429L251.051,194.76C251.051,195.391 250.864,195.9 250.492,196.287C250.12,196.675 249.626,196.869 249.01,196.869L247.05,196.869ZM248.007,195.911L249.01,195.911C249.337,195.911 249.599,195.807 249.797,195.598C249.994,195.389 250.093,195.11 250.093,194.76L250.093,190.429C250.093,190.079 249.994,189.8 249.797,189.591C249.599,189.382 249.337,189.277 249.01,189.277L248.007,189.277L248.007,195.911Z" style="fill-rule:nonzero;"/>
|
|
||||||
<g transform="matrix(1,0,0,1,0,0.0379854)">
|
|
||||||
<path d="M255.758,188.32L255.758,189.277L253.262,189.277L253.262,192.081L255.45,192.081L255.45,193.039L253.262,193.039L253.262,195.911L255.758,195.911L255.758,196.869L252.304,196.869L252.304,188.32L255.758,188.32Z" style="fill-rule:nonzero;"/>
|
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
|
Before Width: | Height: | Size: 9.6 KiB After Width: | Height: | Size: 11 KiB |
|
@ -193,25 +193,6 @@ var HMNTemplateFuncs = template.FuncMap{
|
||||||
"noescape": func(str string) template.HTML {
|
"noescape": func(str string) template.HTML {
|
||||||
return template.HTML(str)
|
return template.HTML(str)
|
||||||
},
|
},
|
||||||
"filesize": func(numBytes int) string {
|
|
||||||
scales := []string{
|
|
||||||
" bytes",
|
|
||||||
"kb",
|
|
||||||
"mb",
|
|
||||||
"gb",
|
|
||||||
}
|
|
||||||
num := float64(numBytes)
|
|
||||||
scale := 0
|
|
||||||
for num > 1024 && scale < len(scales)-1 {
|
|
||||||
num /= 1024
|
|
||||||
scale += 1
|
|
||||||
}
|
|
||||||
precision := 0
|
|
||||||
if scale > 0 {
|
|
||||||
precision = 2
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%.*f%s", precision, num, scales[scale])
|
|
||||||
},
|
|
||||||
|
|
||||||
// NOTE(asaf): Template specific functions:
|
// NOTE(asaf): Template specific functions:
|
||||||
"projectcarddata": func(project Project, classes string) ProjectCardData {
|
"projectcarddata": func(project Project, classes string) ProjectCardData {
|
||||||
|
|
|
@ -305,8 +305,7 @@ type TimelineItem struct {
|
||||||
type TimelineItemMediaType int
|
type TimelineItemMediaType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
TimelineItemMediaTypeUnknown TimelineItemMediaType = iota
|
TimelineItemMediaTypeImage TimelineItemMediaType = iota + 1
|
||||||
TimelineItemMediaTypeImage
|
|
||||||
TimelineItemMediaTypeVideo
|
TimelineItemMediaTypeVideo
|
||||||
TimelineItemMediaTypeAudio
|
TimelineItemMediaTypeAudio
|
||||||
TimelineItemMediaTypeEmbed
|
TimelineItemMediaTypeEmbed
|
||||||
|
@ -319,8 +318,6 @@ type TimelineItemMedia struct {
|
||||||
ThumbnailUrl string
|
ThumbnailUrl string
|
||||||
MimeType string
|
MimeType string
|
||||||
Width, Height int
|
Width, Height int
|
||||||
Filename string
|
|
||||||
FileSize int
|
|
||||||
ExtraOpenGraphItems []OpenGraphItem
|
ExtraOpenGraphItems []OpenGraphItem
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,450 +0,0 @@
|
||||||
package twitch
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/config"
|
|
||||||
"git.handmade.network/hmn/hmn/src/hmnurl"
|
|
||||||
"git.handmade.network/hmn/hmn/src/logging"
|
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
|
||||||
"git.handmade.network/hmn/hmn/src/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
var twitchAPIBaseUrl = config.Config.Twitch.BaseUrl
|
|
||||||
|
|
||||||
var HitRateLimit = errors.New("hit rate limit")
|
|
||||||
var MaxRetries = errors.New("hit max retries")
|
|
||||||
|
|
||||||
var httpClient = &http.Client{}
|
|
||||||
|
|
||||||
// NOTE(asaf): Access token is not thread-safe right now.
|
|
||||||
// All twitch requests are made through the goroutine in MonitorTwitchSubscriptions.
|
|
||||||
var activeAccessToken string
|
|
||||||
var rateLimitReset time.Time
|
|
||||||
|
|
||||||
type twitchUser struct {
|
|
||||||
TwitchID string
|
|
||||||
TwitchLogin string
|
|
||||||
}
|
|
||||||
|
|
||||||
func getTwitchUsersByLogin(ctx context.Context, logins []string) ([]twitchUser, error) {
|
|
||||||
result := make([]twitchUser, 0, len(logins))
|
|
||||||
numChunks := len(logins)/100 + 1
|
|
||||||
for i := 0; i < numChunks; i++ {
|
|
||||||
query := url.Values{}
|
|
||||||
query.Add("first", "100")
|
|
||||||
for _, login := range logins[i*100 : utils.IntMin((i+1)*100, len(logins))] {
|
|
||||||
query.Add("login", login)
|
|
||||||
}
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "GET", buildUrl("/users", query.Encode()), nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to create requset")
|
|
||||||
}
|
|
||||||
res, err := doRequest(ctx, true, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to fetch twitch users")
|
|
||||||
}
|
|
||||||
|
|
||||||
type user struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Login string `json:"login"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchResponse struct {
|
|
||||||
Data []user `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to read response body while fetching twitch users")
|
|
||||||
}
|
|
||||||
|
|
||||||
var userResponse twitchResponse
|
|
||||||
err = json.Unmarshal(body, &userResponse)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to parse twitch response while fetching twitch users")
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, u := range userResponse.Data {
|
|
||||||
result = append(result, twitchUser{
|
|
||||||
TwitchID: u.ID,
|
|
||||||
TwitchLogin: u.Login,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type streamStatus struct {
|
|
||||||
TwitchID string
|
|
||||||
TwitchLogin string
|
|
||||||
Live bool
|
|
||||||
Title string
|
|
||||||
StartedAt time.Time
|
|
||||||
Category string
|
|
||||||
Tags []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func getStreamStatus(ctx context.Context, twitchIDs []string) ([]streamStatus, error) {
|
|
||||||
result := make([]streamStatus, 0, len(twitchIDs))
|
|
||||||
numChunks := len(twitchIDs)/100 + 1
|
|
||||||
for i := 0; i < numChunks; i++ {
|
|
||||||
query := url.Values{}
|
|
||||||
query.Add("first", "100")
|
|
||||||
for _, tid := range twitchIDs[i*100 : utils.IntMin((i+1)*100, len(twitchIDs))] {
|
|
||||||
query.Add("user_id", tid)
|
|
||||||
}
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "GET", buildUrl("/streams", query.Encode()), nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to create request")
|
|
||||||
}
|
|
||||||
res, err := doRequest(ctx, true, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to fetch stream statuses")
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchStatus struct {
|
|
||||||
TwitchID string `json:"user_id"`
|
|
||||||
TwitchLogin string `json:"user_login"`
|
|
||||||
GameID string `json:"game_id"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Title string `json:"title"`
|
|
||||||
StartedAt string `json:"started_at"`
|
|
||||||
Thumbnail string `json:"thumbnail_url"`
|
|
||||||
Tags []string `json:"tag_ids"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchResponse struct {
|
|
||||||
Data []twitchStatus `json:"data"`
|
|
||||||
}
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to read response body while processing stream statuses")
|
|
||||||
}
|
|
||||||
|
|
||||||
var streamResponse twitchResponse
|
|
||||||
err = json.Unmarshal(body, &streamResponse)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to parse twitch response while processing stream statuses")
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, d := range streamResponse.Data {
|
|
||||||
started, err := time.Parse(time.RFC3339, d.StartedAt)
|
|
||||||
if err != nil {
|
|
||||||
logging.ExtractLogger(ctx).Warn().Str("Time string", d.StartedAt).Msg("Failed to parse twitch timestamp")
|
|
||||||
started = time.Now()
|
|
||||||
}
|
|
||||||
status := streamStatus{
|
|
||||||
TwitchID: d.TwitchID,
|
|
||||||
TwitchLogin: d.TwitchLogin,
|
|
||||||
Live: d.Type == "live",
|
|
||||||
Title: d.Title,
|
|
||||||
StartedAt: started,
|
|
||||||
Category: d.GameID,
|
|
||||||
Tags: d.Tags,
|
|
||||||
}
|
|
||||||
result = append(result, status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchEventSub struct {
|
|
||||||
EventID string
|
|
||||||
TwitchID string
|
|
||||||
Type string
|
|
||||||
GoodStatus bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func getEventSubscriptions(ctx context.Context) ([]twitchEventSub, error) {
|
|
||||||
result := make([]twitchEventSub, 0)
|
|
||||||
after := ""
|
|
||||||
for {
|
|
||||||
query := url.Values{}
|
|
||||||
if len(after) > 0 {
|
|
||||||
query.Add("after", after)
|
|
||||||
}
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "GET", buildUrl("/eventsub/subscriptions", query.Encode()), nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to create request")
|
|
||||||
}
|
|
||||||
res, err := doRequest(ctx, true, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to fetch twitch event subscriptions")
|
|
||||||
}
|
|
||||||
|
|
||||||
type eventSub struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Status string `json:"status"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Condition struct {
|
|
||||||
TwitchID string `json:"broadcaster_user_id"`
|
|
||||||
} `json:"condition"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchResponse struct {
|
|
||||||
Data []eventSub `json:"data"`
|
|
||||||
Pagination *struct {
|
|
||||||
After string `json:"cursor"`
|
|
||||||
} `json:"pagination"`
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to read response body while fetching twitch eventsubs")
|
|
||||||
}
|
|
||||||
|
|
||||||
var eventSubResponse twitchResponse
|
|
||||||
err = json.Unmarshal(body, &eventSubResponse)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to parse twitch response while fetching twitch eventsubs")
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, es := range eventSubResponse.Data {
|
|
||||||
result = append(result, twitchEventSub{
|
|
||||||
EventID: es.ID,
|
|
||||||
TwitchID: es.Condition.TwitchID,
|
|
||||||
Type: es.Type,
|
|
||||||
GoodStatus: es.Status == "enabled" || es.Status == "webhook_callback_verification_pending",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if eventSubResponse.Pagination == nil || eventSubResponse.Pagination.After == "" {
|
|
||||||
return result, nil
|
|
||||||
} else {
|
|
||||||
after = eventSubResponse.Pagination.After
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func subscribeToEvent(ctx context.Context, eventType string, twitchID string) error {
|
|
||||||
type eventBody struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
Version string `json:"version"`
|
|
||||||
Condition struct {
|
|
||||||
TwitchID string `json:"broadcaster_user_id"`
|
|
||||||
} `json:"condition"`
|
|
||||||
Transport struct {
|
|
||||||
Method string `json:"method"`
|
|
||||||
Callback string `json:"callback"`
|
|
||||||
Secret string `json:"secret"`
|
|
||||||
} `json:"transport"`
|
|
||||||
}
|
|
||||||
|
|
||||||
ev := eventBody{
|
|
||||||
Type: eventType,
|
|
||||||
Version: "1",
|
|
||||||
}
|
|
||||||
ev.Condition.TwitchID = twitchID
|
|
||||||
ev.Transport.Method = "webhook"
|
|
||||||
// NOTE(asaf): Twitch has special treatment for localhost. We can keep this around for live/beta because it just won't replace anything.
|
|
||||||
ev.Transport.Callback = strings.ReplaceAll(hmnurl.BuildTwitchEventSubCallback(), "handmade.local:9001", "localhost")
|
|
||||||
ev.Transport.Secret = config.Config.Twitch.EventSubSecret
|
|
||||||
|
|
||||||
evJson, err := json.Marshal(ev)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to marshal event sub data")
|
|
||||||
}
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "POST", buildUrl("/eventsub/subscriptions", ""), bytes.NewReader(evJson))
|
|
||||||
req.Header.Set("Content-Type", "application/json")
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create request")
|
|
||||||
}
|
|
||||||
res, err := doRequest(ctx, true, req)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create new event subscription")
|
|
||||||
}
|
|
||||||
defer readAndClose(res)
|
|
||||||
|
|
||||||
if res.StatusCode >= 300 {
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to read response body while creating twitch eventsubs")
|
|
||||||
}
|
|
||||||
logging.ExtractLogger(ctx).Error().Interface("Headers", res.Header).Int("Status code", res.StatusCode).Str("Body", string(body[:])).Msg("Failed to create twitch event sub")
|
|
||||||
return oops.New(nil, "failed to create new event subscription")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func unsubscribeFromEvent(ctx context.Context, eventID string) error {
|
|
||||||
query := url.Values{}
|
|
||||||
query.Add("id", eventID)
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "DELETE", buildUrl("/eventsub/subscriptions", query.Encode()), nil)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create request")
|
|
||||||
}
|
|
||||||
res, err := doRequest(ctx, true, req)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to delete new event subscription")
|
|
||||||
}
|
|
||||||
defer readAndClose(res)
|
|
||||||
|
|
||||||
if res.StatusCode > 300 {
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to read response body while deleting twitch eventsubs")
|
|
||||||
}
|
|
||||||
logging.ExtractLogger(ctx).Error().Interface("Headers", res.Header).Int("Status code", res.StatusCode).Str("Body", string(body[:])).Msg("Failed to delete twitch event sub")
|
|
||||||
return oops.New(nil, "failed to delete new event subscription")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func doRequest(ctx context.Context, waitOnRateLimit bool, req *http.Request) (*http.Response, error) {
|
|
||||||
serviceUnavailable := false
|
|
||||||
numRetries := 5
|
|
||||||
|
|
||||||
for {
|
|
||||||
if numRetries == 0 {
|
|
||||||
return nil, MaxRetries
|
|
||||||
}
|
|
||||||
numRetries -= 1
|
|
||||||
|
|
||||||
now := time.Now()
|
|
||||||
if rateLimitReset.After(now) {
|
|
||||||
if waitOnRateLimit {
|
|
||||||
timer := time.NewTimer(rateLimitReset.Sub(now))
|
|
||||||
select {
|
|
||||||
case <-timer.C:
|
|
||||||
case <-ctx.Done():
|
|
||||||
return nil, errors.New("request interrupted during rate limiting")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return nil, HitRateLimit
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", activeAccessToken))
|
|
||||||
req.Header.Set("Client-Id", config.Config.Twitch.ClientID)
|
|
||||||
res, err := httpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "twitch request failed")
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.StatusCode != 503 {
|
|
||||||
serviceUnavailable = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.StatusCode >= 200 && res.StatusCode < 300 {
|
|
||||||
return res, nil
|
|
||||||
} else if res.StatusCode == 503 {
|
|
||||||
readAndClose(res)
|
|
||||||
if serviceUnavailable {
|
|
||||||
// NOTE(asaf): The docs say we should retry once if we receive 503
|
|
||||||
return nil, oops.New(nil, "got 503 Service Unavailable twice in a row")
|
|
||||||
} else {
|
|
||||||
serviceUnavailable = true
|
|
||||||
}
|
|
||||||
} else if res.StatusCode == 429 {
|
|
||||||
logging.ExtractLogger(ctx).Warn().Interface("Headers", res.Header).Msg("Hit Twitch rate limit")
|
|
||||||
err = updateRateLimitReset(res)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else if res.StatusCode == 401 {
|
|
||||||
logging.ExtractLogger(ctx).Warn().Msg("Twitch refresh token is invalid. Renewing...")
|
|
||||||
readAndClose(res)
|
|
||||||
err = refreshAccessToken(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, oops.New(err, "failed to read response body")
|
|
||||||
}
|
|
||||||
logging.ExtractLogger(ctx).Warn().Interface("Headers", res.Header).Int("Status code", res.StatusCode).Str("Body", string(body[:])).Msg("Unexpected status code from twitch")
|
|
||||||
res.Body.Close()
|
|
||||||
return res, oops.New(nil, "got an unexpected status code from twitch")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateRateLimitReset(res *http.Response) error {
|
|
||||||
defer readAndClose(res)
|
|
||||||
|
|
||||||
resetStr := res.Header.Get("Ratelimit-Reset")
|
|
||||||
if len(resetStr) == 0 {
|
|
||||||
return oops.New(nil, "no ratelimit data on response")
|
|
||||||
}
|
|
||||||
|
|
||||||
resetUnix, err := strconv.Atoi(resetStr)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to parse reset time")
|
|
||||||
}
|
|
||||||
|
|
||||||
rateLimitReset = time.Unix(int64(resetUnix), 0)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type AccessTokenResponse struct {
|
|
||||||
AccessToken string `json:"access_token"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func refreshAccessToken(ctx context.Context) error {
|
|
||||||
logging.ExtractLogger(ctx).Info().Msg("Refreshing twitch token")
|
|
||||||
query := url.Values{}
|
|
||||||
query.Add("client_id", config.Config.Twitch.ClientID)
|
|
||||||
query.Add("client_secret", config.Config.Twitch.ClientSecret)
|
|
||||||
query.Add("grant_type", "client_credentials")
|
|
||||||
url := fmt.Sprintf("%s/token?%s", config.Config.Twitch.BaseIDUrl, query.Encode())
|
|
||||||
req, err := http.NewRequestWithContext(ctx, "POST", url, nil)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to create request")
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := httpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to request new access token")
|
|
||||||
}
|
|
||||||
defer readAndClose(res)
|
|
||||||
|
|
||||||
if res.StatusCode >= 400 {
|
|
||||||
// NOTE(asaf): The docs don't specify the error cases for this call.
|
|
||||||
// NOTE(asaf): According to the docs rate limiting is per-token, and we don't use a token for this call,
|
|
||||||
// so who knows how rate limiting works here.
|
|
||||||
body, _ := io.ReadAll(res.Body)
|
|
||||||
logging.ExtractLogger(ctx).Error().Interface("Headers", res.Header).Int("Status code", res.StatusCode).Str("body", string(body[:])).Msg("Got bad status code from twitch access token refresh")
|
|
||||||
return oops.New(nil, "received unexpected status code from twitch access token refresh")
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := io.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to read response body")
|
|
||||||
}
|
|
||||||
var accessTokenResponse AccessTokenResponse
|
|
||||||
err = json.Unmarshal(body, &accessTokenResponse)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to unmarshal access token response")
|
|
||||||
}
|
|
||||||
|
|
||||||
activeAccessToken = accessTokenResponse.AccessToken
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func readAndClose(res *http.Response) {
|
|
||||||
io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildUrl(path string, queryParams string) string {
|
|
||||||
return fmt.Sprintf("%s%s?%s", config.Config.Twitch.BaseUrl, path, queryParams)
|
|
||||||
}
|
|
|
@ -1,514 +0,0 @@
|
||||||
package twitch
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/config"
|
|
||||||
"git.handmade.network/hmn/hmn/src/db"
|
|
||||||
"git.handmade.network/hmn/hmn/src/discord"
|
|
||||||
"git.handmade.network/hmn/hmn/src/hmndata"
|
|
||||||
"git.handmade.network/hmn/hmn/src/logging"
|
|
||||||
"git.handmade.network/hmn/hmn/src/models"
|
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
|
||||||
"git.handmade.network/hmn/hmn/src/perf"
|
|
||||||
"github.com/jackc/pgx/v4/pgxpool"
|
|
||||||
)
|
|
||||||
|
|
||||||
type twitchNotification struct {
|
|
||||||
TwitchID string
|
|
||||||
Type twitchNotificationType
|
|
||||||
}
|
|
||||||
|
|
||||||
var twitchNotificationChannel chan twitchNotification
|
|
||||||
var linksChangedChannel chan struct{}
|
|
||||||
|
|
||||||
func MonitorTwitchSubscriptions(ctx context.Context, dbConn *pgxpool.Pool) <-chan struct{} {
|
|
||||||
log := logging.ExtractLogger(ctx).With().Str("twitch goroutine", "stream monitor").Logger()
|
|
||||||
ctx = logging.AttachLoggerToContext(&log, ctx)
|
|
||||||
|
|
||||||
if config.Config.Twitch.ClientID == "" {
|
|
||||||
log.Warn().Msg("No twitch config provided.")
|
|
||||||
done := make(chan struct{}, 1)
|
|
||||||
done <- struct{}{}
|
|
||||||
return done
|
|
||||||
}
|
|
||||||
|
|
||||||
twitchNotificationChannel = make(chan twitchNotification, 100)
|
|
||||||
linksChangedChannel = make(chan struct{}, 10)
|
|
||||||
done := make(chan struct{})
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer func() {
|
|
||||||
log.Info().Msg("Shutting down twitch monitor")
|
|
||||||
done <- struct{}{}
|
|
||||||
}()
|
|
||||||
log.Info().Msg("Running twitch monitor...")
|
|
||||||
|
|
||||||
err := refreshAccessToken(ctx)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Failed to fetch refresh token on start")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
monitorTicker := time.NewTicker(2 * time.Hour)
|
|
||||||
firstRunChannel := make(chan struct{}, 1)
|
|
||||||
firstRunChannel <- struct{}{}
|
|
||||||
|
|
||||||
timers := make([]*time.Timer, 0)
|
|
||||||
expiredTimers := make(chan *time.Timer, 10)
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-ctx.Done():
|
|
||||||
for _, timer := range timers {
|
|
||||||
timer.Stop()
|
|
||||||
}
|
|
||||||
return
|
|
||||||
case expired := <-expiredTimers:
|
|
||||||
for idx, timer := range timers {
|
|
||||||
if timer == expired {
|
|
||||||
timers = append(timers[:idx], timers[idx+1:]...)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case <-firstRunChannel:
|
|
||||||
syncWithTwitch(ctx, dbConn, true)
|
|
||||||
case <-monitorTicker.C:
|
|
||||||
syncWithTwitch(ctx, dbConn, true)
|
|
||||||
case <-linksChangedChannel:
|
|
||||||
// NOTE(asaf): Since we update links inside transactions for users/projects
|
|
||||||
// we won't see the updated list of links until the transaction is committed.
|
|
||||||
// Waiting 5 seconds is just a quick workaround for that. It's not
|
|
||||||
// convenient to only trigger this after the transaction is committed.
|
|
||||||
var timer *time.Timer
|
|
||||||
t := time.AfterFunc(5*time.Second, func() {
|
|
||||||
expiredTimers <- timer
|
|
||||||
syncWithTwitch(ctx, dbConn, false)
|
|
||||||
})
|
|
||||||
timer = t
|
|
||||||
timers = append(timers, t)
|
|
||||||
case notification := <-twitchNotificationChannel:
|
|
||||||
if notification.Type == notificationTypeRevocation {
|
|
||||||
syncWithTwitch(ctx, dbConn, false)
|
|
||||||
} else {
|
|
||||||
if notification.Type == notificationTypeChannelUpdate {
|
|
||||||
// NOTE(asaf): The twitch API (getStreamStatus) lags behind the notification and
|
|
||||||
// would return old data if we called it immediately, so we have to
|
|
||||||
// wait a bit before we process the notification. We can get the
|
|
||||||
// category from the notification, but not the tags (or the up-to-date title),
|
|
||||||
// so we can't really skip this.
|
|
||||||
var timer *time.Timer
|
|
||||||
t := time.AfterFunc(3*time.Minute, func() {
|
|
||||||
expiredTimers <- timer
|
|
||||||
processEventSubNotification(ctx, dbConn, ¬ification)
|
|
||||||
})
|
|
||||||
timer = t
|
|
||||||
timers = append(timers, t)
|
|
||||||
} else {
|
|
||||||
processEventSubNotification(ctx, dbConn, ¬ification)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return done
|
|
||||||
}
|
|
||||||
|
|
||||||
type twitchNotificationType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
notificationTypeNone twitchNotificationType = 0
|
|
||||||
notificationTypeOnline = 1
|
|
||||||
notificationTypeOffline = 2
|
|
||||||
notificationTypeChannelUpdate = 3
|
|
||||||
|
|
||||||
notificationTypeRevocation = 4
|
|
||||||
)
|
|
||||||
|
|
||||||
func QueueTwitchNotification(messageType string, body []byte) error {
|
|
||||||
var notification twitchNotification
|
|
||||||
if messageType == "notification" {
|
|
||||||
type notificationJson struct {
|
|
||||||
Subscription struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
} `json:"subscription"`
|
|
||||||
Event struct {
|
|
||||||
BroadcasterUserID string `json:"broadcaster_user_id"`
|
|
||||||
BroadcasterUserLogin string `json:"broadcaster_user_login"`
|
|
||||||
} `json:"event"`
|
|
||||||
}
|
|
||||||
var incoming notificationJson
|
|
||||||
err := json.Unmarshal(body, &incoming)
|
|
||||||
if err != nil {
|
|
||||||
return oops.New(err, "failed to parse notification body")
|
|
||||||
}
|
|
||||||
|
|
||||||
notification.TwitchID = incoming.Event.BroadcasterUserID
|
|
||||||
switch incoming.Subscription.Type {
|
|
||||||
case "stream.online":
|
|
||||||
notification.Type = notificationTypeOnline
|
|
||||||
case "stream.offline":
|
|
||||||
notification.Type = notificationTypeOffline
|
|
||||||
case "channel.update":
|
|
||||||
notification.Type = notificationTypeChannelUpdate
|
|
||||||
default:
|
|
||||||
return oops.New(nil, "unknown subscription type received")
|
|
||||||
}
|
|
||||||
} else if messageType == "revocation" {
|
|
||||||
notification.Type = notificationTypeRevocation
|
|
||||||
}
|
|
||||||
|
|
||||||
if twitchNotificationChannel != nil && notification.Type != notificationTypeNone {
|
|
||||||
select {
|
|
||||||
case twitchNotificationChannel <- notification:
|
|
||||||
default:
|
|
||||||
return oops.New(nil, "twitch notification channel is full")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func UserOrProjectLinksUpdated(twitchLoginsPreChange, twitchLoginsPostChange []string) {
|
|
||||||
if linksChangedChannel != nil {
|
|
||||||
twitchChanged := (len(twitchLoginsPreChange) != len(twitchLoginsPostChange))
|
|
||||||
if !twitchChanged {
|
|
||||||
for idx, _ := range twitchLoginsPreChange {
|
|
||||||
if twitchLoginsPreChange[idx] != twitchLoginsPostChange[idx] {
|
|
||||||
twitchChanged = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
select {
|
|
||||||
case linksChangedChannel <- struct{}{}:
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func syncWithTwitch(ctx context.Context, dbConn *pgxpool.Pool, updateAll bool) {
|
|
||||||
log := logging.ExtractLogger(ctx)
|
|
||||||
log.Info().Msg("Running twitch sync")
|
|
||||||
p := perf.MakeNewRequestPerf("Background job", "", "syncWithTwitch")
|
|
||||||
defer func() {
|
|
||||||
p.EndRequest()
|
|
||||||
perf.LogPerf(p, log.Info())
|
|
||||||
}()
|
|
||||||
|
|
||||||
type twitchSyncStats struct {
|
|
||||||
NumSubbed int
|
|
||||||
NumUnsubbed int
|
|
||||||
NumStreamsChecked int
|
|
||||||
}
|
|
||||||
var stats twitchSyncStats
|
|
||||||
|
|
||||||
p.StartBlock("SQL", "Fetch list of streamers")
|
|
||||||
streamers, err := hmndata.FetchTwitchStreamers(ctx, dbConn)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Error while monitoring twitch")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
|
|
||||||
needID := make([]string, 0)
|
|
||||||
streamerMap := make(map[string]*hmndata.TwitchStreamer)
|
|
||||||
for idx, streamer := range streamers {
|
|
||||||
needID = append(needID, streamer.TwitchLogin)
|
|
||||||
streamerMap[streamer.TwitchLogin] = &streamers[idx]
|
|
||||||
}
|
|
||||||
|
|
||||||
p.StartBlock("TwitchAPI", "Fetch twitch user info")
|
|
||||||
twitchUsers, err := getTwitchUsersByLogin(ctx, needID)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Error while monitoring twitch")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
|
|
||||||
for _, tu := range twitchUsers {
|
|
||||||
streamerMap[tu.TwitchLogin].TwitchID = tu.TwitchID
|
|
||||||
}
|
|
||||||
|
|
||||||
validStreamers := make([]hmndata.TwitchStreamer, 0, len(streamers))
|
|
||||||
for _, streamer := range streamers {
|
|
||||||
if len(streamer.TwitchID) > 0 {
|
|
||||||
validStreamers = append(validStreamers, streamer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.StartBlock("TwitchAPI", "Fetch event subscriptions")
|
|
||||||
subscriptions, err := getEventSubscriptions(ctx)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Error while monitoring twitch")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
|
|
||||||
const (
|
|
||||||
EventSubNone = 0 // No event of this type found
|
|
||||||
EventSubRefresh = 1 // Event found, but bad status. Need to unsubscribe and resubscribe.
|
|
||||||
EventSubGood = 2 // All is well.
|
|
||||||
)
|
|
||||||
|
|
||||||
type isSubbedByType map[string]bool
|
|
||||||
|
|
||||||
streamerEventSubs := make(map[string]isSubbedByType)
|
|
||||||
for _, streamer := range validStreamers {
|
|
||||||
streamerEventSubs[streamer.TwitchID] = make(isSubbedByType)
|
|
||||||
streamerEventSubs[streamer.TwitchID]["channel.update"] = false
|
|
||||||
streamerEventSubs[streamer.TwitchID]["stream.online"] = false
|
|
||||||
streamerEventSubs[streamer.TwitchID]["stream.offline"] = false
|
|
||||||
}
|
|
||||||
|
|
||||||
type unsubEvent struct {
|
|
||||||
TwitchID string
|
|
||||||
EventID string
|
|
||||||
}
|
|
||||||
|
|
||||||
toUnsub := make([]unsubEvent, 0)
|
|
||||||
|
|
||||||
for _, sub := range subscriptions {
|
|
||||||
handled := false
|
|
||||||
if eventSubs, ok := streamerEventSubs[sub.TwitchID]; ok {
|
|
||||||
if _, ok := eventSubs[sub.Type]; ok { // Make sure it's a known type
|
|
||||||
if !sub.GoodStatus {
|
|
||||||
log.Debug().Str("TwitchID", sub.TwitchID).Str("Event Type", sub.Type).Msg("Twitch doesn't like our sub")
|
|
||||||
toUnsub = append(toUnsub, unsubEvent{TwitchID: sub.TwitchID, EventID: sub.EventID})
|
|
||||||
} else {
|
|
||||||
streamerEventSubs[sub.TwitchID][sub.Type] = true
|
|
||||||
}
|
|
||||||
handled = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !handled {
|
|
||||||
// NOTE(asaf): Found an unknown type or an event subscription that we don't have a matching user for.
|
|
||||||
// Make sure we unsubscribe.
|
|
||||||
toUnsub = append(toUnsub, unsubEvent{TwitchID: sub.TwitchID, EventID: sub.EventID})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.Config.Env != config.Dev { // NOTE(asaf): Can't subscribe to events from dev. We need a non-localhost callback url.
|
|
||||||
p.StartBlock("TwitchAPI", "Sync subscriptions with twitch")
|
|
||||||
for _, ev := range toUnsub {
|
|
||||||
err = unsubscribeFromEvent(ctx, ev.EventID)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Error while unsubscribing events")
|
|
||||||
// NOTE(asaf): Soft error. Don't care if it fails.
|
|
||||||
}
|
|
||||||
stats.NumUnsubbed += 1
|
|
||||||
}
|
|
||||||
|
|
||||||
for twitchID, evStatuses := range streamerEventSubs {
|
|
||||||
for evType, isSubbed := range evStatuses {
|
|
||||||
if !isSubbed {
|
|
||||||
err = subscribeToEvent(ctx, evType, twitchID)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Error while monitoring twitch")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
stats.NumSubbed += 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
tx, err := dbConn.Begin(ctx)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to start transaction")
|
|
||||||
}
|
|
||||||
defer tx.Rollback(ctx)
|
|
||||||
|
|
||||||
allIDs := make([]string, 0, len(validStreamers))
|
|
||||||
for _, streamer := range validStreamers {
|
|
||||||
allIDs = append(allIDs, streamer.TwitchID)
|
|
||||||
}
|
|
||||||
p.StartBlock("SQL", "Remove untracked streamers")
|
|
||||||
_, err = tx.Exec(ctx,
|
|
||||||
`DELETE FROM twitch_streams WHERE twitch_id != ANY($1)`,
|
|
||||||
allIDs,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("Failed to remove untracked twitch ids from streamer list in db")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
|
|
||||||
usersToUpdate := make([]string, 0)
|
|
||||||
if updateAll {
|
|
||||||
usersToUpdate = allIDs
|
|
||||||
} else {
|
|
||||||
// NOTE(asaf): Twitch can revoke our subscriptions, so we need to
|
|
||||||
// update users whose subs were revoked or missing since last time we checked.
|
|
||||||
for twitchID, evStatuses := range streamerEventSubs {
|
|
||||||
for _, isSubbed := range evStatuses {
|
|
||||||
if !isSubbed {
|
|
||||||
usersToUpdate = append(usersToUpdate, twitchID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.StartBlock("TwitchAPI", "Fetch twitch stream statuses")
|
|
||||||
statuses, err := getStreamStatus(ctx, usersToUpdate)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to fetch stream statuses")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
p.StartBlock("SQL", "Update stream statuses in db")
|
|
||||||
for _, status := range statuses {
|
|
||||||
log.Debug().Interface("Status", status).Msg("Got streamer")
|
|
||||||
_, err = updateStreamStatusInDB(ctx, tx, &status)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to update twitch stream status")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.EndBlock()
|
|
||||||
err = tx.Commit(ctx)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to commit transaction")
|
|
||||||
}
|
|
||||||
stats.NumStreamsChecked += len(usersToUpdate)
|
|
||||||
log.Info().Interface("Stats", stats).Msg("Twitch sync done")
|
|
||||||
}
|
|
||||||
|
|
||||||
func notifyDiscordOfLiveStream(ctx context.Context, dbConn db.ConnOrTx, twitchLogin string, title string) error {
|
|
||||||
var err error
|
|
||||||
if config.Config.Discord.StreamsChannelID != "" {
|
|
||||||
err = discord.SendMessages(ctx, dbConn, discord.MessageToSend{
|
|
||||||
ChannelID: config.Config.Discord.StreamsChannelID,
|
|
||||||
Req: discord.CreateMessageRequest{
|
|
||||||
Content: fmt.Sprintf("%s is live: https://twitch.tv/%s\n> %s", twitchLogin, twitchLogin, title),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func processEventSubNotification(ctx context.Context, dbConn db.ConnOrTx, notification *twitchNotification) {
|
|
||||||
log := logging.ExtractLogger(ctx)
|
|
||||||
log.Debug().Interface("Notification", notification).Msg("Processing twitch notification")
|
|
||||||
if notification.Type == notificationTypeNone {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
status := streamStatus{
|
|
||||||
TwitchID: notification.TwitchID,
|
|
||||||
Live: false,
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
if notification.Type == notificationTypeChannelUpdate || notification.Type == notificationTypeOnline {
|
|
||||||
result, err := getStreamStatus(ctx, []string{notification.TwitchID})
|
|
||||||
if err != nil || len(result) == 0 {
|
|
||||||
log.Error().Str("TwitchID", notification.TwitchID).Err(err).Msg("failed to fetch stream status")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
allStreamers, err := hmndata.FetchTwitchStreamers(ctx, dbConn)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to fetch hmn streamers")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for _, streamer := range allStreamers {
|
|
||||||
if streamer.TwitchLogin == result[0].TwitchLogin {
|
|
||||||
status = result[0]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Interface("Status", status).Msg("Updating status")
|
|
||||||
inserted, err := updateStreamStatusInDB(ctx, dbConn, &status)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to update twitch stream status")
|
|
||||||
}
|
|
||||||
if inserted {
|
|
||||||
log.Debug().Msg("Notifying discord")
|
|
||||||
err = notifyDiscordOfLiveStream(ctx, dbConn, status.TwitchLogin, status.Title)
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err).Msg("failed to notify discord")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateStreamStatusInDB(ctx context.Context, conn db.ConnOrTx, status *streamStatus) (bool, error) {
|
|
||||||
log := logging.ExtractLogger(ctx)
|
|
||||||
inserted := false
|
|
||||||
if isStatusRelevant(status) {
|
|
||||||
log.Debug().Msg("Status relevant")
|
|
||||||
_, err := db.QueryOne(ctx, conn, models.TwitchStream{},
|
|
||||||
`
|
|
||||||
SELECT $columns
|
|
||||||
FROM twitch_streams
|
|
||||||
WHERE twitch_id = $1
|
|
||||||
`,
|
|
||||||
status.TwitchID,
|
|
||||||
)
|
|
||||||
if err == db.NotFound {
|
|
||||||
log.Debug().Msg("Inserting new stream")
|
|
||||||
inserted = true
|
|
||||||
} else if err != nil {
|
|
||||||
return false, oops.New(err, "failed to query existing stream")
|
|
||||||
}
|
|
||||||
_, err = conn.Exec(ctx,
|
|
||||||
`
|
|
||||||
INSERT INTO twitch_streams (twitch_id, twitch_login, title, started_at)
|
|
||||||
VALUES ($1, $2, $3, $4)
|
|
||||||
ON CONFLICT (twitch_id) DO UPDATE SET
|
|
||||||
title = EXCLUDED.title,
|
|
||||||
started_at = EXCLUDED.started_at
|
|
||||||
`,
|
|
||||||
status.TwitchID,
|
|
||||||
status.TwitchLogin,
|
|
||||||
status.Title,
|
|
||||||
status.StartedAt,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return false, oops.New(err, "failed to insert twitch streamer into db")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.Debug().Msg("Stream not relevant")
|
|
||||||
_, err := conn.Exec(ctx,
|
|
||||||
`
|
|
||||||
DELETE FROM twitch_streams WHERE twitch_id = $1
|
|
||||||
`,
|
|
||||||
status.TwitchID,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return false, oops.New(err, "failed to remove twitch streamer from db")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return inserted, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var RelevantCategories = []string{
|
|
||||||
"1469308723", // Software and Game Development
|
|
||||||
}
|
|
||||||
|
|
||||||
var RelevantTags = []string{
|
|
||||||
"a59f1e4e-257b-4bd0-90c7-189c3efbf917", // Programming
|
|
||||||
"6f86127d-6051-4a38-94bb-f7b475dde109", // Software Development
|
|
||||||
}
|
|
||||||
|
|
||||||
func isStatusRelevant(status *streamStatus) bool {
|
|
||||||
if status.Live {
|
|
||||||
for _, cat := range RelevantCategories {
|
|
||||||
if status.Category == cat {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tag := range RelevantTags {
|
|
||||||
for _, streamTag := range status.Tags {
|
|
||||||
if tag == streamTag {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
|
@ -207,7 +207,7 @@ func AdminApprovalQueue(c *RequestContext) ResponseData {
|
||||||
userIds = append(userIds, u.User.ID)
|
userIds = append(userIds, u.User.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
userLinks, err := db.Query(c.Context(), c.Conn, models.Link{},
|
userLinks, err := db.Query[models.Link](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -222,8 +222,7 @@ func AdminApprovalQueue(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user links"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user links"))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ul := range userLinks {
|
for _, link := range userLinks {
|
||||||
link := ul.(*models.Link)
|
|
||||||
userData := unapprovedUsers[userIDToDataIdx[*link.UserID]]
|
userData := unapprovedUsers[userIDToDataIdx[*link.UserID]]
|
||||||
userData.UserLinks = append(userData.UserLinks, templates.LinkToTemplate(link))
|
userData.UserLinks = append(userData.UserLinks, templates.LinkToTemplate(link))
|
||||||
}
|
}
|
||||||
|
@ -260,10 +259,11 @@ func AdminApprovalQueueSubmit(c *RequestContext) ResponseData {
|
||||||
type userQuery struct {
|
type userQuery struct {
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
u, err := db.QueryOne(c.Context(), c.Conn, userQuery{},
|
u, err := db.QueryOne[userQuery](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM auth_user
|
FROM
|
||||||
|
auth_user
|
||||||
LEFT JOIN handmade_asset AS auth_user_avatar ON auth_user_avatar.id = auth_user.avatar_asset_id
|
LEFT JOIN handmade_asset AS auth_user_avatar ON auth_user_avatar.id = auth_user.avatar_asset_id
|
||||||
WHERE auth_user.id = $1
|
WHERE auth_user.id = $1
|
||||||
`,
|
`,
|
||||||
|
@ -276,7 +276,7 @@ func AdminApprovalQueueSubmit(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
user := u.(*userQuery).User
|
user := u.User
|
||||||
|
|
||||||
whatHappened := ""
|
whatHappened := ""
|
||||||
if action == ApprovalQueueActionApprove {
|
if action == ApprovalQueueActionApprove {
|
||||||
|
@ -337,7 +337,7 @@ type UnapprovedPost struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchUnapprovedPosts(c *RequestContext) ([]*UnapprovedPost, error) {
|
func fetchUnapprovedPosts(c *RequestContext) ([]*UnapprovedPost, error) {
|
||||||
it, err := db.Query(c.Context(), c.Conn, UnapprovedPost{},
|
res, err := db.Query[UnapprovedPost](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -358,10 +358,6 @@ func fetchUnapprovedPosts(c *RequestContext) ([]*UnapprovedPost, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch unapproved posts")
|
return nil, oops.New(err, "failed to fetch unapproved posts")
|
||||||
}
|
}
|
||||||
var res []*UnapprovedPost
|
|
||||||
for _, iresult := range it {
|
|
||||||
res = append(res, iresult.(*UnapprovedPost))
|
|
||||||
}
|
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,7 +371,7 @@ func fetchUnapprovedProjects(c *RequestContext) ([]UnapprovedProject, error) {
|
||||||
type unapprovedUser struct {
|
type unapprovedUser struct {
|
||||||
ID int `db:"id"`
|
ID int `db:"id"`
|
||||||
}
|
}
|
||||||
it, err := db.Query(c.Context(), c.Conn, unapprovedUser{},
|
uids, err := db.Query[unapprovedUser](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -388,9 +384,9 @@ func fetchUnapprovedProjects(c *RequestContext) ([]UnapprovedProject, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, oops.New(err, "failed to fetch unapproved users")
|
return nil, oops.New(err, "failed to fetch unapproved users")
|
||||||
}
|
}
|
||||||
ownerIDs := make([]int, 0, len(it))
|
ownerIDs := make([]int, 0, len(uids))
|
||||||
for _, uid := range it {
|
for _, uid := range uids {
|
||||||
ownerIDs = append(ownerIDs, uid.(*unapprovedUser).ID)
|
ownerIDs = append(ownerIDs, uid.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
projects, err := hmndata.FetchProjects(c.Context(), c.Conn, c.CurrentUser, hmndata.ProjectsQuery{
|
projects, err := hmndata.FetchProjects(c.Context(), c.Conn, c.CurrentUser, hmndata.ProjectsQuery{
|
||||||
|
|
|
@ -22,7 +22,7 @@ func APICheckUsername(c *RequestContext) ResponseData {
|
||||||
type userQuery struct {
|
type userQuery struct {
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
userResult, err := db.QueryOne(c.Context(), c.Conn, userQuery{},
|
userResult, err := db.QueryOne[userQuery](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM
|
FROM
|
||||||
|
@ -43,7 +43,7 @@ func APICheckUsername(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user: %s", requestedUsername))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch user: %s", requestedUsername))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
canonicalUsername = userResult.(*userQuery).User.Username
|
canonicalUsername = userResult.User.Username
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,10 +78,11 @@ func Login(c *RequestContext) ResponseData {
|
||||||
type userQuery struct {
|
type userQuery struct {
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
userRow, err := db.QueryOne(c.Context(), c.Conn, userQuery{},
|
userRow, err := db.QueryOne[userQuery](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM auth_user
|
FROM
|
||||||
|
auth_user
|
||||||
LEFT JOIN handmade_asset AS auth_user_avatar ON auth_user_avatar.id = auth_user.avatar_asset_id
|
LEFT JOIN handmade_asset AS auth_user_avatar ON auth_user_avatar.id = auth_user.avatar_asset_id
|
||||||
WHERE LOWER(username) = LOWER($1)
|
WHERE LOWER(username) = LOWER($1)
|
||||||
`,
|
`,
|
||||||
|
@ -94,7 +95,7 @@ func Login(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to look up user by username"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to look up user by username"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
user := &userRow.(*userQuery).User
|
user := &userRow.User
|
||||||
|
|
||||||
success, err := tryLogin(c, user, password)
|
success, err := tryLogin(c, user, password)
|
||||||
|
|
||||||
|
@ -460,7 +461,7 @@ func RequestPasswordResetSubmit(c *RequestContext) ResponseData {
|
||||||
type userQuery struct {
|
type userQuery struct {
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
}
|
}
|
||||||
userRow, err := db.QueryOne(c.Context(), c.Conn, userQuery{},
|
userRow, err := db.QueryOne[userQuery](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM auth_user
|
FROM auth_user
|
||||||
|
@ -479,12 +480,12 @@ func RequestPasswordResetSubmit(c *RequestContext) ResponseData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if userRow != nil {
|
if userRow != nil {
|
||||||
user = &userRow.(*userQuery).User
|
user = &userRow.User
|
||||||
}
|
}
|
||||||
|
|
||||||
if user != nil {
|
if user != nil {
|
||||||
c.Perf.StartBlock("SQL", "Fetching existing token")
|
c.Perf.StartBlock("SQL", "Fetching existing token")
|
||||||
tokenRow, err := db.QueryOne(c.Context(), c.Conn, models.OneTimeToken{},
|
resetToken, err := db.QueryOne[models.OneTimeToken](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM handmade_onetimetoken
|
FROM handmade_onetimetoken
|
||||||
|
@ -501,10 +502,6 @@ func RequestPasswordResetSubmit(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch onetimetoken for user"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch onetimetoken for user"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var resetToken *models.OneTimeToken
|
|
||||||
if tokenRow != nil {
|
|
||||||
resetToken = tokenRow.(*models.OneTimeToken)
|
|
||||||
}
|
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
|
|
||||||
if resetToken != nil {
|
if resetToken != nil {
|
||||||
|
@ -527,7 +524,7 @@ func RequestPasswordResetSubmit(c *RequestContext) ResponseData {
|
||||||
|
|
||||||
if resetToken == nil {
|
if resetToken == nil {
|
||||||
c.Perf.StartBlock("SQL", "Creating new token")
|
c.Perf.StartBlock("SQL", "Creating new token")
|
||||||
tokenRow, err := db.QueryOne(c.Context(), c.Conn, models.OneTimeToken{},
|
resetToken, err := db.QueryOne[models.OneTimeToken](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
INSERT INTO handmade_onetimetoken (token_type, created, expires, token_content, owner_id)
|
INSERT INTO handmade_onetimetoken (token_type, created, expires, token_content, owner_id)
|
||||||
VALUES ($1, $2, $3, $4, $5)
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
@ -543,7 +540,6 @@ func RequestPasswordResetSubmit(c *RequestContext) ResponseData {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to create onetimetoken"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to create onetimetoken"))
|
||||||
}
|
}
|
||||||
resetToken = tokenRow.(*models.OneTimeToken)
|
|
||||||
|
|
||||||
err = email.SendPasswordReset(user.Email, user.BestName(), user.Username, resetToken.Content, resetToken.Expires, c.Perf)
|
err = email.SendPasswordReset(user.Email, user.BestName(), user.Username, resetToken.Content, resetToken.Expires, c.Perf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -787,7 +783,7 @@ func validateUsernameAndToken(c *RequestContext, username string, token string,
|
||||||
User models.User `db:"auth_user"`
|
User models.User `db:"auth_user"`
|
||||||
OneTimeToken *models.OneTimeToken `db:"onetimetoken"`
|
OneTimeToken *models.OneTimeToken `db:"onetimetoken"`
|
||||||
}
|
}
|
||||||
row, err := db.QueryOne(c.Context(), c.Conn, userAndTokenQuery{},
|
data, err := db.QueryOne[userAndTokenQuery](c.Context(), c.Conn,
|
||||||
`
|
`
|
||||||
SELECT $columns
|
SELECT $columns
|
||||||
FROM auth_user
|
FROM auth_user
|
||||||
|
@ -807,8 +803,7 @@ func validateUsernameAndToken(c *RequestContext, username string, token string,
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if row != nil {
|
if data != nil {
|
||||||
data := row.(*userAndTokenQuery)
|
|
||||||
result.User = &data.User
|
result.User = &data.User
|
||||||
result.OneTimeToken = data.OneTimeToken
|
result.OneTimeToken = data.OneTimeToken
|
||||||
if result.OneTimeToken != nil {
|
if result.OneTimeToken != nil {
|
||||||
|
|
|
@ -78,22 +78,6 @@ func DiscordOAuthCallback(c *RequestContext) ResponseData {
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to save new Discord user info"))
|
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to save new Discord user info"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.CurrentUser.Status == models.UserStatusConfirmed {
|
|
||||||
_, err = c.Conn.Exec(c.Context(),
|
|
||||||
`
|
|
||||||
UPDATE auth_user
|
|
||||||
SET status = $1
|
|
||||||
WHERE id = $2
|
|
||||||
`,
|
|
||||||
models.UserStatusApproved,
|
|
||||||
c.CurrentUser.ID,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error().Err(err).Msg("failed to set user status to approved after linking discord account")
|
|
||||||
// NOTE(asaf): It's not worth failing the request over this, so we're not returning an error to the user.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.Redirect(hmnurl.BuildUserSettings("discord"), http.StatusSeeOther)
|
return c.Redirect(hmnurl.BuildUserSettings("discord"), http.StatusSeeOther)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,6 @@ package website
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func getPageInfo(
|
func getPageInfo(
|
||||||
|
@ -16,7 +14,7 @@ func getPageInfo(
|
||||||
totalPages int,
|
totalPages int,
|
||||||
ok bool,
|
ok bool,
|
||||||
) {
|
) {
|
||||||
totalPages = utils.IntMax(1, int(math.Ceil(float64(totalItems)/float64(itemsPerPage))))
|
totalPages = int(math.Ceil(float64(totalItems) / float64(itemsPerPage)))
|
||||||
ok = true
|
ok = true
|
||||||
|
|
||||||
page = 1
|
page = 1
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
package website
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestGetPageInfo(t *testing.T) {
|
|
||||||
items := []struct {
|
|
||||||
name string
|
|
||||||
pageParam string
|
|
||||||
totalItems, perPage int
|
|
||||||
page, totalPages int
|
|
||||||
ok bool
|
|
||||||
}{
|
|
||||||
{"good, no param", "", 85, 10, 1, 9, true},
|
|
||||||
{"good", "2", 85, 10, 2, 9, true},
|
|
||||||
{"too big", "10", 85, 10, 0, 0, false},
|
|
||||||
{"too small", "0", 85, 10, 0, 0, false},
|
|
||||||
{"pizza", "pizza", 85, 10, 0, 0, false},
|
|
||||||
{"zero items, no param", "", 0, 10, 1, 1, true}, // should go to page 1
|
|
||||||
{"zero items, page 1", "1", 0, 10, 1, 1, true},
|
|
||||||
{"zero items, too big", "2", 0, 10, 0, 0, false},
|
|
||||||
{"zero items, too small", "0", 0, 10, 0, 0, false},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, item := range items {
|
|
||||||
t.Run(item.name, func(t *testing.T) {
|
|
||||||
page, totalPages, ok := getPageInfo(item.pageParam, item.totalItems, item.perPage)
|
|
||||||
assert.Equal(t, item.page, page)
|
|
||||||
assert.Equal(t, item.totalPages, totalPages)
|
|
||||||
assert.Equal(t, item.ok, ok)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -22,7 +22,6 @@ import (
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
"git.handmade.network/hmn/hmn/src/oops"
|
||||||
"git.handmade.network/hmn/hmn/src/parsing"
|
"git.handmade.network/hmn/hmn/src/parsing"
|
||||||
"git.handmade.network/hmn/hmn/src/templates"
|
"git.handmade.network/hmn/hmn/src/templates"
|
||||||
"git.handmade.network/hmn/hmn/src/twitch"
|
|
||||||
"git.handmade.network/hmn/hmn/src/utils"
|
"git.handmade.network/hmn/hmn/src/utils"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
|
@ -865,7 +864,6 @@ func updateProject(ctx context.Context, tx pgx.Tx, user *models.User, payload *P
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
twitchLoginsPreChange, preErr := hmndata.FetchTwitchLoginsForUserOrProject(ctx, tx, nil, &payload.ProjectID)
|
|
||||||
_, err = tx.Exec(ctx, `DELETE FROM handmade_links WHERE project_id = $1`, payload.ProjectID)
|
_, err = tx.Exec(ctx, `DELETE FROM handmade_links WHERE project_id = $1`, payload.ProjectID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return oops.New(err, "Failed to delete project links")
|
return oops.New(err, "Failed to delete project links")
|
||||||
|
@ -885,10 +883,6 @@ func updateProject(ctx context.Context, tx pgx.Tx, user *models.User, payload *P
|
||||||
return oops.New(err, "Failed to insert new project link")
|
return oops.New(err, "Failed to insert new project link")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
twitchLoginsPostChange, postErr := hmndata.FetchTwitchLoginsForUserOrProject(ctx, tx, nil, &payload.ProjectID)
|
|
||||||
if preErr == nil && postErr == nil {
|
|
||||||
twitch.UserOrProjectLinksUpdated(twitchLoginsPreChange, twitchLoginsPostChange)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -205,9 +205,6 @@ func NewWebsiteRoutes(longRequestContext context.Context, conn *pgxpool.Pool) ht
|
||||||
hmnOnly.POST(hmnurl.RegexDiscordUnlink, authMiddleware(csrfMiddleware(DiscordUnlink)))
|
hmnOnly.POST(hmnurl.RegexDiscordUnlink, authMiddleware(csrfMiddleware(DiscordUnlink)))
|
||||||
hmnOnly.POST(hmnurl.RegexDiscordShowcaseBacklog, authMiddleware(csrfMiddleware(DiscordShowcaseBacklog)))
|
hmnOnly.POST(hmnurl.RegexDiscordShowcaseBacklog, authMiddleware(csrfMiddleware(DiscordShowcaseBacklog)))
|
||||||
|
|
||||||
hmnOnly.POST(hmnurl.RegexTwitchEventSubCallback, TwitchEventSubCallback)
|
|
||||||
hmnOnly.GET(hmnurl.RegexTwitchDebugPage, TwitchDebugPage)
|
|
||||||
|
|
||||||
hmnOnly.GET(hmnurl.RegexUserProfile, UserProfile)
|
hmnOnly.GET(hmnurl.RegexUserProfile, UserProfile)
|
||||||
hmnOnly.GET(hmnurl.RegexUserSettings, authMiddleware(UserSettings))
|
hmnOnly.GET(hmnurl.RegexUserSettings, authMiddleware(UserSettings))
|
||||||
hmnOnly.POST(hmnurl.RegexUserSettings, authMiddleware(csrfMiddleware(UserSettingsSave)))
|
hmnOnly.POST(hmnurl.RegexUserSettings, authMiddleware(csrfMiddleware(UserSettingsSave)))
|
||||||
|
|
|
@ -6,7 +6,7 @@ func Manifesto(c *RequestContext) ResponseData {
|
||||||
baseData := getBaseDataAutocrumb(c, "Handmade Manifesto")
|
baseData := getBaseDataAutocrumb(c, "Handmade Manifesto")
|
||||||
baseData.OpenGraphItems = append(baseData.OpenGraphItems, templates.OpenGraphItem{
|
baseData.OpenGraphItems = append(baseData.OpenGraphItems, templates.OpenGraphItem{
|
||||||
Property: "og:description",
|
Property: "og:description",
|
||||||
Value: "Computers are amazing. So why is software so terrible?",
|
Value: "Modern computer hardware is amazing. Manufacturers have orchestrated billions of pieces of silicon into terrifyingly complex and efficient structures…",
|
||||||
})
|
})
|
||||||
|
|
||||||
var res ResponseData
|
var res ResponseData
|
||||||
|
|
|
@ -89,8 +89,6 @@ func SnippetToTimelineItem(
|
||||||
item.EmbedMedia = append(item.EmbedMedia, videoMediaItem(asset))
|
item.EmbedMedia = append(item.EmbedMedia, videoMediaItem(asset))
|
||||||
} else if strings.HasPrefix(asset.MimeType, "audio/") {
|
} else if strings.HasPrefix(asset.MimeType, "audio/") {
|
||||||
item.EmbedMedia = append(item.EmbedMedia, audioMediaItem(asset))
|
item.EmbedMedia = append(item.EmbedMedia, audioMediaItem(asset))
|
||||||
} else {
|
|
||||||
item.EmbedMedia = append(item.EmbedMedia, unknownMediaItem(asset))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,8 +100,7 @@ func SnippetToTimelineItem(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(item.EmbedMedia) == 0 ||
|
if len(item.EmbedMedia) > 0 && (item.EmbedMedia[0].Width == 0 || item.EmbedMedia[0].Height == 0) {
|
||||||
(len(item.EmbedMedia) > 0 && (item.EmbedMedia[0].Width == 0 || item.EmbedMedia[0].Height == 0)) {
|
|
||||||
item.CanShowcase = false
|
item.CanShowcase = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,15 +185,3 @@ func youtubeMediaItem(videoId string) templates.TimelineItemMedia {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func unknownMediaItem(asset *models.Asset) templates.TimelineItemMedia {
|
|
||||||
assetUrl := hmnurl.BuildS3Asset(asset.S3Key)
|
|
||||||
|
|
||||||
return templates.TimelineItemMedia{
|
|
||||||
Type: templates.TimelineItemMediaTypeUnknown,
|
|
||||||
AssetUrl: assetUrl,
|
|
||||||
MimeType: asset.MimeType,
|
|
||||||
Filename: asset.Filename,
|
|
||||||
FileSize: asset.Size,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,94 +0,0 @@
|
||||||
package website
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/hmac"
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"git.handmade.network/hmn/hmn/src/config"
|
|
||||||
"git.handmade.network/hmn/hmn/src/db"
|
|
||||||
"git.handmade.network/hmn/hmn/src/models"
|
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
|
||||||
"git.handmade.network/hmn/hmn/src/twitch"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TwitchEventSubCallback(c *RequestContext) ResponseData {
|
|
||||||
secret := config.Config.Twitch.EventSubSecret
|
|
||||||
messageId := c.Req.Header.Get("Twitch-Eventsub-Message-Id")
|
|
||||||
timestamp := c.Req.Header.Get("Twitch-Eventsub-Message-Timestamp")
|
|
||||||
signature := c.Req.Header.Get("Twitch-Eventsub-Message-Signature")
|
|
||||||
messageType := c.Req.Header.Get("Twitch-Eventsub-Message-Type")
|
|
||||||
|
|
||||||
body, err := io.ReadAll(c.Req.Body)
|
|
||||||
if err != nil {
|
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to read request body"))
|
|
||||||
}
|
|
||||||
|
|
||||||
hmacMessage := fmt.Sprintf("%s%s%s", messageId, timestamp, string(body[:]))
|
|
||||||
hmac := hmac.New(sha256.New, []byte(secret))
|
|
||||||
hmac.Write([]byte(hmacMessage))
|
|
||||||
hash := hmac.Sum(nil)
|
|
||||||
hmacStr := "sha256=" + hex.EncodeToString(hash)
|
|
||||||
|
|
||||||
if hmacStr != signature {
|
|
||||||
var res ResponseData
|
|
||||||
res.StatusCode = 403
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Logger.Debug().Str("Body", string(body[:])).Str("Type", messageType).Msg("Got twitch webhook")
|
|
||||||
|
|
||||||
if messageType == "webhook_callback_verification" {
|
|
||||||
type challengeReq struct {
|
|
||||||
Challenge string `json:"challenge"`
|
|
||||||
}
|
|
||||||
var data challengeReq
|
|
||||||
err = json.Unmarshal(body, &data)
|
|
||||||
if err != nil {
|
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to unmarshal twitch verification"))
|
|
||||||
}
|
|
||||||
var res ResponseData
|
|
||||||
res.StatusCode = 200
|
|
||||||
|
|
||||||
res.Header().Set("Content-Type", "text/plain") // NOTE(asaf): No idea why, but the twitch-cli fails when we don't set this.
|
|
||||||
res.Write([]byte(data.Challenge))
|
|
||||||
return res
|
|
||||||
} else {
|
|
||||||
err := twitch.QueueTwitchNotification(messageType, body)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error().Err(err).Msg("Failed to process twitch callback")
|
|
||||||
// NOTE(asaf): Returning 200 either way here
|
|
||||||
}
|
|
||||||
var res ResponseData
|
|
||||||
res.StatusCode = 200
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TwitchDebugPage(c *RequestContext) ResponseData {
|
|
||||||
streams, err := db.Query(c.Context(), c.Conn, models.TwitchStream{},
|
|
||||||
`
|
|
||||||
SELECT $columns
|
|
||||||
FROM
|
|
||||||
twitch_streams
|
|
||||||
ORDER BY started_at DESC
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return c.ErrorResponse(http.StatusInternalServerError, oops.New(err, "failed to fetch twitch streams"))
|
|
||||||
}
|
|
||||||
|
|
||||||
html := ""
|
|
||||||
for _, stream := range streams {
|
|
||||||
s := stream.(*models.TwitchStream)
|
|
||||||
html += fmt.Sprintf(`<a href="https://twitch.tv/%s">%s</a>%s<br />`, s.Login, s.Login, s.Title)
|
|
||||||
}
|
|
||||||
var res ResponseData
|
|
||||||
res.StatusCode = 200
|
|
||||||
res.Write([]byte(html))
|
|
||||||
return res
|
|
||||||
}
|
|
|
@ -18,7 +18,6 @@ import (
|
||||||
"git.handmade.network/hmn/hmn/src/models"
|
"git.handmade.network/hmn/hmn/src/models"
|
||||||
"git.handmade.network/hmn/hmn/src/oops"
|
"git.handmade.network/hmn/hmn/src/oops"
|
||||||
"git.handmade.network/hmn/hmn/src/templates"
|
"git.handmade.network/hmn/hmn/src/templates"
|
||||||
"git.handmade.network/hmn/hmn/src/twitch"
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
)
|
)
|
||||||
|
@ -379,7 +378,6 @@ func UserSettingsSave(c *RequestContext) ResponseData {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process links
|
// Process links
|
||||||
twitchLoginsPreChange, preErr := hmndata.FetchTwitchLoginsForUserOrProject(c.Context(), tx, &c.CurrentUser.ID, nil)
|
|
||||||
linksText := form.Get("links")
|
linksText := form.Get("links")
|
||||||
links := ParseLinks(linksText)
|
links := ParseLinks(linksText)
|
||||||
_, err = tx.Exec(c.Context(), `DELETE FROM handmade_links WHERE user_id = $1`, c.CurrentUser.ID)
|
_, err = tx.Exec(c.Context(), `DELETE FROM handmade_links WHERE user_id = $1`, c.CurrentUser.ID)
|
||||||
|
@ -403,10 +401,6 @@ func UserSettingsSave(c *RequestContext) ResponseData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
twitchLoginsPostChange, postErr := hmndata.FetchTwitchLoginsForUserOrProject(c.Context(), tx, &c.CurrentUser.ID, nil)
|
|
||||||
if preErr == nil && postErr == nil {
|
|
||||||
twitch.UserOrProjectLinksUpdated(twitchLoginsPreChange, twitchLoginsPostChange)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update password
|
// Update password
|
||||||
oldPassword := form.Get("old_password")
|
oldPassword := form.Get("old_password")
|
||||||
|
|
|
@ -17,7 +17,6 @@ import (
|
||||||
"git.handmade.network/hmn/hmn/src/logging"
|
"git.handmade.network/hmn/hmn/src/logging"
|
||||||
"git.handmade.network/hmn/hmn/src/perf"
|
"git.handmade.network/hmn/hmn/src/perf"
|
||||||
"git.handmade.network/hmn/hmn/src/templates"
|
"git.handmade.network/hmn/hmn/src/templates"
|
||||||
"git.handmade.network/hmn/hmn/src/twitch"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -47,7 +46,6 @@ var WebsiteCommand = &cobra.Command{
|
||||||
perfCollector.Done,
|
perfCollector.Done,
|
||||||
discord.RunDiscordBot(backgroundJobContext, conn),
|
discord.RunDiscordBot(backgroundJobContext, conn),
|
||||||
discord.RunHistoryWatcher(backgroundJobContext, conn),
|
discord.RunHistoryWatcher(backgroundJobContext, conn),
|
||||||
twitch.MonitorTwitchSubscriptions(backgroundJobContext, conn),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
signals := make(chan os.Signal, 1)
|
signals := make(chan os.Signal, 1)
|
||||||
|
|
Loading…
Reference in New Issue